aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--ansible/roles/convert_openrc/tasks/main.yml10
-rw-r--r--ansible/roles/create_storperf_admin_rc/tasks/main.yml9
-rw-r--r--ansible/roles/create_storperf_admin_rc/templates/storperf_admin-rc.j24
-rw-r--r--api/resources/v1/env.py20
-rw-r--r--dashboard/Prox_BM_L2FWD-4Port_MultiSize-1518452496550.json5817
-rw-r--r--docs/testing/user/userguide/04-installation.rst4
-rw-r--r--docs/testing/user/userguide/05-yardstick_plugin.rst9
-rw-r--r--docs/testing/user/userguide/08-api.rst1
-rw-r--r--gui/app/scripts/controllers/main.js4
-rw-r--r--requirements.txt2
-rw-r--r--samples/vnf_samples/nsut/prox/configs/gen_l2fwd-2.cfg10
-rw-r--r--samples/vnf_samples/nsut/prox/configs/gen_l2fwd-4.cfg16
-rw-r--r--samples/vnf_samples/nsut/vfw/tc_heat_rfc2544_ipv4_1rule_1flow_64B_trex_scale-up.yaml89
-rw-r--r--samples/vnf_samples/nsut/vfw/vfw-tg-topology-scale-up.yaml52
-rw-r--r--samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_2_lb_1_sw.conf52
-rw-r--r--samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_4_lb_1_sw.conf52
-rw-r--r--samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_6_lb_1_sw.conf51
-rw-r--r--samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_8_lb_1_sw.conf52
-rw-r--r--samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_8_ports_10_lb_1_sw.conf52
-rw-r--r--samples/vnf_samples/traffic_profiles/ipv4_throughput-scale-up.yaml102
-rw-r--r--samples/vnf_samples/traffic_profiles/prox_binsearch.yaml6
-rw-r--r--test-requirements.txt1
-rwxr-xr-xtests/ci/prepare_storperf_admin-rc.sh5
-rw-r--r--tests/opnfv/test_suites/opnfv_os-odl-bgpvpn-noha_daily.yaml23
-rw-r--r--tests/unit/__init__.py64
-rw-r--r--tests/unit/network_services/helpers/test_dpdkbindnic_helper.py402
-rw-r--r--tests/unit/network_services/traffic_profile/test_base.py41
-rw-r--r--tests/unit/network_services/traffic_profile/test_ixia_rfc2544.py2
-rw-r--r--tests/unit/network_services/traffic_profile/test_prox_binsearch.py94
-rw-r--r--tests/unit/network_services/traffic_profile/test_rfc2544.py6
-rw-r--r--tests/unit/network_services/traffic_profile/test_trex_traffic_profile.py (renamed from tests/unit/network_services/traffic_profile/test_traffic_profile.py)20
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_acl_vnf.py3
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_prox_helpers.py33
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_prox_vnf.py29
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py103
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_tg_ixload.py57
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_tg_ping.py2
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py6
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_vfw_vnf.py5
-rw-r--r--tox.ini2
-rw-r--r--yardstick/benchmark/core/task.py19
-rwxr-xr-xyardstick/benchmark/runners/base.py24
-rw-r--r--yardstick/benchmark/scenarios/networking/sfc_openstack.py16
-rw-r--r--yardstick/benchmark/scenarios/networking/vnf_generic.py274
-rw-r--r--yardstick/common/exceptions.py5
-rw-r--r--yardstick/common/openstack_utils.py51
-rw-r--r--yardstick/common/packages.py87
-rw-r--r--yardstick/common/privsep.py23
-rw-r--r--yardstick/common/utils.py72
-rw-r--r--yardstick/dispatcher/influxdb.py34
-rw-r--r--yardstick/error.py48
-rw-r--r--yardstick/network_services/constants.py17
-rw-r--r--yardstick/network_services/helpers/dpdkbindnic_helper.py313
-rw-r--r--yardstick/network_services/traffic_profile/__init__.py33
-rw-r--r--yardstick/network_services/traffic_profile/base.py12
-rw-r--r--yardstick/network_services/traffic_profile/ixia_rfc2544.py2
-rw-r--r--yardstick/network_services/traffic_profile/prox_binsearch.py51
-rw-r--r--yardstick/network_services/traffic_profile/rfc2544.py2
-rw-r--r--yardstick/network_services/traffic_profile/trex_traffic_profile.py (renamed from yardstick/network_services/traffic_profile/traffic_profile.py)6
-rw-r--r--yardstick/network_services/utils.py1
-rw-r--r--yardstick/network_services/vnf_generic/vnf/acl_vnf.py9
-rw-r--r--yardstick/network_services/vnf_generic/vnf/prox_helpers.py47
-rw-r--r--yardstick/network_services/vnf_generic/vnf/prox_vnf.py29
-rw-r--r--yardstick/network_services/vnf_generic/vnf/sample_vnf.py135
-rw-r--r--yardstick/network_services/vnf_generic/vnf/tg_ixload.py13
-rw-r--r--yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py11
-rw-r--r--yardstick/network_services/vnf_generic/vnf/vfw_vnf.py8
-rw-r--r--yardstick/network_services/vnf_generic/vnf/vnf_ssh_helper.py61
-rw-r--r--yardstick/orchestrator/heat.py31
-rw-r--r--yardstick/ssh.py68
-rw-r--r--yardstick/tests/__init__.py75
-rw-r--r--yardstick/tests/functional/base.py46
-rw-r--r--yardstick/tests/functional/benchmark/__init__.py0
-rw-r--r--yardstick/tests/functional/benchmark/scenarios/__init__.py0
-rw-r--r--yardstick/tests/functional/benchmark/scenarios/networking/__init__.py0
-rw-r--r--yardstick/tests/functional/benchmark/scenarios/networking/test_vnf_generic.py195
-rw-r--r--yardstick/tests/functional/common/fake_directory_package/README.md2
-rw-r--r--yardstick/tests/functional/common/fake_directory_package/setup.py29
-rw-r--r--yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/__init__.py0
-rw-r--r--yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/__init__.py0
-rw-r--r--yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/__init__.py0
-rw-r--r--yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/__init__.py0
-rw-r--r--yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/dummy2.py40
-rw-r--r--yardstick/tests/functional/common/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gzbin0 -> 1650 bytes
-rw-r--r--yardstick/tests/functional/common/test_packages.py94
-rw-r--r--yardstick/tests/unit/__init__.py65
-rw-r--r--yardstick/tests/unit/base.py23
-rw-r--r--yardstick/tests/unit/benchmark/core/test_task.py11
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_base.py17
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_search.py11
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py314
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/test_base.py7
-rw-r--r--yardstick/tests/unit/common/test_packages.py88
-rw-r--r--yardstick/tests/unit/common/test_utils.py92
-rw-r--r--yardstick/tests/unit/test_ssh.py54
95 files changed, 8924 insertions, 1053 deletions
diff --git a/ansible/roles/convert_openrc/tasks/main.yml b/ansible/roles/convert_openrc/tasks/main.yml
index 1606b0bc5..be621f0af 100644
--- a/ansible/roles/convert_openrc/tasks/main.yml
+++ b/ansible/roles/convert_openrc/tasks/main.yml
@@ -29,14 +29,8 @@
auth_url: "{{ openrc.OS_AUTH_URL }}"
password: "{{ openrc.OS_PASSWORD }}"
username: "{{ openrc.OS_USERNAME }}"
- project_name: "{{ openrc.OS_PROJECT_NAME }}"
-# tenant_name: "{{ openrc.OS_TENANT_NAME }}"
+ project_name: "{{ openrc.OS_PROJECT_NAME|default(openrc.OS_TENANT_NAME) }}"
project_domain_name: "{{ openrc.OS_PROJECT_DOMAIN_NAME }}"
-# user_domain_name: "{{ openrc.OS_USER_DOMAIN_NAME }}"
- # BUGS: We need to specify identity_api_version == 3, but we can't do it here
- # because it is not the write place
- # we need to set it via OS_IDENTITY_API_VERSION or clouds.yaml
-# identity_api_version: "{{ openrc.OS_IDENTITY_API_VERSION }}"
- debug: var=os_auth
@@ -44,7 +38,7 @@
clouds:
demo:
# must specify API version here
- identity_api_version: "{{ openrc.OS_IDENTITY_API_VERSION }}"
+ identity_api_version: "{{ openrc.OS_IDENTITY_API_VERSION|default(3) }}"
auth: "{{ os_auth }}"
- template:
diff --git a/ansible/roles/create_storperf_admin_rc/tasks/main.yml b/ansible/roles/create_storperf_admin_rc/tasks/main.yml
index bd1418e89..f63d1d814 100644
--- a/ansible/roles/create_storperf_admin_rc/tasks/main.yml
+++ b/ansible/roles/create_storperf_admin_rc/tasks/main.yml
@@ -12,15 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
---
-- name: Fetch TENANT_ID
- os_project_facts:
- name: admin
- environment: "{{ openrc }}"
-
-- name: Fetch TENANT_ID
- set_fact:
- os_tenant_id: "{{ openstack_projects[0].id }}"
-
- name: Create storperf_admin-rc
template:
src: storperf_admin-rc.j2
diff --git a/ansible/roles/create_storperf_admin_rc/templates/storperf_admin-rc.j2 b/ansible/roles/create_storperf_admin_rc/templates/storperf_admin-rc.j2
index 410ab24df..888e87118 100644
--- a/ansible/roles/create_storperf_admin_rc/templates/storperf_admin-rc.j2
+++ b/ansible/roles/create_storperf_admin_rc/templates/storperf_admin-rc.j2
@@ -1,7 +1,5 @@
OS_AUTH_URL="{{ openrc.OS_AUTH_URL }}"
OS_USERNAME="{{ openrc.OS_USERNAME|default('admin') }}"
OS_PASSWORD="{{ openrc.OS_PASSWORD|default('console') }}"
-OS_TENANT_NAME="{{ openrc.OS_TENANT_NAME|default('admin') }}"
OS_VOLUME_API_VERSION="{{ openrc.OS_VOLUME_API_VERSION|default('2') }}"
-OS_PROJECT_NAME="{{ openrc.OS_PROJECT_NAME|default(openrc.OS_TENANT_NAME) }}"
-OS_TENANT_ID="{{ os_tenant_id }}"
+OS_PROJECT_NAME="{{ openrc.OS_PROJECT_NAME|openrc.OS_TENANT_NAME|default('admin') }}"
diff --git a/api/resources/v1/env.py b/api/resources/v1/env.py
index 7c831fd74..75c981a96 100644
--- a/api/resources/v1/env.py
+++ b/api/resources/v1/env.py
@@ -22,6 +22,8 @@ import collections
from six.moves import configparser
from oslo_serialization import jsonutils
from docker import Client
+from docker.errors import APIError
+from requests.exceptions import HTTPError
from api.database.v1.handlers import AsyncTaskHandler
from api.utils import influx
@@ -44,7 +46,7 @@ class V1Env(ApiResource):
def post(self):
return self._dispatch_post()
- def create_grafana(self, args):
+ def create_grafana(self, *args):
task_id = str(uuid.uuid4())
thread = threading.Thread(target=self._create_grafana, args=(task_id,))
@@ -82,7 +84,7 @@ class V1Env(ApiResource):
self._update_task_status(task_id)
LOG.info('Finished')
- except Exception as e:
+ except (APIError, HTTPError) as e:
self._update_task_error(task_id, str(e))
LOG.exception('Create grafana failed')
@@ -117,7 +119,7 @@ class V1Env(ApiResource):
"isDefault": True,
}
try:
- HttpClient().post(url, data, timeout=10)
+ HttpClient().post(url, data, timeout=60)
except Exception:
LOG.exception('Create datasources failed')
raise
@@ -145,7 +147,7 @@ class V1Env(ApiResource):
return any(t in a['RepoTags'][0]
for a in client.images() if a['RepoTags'])
- def create_influxdb(self, args):
+ def create_influxdb(self, *args):
task_id = str(uuid.uuid4())
thread = threading.Thread(target=self._create_influxdb, args=(task_id,))
@@ -185,7 +187,7 @@ class V1Env(ApiResource):
self._update_task_status(task_id)
LOG.info('Finished')
- except Exception as e:
+ except APIError as e:
self._update_task_error(task_id, str(e))
LOG.exception('Creating influxdb failed')
@@ -217,7 +219,7 @@ class V1Env(ApiResource):
consts.INFLUXDB_DB_NAME)
client.create_database(consts.INFLUXDB_DB_NAME)
LOG.info('Success to config influxDB')
- except Exception:
+ except HTTPError:
LOG.exception('Config influxdb failed')
def _change_output_to_influxdb(self, ip):
@@ -236,7 +238,7 @@ class V1Env(ApiResource):
with open(consts.CONF_FILE, 'w') as f:
parser.write(f)
- def prepare_env(self, args):
+ def prepare_env(self, *args):
task_id = str(uuid.uuid4())
thread = threading.Thread(target=self._prepare_env_daemon,
@@ -287,7 +289,7 @@ class V1Env(ApiResource):
self._update_task_status(task_id)
LOG.info('Finished')
- except Exception as e:
+ except (subprocess.CalledProcessError, OSError) as e:
self._update_task_error(task_id, str(e))
LOG.exception('Prepare env failed')
@@ -373,7 +375,7 @@ class V1Env(ApiResource):
LOG.info('Source openrc: Sourcing')
try:
self._source_file(consts.OPENRC)
- except Exception as e:
+ except subprocess.CalledProcessError as e:
LOG.exception('Failed to source openrc')
return result_handler(consts.API_ERROR, str(e))
LOG.info('Source openrc: Done')
diff --git a/dashboard/Prox_BM_L2FWD-4Port_MultiSize-1518452496550.json b/dashboard/Prox_BM_L2FWD-4Port_MultiSize-1518452496550.json
new file mode 100644
index 000000000..3c78ab18d
--- /dev/null
+++ b/dashboard/Prox_BM_L2FWD-4Port_MultiSize-1518452496550.json
@@ -0,0 +1,5817 @@
+{
+ "__inputs": [
+ {
+ "name": "DS_YARDSTICK",
+ "label": "yardstick",
+ "description": "",
+ "type": "datasource",
+ "pluginId": "influxdb",
+ "pluginName": "InfluxDB"
+ }
+ ],
+ "__requires": [
+ {
+ "type": "grafana",
+ "id": "grafana",
+ "name": "Grafana",
+ "version": "4.4.3"
+ },
+ {
+ "type": "panel",
+ "id": "graph",
+ "name": "Graph",
+ "version": ""
+ },
+ {
+ "type": "datasource",
+ "id": "influxdb",
+ "name": "InfluxDB",
+ "version": "1.0.0"
+ },
+ {
+ "type": "panel",
+ "id": "singlestat",
+ "name": "Singlestat",
+ "version": ""
+ },
+ {
+ "type": "panel",
+ "id": "text",
+ "name": "Text",
+ "version": ""
+ }
+ ],
+ "annotations": {
+ "list": []
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "hideControls": false,
+ "id": null,
+ "links": [],
+ "refresh": false,
+ "rows": [
+ {
+ "collapse": false,
+ "height": "100px",
+ "panels": [
+ {
+ "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 32px '#31A7D3'\"><center>OPNFV_Yardstick_NSB_PROX_BM_L2FWD_4Port_Test</center> </a></h5>\n<center>\n<p>The application does Port forwarding without touching packets. It will take packets in from one port and forward them unmodified to another port </p>\n<p>The KPI is the number of packets per second for a specified packet size with an accepted minimal packet loss </p>\n</center>",
+ "editable": true,
+ "error": false,
+ "id": 3,
+ "links": [],
+ "mode": "html",
+ "span": 12,
+ "title": "",
+ "type": "text"
+ },
+ {
+ "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 22px '#31A7D3'\"><center>Throughput</center> </a></h5>\n",
+ "editable": true,
+ "error": false,
+ "height": "40",
+ "id": 7,
+ "links": [],
+ "minSpan": 12,
+ "mode": "html",
+ "span": 12,
+ "title": "",
+ "type": "text"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "300px",
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "id": 6,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 12,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Cumulative Packets Sents",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.xe0.out_packets\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE $timeFilter GROUP BY time($interval) fill(null)",
+ "rawQuery": false,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.packets_fwd"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(216, 200, 27, 0.27)",
+ "op": "gt",
+ "value": 2
+ },
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(234, 112, 112, 0.22)",
+ "op": "gt",
+ "value": 2
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Cumulative Load Sent by Generator",
+ "tooltip": {
+ "msResolution": true,
+ "shared": true,
+ "sort": 0,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Packets Per Second",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "id": 9,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "TG xe-0 in packets",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.xe0.in_packets"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "TG xe-1 in packets",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.xe1.in_packets"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "TG xe-2 in packets",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.xe2.in_packets"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "TG xe-3 in packets",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "D",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.xe3.in_packets"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(216, 200, 27, 0.27)",
+ "op": "gt",
+ "value": 2
+ },
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(234, 112, 112, 0.22)",
+ "op": "gt",
+ "value": 2
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Load Received by Generator",
+ "tooltip": {
+ "msResolution": true,
+ "shared": true,
+ "sort": 0,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Packets Per Second",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "id": 43,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "TG xe-0 Out packets",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.xe0.out_packets"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "TG xe-1 Out packets",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.xe1.out_packets"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "TG xe-2 Out packets",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.xe2.out_packets"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "TG xe-3 Out packets",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "D",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.xe3.out_packets"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(216, 200, 27, 0.27)",
+ "op": "gt",
+ "value": 2
+ },
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(234, 112, 112, 0.22)",
+ "op": "gt",
+ "value": 2
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Load Sent by Generator",
+ "tooltip": {
+ "msResolution": true,
+ "shared": true,
+ "sort": 0,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Packets Per Second",
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "New row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "300px",
+ "panels": [
+ {
+ "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 22px '#31A7D3'\"><center>Prox L2Fwd Traffic Gen stats</center> </a></h5>\n",
+ "editable": true,
+ "error": false,
+ "height": "40",
+ "id": 8,
+ "links": [],
+ "minSpan": 12,
+ "mode": "html",
+ "span": 12,
+ "title": "",
+ "type": "text"
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "height": "300",
+ "id": 4,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "rightSide": false,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 1,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "SUT Packets Received",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "previous"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.curr_packets_in"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(216, 200, 27, 0.27)",
+ "op": "gt",
+ "value": 2
+ },
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(234, 112, 112, 0.22)",
+ "op": "gt",
+ "value": 2
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "SUT Stats - Load Received By SUT",
+ "tooltip": {
+ "msResolution": true,
+ "shared": true,
+ "sort": 0,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Packets per Second",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "height": "300",
+ "id": 39,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "rightSide": false,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 1,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "SUT Packets Sent",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "previous"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.curr_packets_in"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(216, 200, 27, 0.27)",
+ "op": "gt",
+ "value": 2
+ },
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(234, 112, 112, 0.22)",
+ "op": "gt",
+ "value": 2
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "SUT Stats - Load Forwarded By SUT",
+ "tooltip": {
+ "msResolution": true,
+ "shared": true,
+ "sort": 0,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Packets per Second",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "New row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "250px",
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "id": 2,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Load Requested by Generator",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.TxThroughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "Rx Throughput",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "hide": true,
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.RxThroughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(216, 200, 27, 0.27)",
+ "op": "gt",
+ "value": 2
+ },
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(234, 112, 112, 0.22)",
+ "op": "gt",
+ "value": 2
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Theoretical Throughput",
+ "tooltip": {
+ "msResolution": true,
+ "shared": true,
+ "sort": 0,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Packets Per Second",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {},
+ "id": 5,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "Packet Size",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.PktSize"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(216, 200, 27, 0.27)",
+ "op": "gt",
+ "value": 2
+ },
+ {
+ "colorMode": "custom",
+ "fill": true,
+ "fillColor": "rgba(234, 112, 112, 0.22)",
+ "op": "gt",
+ "value": 2
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "Packet size",
+ "tooltip": {
+ "msResolution": true,
+ "shared": true,
+ "sort": 0,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "bytes",
+ "label": "Packet Size",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "New row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "250px",
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "fill": 1,
+ "id": 10,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "SUCCESS Tx Total",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Success_tx_total"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "SUCCESS Rx Total",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Success_rx_total"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "SUCCESS ALLOWABLE LOST PACKETS",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "C",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Success_can_be_lost"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "SUCCESS CRITERIA: TX Total = Rx Total + Tolerated Loss",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "Packets Per Second",
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "35",
+ "id": 12,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 3,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.duration"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": "",
+ "title": "Test Interval",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30",
+ "id": 11,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 3,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "alias": "Test Duration",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.test_duration"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": "",
+ "title": "Test Duration",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30",
+ "id": 13,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 3,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "alias": "Test Precision",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.test_precision"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": "",
+ "title": "Test Precision",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30",
+ "id": 14,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 3,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "alias": "Tolerated Loss",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.tolerated_loss"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": "",
+ "title": "Tolerated Loss",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "New row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "30",
+ "panels": [
+ {
+ "content": "<center>Packet size</center>",
+ "height": "30px",
+ "id": 15,
+ "links": [],
+ "mode": "html",
+ "span": 4,
+ "title": "",
+ "type": "text"
+ },
+ {
+ "content": "<center>Theoretical Max Throughput (Million Packets Per Second)</center>",
+ "height": "30px",
+ "id": 16,
+ "links": [],
+ "mode": "html",
+ "span": 4,
+ "title": "",
+ "type": "text"
+ },
+ {
+ "content": "<center>Max Actual Throughput (Million Packets Per Second)</center>",
+ "height": "30px",
+ "id": 17,
+ "links": [],
+ "mode": "html",
+ "span": 4,
+ "title": "",
+ "type": "text"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "30px",
+ "panels": [
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 0,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 18,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "alias": "Theoretical Max Throughput (Mpps)",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 64 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_pktSize"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "64"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 19,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "alias": "Max Throughput (Mpps)",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 64 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_theor_max_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "64"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 20,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 64 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_Actual_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "64"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h3"
+ },
+ {
+ "collapse": false,
+ "height": "30",
+ "panels": [
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": null,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 21,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 128 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_pktSize"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "128"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 22,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 128 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_theor_max_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "128"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 23,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 128 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_Actual_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "128"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "30px",
+ "panels": [
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 0,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 24,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 256 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_pktSize"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSiuze",
+ "operator": "=",
+ "value": "256"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 25,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 256 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_theor_max_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "256"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 26,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 256 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_Actual_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "256"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h3"
+ },
+ {
+ "collapse": false,
+ "height": -82,
+ "panels": [
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 27,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 512 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_pktSize"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "512"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 28,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 512 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_theor_max_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "512"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 29,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 512 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_Actual_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "512"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h3"
+ },
+ {
+ "collapse": false,
+ "height": "30px",
+ "panels": [
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 30,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1024 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_pktSize"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "1024"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 31,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1024 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_theor_max_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "1024"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 32,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1024 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_Actual_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "1024"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h3"
+ },
+ {
+ "collapse": false,
+ "height": "30px",
+ "panels": [
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 33,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1280 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_pktSize"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "1280"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 34,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1280 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_theor_max_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "1280"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "height": "30px",
+ "id": 35,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1280 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.Result_Actual_throughput"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": [
+ {
+ "key": "tg__0.Result_pktSize",
+ "operator": "=",
+ "value": "1280"
+ }
+ ]
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "30",
+ "panels": [
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 44,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_pktSize\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1518 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 45,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_theor_max_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1518 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ },
+ {
+ "cacheTimeout": null,
+ "colorBackground": false,
+ "colorValue": false,
+ "colors": [
+ "rgba(245, 54, 54, 0.9)",
+ "rgba(237, 129, 40, 0.89)",
+ "rgba(50, 172, 45, 0.97)"
+ ],
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 4,
+ "format": "none",
+ "gauge": {
+ "maxValue": 100,
+ "minValue": 0,
+ "show": false,
+ "thresholdLabels": false,
+ "thresholdMarkers": true
+ },
+ "id": 46,
+ "interval": null,
+ "links": [],
+ "mappingType": 1,
+ "mappingTypes": [
+ {
+ "name": "value to text",
+ "value": 1
+ },
+ {
+ "name": "range to text",
+ "value": 2
+ }
+ ],
+ "maxDataPoints": 100,
+ "nullPointMode": "connected",
+ "nullText": null,
+ "postfix": "",
+ "postfixFontSize": "50%",
+ "prefix": "",
+ "prefixFontSize": "50%",
+ "rangeMaps": [
+ {
+ "from": "null",
+ "text": "N/A",
+ "to": "null"
+ }
+ ],
+ "span": 4,
+ "sparkline": {
+ "fillColor": "rgba(31, 118, 189, 0.18)",
+ "full": false,
+ "lineColor": "rgb(31, 120, 193)",
+ "show": false
+ },
+ "tableColumn": "",
+ "targets": [
+ {
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "orderByTime": "ASC",
+ "policy": "default",
+ "query": "SELECT mean(\"tg__0.Result_Actual_throughput\") FROM \"tc_prox_baremetal_l2fwd-4\" WHERE \"tg__0.Result_pktSize\" = 1518 AND $timeFilter GROUP BY time($__interval) fill(null)",
+ "rawQuery": true,
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "value"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": "",
+ "title": "",
+ "type": "singlestat",
+ "valueFontSize": "80%",
+ "valueMaps": [
+ {
+ "op": "=",
+ "text": "N/A",
+ "value": "null"
+ }
+ ],
+ "valueName": "avg"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "40px",
+ "panels": [
+ {
+ "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 22px '#31A7D3'\"><center>Latency</center> </a></h5>",
+ "height": "40",
+ "id": 41,
+ "links": [],
+ "mode": "html",
+ "span": 12,
+ "title": "",
+ "type": "text"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": 250,
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "fill": 1,
+ "height": "300px",
+ "id": 47,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "xe0 Latency Avg",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.LatencyAvg.5"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "xe0 Latency Max",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.LatencyMax.5"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "xe0 Latency",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "usec",
+ "logBase": 1,
+ "max": "65000",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "fill": 1,
+ "height": "300px",
+ "id": 48,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "xe1 Latency Avg",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.LatencyAvg.6"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "xe1 Latency Max",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.LatencyMax.6"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "xe1 Latency",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "usec",
+ "logBase": 1,
+ "max": "65000",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "fill": 1,
+ "height": "300px",
+ "id": 49,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "xe2 Latency Avg",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.LatencyAvg.7"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "xe2 Latency Max",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.LatencyMax.7"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "xe2 Latency",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "usec",
+ "logBase": 1,
+ "max": "65000",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "fill": 1,
+ "height": "300px",
+ "id": 50,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "xe3 Latency Avg",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.LatencyAvg.8"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ },
+ {
+ "alias": "xe3 Latency Max",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "null"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "tg__0.LatencyMax.8"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "mean"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "xe3 Latency",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "usec",
+ "logBase": 1,
+ "max": "65000",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ]
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": "40px",
+ "panels": [
+ {
+ "content": "<h5 style=\"font-family:Verdana\"> <a style=\"color:#31A7D3\"><a style=\"font: 22px '#31A7D3'\"><center>SUT CPU Utilization</center> </a></h5>",
+ "height": "40px",
+ "id": 51,
+ "links": [],
+ "mode": "html",
+ "span": 12,
+ "title": "",
+ "type": "text"
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h6"
+ },
+ {
+ "collapse": false,
+ "height": 250,
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 5,
+ "fill": 1,
+ "height": "300px",
+ "id": 52,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": false,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "CPU 0 Utilization",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.collect_stats.core.cpu.0.percent-user"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "distinct"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "CPU 0 Utilization - Master Core",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "% Utilization",
+ "logBase": 1,
+ "max": "100",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 5,
+ "fill": 1,
+ "height": "300px",
+ "id": 53,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "CPU 1 Utilization - L2FWD XE0 to XE1",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.collect_stats.core.cpu.1.percent-user"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "distinct"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "CPU 1 Utilization - L2FWD XE0 to XE1",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "% Utilization",
+ "logBase": 1,
+ "max": "100",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 5,
+ "fill": 1,
+ "height": "300px",
+ "id": 54,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "CPU 2 Utilization",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.collect_stats.core.cpu.2.percent-user"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "distinct"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "CPU 2 Utilization - L2FWD XE1 to XE0",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "% Utilization",
+ "logBase": 1,
+ "max": "100",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 5,
+ "fill": 1,
+ "height": "300px",
+ "id": 55,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "CPU 3 Utilization",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.collect_stats.core.cpu.3.percent-user"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "distinct"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "CPU 3 Utilization - L2FWD XE2 to XE3",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "% Utilization",
+ "logBase": 1,
+ "max": "100",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 5,
+ "fill": 1,
+ "height": "300px",
+ "id": 56,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "CPU 4 Utilization",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.collect_stats.core.cpu.4.percent-user"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "distinct"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "CPU 4 Utilization - L2FWD XE3 to XE2",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "% Utilization",
+ "logBase": 1,
+ "max": "100",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ }
+ ]
+ },
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": "${DS_YARDSTICK}",
+ "decimals": 5,
+ "fill": 1,
+ "height": "300px",
+ "id": 57,
+ "legend": {
+ "alignAsTable": true,
+ "avg": true,
+ "current": false,
+ "max": true,
+ "min": true,
+ "show": true,
+ "sortDesc": true,
+ "total": false,
+ "values": true
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "span": 6,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "alias": "CPU 5 Utilization",
+ "dsType": "influxdb",
+ "groupBy": [
+ {
+ "params": [
+ "$__interval"
+ ],
+ "type": "time"
+ },
+ {
+ "params": [
+ "none"
+ ],
+ "type": "fill"
+ }
+ ],
+ "measurement": "tc_prox_baremetal_l2fwd-4",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "B",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "vnf__0.collect_stats.core.cpu.5.percent-user"
+ ],
+ "type": "field"
+ },
+ {
+ "params": [],
+ "type": "distinct"
+ }
+ ]
+ ],
+ "tags": []
+ }
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "CPU 5 Utilization",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": "% Utilization",
+ "logBase": 1,
+ "max": "100",
+ "min": "0",
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": "0",
+ "show": true
+ }
+ ]
+ }
+ ],
+ "repeat": null,
+ "repeatIteration": null,
+ "repeatRowId": null,
+ "showTitle": false,
+ "title": "Dashboard Row",
+ "titleSize": "h6"
+ }
+ ],
+ "schemaVersion": 14,
+ "style": "dark",
+ "tags": [
+ "yardstick",
+ "NSB",
+ "Prox",
+ "L2fwd",
+ "4Port",
+ "BM"
+ ],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "2018-02-12T15:17:27.733Z",
+ "to": "2018-02-12T16:44:28.270Z"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "timezone": "browser",
+ "title": "Prox_BM_L2FWD-4Port_MultiSize",
+ "version": 29
+} \ No newline at end of file
diff --git a/docs/testing/user/userguide/04-installation.rst b/docs/testing/user/userguide/04-installation.rst
index 5bb64e3bb..cac814667 100644
--- a/docs/testing/user/userguide/04-installation.rst
+++ b/docs/testing/user/userguide/04-installation.rst
@@ -172,13 +172,13 @@ Environment variables in the ``openrc`` file have to include at least::
OS_AUTH_URL
OS_USERNAME
OS_PASSWORD
- OS_TENANT_NAME
+ OS_PROJECT_NAME
EXTERNAL_NETWORK
A sample ``openrc`` file may look like this::
export OS_PASSWORD=console
- export OS_TENANT_NAME=admin
+ export OS_PROJECT_NAME=admin
export OS_AUTH_URL=http://172.16.1.222:35357/v2.0
export OS_USERNAME=admin
export OS_VOLUME_API_VERSION=2
diff --git a/docs/testing/user/userguide/05-yardstick_plugin.rst b/docs/testing/user/userguide/05-yardstick_plugin.rst
index ec0b49ff1..679ce7900 100644
--- a/docs/testing/user/userguide/05-yardstick_plugin.rst
+++ b/docs/testing/user/userguide/05-yardstick_plugin.rst
@@ -49,7 +49,7 @@ environment and other dependencies:
3. Make sure Jump Host have access to the OpenStack Controller API.
4. Make sure Jump Host must have internet connectivity for downloading docker image.
5. You need to know where to get basic openstack Keystone authorization info, such as
- OS_PASSWORD, OS_TENANT_NAME, OS_AUTH_URL, OS_USERNAME.
+ OS_PASSWORD, OS_PROJECT_NAME, OS_AUTH_URL, OS_USERNAME.
6. To run a Storperf container, you need to have OpenStack Controller environment
variables defined and passed to Storperf container. The best way to do this is to
put environment variables in a "storperf_admin-rc" file. The storperf_admin-rc
@@ -58,8 +58,6 @@ environment and other dependencies:
* OS_AUTH_URL
* OS_USERNAME
* OS_PASSWORD
-* OS_TENANT_ID
-* OS_TENANT_NAME
* OS_PROJECT_NAME
* OS_PROJECT_ID
* OS_USER_DOMAIN_ID
@@ -76,8 +74,9 @@ test/ci/prepare_storperf_admin-rc.sh
USERNAME=${OS_USERNAME:-admin}
PASSWORD=${OS_PASSWORD:-console}
+ # OS_TENANT_NAME is still present to keep backward compatibility with legacy
+ # deployments, but should be replaced by OS_PROJECT_NAME.
TENANT_NAME=${OS_TENANT_NAME:-admin}
- TENANT_ID=`openstack project show admin|grep '\bid\b' |awk -F '|' '{print $3}'|sed -e 's/^[[:space:]]*//'`
PROJECT_NAME=${OS_PROJECT_NAME:-$TENANT_NAME}
PROJECT_ID=`openstack project show admin|grep '\bid\b' |awk -F '|' '{print $3}'|sed -e 's/^[[:space:]]*//'`
USER_DOMAIN_ID=${OS_USER_DOMAIN_ID:-default}
@@ -90,8 +89,6 @@ test/ci/prepare_storperf_admin-rc.sh
echo "OS_PASSWORD="$PASSWORD >> ~/storperf_admin-rc
echo "OS_PROJECT_NAME="$PROJECT_NAME >> ~/storperf_admin-rc
echo "OS_PROJECT_ID="$PROJECT_ID >> ~/storperf_admin-rc
- echo "OS_TENANT_NAME="$TENANT_NAME >> ~/storperf_admin-rc
- echo "OS_TENANT_ID="$TENANT_ID >> ~/storperf_admin-rc
echo "OS_USER_DOMAIN_ID="$USER_DOMAIN_ID >> ~/storperf_admin-rc
diff --git a/docs/testing/user/userguide/08-api.rst b/docs/testing/user/userguide/08-api.rst
index ff6e62228..92fa408c8 100644
--- a/docs/testing/user/userguide/08-api.rst
+++ b/docs/testing/user/userguide/08-api.rst
@@ -252,7 +252,6 @@ Example::
"OS_PASSWORD": "console",
"OS_PROJECT_DOMAIN_NAME": "default",
"OS_PROJECT_NAME": "admin",
- "OS_TENANT_NAME": "admin",
"OS_USERNAME": "admin",
"OS_USER_DOMAIN_NAME": "default"
},
diff --git a/gui/app/scripts/controllers/main.js b/gui/app/scripts/controllers/main.js
index ceec83fa9..dc5c32670 100644
--- a/gui/app/scripts/controllers/main.js
+++ b/gui/app/scripts/controllers/main.js
@@ -107,7 +107,7 @@ angular.module('yardStickGui2App')
$scope.envInfo = [
{ name: 'OS_USERNAME', value: '' },
{ name: 'OS_PASSWORD', value: '' },
- { name: 'OS_TENANT_NAME', value: '' },
+ { name: 'OS_PROJECT_NAME', value: '' },
{ name: 'EXTERNAL_NETWORK', value: '' }
];
@@ -298,7 +298,7 @@ angular.module('yardStickGui2App')
$scope.envInfo = [
{ name: 'OS_USERNAME', value: '' },
{ name: 'OS_PASSWORD', value: '' },
- { name: 'OS_TENANT_NAME', value: '' },
+ { name: 'OS_PROJECT_NAME', value: '' },
{ name: 'EXTERNAL_NETWORK', value: '' }
];
$scope.displayOpenrcFile = null;
diff --git a/requirements.txt b/requirements.txt
index aacafdf93..d45e4b149 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -37,11 +37,13 @@ os-client-config==1.28.0 # OSI Approved Apache Software License
osc-lib==1.7.0 # OSI Approved Apache Software License
oslo.config==4.11.1 # OSI Approved Apache Software License
oslo.i18n==3.17.0 # OSI Approved Apache Software License
+oslo.privsep===1.22.1 # OSI Approved Apache Software License
oslo.serialization==2.20.1 # OSI Approved Apache Software License
oslo.utils==3.28.0 # OSI Approved Apache Software License
paramiko==2.2.1 # LGPL; OSI Approved GNU Library or Lesser General Public License (LGPL)
pbr==3.1.1 # OSI Approved Apache Software License; Apache License, Version 2.0
pika==0.10.0 # BSD; OSI Approved BSD License
+pip==9.0.1;python_version=='2.7' # MIT
positional==1.1.2 # OSI Approved Apache Software License
pycrypto==2.6.1 # Public Domain
pyparsing==2.2.0 # MIT License; OSI Approved MIT License
diff --git a/samples/vnf_samples/nsut/prox/configs/gen_l2fwd-2.cfg b/samples/vnf_samples/nsut/prox/configs/gen_l2fwd-2.cfg
index e7fad98bf..192f2f89a 100644
--- a/samples/vnf_samples/nsut/prox/configs/gen_l2fwd-2.cfg
+++ b/samples/vnf_samples/nsut/prox/configs/gen_l2fwd-2.cfg
@@ -49,7 +49,8 @@ mode=gen
tx port=p0
bps=1250000000
; Ethernet + IP + UDP
-pkt inline=${sut_mac0} 70 00 00 00 00 01 08 00 45 00 00 1c 00 01 00 00 40 11 f7 7d 00 00 00 01 00 00 00 02 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac0} 3c fd fe 9f a3 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
[core 2]
name=gen 1
@@ -58,19 +59,20 @@ mode=gen
tx port=p1
bps=1250000000
; Ethernet + IP + UDP
-pkt inline=${sut_mac1} 70 00 00 00 00 02 08 00 45 00 00 1c 00 01 00 00 40 11 f7 7d 00 00 00 01 00 00 00 03 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac1} 3c fd fe 9f a3 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
[core 3]
name=rec 0
task=0
mode=lat
rx port=p0
-lat pos=42
+lat pos=38
[core 4]
name=rec 0
task=0
mode=lat
rx port=p1
-lat pos=42
+lat pos=38
diff --git a/samples/vnf_samples/nsut/prox/configs/gen_l2fwd-4.cfg b/samples/vnf_samples/nsut/prox/configs/gen_l2fwd-4.cfg
index 5b79185a7..0db21b681 100644
--- a/samples/vnf_samples/nsut/prox/configs/gen_l2fwd-4.cfg
+++ b/samples/vnf_samples/nsut/prox/configs/gen_l2fwd-4.cfg
@@ -61,7 +61,8 @@ mode=gen
tx port=p0
bps=1250000000
; Ethernet + IP + UDP
-pkt inline=${sut_mac0} 3c fd fe 9f a3 a0 08 a0 00 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac0} 3c fd fe 9f a3 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
[core 2]
name=gen 1
@@ -70,7 +71,8 @@ mode=gen
tx port=p1
bps=1250000000
; Ethernet + IP + UDP
-pkt inline=${sut_mac1} 3c fd fe 9f a5 50 08 a0 00 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac1} 3c fd fe 9f a3 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
[core 3]
name=gen 2
@@ -79,7 +81,8 @@ mode=gen
tx port=p2
bps=1250000000
; Ethernet + IP + UDP
-pkt inline=${sut_mac2} 3c fd fe 9f a5 50 08 a0 00 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac2} 3c fd fe 9f a5 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
[core 4]
name=gen 3
@@ -88,28 +91,33 @@ mode=gen
tx port=p3
bps=1250000000
; Ethernet + IP + UDP
-pkt inline=${sut_mac3} 3c fd fe 9f a5 50 08 a0 00 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+pkt inline=${sut_mac3} 3c fd fe 9f a5 08 08 00 45 45 00 00 1c 00 01 00 00 40 11 f7 7d c0 a8 01 01 c0 a8 01 01 13 88 13 88 00 08 55 7b
+lat pos=38
[core 5]
name=rec 0
task=0
mode=lat
rx port=p0
+lat pos=38
[core 6]
name=rec 1
task=0
mode=lat
rx port=p1
+lat pos=38
[core 7]
name=rec 2
task=0
mode=lat
rx port=p2
+lat pos=38
[core 8]
name=rec 3
task=0
mode=lat
rx port=p3
+lat pos=38 \ No newline at end of file
diff --git a/samples/vnf_samples/nsut/vfw/tc_heat_rfc2544_ipv4_1rule_1flow_64B_trex_scale-up.yaml b/samples/vnf_samples/nsut/vfw/tc_heat_rfc2544_ipv4_1rule_1flow_64B_trex_scale-up.yaml
new file mode 100644
index 000000000..eaeee7103
--- /dev/null
+++ b/samples/vnf_samples/nsut/vfw/tc_heat_rfc2544_ipv4_1rule_1flow_64B_trex_scale-up.yaml
@@ -0,0 +1,89 @@
+# Copyright (c) 2016-2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+{% set mem = mem or 20480 %}
+{% set vcpus = vcpus or 10 %}
+{% set vports = vports or 2 %}
+---
+schema: yardstick:task:0.1
+scenarios:
+- type: NSPerf
+ traffic_profile: ../../traffic_profiles/ipv4_throughput-scale-up.yaml
+ extra_args:
+ vports: {{ vports }}
+ topology: vfw-tg-topology-scale-up.yaml
+ nodes:
+ tg__0: tg_0.yardstick
+ vnf__0: vnf_0.yardstick
+ options:
+ framesize:
+ uplink: {64B: 100}
+ downlink: {64B: 100}
+ flow:
+ src_ip: [
+{% for vport in range(0,vports,2|int) %}
+ {'tg__0': 'xe{{vport}}'},
+{% endfor %} ]
+ dst_ip: [
+{% for vport in range(1,vports,2|int) %}
+ {'tg__0': 'xe{{vport}}'},
+{% endfor %} ]
+ count: 1
+ traffic_type: 4
+ rfc2544:
+ allowed_drop_rate: 0.0001 - 0.0001
+ vnf__0:
+ rules: acl_1rule.yaml
+ vnf_config: {lb_config: 'SW', file: vfw_vnf_pipeline_cores_{{vcpus}}_ports_{{vports}}_lb_1_sw.conf }
+ runner:
+ type: Iteration
+ iterations: 10
+ interval: 35
+context:
+ # put node context first, so we don't HEAT deploy if node has errors
+ name: yardstick
+ image: yardstick-samplevnfs
+ flavor:
+ vcpus: {{ vcpus }}
+ ram: {{ mem }}
+ disk: 6
+ extra_specs:
+ hw:cpu_sockets: 1
+ hw:cpu_cores: {{ vcpus }}
+ hw:cpu_threads: 1
+ user: ubuntu
+ placement_groups:
+ pgrp1:
+ policy: "availability"
+ servers:
+ tg_0:
+ floating_ip: true
+ placement: "pgrp1"
+ vnf_0:
+ floating_ip: true
+ placement: "pgrp1"
+ networks:
+ mgmt:
+ cidr: '10.0.1.0/24'
+{% for vport in range(1,vports,2|int) %}
+ uplink_{{loop.index0}}:
+ cidr: '10.1.{{vport}}.0/24'
+ gateway_ip: 'null'
+ port_security_enabled: False
+ enable_dhcp: 'false'
+ downlink_{{loop.index0}}:
+ cidr: '10.1.{{vport+1}}.0/24'
+ gateway_ip: 'null'
+ port_security_enabled: False
+ enable_dhcp: 'false'
+{% endfor %}
diff --git a/samples/vnf_samples/nsut/vfw/vfw-tg-topology-scale-up.yaml b/samples/vnf_samples/nsut/vfw/vfw-tg-topology-scale-up.yaml
new file mode 100644
index 000000000..d4bf8d6d1
--- /dev/null
+++ b/samples/vnf_samples/nsut/vfw/vfw-tg-topology-scale-up.yaml
@@ -0,0 +1,52 @@
+# Copyright (c) 2016-2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+---
+{% set vports = get(extra_args, 'vports', '2') %}
+nsd:nsd-catalog:
+ nsd:
+ - id: 3tg-topology
+ name: 3tg-topology
+ short-name: 3tg-topology
+ description: 3tg-topology
+ constituent-vnfd:
+ - member-vnf-index: '1'
+ vnfd-id-ref: tg__0
+ VNF model: ../../vnf_descriptors/tg_rfc2544_tpl.yaml #VNF type
+ - member-vnf-index: '2'
+ vnfd-id-ref: vnf__0
+ VNF model: ../../vnf_descriptors/vfw_vnf.yaml #VNF type
+
+ vld:
+{% for vport in range(0,vports,2|int) %}
+ - id: uplink_{{loop.index0}}
+ name: tg__0 to vnf__0 link {{vport + 1}}
+ type: ELAN
+ vnfd-connection-point-ref:
+ - member-vnf-index-ref: '1'
+ vnfd-connection-point-ref: xe{{vport}}
+ vnfd-id-ref: tg__0
+ - member-vnf-index-ref: '2'
+ vnfd-connection-point-ref: xe{{vport}}
+ vnfd-id-ref: vnf__0
+ - id: downlink_{{loop.index0}}
+ name: vnf__0 to tg__0 link {{vport + 2}}
+ type: ELAN
+ vnfd-connection-point-ref:
+ - member-vnf-index-ref: '2'
+ vnfd-connection-point-ref: xe{{vport+1}}
+ vnfd-id-ref: vnf__0
+ - member-vnf-index-ref: '1'
+ vnfd-connection-point-ref: xe{{vport+1}}
+ vnfd-id-ref: tg__0
+{% endfor %}
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_2_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_2_lb_1_sw.conf
new file mode 100644
index 000000000..b31d0546c
--- /dev/null
+++ b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_2_lb_1_sw.conf
@@ -0,0 +1,52 @@
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ1.0
+prv_to_pub_map = (1,0)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ1.0 RXQ0.0
+pktq_out = SWQ2 SWQ3 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3
+pktq_out = SWQ4 SWQ5
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ4 SWQ5
+pktq_out = SWQ6 SWQ7
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ6 SWQ7 SWQ1
+pktq_out = TXQ1.0 TXQ0.0
+
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_4_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_4_lb_1_sw.conf
new file mode 100644
index 000000000..3bf8dc68b
--- /dev/null
+++ b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_4_ports_4_lb_1_sw.conf
@@ -0,0 +1,52 @@
+
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ2.0 RXQ3.0
+prv_to_pub_map = (2,0)(3,1)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ2.0 RXQ0.0 RXQ3.0 RXQ1.0
+pktq_out = SWQ2 SWQ3 SWQ4 SWQ5 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3 SWQ4 SWQ5
+pktq_out = SWQ6 SWQ7 SWQ8 SWQ9
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,2,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ6 SWQ7 SWQ8 SWQ9
+pktq_out = SWQ10 SWQ11 SWQ12 SWQ13
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ10 SWQ11 SWQ12 SWQ13 SWQ1
+pktq_out = TXQ2.0 TXQ0.0 TXQ3.0 TXQ1.0
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_6_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_6_lb_1_sw.conf
new file mode 100644
index 000000000..1d55d8855
--- /dev/null
+++ b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_6_lb_1_sw.conf
@@ -0,0 +1,51 @@
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ5.0 RXQ3.0 RXQ4.0
+prv_to_pub_map = (5,2)(3,0)(4,1)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ5.0 RXQ2.0 RXQ3.0 RXQ0.0 RXQ4.0 RXQ1.0
+pktq_out = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7
+pktq_out = SWQ8 SWQ9 SWQ10 SWQ11 SWQ12 SWQ13
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,2,4,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ8 SWQ9 SWQ10 SWQ11 SWQ12 SWQ13
+pktq_out = SWQ14 SWQ15 SWQ16 SWQ17 SWQ18 SWQ19
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ14 SWQ15 SWQ16 SWQ17 SWQ18 SWQ19 SWQ1
+pktq_out = TXQ5.0 TXQ2.0 TXQ3.0 TXQ0.0 TXQ4.0 TXQ1.0
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_8_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_8_lb_1_sw.conf
new file mode 100644
index 000000000..8434fee34
--- /dev/null
+++ b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_6_ports_8_lb_1_sw.conf
@@ -0,0 +1,52 @@
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ6.0 RXQ7.0 RXQ4.0 RXQ5.0
+prv_to_pub_map = (6,2)(7,3)(4,0)(5,1)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ6.0 RXQ2.0 RXQ7.0 RXQ3.0 RXQ4.0 RXQ0.0 RXQ5.0 RXQ1.0
+pktq_out = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ8 SWQ9 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ8 SWQ9
+pktq_out = SWQ10 SWQ11 SWQ12 SWQ13 SWQ14 SWQ15 SWQ16 SWQ17
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,2,4,6,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ10 SWQ11 SWQ12 SWQ13 SWQ14 SWQ15 SWQ16 SWQ17
+pktq_out = SWQ18 SWQ19 SWQ20 SWQ21 SWQ22 SWQ23 SWQ24 SWQ25
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ18 SWQ19 SWQ20 SWQ21 SWQ22 SWQ23 SWQ24 SWQ25 SWQ1
+pktq_out = TXQ6.0 TXQ2.0 TXQ7.0 TXQ3.0 TXQ4.0 TXQ0.0 TXQ5.0 TXQ1.0
+
diff --git a/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_8_ports_10_lb_1_sw.conf b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_8_ports_10_lb_1_sw.conf
new file mode 100644
index 000000000..51d97e0f8
--- /dev/null
+++ b/samples/vnf_samples/nsut/vfw/vfw_vnf_pipeline_cores_8_ports_10_lb_1_sw.conf
@@ -0,0 +1,52 @@
+[PIPELINE0]
+type = MASTER
+core = 0
+
+[PIPELINE1]
+type = ARPICMP
+core = 0
+pktq_in = SWQ0
+pktq_out = SWQ1
+pktq_in_prv = RXQ7.0 RXQ8.0 RXQ5.0 RXQ6.0 RXQ9.0
+prv_to_pub_map = (7,2)(8,3)(5,0)(6,1)(9,4)
+
+[PIPELINE2]
+type = TXRX
+core = 1
+pipeline_txrx_type = RXRX
+dest_if_offset = 176
+pktq_in = RXQ7.0 RXQ2.0 RXQ8.0 RXQ3.0 RXQ5.0 RXQ0.0 RXQ6.0 RXQ1.0 RXQ9.0 RXQ4.0
+pktq_out = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ8 SWQ9 SWQ10 SWQ11 SWQ0
+
+[PIPELINE3]
+type = LOADB
+core = 2
+pktq_in = SWQ2 SWQ3 SWQ4 SWQ5 SWQ6 SWQ7 SWQ8 SWQ9 SWQ10 SWQ11
+pktq_out = SWQ12 SWQ13 SWQ14 SWQ15 SWQ16 SWQ17 SWQ18 SWQ19 SWQ20 SWQ21
+outport_offset = 136
+n_vnf_threads = 1
+n_lb_tuples = 5
+loadb_debug = 0
+lib_arp_debug = 0
+prv_que_handler = (0,2,4,6,8,)
+
+[PIPELINE4]
+type = VFW
+core = 3
+pktq_in = SWQ12 SWQ13 SWQ14 SWQ15 SWQ16 SWQ17 SWQ18 SWQ19 SWQ20 SWQ21
+pktq_out = SWQ22 SWQ23 SWQ24 SWQ25 SWQ26 SWQ27 SWQ28 SWQ29 SWQ30 SWQ31
+n_rules = 10
+prv_que_handler = (0)
+n_flows = 2000000
+traffic_type = 4
+pkt_type = ipv4
+tcp_be_liberal = 0
+
+[PIPELINE5]
+type = TXRX
+core = 1
+pipeline_txrx_type = TXTX
+dest_if_offset = 176
+pktq_in = SWQ22 SWQ23 SWQ24 SWQ25 SWQ26 SWQ27 SWQ28 SWQ29 SWQ30 SWQ31 SWQ1
+pktq_out = TXQ7.0 TXQ2.0 TXQ8.0 TXQ3.0 TXQ5.0 TXQ0.0 TXQ6.0 TXQ1.0 TXQ9.0 TXQ4.0
+
diff --git a/samples/vnf_samples/traffic_profiles/ipv4_throughput-scale-up.yaml b/samples/vnf_samples/traffic_profiles/ipv4_throughput-scale-up.yaml
new file mode 100644
index 000000000..d2cc18c15
--- /dev/null
+++ b/samples/vnf_samples/traffic_profiles/ipv4_throughput-scale-up.yaml
@@ -0,0 +1,102 @@
+# Copyright (c) 2016-2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# flow definition for ACL tests - 1K flows - ipv4 only
+#
+# the number of flows defines the widest range of parameters
+# for example if srcip_range=1.0.0.1-1.0.0.255 and dst_ip_range=10.0.0.1-10.0.1.255
+# and it should define only 16 flows
+#
+# there is assumption that packets generated will have a random sequences of following addresses pairs
+# in the packets
+# 1. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+# 2. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+# ...
+# 512. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+#
+# not all combination should be filled
+# Any other field with random range will be added to flow definition
+#
+# the example.yaml provides all possibilities for traffic generation
+#
+# the profile defines a public and private side to make limited traffic correlation
+# between private and public side same way as it is made by IXIA solution.
+#
+{% set vports = get(extra_args, 'vports', '2') %}
+---
+schema: "nsb:traffic_profile:0.1"
+
+# This file is a template, it will be filled with values from tc.yaml before passing to the traffic generator
+
+name: rfc2544
+description: Traffic profile to run RFC2544 latency
+traffic_profile:
+ traffic_type: RFC2544Profile # defines traffic behavior - constant or look for highest possible throughput
+ frame_rate: 100 # pc of linerate
+ # that specifies a range (e.g. ipv4 address, port)
+{% set count = 0 %}
+{% for vport in range(vports|int) %}
+uplink_{{vport}}:
+ ipv4:
+ id: {{count + 1 }}
+ outer_l2:
+ framesize:
+ 64B: "{{ get(imix, 'imix.uplink.64B', '0') }}"
+ 128B: "{{ get(imix, 'imix.uplink.128B', '0') }}"
+ 256B: "{{ get(imix, 'imix.uplink.256B', '0') }}"
+ 373b: "{{ get(imix, 'imix.uplink.373B', '0') }}"
+ 512B: "{{ get(imix, 'imix.uplink.512B', '0') }}"
+ 570B: "{{ get(imix, 'imix.uplink.570B', '0') }}"
+ 1400B: "{{ get(imix, 'imix.uplink.1400B', '0') }}"
+ 1500B: "{{ get(imix, 'imix.uplink.1500B', '0') }}"
+ 1518B: "{{ get(imix, 'imix.uplink.1518B', '0') }}"
+ outer_l3v4:
+ proto: "udp"
+ srcip4: "{{ get(flow, 'flow.src_ip_{{vport}}', '1.1.1.1-1.1.255.255') }}"
+ dstip4: "{{ get(flow, 'flow.dst_ip_{{vport}}', '90.90.1.1-90.90.255.255') }}"
+ count: "{{ get(flow, 'flow.count', '1') }}"
+ ttl: 32
+ dscp: 0
+ outer_l4:
+ srcport: "{{ get(flow, 'flow.src_port_{{vport}}', '1234-4321') }}"
+ dstport: "{{ get(flow, 'flow.dst_port_{{vport}}', '2001-4001') }}"
+ count: "{{ get(flow, 'flow.count', '1') }}"
+downlink_{{vport}}:
+ ipv4:
+ id: {{count + 2}}
+ outer_l2:
+ framesize:
+ 64B: "{{ get(imix, 'imix.downlink.64B', '0') }}"
+ 128B: "{{ get(imix, 'imix.downlink.128B', '0') }}"
+ 256B: "{{ get(imix, 'imix.downlink.256B', '0') }}"
+ 373b: "{{ get(imix, 'imix.downlink.373B', '0') }}"
+ 512B: "{{ get(imix, 'imix.downlink.512B', '0') }}"
+ 570B: "{{ get(imix, 'imix.downlink.570B', '0') }}"
+ 1400B: "{{ get(imix, 'imix.downlink.1400B', '0') }}"
+ 1500B: "{{ get(imix, 'imix.downlink.1500B', '0') }}"
+ 1518B: "{{ get(imix, 'imix.downlink.1518B', '0') }}"
+
+ outer_l3v4:
+ proto: "udp"
+ srcip4: "{{ get(flow, 'flow.dst_ip_{{vport}}', '90.90.1.1-90.90.255.255') }}"
+ dstip4: "{{ get(flow, 'flow.src_ip_{{vport}}', '1.1.1.1-1.1.255.255') }}"
+ count: "{{ get(flow, 'flow.count', '1') }}"
+ ttl: 32
+ dscp: 0
+ outer_l4:
+ srcport: "{{ get(flow, 'flow.dst_port_{{vport}}', '1234-4321') }}"
+ dstport: "{{ get(flow, 'flow.src_port_{{vport}}', '2001-4001') }}"
+ count: "{{ get(flow, 'flow.count', '1') }}"
+{% set count = count + 2 %}
+{% endfor %} \ No newline at end of file
diff --git a/samples/vnf_samples/traffic_profiles/prox_binsearch.yaml b/samples/vnf_samples/traffic_profiles/prox_binsearch.yaml
index 805250ee3..e1a4f59de 100644
--- a/samples/vnf_samples/traffic_profiles/prox_binsearch.yaml
+++ b/samples/vnf_samples/traffic_profiles/prox_binsearch.yaml
@@ -21,9 +21,9 @@ traffic_profile:
traffic_type: ProxBinSearchProfile
tolerated_loss: 0.001
test_precision: 0.1
-# packet_sizes: [64, 128, 256, 512, 1024, 1280, 1518]
- packet_sizes: [64]
- duration: 10
+ packet_sizes: [64, 128, 256, 512, 1024, 1280, 1518]
+ # packet_sizes: [64]
+ duration: 30
lower_bound: 0.0
upper_bound: 100.0
diff --git a/test-requirements.txt b/test-requirements.txt
index ee9815c45..4828e98b0 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -4,6 +4,7 @@
coverage==4.4.2 # Apache 2.0; OSI Approved Apache Software License; http://www.apache.org/licenses/LICENSE-2.0; http://www.apache.org/licenses/LICENSE-2.0
fixtures==3.0.0 # OSI Approved BSD License; OSI Approved Apache Software License
+oslotest===2.17.1 # OSI Approved Apache Software License
packaging==16.8.0 # BSD or Apache License, Version 2.0
pyflakes==1.0.0 # MIT; OSI Approved MIT License
pylint==1.8.1 # GPLv2
diff --git a/tests/ci/prepare_storperf_admin-rc.sh b/tests/ci/prepare_storperf_admin-rc.sh
index 558375e62..ef7c2297e 100755
--- a/tests/ci/prepare_storperf_admin-rc.sh
+++ b/tests/ci/prepare_storperf_admin-rc.sh
@@ -15,8 +15,9 @@ AUTH_URL=${OS_AUTH_URL}
USERNAME=${OS_USERNAME:-admin}
PASSWORD=${OS_PASSWORD:-console}
+# OS_TENANT_NAME is still present to keep backward compatibility with legacy
+# deployments, but should be replaced by OS_PROJECT_NAME.
TENANT_NAME=${OS_TENANT_NAME:-admin}
-TENANT_ID=`openstack project show admin|grep '\bid\b' |awk -F '|' '{print $3}'|sed -e 's/^[[:space:]]*//'`
PROJECT_NAME=${OS_PROJECT_NAME:-$TENANT_NAME}
PROJECT_ID=`openstack project show admin|grep '\bid\b' |awk -F '|' '{print $3}'|sed -e 's/^[[:space:]]*//'`
@@ -30,8 +31,6 @@ echo "OS_USERNAME="$USERNAME >> ~/storperf_admin-rc
echo "OS_PASSWORD="$PASSWORD >> ~/storperf_admin-rc
echo "OS_PROJECT_NAME="$PROJECT_NAME >> ~/storperf_admin-rc
echo "OS_PROJECT_ID="$PROJECT_ID >> ~/storperf_admin-rc
-echo "OS_TENANT_NAME="$TENANT_NAME >> ~/storperf_admin-rc
-echo "OS_TENANT_ID="$TENANT_ID >> ~/storperf_admin-rc
echo "OS_USER_DOMAIN_ID="$USER_DOMAIN_ID >> ~/storperf_admin-rc
echo "OS_PROJECT_DOMAIN_NAME="$OS_PROJECT_DOMAIN_NAME >> ~/storperf_admin-rc
echo "OS_USER_DOMAIN_NAME="$OS_USER_DOMAIN_NAME >> ~/storperf_admin-rc
diff --git a/tests/opnfv/test_suites/opnfv_os-odl-bgpvpn-noha_daily.yaml b/tests/opnfv/test_suites/opnfv_os-odl-bgpvpn-noha_daily.yaml
new file mode 100644
index 000000000..e55b83301
--- /dev/null
+++ b/tests/opnfv/test_suites/opnfv_os-odl-bgpvpn-noha_daily.yaml
@@ -0,0 +1,23 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+# os-odl-bgpvpn-noha daily task suite
+
+schema: "yardstick:suite:0.1"
+
+name: "os-odl-bgpvpn-noha"
+test_cases_dir: "tests/opnfv/test_cases/"
+test_cases:
+-
+ file_name: opnfv_yardstick_tc002.yaml
+-
+ file_name: opnfv_yardstick_tc005.yaml
+-
+ file_name: opnfv_yardstick_tc012.yaml
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
index a468b272b..5935abbac 100644
--- a/tests/unit/__init__.py
+++ b/tests/unit/__init__.py
@@ -12,65 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from __future__ import absolute_import
-import mock
+from yardstick import tests
-STL_MOCKS = {
- 'trex_stl_lib': mock.MagicMock(),
- 'trex_stl_lib.base64': mock.MagicMock(),
- 'trex_stl_lib.binascii': mock.MagicMock(),
- 'trex_stl_lib.collections': mock.MagicMock(),
- 'trex_stl_lib.copy': mock.MagicMock(),
- 'trex_stl_lib.datetime': mock.MagicMock(),
- 'trex_stl_lib.functools': mock.MagicMock(),
- 'trex_stl_lib.imp': mock.MagicMock(),
- 'trex_stl_lib.inspect': mock.MagicMock(),
- 'trex_stl_lib.json': mock.MagicMock(),
- 'trex_stl_lib.linecache': mock.MagicMock(),
- 'trex_stl_lib.math': mock.MagicMock(),
- 'trex_stl_lib.os': mock.MagicMock(),
- 'trex_stl_lib.platform': mock.MagicMock(),
- 'trex_stl_lib.pprint': mock.MagicMock(),
- 'trex_stl_lib.random': mock.MagicMock(),
- 'trex_stl_lib.re': mock.MagicMock(),
- 'trex_stl_lib.scapy': mock.MagicMock(),
- 'trex_stl_lib.socket': mock.MagicMock(),
- 'trex_stl_lib.string': mock.MagicMock(),
- 'trex_stl_lib.struct': mock.MagicMock(),
- 'trex_stl_lib.sys': mock.MagicMock(),
- 'trex_stl_lib.threading': mock.MagicMock(),
- 'trex_stl_lib.time': mock.MagicMock(),
- 'trex_stl_lib.traceback': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_async_client': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_client': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_exceptions': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_ext': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_jsonrpc_client': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_packet_builder_interface': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_packet_builder_scapy': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_port': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_stats': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_streams': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_types': mock.MagicMock(),
- 'trex_stl_lib.types': mock.MagicMock(),
- 'trex_stl_lib.utils': mock.MagicMock(),
- 'trex_stl_lib.utils.argparse': mock.MagicMock(),
- 'trex_stl_lib.utils.collections': mock.MagicMock(),
- 'trex_stl_lib.utils.common': mock.MagicMock(),
- 'trex_stl_lib.utils.json': mock.MagicMock(),
- 'trex_stl_lib.utils.os': mock.MagicMock(),
- 'trex_stl_lib.utils.parsing_opts': mock.MagicMock(),
- 'trex_stl_lib.utils.pwd': mock.MagicMock(),
- 'trex_stl_lib.utils.random': mock.MagicMock(),
- 'trex_stl_lib.utils.re': mock.MagicMock(),
- 'trex_stl_lib.utils.string': mock.MagicMock(),
- 'trex_stl_lib.utils.sys': mock.MagicMock(),
- 'trex_stl_lib.utils.text_opts': mock.MagicMock(),
- 'trex_stl_lib.utils.text_tables': mock.MagicMock(),
- 'trex_stl_lib.utils.texttable': mock.MagicMock(),
- 'trex_stl_lib.warnings': mock.MagicMock(),
- 'trex_stl_lib.yaml': mock.MagicMock(),
- 'trex_stl_lib.zlib': mock.MagicMock(),
- 'trex_stl_lib.zmq': mock.MagicMock(),
-}
+# NOTE(ralonsoh): to be removed. Replace all occurrences of
+# tests.unit.STL_MOCKS with yardstick.tests.STL_MOCKS
+STL_MOCKS = tests.STL_MOCKS
diff --git a/tests/unit/network_services/helpers/test_dpdkbindnic_helper.py b/tests/unit/network_services/helpers/test_dpdkbindnic_helper.py
index e30aee854..cc980640c 100644
--- a/tests/unit/network_services/helpers/test_dpdkbindnic_helper.py
+++ b/tests/unit/network_services/helpers/test_dpdkbindnic_helper.py
@@ -16,6 +16,14 @@
import mock
import unittest
+
+import os
+
+from yardstick.error import IncorrectConfig, SSHError
+from yardstick.error import IncorrectNodeSetup
+from yardstick.error import IncorrectSetup
+from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkInterface
+from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkNode
from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkBindHelper
from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkBindHelperException
from yardstick.network_services.helpers.dpdkbindnic_helper import NETWORK_KERNEL
@@ -26,7 +34,269 @@ from yardstick.network_services.helpers.dpdkbindnic_helper import NETWORK_OTHER
from yardstick.network_services.helpers.dpdkbindnic_helper import CRYPTO_OTHER
+NAME = "tg_0"
+
+
+class TestDpdkInterface(unittest.TestCase):
+
+ SAMPLE_NETDEVS = {
+ 'enp11s0': {
+ 'address': '0a:de:ad:be:ef:f5',
+ 'device': '0x1533',
+ 'driver': 'igb',
+ 'ifindex': '2',
+ 'interface_name': 'enp11s0',
+ 'operstate': 'down',
+ 'pci_bus_id': '0000:0b:00.0',
+ 'subsystem_device': '0x1533',
+ 'subsystem_vendor': '0x15d9',
+ 'vendor': '0x8086'
+ },
+ 'lan': {
+ 'address': '0a:de:ad:be:ef:f4',
+ 'device': '0x153a',
+ 'driver': 'e1000e',
+ 'ifindex': '3',
+ 'interface_name': 'lan',
+ 'operstate': 'up',
+ 'pci_bus_id': '0000:00:19.0',
+ 'subsystem_device': '0x153a',
+ 'subsystem_vendor': '0x15d9',
+ 'vendor': '0x8086'
+ }
+ }
+
+ SAMPLE_VM_NETDEVS = {
+ 'eth1': {
+ 'address': 'fa:de:ad:be:ef:5b',
+ 'device': '0x0001',
+ 'driver': 'virtio_net',
+ 'ifindex': '3',
+ 'interface_name': 'eth1',
+ 'operstate': 'down',
+ 'pci_bus_id': '0000:00:04.0',
+ 'vendor': '0x1af4'
+ }
+ }
+
+ def test_parse_netdev_info(self):
+ output = """\
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/ifindex:2
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/address:0a:de:ad:be:ef:f5
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/operstate:down
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/vendor:0x8086
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/device:0x1533
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/subsystem_vendor:0x15d9
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/subsystem_device:0x1533
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/driver:igb
+/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/pci_bus_id:0000:0b:00.0
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/ifindex:3
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/address:0a:de:ad:be:ef:f4
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/operstate:up
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/vendor:0x8086
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/device:0x153a
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/subsystem_vendor:0x15d9
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/subsystem_device:0x153a
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/driver:e1000e
+/sys/devices/pci0000:00/0000:00:19.0/net/lan/pci_bus_id:0000:00:19.0
+"""
+ res = DpdkBindHelper.parse_netdev_info(output)
+ self.assertDictEqual(res, self.SAMPLE_NETDEVS)
+
+ def test_parse_netdev_info_virtio(self):
+ output = """\
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/ifindex:3
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/address:fa:de:ad:be:ef:5b
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/operstate:down
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/device/vendor:0x1af4
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/device/device:0x0001
+/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/driver:virtio_net
+"""
+ res = DpdkBindHelper.parse_netdev_info(output)
+ self.assertDictEqual(res, self.SAMPLE_VM_NETDEVS)
+
+ def test_probe_missing_values(self):
+ mock_dpdk_node = mock.Mock()
+ mock_dpdk_node.netdevs = self.SAMPLE_NETDEVS.copy()
+
+ interface = {'local_mac': '0a:de:ad:be:ef:f5'}
+ dpdk_intf = DpdkInterface(mock_dpdk_node, interface)
+
+ dpdk_intf.probe_missing_values()
+ self.assertEqual(interface['vpci'], '0000:0b:00.0')
+
+ interface['local_mac'] = '0a:de:ad:be:ef:f4'
+ dpdk_intf.probe_missing_values()
+ self.assertEqual(interface['vpci'], '0000:00:19.0')
+
+ def test_probe_missing_values_no_update(self):
+ mock_dpdk_node = mock.Mock()
+ mock_dpdk_node.netdevs = self.SAMPLE_NETDEVS.copy()
+ del mock_dpdk_node.netdevs['enp11s0']['driver']
+ del mock_dpdk_node.netdevs['lan']['driver']
+
+ interface = {'local_mac': '0a:de:ad:be:ef:f5'}
+ dpdk_intf = DpdkInterface(mock_dpdk_node, interface)
+
+ dpdk_intf.probe_missing_values()
+ self.assertNotIn('vpci', interface)
+ self.assertNotIn('driver', interface)
+
+ def test_probe_missing_values_negative(self):
+ mock_dpdk_node = mock.Mock()
+ mock_dpdk_node.netdevs.values.side_effect = IncorrectNodeSetup
+
+ interface = {'local_mac': '0a:de:ad:be:ef:f5'}
+ dpdk_intf = DpdkInterface(mock_dpdk_node, interface)
+
+ with self.assertRaises(IncorrectConfig):
+ dpdk_intf.probe_missing_values()
+
+
+class TestDpdkNode(unittest.TestCase):
+
+ INTERFACES = [
+ {'name': 'name1',
+ 'virtual-interface': {
+ 'local_mac': 404,
+ 'vpci': 'pci10',
+ }},
+ {'name': 'name2',
+ 'virtual-interface': {
+ 'local_mac': 404,
+ 'vpci': 'pci2',
+ }},
+ {'name': 'name3',
+ 'virtual-interface': {
+ 'local_mac': 404,
+ 'vpci': 'some-pci1',
+ }},
+ ]
+
+ def test_probe_dpdk_drivers(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ interfaces = [
+ {'name': 'name1',
+ 'virtual-interface': {
+ 'local_mac': 404,
+ 'vpci': 'pci10',
+ }},
+ {'name': 'name2',
+ 'virtual-interface': {
+ 'local_mac': 404,
+ 'vpci': 'pci2',
+ }},
+ {'name': 'name3',
+ 'virtual-interface': {
+ 'local_mac': 404,
+ 'vpci': 'some-pci1',
+ }},
+ ]
+
+ dpdk_node = DpdkNode(NAME, interfaces, mock_ssh_helper)
+ dpdk_helper = dpdk_node.dpdk_helper
+
+ dpdk_helper.probe_real_kernel_drivers = mock.Mock()
+ dpdk_helper.real_kernel_interface_driver_map = {
+ 'pci1': 'driver1',
+ 'pci2': 'driver2',
+ 'pci3': 'driver3',
+ 'pci4': 'driver1',
+ 'pci6': 'driver3',
+ }
+
+ dpdk_node._probe_dpdk_drivers()
+ self.assertNotIn('driver', interfaces[0]['virtual-interface'])
+ self.assertEqual(interfaces[1]['virtual-interface']['driver'], 'driver2')
+ self.assertEqual(interfaces[2]['virtual-interface']['driver'], 'driver1')
+
+ def test_check(self):
+ def update():
+ if not mock_force_rebind.called:
+ raise IncorrectConfig
+
+ interfaces[0]['virtual-interface'].update({
+ 'vpci': '0000:01:02.1',
+ 'local_ip': '10.20.30.40',
+ 'netmask': '255.255.0.0',
+ 'driver': 'ixgbe',
+ })
+
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ interfaces = [
+ {'name': 'name1',
+ 'virtual-interface': {
+ 'local_mac': 404,
+ }},
+ ]
+
+ dpdk_node = DpdkNode(NAME, interfaces, mock_ssh_helper)
+ dpdk_node._probe_missing_values = mock_probe_missing = mock.Mock(side_effect=update)
+ dpdk_node._force_rebind = mock_force_rebind = mock.Mock()
+
+ self.assertIsNone(dpdk_node.check())
+ self.assertEqual(mock_probe_missing.call_count, 2)
+
+ @mock.patch('yardstick.network_services.helpers.dpdkbindnic_helper.DpdkInterface')
+ def test_check_negative(self, mock_intf_type):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ mock_intf_type().check.side_effect = SSHError
+
+ dpdk_node = DpdkNode(NAME, self.INTERFACES, mock_ssh_helper)
+
+ with self.assertRaises(IncorrectSetup):
+ dpdk_node.check()
+
+ def test_probe_netdevs(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ expected = {'key1': 500, 'key2': 'hello world'}
+ update = {'key1': 1000, 'key3': []}
+
+ dpdk_node = DpdkNode(NAME, self.INTERFACES, mock_ssh_helper)
+ dpdk_helper = dpdk_node.dpdk_helper
+ dpdk_helper.find_net_devices = mock.Mock(side_effect=[expected, update])
+
+ self.assertDictEqual(dpdk_node.netdevs, {})
+ dpdk_node._probe_netdevs()
+ self.assertDictEqual(dpdk_node.netdevs, expected)
+
+ expected = {'key1': 1000, 'key2': 'hello world', 'key3': []}
+ dpdk_node._probe_netdevs()
+ self.assertDictEqual(dpdk_node.netdevs, expected)
+
+ def test_probe_netdevs_setup_negative(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ dpdk_node = DpdkNode(NAME, self.INTERFACES, mock_ssh_helper)
+ dpdk_helper = dpdk_node.dpdk_helper
+ dpdk_helper.find_net_devices = mock.Mock(side_effect=DpdkBindHelperException)
+
+ with self.assertRaises(DpdkBindHelperException):
+ dpdk_node._probe_netdevs()
+
+ def test_force_rebind(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ dpdk_node = DpdkNode(NAME, self.INTERFACES, mock_ssh_helper)
+ dpdk_helper = dpdk_node.dpdk_helper
+ dpdk_helper.force_dpdk_rebind = mock_helper_func = mock.Mock()
+
+ dpdk_node._force_rebind()
+ self.assertEqual(mock_helper_func.call_count, 1)
+
+
class TestDpdkBindHelper(unittest.TestCase):
+ bin_path = "/opt/nsb_bin"
EXAMPLE_OUTPUT = """
Network devices using DPDK-compatible driver
@@ -111,13 +381,15 @@ Other crypto devices
def test___init__(self):
conn = mock.Mock()
conn.provision_tool = mock.Mock(return_value='path_to_tool')
+ conn.join_bin_path.return_value = os.path.join(self.bin_path, DpdkBindHelper.DPDK_DEVBIND)
dpdk_bind_helper = DpdkBindHelper(conn)
self.assertEqual(conn, dpdk_bind_helper.ssh_helper)
self.assertEqual(self.CLEAN_STATUS, dpdk_bind_helper.dpdk_status)
self.assertIsNone(dpdk_bind_helper.status_nic_row_re)
- self.assertIsNone(dpdk_bind_helper._dpdk_devbind)
+ self.assertEqual(dpdk_bind_helper.dpdk_devbind,
+ os.path.join(self.bin_path, dpdk_bind_helper.DPDK_DEVBIND))
self.assertIsNone(dpdk_bind_helper._status_cmd_attr)
def test__dpdk_execute(self):
@@ -125,8 +397,7 @@ Other crypto devices
conn.execute = mock.Mock(return_value=(0, 'output', 'error'))
conn.provision_tool = mock.Mock(return_value='tool_path')
dpdk_bind_helper = DpdkBindHelper(conn)
- self.assertEqual((0, 'output', 'error'),
- dpdk_bind_helper._dpdk_execute('command'))
+ self.assertEqual((0, 'output', 'error'), dpdk_bind_helper._dpdk_execute('command'))
def test__dpdk_execute_failure(self):
conn = mock.Mock()
@@ -141,7 +412,7 @@ Other crypto devices
dpdk_bind_helper = DpdkBindHelper(conn)
- dpdk_bind_helper._addline(NETWORK_KERNEL, self.ONE_INPUT_LINE)
+ dpdk_bind_helper._add_line(NETWORK_KERNEL, self.ONE_INPUT_LINE)
self.assertIsNotNone(dpdk_bind_helper.dpdk_status)
self.assertEqual(self.ONE_INPUT_LINE_PARSED, dpdk_bind_helper.dpdk_status[NETWORK_KERNEL])
@@ -161,11 +432,35 @@ Other crypto devices
dpdk_bind_helper = DpdkBindHelper(conn)
- dpdk_bind_helper.parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
+ dpdk_bind_helper._parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
self.maxDiff = None
self.assertEqual(self.PARSED_EXAMPLE, dpdk_bind_helper.dpdk_status)
+ def test_kernel_bound_pci_addresses(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ expected = ['a', 'b', 3]
+
+ dpdk_helper = DpdkBindHelper(mock_ssh_helper)
+ dpdk_helper.dpdk_status = {
+ NETWORK_DPDK: [{'vpci': 4}, {'vpci': 5}, {'vpci': 'g'}],
+ NETWORK_KERNEL: [{'vpci': 'a'}, {'vpci': 'b'}, {'vpci': 3}],
+ CRYPTO_DPDK: [],
+ }
+
+ result = dpdk_helper.kernel_bound_pci_addresses
+ self.assertEqual(result, expected)
+
+ def test_find_net_devices_negative(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 1, 'error', 'debug'
+
+ dpdk_helper = DpdkBindHelper(mock_ssh_helper)
+
+ self.assertDictEqual(dpdk_helper.find_net_devices(), {})
+
def test_read_status(self):
conn = mock.Mock()
conn.execute = mock.Mock(return_value=(0, self.EXAMPLE_OUTPUT, ''))
@@ -180,7 +475,7 @@ Other crypto devices
dpdk_bind_helper = DpdkBindHelper(conn)
- dpdk_bind_helper.parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
+ dpdk_bind_helper._parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
self.assertEqual(['0000:00:04.0', '0000:00:05.0'],
dpdk_bind_helper._get_bound_pci_addresses(NETWORK_DPDK))
@@ -192,18 +487,18 @@ Other crypto devices
dpdk_bind_helper = DpdkBindHelper(conn)
- dpdk_bind_helper.parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
+ dpdk_bind_helper._parse_dpdk_status_output(self.EXAMPLE_OUTPUT)
self.assertEqual({'0000:00:04.0': 'igb_uio',
- '0000:00:03.0': 'virtio-pci',
- '0000:00:05.0': 'igb_uio',
- },
- dpdk_bind_helper.interface_driver_map)
+ '0000:00:03.0': 'virtio-pci',
+ '0000:00:05.0': 'igb_uio',
+ },
+ dpdk_bind_helper.interface_driver_map)
def test_bind(self):
conn = mock.Mock()
conn.execute = mock.Mock(return_value=(0, '', ''))
- conn.provision_tool = mock.Mock(return_value='/opt/nsb_bin/dpdk-devbind.py')
+ conn.join_bin_path.return_value = os.path.join(self.bin_path, DpdkBindHelper.DPDK_DEVBIND)
dpdk_bind_helper = DpdkBindHelper(conn)
dpdk_bind_helper.read_status = mock.Mock()
@@ -217,7 +512,7 @@ Other crypto devices
def test_bind_single_pci(self):
conn = mock.Mock()
conn.execute = mock.Mock(return_value=(0, '', ''))
- conn.provision_tool = mock.Mock(return_value='/opt/nsb_bin/dpdk-devbind.py')
+ conn.join_bin_path.return_value = os.path.join(self.bin_path, DpdkBindHelper.DPDK_DEVBIND)
dpdk_bind_helper = DpdkBindHelper(conn)
dpdk_bind_helper.read_status = mock.Mock()
@@ -257,3 +552,84 @@ Other crypto devices
}
self.assertDictEqual(expected, dpdk_bind_helper.used_drivers)
+
+ def test_force_dpdk_rebind(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ dpdk_helper = DpdkBindHelper(mock_ssh_helper, 'driver2')
+ dpdk_helper.dpdk_status = {
+ NETWORK_DPDK: [
+ {
+ 'vpci': 'pci1',
+ },
+ {
+ 'vpci': 'pci3',
+ },
+ {
+ 'vpci': 'pci6',
+ },
+ {
+ 'vpci': 'pci3',
+ },
+ ]
+ }
+ dpdk_helper.real_kernel_interface_driver_map = {
+ 'pci1': 'real_driver1',
+ 'pci2': 'real_driver2',
+ 'pci3': 'real_driver1',
+ 'pci4': 'real_driver4',
+ 'pci6': 'real_driver6',
+ }
+ dpdk_helper.load_dpdk_driver = mock.Mock()
+ dpdk_helper.read_status = mock.Mock()
+ dpdk_helper.save_real_kernel_interface_driver_map = mock.Mock()
+ dpdk_helper.save_used_drivers = mock.Mock()
+ dpdk_helper.bind = mock_bind = mock.Mock()
+
+ dpdk_helper.force_dpdk_rebind()
+ self.assertEqual(mock_bind.call_count, 2)
+
+ def test_save_real_kernel_drivers(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.return_value = 0, '', ''
+
+ dpdk_helper = DpdkBindHelper(mock_ssh_helper)
+ dpdk_helper.real_kernel_drivers = {
+ 'abc': '123',
+ }
+ dpdk_helper.real_kernel_interface_driver_map = {
+ 'abc': 'AAA',
+ 'def': 'DDD',
+ 'abs': 'AAA',
+ 'ghi': 'GGG',
+ }
+
+ # save_used_drivers must be called before save_real_kernel_drivers can be
+ with self.assertRaises(AttributeError):
+ dpdk_helper.save_real_kernel_drivers()
+
+ dpdk_helper.save_used_drivers()
+
+ expected_used_drivers = {
+ 'AAA': ['abc', 'abs'],
+ 'DDD': ['def'],
+ 'GGG': ['ghi'],
+ }
+ dpdk_helper.save_real_kernel_drivers()
+ self.assertDictEqual(dpdk_helper.used_drivers, expected_used_drivers)
+ self.assertDictEqual(dpdk_helper.real_kernel_drivers, {})
+
+ def test_get_real_kernel_driver(self):
+ mock_ssh_helper = mock.Mock()
+ mock_ssh_helper.execute.side_effect = [
+ (0, 'non-matching text', ''),
+ (0, 'pre Kernel modules: real_driver1', ''),
+ (0, 'before Ethernet middle Virtio network device after', ''),
+ ]
+
+ dpdk_helper = DpdkBindHelper(mock_ssh_helper)
+
+ self.assertIsNone(dpdk_helper.get_real_kernel_driver('abc'))
+ self.assertEqual(dpdk_helper.get_real_kernel_driver('abc'), 'real_driver1')
+ self.assertEqual(dpdk_helper.get_real_kernel_driver('abc'), DpdkBindHelper.VIRTIO_DRIVER)
diff --git a/tests/unit/network_services/traffic_profile/test_base.py b/tests/unit/network_services/traffic_profile/test_base.py
index 290610361..33f803286 100644
--- a/tests/unit/network_services/traffic_profile/test_base.py
+++ b/tests/unit/network_services/traffic_profile/test_base.py
@@ -13,16 +13,16 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-#
-# Unittest for yardstick.network_services.traffic_profile.test_base
+import sys
-from __future__ import absolute_import
-import unittest
import mock
+import unittest
-from yardstick.network_services.traffic_profile.base import \
- TrafficProfile, DummyProfile
+from yardstick.common import exceptions
+from yardstick.network_services import traffic_profile as tprofile_package
+from yardstick.network_services.traffic_profile import base
+from yardstick import tests as y_tests
class TestTrafficProfile(unittest.TestCase):
@@ -43,20 +43,33 @@ class TestTrafficProfile(unittest.TestCase):
return _mock
def test___init__(self):
- traffic_profile = TrafficProfile(self.TRAFFIC_PROFILE)
+ traffic_profile = base.TrafficProfile(self.TRAFFIC_PROFILE)
self.assertEqual(self.TRAFFIC_PROFILE, traffic_profile.params)
def test_execute(self):
- traffic_profile = TrafficProfile(self.TRAFFIC_PROFILE)
- self.assertRaises(NotImplementedError, traffic_profile.execute_traffic, {})
+ traffic_profile = base.TrafficProfile(self.TRAFFIC_PROFILE)
+ self.assertRaises(NotImplementedError,
+ traffic_profile.execute_traffic, {})
+
+ def test_get_existing_traffic_profile(self):
+ traffic_profile_list = [
+ 'RFC2544Profile', 'FixedProfile', 'TrafficProfileGenericHTTP',
+ 'IXIARFC2544Profile', 'ProxACLProfile', 'ProxBinSearchProfile',
+ 'ProxProfile', 'ProxRampProfile']
+ with mock.patch.dict(sys.modules, y_tests.STL_MOCKS):
+ tprofile_package.register_modules()
+
+ for tp in traffic_profile_list:
+ traffic_profile = base.TrafficProfile.get(
+ {'traffic_profile': {'traffic_type': tp}})
+ self.assertEqual(tp, traffic_profile.__class__.__name__)
- def test_get(self):
- traffic_profile = TrafficProfile(self.TRAFFIC_PROFILE)
- self.assertRaises(RuntimeError, traffic_profile.get,
- self.TRAFFIC_PROFILE)
+ def test_get_non_existing_traffic_profile(self):
+ self.assertRaises(exceptions.TrafficProfileNotImplemented,
+ base.TrafficProfile.get, self.TRAFFIC_PROFILE)
class TestDummyProfile(unittest.TestCase):
def test_execute(self):
- dummy_profile = DummyProfile(TrafficProfile)
+ dummy_profile = base.DummyProfile(base.TrafficProfile)
self.assertIsNone(dummy_profile.execute({}))
diff --git a/tests/unit/network_services/traffic_profile/test_ixia_rfc2544.py b/tests/unit/network_services/traffic_profile/test_ixia_rfc2544.py
index f13945abf..f83d3341d 100644
--- a/tests/unit/network_services/traffic_profile/test_ixia_rfc2544.py
+++ b/tests/unit/network_services/traffic_profile/test_ixia_rfc2544.py
@@ -29,7 +29,7 @@ stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
stl_patch.start()
if stl_patch:
- from yardstick.network_services.traffic_profile.traffic_profile \
+ from yardstick.network_services.traffic_profile.trex_traffic_profile \
import TrexProfile
from yardstick.network_services.traffic_profile.ixia_rfc2544 import \
IXIARFC2544Profile
diff --git a/tests/unit/network_services/traffic_profile/test_prox_binsearch.py b/tests/unit/network_services/traffic_profile/test_prox_binsearch.py
index c1f1c825b..1b4189b48 100644
--- a/tests/unit/network_services/traffic_profile/test_prox_binsearch.py
+++ b/tests/unit/network_services/traffic_profile/test_prox_binsearch.py
@@ -32,7 +32,7 @@ if stl_patch:
class TestProxBinSearchProfile(unittest.TestCase):
def test_execute_1(self):
- def target(*args, **kwargs):
+ def target(*args, **_):
runs.append(args[2])
if args[2] < 0 or args[2] > 100:
raise RuntimeError(' '.join([str(args), str(runs)]))
@@ -43,6 +43,8 @@ class TestProxBinSearchProfile(unittest.TestCase):
tp_config = {
'traffic_profile': {
'packet_sizes': [200],
+ 'test_precision': 2.0,
+ 'tolerated_loss': 0.001,
},
}
@@ -61,11 +63,47 @@ class TestProxBinSearchProfile(unittest.TestCase):
profile.execute_traffic(traffic_generator)
self.assertEqual(round(profile.current_lower, 2), 74.69)
- self.assertEqual(round(profile.current_upper, 2), 75.39)
- self.assertEqual(len(runs), 8)
+ self.assertEqual(round(profile.current_upper, 2), 76.09)
+ self.assertEqual(len(runs), 7)
+
+ # Result Samples inc theor_max
+ result_tuple = {"Result_Actual_throughput": 7.5e-07,
+ "Result_theor_max_throughput": 0.00012340000000000002,
+ "Result_pktSize": 200}
+ profile.queue.put.assert_called_with(result_tuple)
+
+ success_result_tuple = {"Success_CurrentDropPackets": 0.5,
+ "Success_DropPackets": 0.5,
+ "Success_LatencyAvg": 5.3,
+ "Success_LatencyMax": 5.2,
+ "Success_LatencyMin": 5.1,
+ "Success_PktSize": 200,
+ "Success_RxThroughput": 7.5e-07,
+ "Success_Throughput": 7.5e-07,
+ "Success_TxThroughput": 0.00012340000000000002}
+
+ calls = profile.queue.put(success_result_tuple)
+ profile.queue.put.assert_has_calls(calls)
+
+ success_result_tuple2 = {"Success_CurrentDropPackets": 0.5,
+ "Success_DropPackets": 0.5,
+ "Success_LatencyAvg": 5.3,
+ "Success_LatencyMax": 5.2,
+ "Success_LatencyMin": 5.1,
+ "Success_PktSize": 200,
+ "Success_RxThroughput": 7.5e-07,
+ "Success_Throughput": 7.5e-07,
+ "Success_TxThroughput": 123.4,
+ "Success_can_be_lost": 409600,
+ "Success_drop_total": 20480,
+ "Success_rx_total": 4075520,
+ "Success_tx_total": 4096000}
+
+ calls = profile.queue.put(success_result_tuple2)
+ profile.queue.put.assert_has_calls(calls)
def test_execute_2(self):
- def target(*args, **kwargs):
+ def target(*args, **_):
runs.append(args[2])
if args[2] < 0 or args[2] > 100:
raise RuntimeError(' '.join([str(args), str(runs)]))
@@ -77,6 +115,7 @@ class TestProxBinSearchProfile(unittest.TestCase):
'traffic_profile': {
'packet_sizes': [200],
'test_precision': 2.0,
+ 'tolerated_loss': 0.001,
},
}
@@ -97,3 +136,50 @@ class TestProxBinSearchProfile(unittest.TestCase):
self.assertEqual(round(profile.current_lower, 2), 24.06)
self.assertEqual(round(profile.current_upper, 2), 25.47)
self.assertEqual(len(runs), 7)
+
+ def test_execute_3(self):
+ def target(*args, **_):
+ runs.append(args[2])
+ if args[2] < 0 or args[2] > 100:
+ raise RuntimeError(' '.join([str(args), str(runs)]))
+ if args[2] > 75.0:
+ return fail_tuple, {}
+ return success_tuple, {}
+
+ tp_config = {
+ 'traffic_profile': {
+ 'packet_sizes': [200],
+ 'test_precision': 2.0,
+ 'tolerated_loss': 0.001,
+ },
+ }
+
+ runs = []
+ success_tuple = ProxTestDataTuple(10.0, 1, 2, 3, 4, [5.1, 5.2, 5.3], 995, 1000, 123.4)
+ fail_tuple = ProxTestDataTuple(10.0, 1, 2, 3, 4, [5.6, 5.7, 5.8], 850, 1000, 123.4)
+
+ traffic_generator = mock.MagicMock()
+
+ profile_helper = mock.MagicMock()
+ profile_helper.run_test = target
+
+ profile = ProxBinSearchProfile(tp_config)
+ profile.init(mock.MagicMock())
+ profile._profile_helper = profile_helper
+
+ profile.upper_bound = 100.0
+ profile.lower_bound = 99.0
+ profile.execute_traffic(traffic_generator)
+
+
+ # Result Samples
+ result_tuple = {"Result_theor_max_throughput": 0, "Result_pktSize": 200}
+ profile.queue.put.assert_called_with(result_tuple)
+
+ # Check for success_ tuple (None expected)
+ calls = profile.queue.put.mock_calls
+ for call in calls:
+ for call_detail in call[1]:
+ for k in call_detail:
+ if "Success_" in k:
+ self.assertRaises(AttributeError)
diff --git a/tests/unit/network_services/traffic_profile/test_rfc2544.py b/tests/unit/network_services/traffic_profile/test_rfc2544.py
index 221233710..fe563e9fd 100644
--- a/tests/unit/network_services/traffic_profile/test_rfc2544.py
+++ b/tests/unit/network_services/traffic_profile/test_rfc2544.py
@@ -29,7 +29,7 @@ stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
stl_patch.start()
if stl_patch:
- from yardstick.network_services.traffic_profile.traffic_profile \
+ from yardstick.network_services.traffic_profile.trex_traffic_profile \
import TrexProfile
from yardstick.network_services.traffic_profile.rfc2544 import \
RFC2544Profile
@@ -164,8 +164,6 @@ class TestRFC2544Profile(unittest.TestCase):
"in_packets": 1000,
"out_packets": 1002,
}
- tol_min = 0.0
- tolerance = 1.0
expected = {
'DropPercentage': 0.1996,
'RxThroughput': 33.333333333333336,
@@ -208,8 +206,6 @@ class TestRFC2544Profile(unittest.TestCase):
"tx_throughput_mbps": 10,
"in_packets": 1000,
"out_packets": 0}
- tol_min = 0.0
- tolerance = 0.0
r_f_c2544_profile.throughput_max = 0
expected = {
'DropPercentage': 100.0, 'RxThroughput': 100 / 3.0,
diff --git a/tests/unit/network_services/traffic_profile/test_traffic_profile.py b/tests/unit/network_services/traffic_profile/test_trex_traffic_profile.py
index 37b9a08d0..c34b97531 100644
--- a/tests/unit/network_services/traffic_profile/test_traffic_profile.py
+++ b/tests/unit/network_services/traffic_profile/test_trex_traffic_profile.py
@@ -29,16 +29,16 @@ stl_patch.start()
if stl_patch:
from yardstick.network_services.traffic_profile.base import TrafficProfile
- from yardstick.network_services.traffic_profile.traffic_profile import TrexProfile
- from yardstick.network_services.traffic_profile.traffic_profile import SRC
- from yardstick.network_services.traffic_profile.traffic_profile import DST
- from yardstick.network_services.traffic_profile.traffic_profile import ETHERNET
- from yardstick.network_services.traffic_profile.traffic_profile import IP
- from yardstick.network_services.traffic_profile.traffic_profile import IPv6
- from yardstick.network_services.traffic_profile.traffic_profile import UDP
- from yardstick.network_services.traffic_profile.traffic_profile import SRC_PORT
- from yardstick.network_services.traffic_profile.traffic_profile import DST_PORT
- from yardstick.network_services.traffic_profile.traffic_profile import TYPE_OF_SERVICE
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import TrexProfile
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import SRC
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import DST
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import ETHERNET
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import IP
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import IPv6
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import UDP
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import SRC_PORT
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import DST_PORT
+ from yardstick.network_services.traffic_profile.trex_traffic_profile import TYPE_OF_SERVICE
class TestTrexProfile(unittest.TestCase):
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_acl_vnf.py b/tests/unit/network_services/vnf_generic/vnf/test_acl_vnf.py
index f9a10149e..a4055bf02 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_acl_vnf.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_acl_vnf.py
@@ -21,6 +21,7 @@ import os
from tests.unit import STL_MOCKS
from tests.unit.network_services.vnf_generic.vnf.test_base import mock_ssh
+from yardstick.common import utils
STLClient = mock.MagicMock()
@@ -312,7 +313,7 @@ class TestAclApproxVnf(unittest.TestCase):
acl_approx_vnf.ssh_helper.run.assert_called_once()
@mock.patch("yardstick.network_services.vnf_generic.vnf.acl_vnf.YangModel")
- @mock.patch("yardstick.network_services.vnf_generic.vnf.acl_vnf.find_relative_file")
+ @mock.patch.object(utils, 'find_relative_file')
@mock.patch("yardstick.network_services.vnf_generic.vnf.sample_vnf.Context")
@mock.patch(SSH_HELPER)
def test_instantiate(self, ssh, *args):
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_prox_helpers.py b/tests/unit/network_services/vnf_generic/vnf/test_prox_helpers.py
index 0ac46c632..ce905182c 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_prox_helpers.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_prox_helpers.py
@@ -24,6 +24,7 @@ import mock
import unittest
from tests.unit import STL_MOCKS
+from yardstick.common import utils
from yardstick.network_services.vnf_generic.vnf.base import VnfdHelper
@@ -962,7 +963,7 @@ class TestProxDpdkVnfSetupEnvHelper(unittest.TestCase):
result = setup_helper.prox_config_data
self.assertEqual(result, expected)
- @mock.patch('yardstick.network_services.vnf_generic.vnf.prox_helpers.find_relative_file')
+ @mock.patch.object(utils, 'find_relative_file')
def test_build_config_file_no_additional_file(self, mock_find_path):
vnf1 = {
'prox_args': {'-c': ""},
@@ -996,7 +997,7 @@ class TestProxDpdkVnfSetupEnvHelper(unittest.TestCase):
self.assertEqual(helper._prox_config_data, '4')
self.assertEqual(helper.remote_path, '5')
- @mock.patch('yardstick.network_services.vnf_generic.vnf.prox_helpers.find_relative_file')
+ @mock.patch.object(utils, 'find_relative_file')
def test_build_config_file_additional_file_string(self, mock_find_path):
vnf1 = {
'prox_args': {'-c': ""},
@@ -1028,7 +1029,7 @@ class TestProxDpdkVnfSetupEnvHelper(unittest.TestCase):
helper.build_config_file()
self.assertDictEqual(helper.additional_files, expected)
- @mock.patch('yardstick.network_services.vnf_generic.vnf.prox_helpers.find_relative_file')
+ @mock.patch.object(utils, 'find_relative_file')
def test_build_config_file_additional_file(self, mock_find_path):
vnf1 = {
'prox_args': {'-c': ""},
@@ -1730,7 +1731,7 @@ class TestProxProfileHelper(unittest.TestCase):
}
self.assertIsNone(helper._test_cores)
- expected = [12, 23]
+ expected = [3, 4]
result = helper.test_cores
self.assertEqual(result, expected)
self.assertIs(result, helper._test_cores)
@@ -1787,7 +1788,7 @@ class TestProxProfileHelper(unittest.TestCase):
}
self.assertIsNone(helper._latency_cores)
- expected = [12, 23]
+ expected = [3, 4]
result = helper.latency_cores
self.assertEqual(result, expected)
self.assertIs(result, helper._latency_cores)
@@ -1842,7 +1843,7 @@ class TestProxProfileHelper(unittest.TestCase):
}
}
- expected = [7, 8]
+ expected = [3, 4]
result = helper.get_cores(helper.PROX_CORE_GEN_MODE)
self.assertEqual(result, expected)
@@ -1984,8 +1985,8 @@ class TestProxMplsProfileHelper(unittest.TestCase):
}
}
- expected_tagged = [7]
- expected_plain = [8]
+ expected_tagged = [3]
+ expected_plain = [4]
self.assertIsNone(helper._cores_tuple)
self.assertEqual(helper.tagged_cores, expected_tagged)
self.assertEqual(helper.plain_cores, expected_plain)
@@ -2060,10 +2061,10 @@ class TestProxBngProfileHelper(unittest.TestCase):
}
}
- expected_cpe = [7]
- expected_inet = [8]
- expected_arp = [4, 3]
- expected_arp_task = [0, 4]
+ expected_cpe = [3]
+ expected_inet = [4]
+ expected_arp = [6, 9]
+ expected_arp_task = [0, 6]
expected_combined = (expected_cpe, expected_inet, expected_arp, expected_arp_task)
self.assertIsNone(helper._cores_tuple)
@@ -2131,8 +2132,8 @@ class TestProxVpeProfileHelper(unittest.TestCase):
}
}
- expected_cpe = [7]
- expected_inet = [8]
+ expected_cpe = [3]
+ expected_inet = [4]
expected_combined = (expected_cpe, expected_inet)
self.assertIsNone(helper._cores_tuple)
@@ -2245,8 +2246,8 @@ class TestProxlwAFTRProfileHelper(unittest.TestCase):
}
}
- expected_tun = [7]
- expected_inet = [8]
+ expected_tun = [3]
+ expected_inet = [4]
expected_combined = (expected_tun, expected_inet)
self.assertIsNone(helper._cores_tuple)
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_prox_vnf.py b/tests/unit/network_services/vnf_generic/vnf/test_prox_vnf.py
index 769279066..46786a304 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_prox_vnf.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_prox_vnf.py
@@ -131,6 +131,8 @@ class TestProxApproxVnf(unittest.TestCase):
'packets_in',
'packets_fwd',
'packets_dropped',
+ 'curr_packets_fwd',
+ 'curr_packets_in'
],
},
'connection-point': [
@@ -329,7 +331,7 @@ class TestProxApproxVnf(unittest.TestCase):
'packets_in': 0,
'packets_dropped': 0,
'packets_fwd': 0,
- 'collect_stats': {'core': {}},
+ 'collect_stats': {'core': {}}
}
result = prox_approx_vnf.collect_kpi()
self.assertEqual(result, expected)
@@ -352,7 +354,11 @@ class TestProxApproxVnf(unittest.TestCase):
'collect_stats': {'core': {'result': 234}},
}
result = prox_approx_vnf.collect_kpi()
- self.assertEqual(result, expected)
+ self.assertEqual(result['packets_in'], expected['packets_in'])
+ self.assertEqual(result['packets_dropped'], expected['packets_dropped'])
+ self.assertEqual(result['packets_fwd'], expected['packets_fwd'])
+ self.assertNotEqual(result['packets_fwd'], 0)
+ self.assertNotEqual(result['packets_fwd'], 0)
@mock.patch(SSH_HELPER)
def test_collect_kpi_error(self, ssh, *args):
@@ -373,6 +379,25 @@ class TestProxApproxVnf(unittest.TestCase):
file_path = os.path.join(curr_path, filename)
return file_path
+ @mock.patch('yardstick.common.utils.open', create=True)
+ @mock.patch('yardstick.benchmark.scenarios.networking.vnf_generic.open', create=True)
+ @mock.patch('yardstick.network_services.helpers.iniparser.open', create=True)
+ @mock.patch(SSH_HELPER)
+ def test_run_prox(self, ssh, *_):
+ mock_ssh(ssh)
+
+ prox_approx_vnf = ProxApproxVnf(NAME, self.VNFD0)
+ prox_approx_vnf.scenario_helper.scenario_cfg = self.SCENARIO_CFG
+ prox_approx_vnf.ssh_helper.join_bin_path.return_value = '/tool_path12/tool_file34'
+ prox_approx_vnf.setup_helper.remote_path = 'configs/file56.cfg'
+
+ expected = "sudo bash -c 'cd /tool_path12; " \
+ "/tool_path12/tool_file34 -o cli -t -f /tmp/l3-swap-2.cfg '"
+
+ prox_approx_vnf._run()
+ result = prox_approx_vnf.ssh_helper.run.call_args[0][0]
+ self.assertEqual(result, expected)
+
@mock.patch(SSH_HELPER)
def bad_test_instantiate(self, *args):
prox_approx_vnf = ProxApproxVnf(NAME, self.VNFD0)
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py b/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py
index 25633384e..b2e3fd85b 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# Copyright (c) 2017 Intel Corporation
+# Copyright (c) 2017-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,14 +15,17 @@
# limitations under the License.
#
+from copy import deepcopy
+
import unittest
import mock
-from copy import deepcopy
+import six
from tests.unit.network_services.vnf_generic.vnf.test_base import mock_ssh
from tests.unit import STL_MOCKS
from yardstick.benchmark.contexts.base import Context
from yardstick.common import exceptions as y_exceptions
+from yardstick.common import utils
from yardstick.network_services.nfvi.resource import ResourceProfile
from yardstick.network_services.vnf_generic.vnf.base import VnfdHelper
@@ -36,7 +39,8 @@ stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
stl_patch.start()
if stl_patch:
- from yardstick.network_services.vnf_generic.vnf.sample_vnf import VnfSshHelper
+ from yardstick.network_services.vnf_generic.vnf import sample_vnf
+ from yardstick.network_services.vnf_generic.vnf.vnf_ssh_helper import VnfSshHelper
from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFDeployHelper
from yardstick.network_services.vnf_generic.vnf.sample_vnf import ScenarioHelper
from yardstick.network_services.vnf_generic.vnf.sample_vnf import ResourceHelper
@@ -528,46 +532,25 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
result = DpdkVnfSetupEnvHelper._update_traffic_type(ip_pipeline_cfg, traffic_options)
self.assertEqual(result, expected)
- def test__setup_hugepages(self):
- vnfd_helper = VnfdHelper(self.VNFD_0)
- ssh_helper = mock.Mock()
- ssh_helper.execute.return_value = 0, '', ''
- scenario_helper = mock.Mock()
- dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
-
- result = dpdk_setup_helper._setup_hugepages()
- expect_start_list = ['awk', 'awk', 'echo']
- expect_in_list = ['meminfo', 'nr_hugepages', '16']
- call_args_iter = (args[0][0] for args in ssh_helper.execute.call_args_list)
- self.assertIsNone(result)
- self.assertEqual(ssh_helper.execute.call_count, 3)
- for expect_start, expect_in, arg0 in zip(expect_start_list, expect_in_list,
- call_args_iter):
- self.assertTrue(arg0.startswith(expect_start))
- self.assertIn(expect_in, arg0)
-
- def test__setup_hugepages_2_mb(self):
- vnfd_helper = VnfdHelper(self.VNFD_0)
+ @mock.patch.object(six, 'BytesIO', return_value=six.BytesIO(b'100\n'))
+ @mock.patch.object(utils, 'read_meminfo',
+ return_value={'Hugepagesize': '2048'})
+ def test__setup_hugepages(self, mock_meminfo, *args):
ssh_helper = mock.Mock()
- ssh_helper.execute.return_value = 0, '2048kB ', ''
- scenario_helper = mock.Mock()
- dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
-
- result = dpdk_setup_helper._setup_hugepages()
- expect_start_list = ['awk', 'awk', 'echo']
- expect_in_list = ['meminfo', 'nr_hugepages', '8192']
- call_args_iter = (args[0][0] for args in ssh_helper.execute.call_args_list)
- self.assertIsNone(result)
- self.assertEqual(ssh_helper.execute.call_count, 3)
- for expect_start, expect_in, arg0 in zip(expect_start_list, expect_in_list,
- call_args_iter):
- self.assertTrue(arg0.startswith(expect_start))
- self.assertIn(expect_in, arg0)
+ dpdk_setup_helper = DpdkVnfSetupEnvHelper(
+ mock.ANY, ssh_helper, mock.ANY)
+ with mock.patch.object(sample_vnf.LOG, 'info') as mock_info:
+ dpdk_setup_helper._setup_hugepages()
+ mock_info.assert_called_once_with(
+ 'Hugepages size (kB): %s, number claimed: %s, number set: '
+ '%s', 2048, 8192, 100)
+ mock_meminfo.assert_called_once_with(ssh_helper)
@mock.patch('yardstick.network_services.vnf_generic.vnf.sample_vnf.open')
- @mock.patch('yardstick.network_services.vnf_generic.vnf.sample_vnf.find_relative_file')
+ @mock.patch.object(utils, 'find_relative_file')
@mock.patch('yardstick.network_services.vnf_generic.vnf.sample_vnf.MultiPortConfig')
- def test_build_config(self, mock_multi_port_config_class, mock_find, *args):
+ @mock.patch.object(utils, 'open_relative_file')
+ def test_build_config(self, mock_open_rf, mock_multi_port_config_class, mock_find, *args):
mock_multi_port_config = mock_multi_port_config_class()
vnfd_helper = VnfdHelper(self.VNFD_0)
ssh_helper = mock.Mock()
@@ -584,6 +567,20 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
self.assertGreaterEqual(mock_multi_port_config.generate_config.call_count, 1)
self.assertGreaterEqual(mock_multi_port_config.generate_script.call_count, 1)
+ scenario_helper.vnf_cfg = {'file': 'fake_file'}
+ dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
+ mock_open_rf.side_effect = mock.mock_open(read_data='fake_data')
+ dpdk_setup_helper.PIPELINE_COMMAND = expected = 'pipeline command'
+
+ result = dpdk_setup_helper.build_config()
+
+ mock_open_rf.assert_called_once()
+ self.assertEqual(result, expected)
+ self.assertGreaterEqual(ssh_helper.upload_config_file.call_count, 2)
+ self.assertGreaterEqual(mock_find.call_count, 1)
+ self.assertGreaterEqual(mock_multi_port_config.generate_config.call_count, 1)
+ self.assertGreaterEqual(mock_multi_port_config.generate_script.call_count, 1)
+
def test__build_pipeline_kwargs(self):
vnfd_helper = VnfdHelper(self.VNFD_0)
ssh_helper = mock.Mock()
@@ -619,10 +616,15 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
ssh_helper = mock.Mock()
ssh_helper.execute = execute
- dpdk_vnf_setup_env_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, mock.Mock())
+ scenario_helper = mock.Mock()
+ scenario_helper.nodes = [None, None]
+ dpdk_vnf_setup_env_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
dpdk_vnf_setup_env_helper._validate_cpu_cfg = mock.Mock(return_value=[])
- self.assertIsInstance(dpdk_vnf_setup_env_helper.setup_vnf_environment(), ResourceProfile)
+ with mock.patch.object(dpdk_vnf_setup_env_helper, '_setup_dpdk'):
+ self.assertIsInstance(
+ dpdk_vnf_setup_env_helper.setup_vnf_environment(),
+ ResourceProfile)
def test__setup_dpdk(self):
ssh_helper = mock.Mock()
@@ -638,21 +640,6 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
mock.call('lsmod | grep -i igb_uio')
])
- def test__setup_dpdk_igb_uio_not_loaded(self):
- ssh_helper = mock.Mock()
- ssh_helper.execute = mock.Mock()
- ssh_helper.execute.side_effect = [(0, 0, 0), (1, 0, 0)]
- dpdk_setup_helper = DpdkVnfSetupEnvHelper(mock.ANY, ssh_helper, mock.ANY)
- with mock.patch.object(dpdk_setup_helper, '_setup_hugepages') as \
- mock_setup_hp:
- with self.assertRaises(y_exceptions.DPDKSetupDriverError):
- dpdk_setup_helper._setup_dpdk()
- mock_setup_hp.assert_called_once()
- ssh_helper.execute.assert_has_calls([
- mock.call('sudo modprobe uio && sudo modprobe igb_uio'),
- mock.call('lsmod | grep -i igb_uio')
- ])
-
@mock.patch('yardstick.ssh.SSH')
def test__setup_resources(self, _):
vnfd_helper = VnfdHelper(deepcopy(self.VNFD_0))
@@ -690,6 +677,7 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
# ssh_helper.execute = mock.Mock(return_value = (0, 'text', ''))
# ssh_helper.execute.return_value = 0, 'output', ''
scenario_helper = mock.Mock()
+ scenario_helper.nodes = [None, None]
rv = ['0000:05:00.1', '0000:05:00.0']
dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
@@ -708,6 +696,7 @@ class TestDpdkVnfSetupEnvHelper(unittest.TestCase):
vnfd_helper = VnfdHelper(self.VNFD_0)
ssh_helper = mock.Mock()
scenario_helper = mock.Mock()
+ scenario_helper.nodes = [None, None]
dpdk_setup_helper = DpdkVnfSetupEnvHelper(vnfd_helper, ssh_helper, scenario_helper)
dpdk_setup_helper.dpdk_bind_helper.bind = mock.Mock()
dpdk_setup_helper.dpdk_bind_helper.used_drivers = {
@@ -1386,7 +1375,7 @@ class TestSampleVNFDeployHelper(unittest.TestCase):
@mock.patch('yardstick.network_services.vnf_generic.vnf.sample_vnf.time')
@mock.patch('subprocess.check_output')
- def test_deploy_vnfs_disabled(self, *args):
+ def test_deploy_vnfs_disabled(self, *_):
vnfd_helper = mock.Mock()
ssh_helper = mock.Mock()
ssh_helper.join_bin_path.return_value = 'joined_path'
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_tg_ixload.py b/tests/unit/network_services/vnf_generic/vnf/test_tg_ixload.py
index d77068137..b93f9aad3 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_tg_ixload.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_tg_ixload.py
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
# Copyright (c) 2016-2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,13 +13,15 @@
# limitations under the License.
#
-from __future__ import absolute_import
+import subprocess
-import unittest
import mock
-import subprocess
+import unittest
+import six
from tests.unit import STL_MOCKS
+from yardstick import ssh
+from yardstick.common import utils
STLClient = mock.MagicMock()
@@ -147,11 +147,11 @@ class TestIxLoadTrafficGen(unittest.TestCase):
ixload_traffic_gen = IxLoadTrafficGen(NAME, vnfd)
self.assertEqual(None, ixload_traffic_gen.listen_traffic({}))
- @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.makedirs")
+ @mock.patch.object(utils, 'find_relative_file')
+ @mock.patch.object(utils, 'makedirs')
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
- def test_instantiate(self, call, shutil, mock_makedirs):
- # pylint: disable=unused-argument
+ def test_instantiate(self, shutil, *args):
with mock.patch("yardstick.ssh.SSH") as ssh:
ssh_mock = mock.Mock(autospec=ssh.SSH)
ssh_mock.execute = \
@@ -175,19 +175,18 @@ class TestIxLoadTrafficGen(unittest.TestCase):
'1C/1T',
'worker_threads': 1}}
}})
- with mock.patch('yardstick.benchmark.scenarios.networking.vnf_generic.open',
- create=True) as mock_open:
+ with mock.patch.object(six.moves.builtins, 'open',
+ create=True) as mock_open:
mock_open.return_value = mock.MagicMock()
ixload_traffic_gen.instantiate(scenario_cfg, {})
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
- @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.open")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.min")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.max")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.len")
- def test_run_traffic(self, call, shutil, main_open, min, max, len):
- # pylint: disable=unused-argument
+ @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
+ def test_run_traffic(self, shutil, *args):
mock_traffic_profile = mock.Mock(autospec=TrafficProfile)
mock_traffic_profile.get_traffic_definition.return_value = "64"
mock_traffic_profile.params = self.TRAFFIC_PROFILE
@@ -213,13 +212,12 @@ class TestIxLoadTrafficGen(unittest.TestCase):
self.assertIsNone(result)
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
- @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.open")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.min")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.max")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.len")
- def test_run_traffic_csv(self, call, shutil, main_open, min, max, len):
- # pylint: disable=unused-argument
+ @mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.shutil")
+ def test_run_traffic_csv(self, shutil, *args):
mock_traffic_profile = mock.Mock(autospec=TrafficProfile)
mock_traffic_profile.get_traffic_definition.return_value = "64"
mock_traffic_profile.params = self.TRAFFIC_PROFILE
@@ -247,20 +245,15 @@ class TestIxLoadTrafficGen(unittest.TestCase):
self.assertIsNone(result)
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
+ @mock.patch.object(ssh, 'SSH')
def test_terminate(self, *args):
- with mock.patch("yardstick.ssh.SSH") as ssh:
- vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
- ssh_mock = mock.Mock(autospec=ssh.SSH)
- ssh_mock.execute = \
- mock.Mock(return_value=(0, "", ""))
- ssh.from_node.return_value = ssh_mock
- ixload_traffic_gen = IxLoadTrafficGen(NAME, vnfd)
- self.assertEqual(None, ixload_traffic_gen.terminate())
+ vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
+ ixload_traffic_gen = IxLoadTrafficGen(NAME, vnfd)
+ self.assertEqual(None, ixload_traffic_gen.terminate())
- @mock.patch("yardstick.ssh.SSH")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
- def test_parse_csv_read(self, mock_call, mock_ssh):
- # pylint: disable=unused-argument
+ @mock.patch.object(ssh, 'SSH')
+ def test_parse_csv_read(self, mock_ssh, *args):
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
kpi_data = {
'HTTP Total Throughput (Kbps)': 1,
@@ -282,10 +275,9 @@ class TestIxLoadTrafficGen(unittest.TestCase):
for key_left, key_right in IxLoadResourceHelper.KPI_LIST.items():
self.assertEqual(result[key_left][-1], int(kpi_data[key_right]))
- @mock.patch("yardstick.ssh.SSH")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
- def test_parse_csv_read_value_error(self, mock_call, mock_ssh):
- # pylint: disable=unused-argument
+ @mock.patch.object(ssh, 'SSH')
+ def test_parse_csv_read_value_error(self, mock_ssh, *args):
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
http_reader = [{
'HTTP Total Throughput (Kbps)': 1,
@@ -305,10 +297,9 @@ class TestIxLoadTrafficGen(unittest.TestCase):
ixload_traffic_gen.resource_helper.parse_csv_read(http_reader)
self.assertDictEqual(ixload_traffic_gen.resource_helper.result, init_value)
- @mock.patch("yardstick.ssh.SSH")
@mock.patch("yardstick.network_services.vnf_generic.vnf.tg_ixload.call")
- def test_parse_csv_read_error(self, mock_call, mock_ssh):
- # pylint: disable=unused-argument
+ @mock.patch.object(ssh, 'SSH')
+ def test_parse_csv_read_error(self, mock_ssh, *args):
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
http_reader = [{
'HTTP Total Throughput (Kbps)': 1,
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_tg_ping.py b/tests/unit/network_services/vnf_generic/vnf/test_tg_ping.py
index fb26f20b5..ed2274e79 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_tg_ping.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_tg_ping.py
@@ -36,7 +36,7 @@ if stl_patch:
from yardstick.network_services.vnf_generic.vnf.tg_ping import PingTrafficGen
from yardstick.network_services.vnf_generic.vnf.tg_ping import PingResourceHelper
from yardstick.network_services.vnf_generic.vnf.tg_ping import PingSetupEnvHelper
- from yardstick.network_services.vnf_generic.vnf.sample_vnf import VnfSshHelper
+ from yardstick.network_services.vnf_generic.vnf.vnf_ssh_helper import VnfSshHelper
class TestPingResourceHelper(unittest.TestCase):
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py b/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py
index e9f718cb7..aeabc367a 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py
@@ -16,8 +16,10 @@
#
import os
-import unittest
+
import mock
+import six
+import unittest
from tests.unit import STL_MOCKS
@@ -341,7 +343,7 @@ class TestIXIATrafficGen(unittest.TestCase):
'task_path': '/path/to/task'
}
- @mock.patch('yardstick.benchmark.scenarios.networking.vnf_generic.open', create=True)
+ @mock.patch.object(six.moves.builtins, 'open', create=True)
@mock.patch('yardstick.network_services.vnf_generic.vnf.tg_rfc2544_ixia.open',
mock.mock_open(), create=True)
@mock.patch('yardstick.network_services.vnf_generic.vnf.tg_rfc2544_ixia.LOG.exception')
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_vfw_vnf.py b/tests/unit/network_services/vnf_generic/vnf/test_vfw_vnf.py
index f0a56665c..f2eca196f 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_vfw_vnf.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_vfw_vnf.py
@@ -22,6 +22,9 @@ import os
from tests.unit import STL_MOCKS
from tests.unit.network_services.vnf_generic.vnf.test_base import mock_ssh
+from yardstick.common import utils
+
+
STLClient = mock.MagicMock()
stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
stl_patch.start()
@@ -331,7 +334,7 @@ pipeline>
vfw_approx_vnf._run()
vfw_approx_vnf.ssh_helper.run.assert_called_once()
- @mock.patch("yardstick.network_services.vnf_generic.vnf.vfw_vnf.find_relative_file")
+ @mock.patch.object(utils, 'find_relative_file')
@mock.patch("yardstick.network_services.vnf_generic.vnf.vfw_vnf.YangModel")
@mock.patch("yardstick.network_services.vnf_generic.vnf.sample_vnf.Context")
@mock.patch(SSH_HELPER)
diff --git a/tox.ini b/tox.ini
index 822ffdab4..313f1eca2 100644
--- a/tox.ini
+++ b/tox.ini
@@ -6,6 +6,8 @@ envlist = py{27,3},pep8,functional{,-py3},coverage
[testenv]
usedevelop=True
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY BRANCH
+setenv =
+ VIRTUAL_ENV={envdir}
deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
diff --git a/yardstick/benchmark/core/task.py b/yardstick/benchmark/core/task.py
index 2c3edfe13..a4514f5f1 100644
--- a/yardstick/benchmark/core/task.py
+++ b/yardstick/benchmark/core/task.py
@@ -118,7 +118,7 @@ class Task(object): # pragma: no cover
case_name = os.path.splitext(os.path.basename(task_files[i]))[0]
try:
- data = self._run(scenarios, run_in_parallel, args.output_file)
+ data = self._run(scenarios, run_in_parallel, output_config)
except KeyboardInterrupt:
raise
except Exception: # pylint: disable=broad-except
@@ -230,11 +230,12 @@ class Task(object): # pragma: no cover
def _do_output(self, output_config, result):
dispatchers = DispatcherBase.get(output_config)
+ dispatchers = (d for d in dispatchers if d.__dispatcher_type__ != 'Influxdb')
for dispatcher in dispatchers:
dispatcher.flush_result_data(result)
- def _run(self, scenarios, run_in_parallel, output_file):
+ def _run(self, scenarios, run_in_parallel, output_config):
"""Deploys context and calls runners"""
for context in self.contexts:
context.deploy()
@@ -245,14 +246,14 @@ class Task(object): # pragma: no cover
# Start all background scenarios
for scenario in filter(_is_background_scenario, scenarios):
scenario["runner"] = dict(type="Duration", duration=1000000000)
- runner = self.run_one_scenario(scenario, output_file)
+ runner = self.run_one_scenario(scenario, output_config)
background_runners.append(runner)
runners = []
if run_in_parallel:
for scenario in scenarios:
if not _is_background_scenario(scenario):
- runner = self.run_one_scenario(scenario, output_file)
+ runner = self.run_one_scenario(scenario, output_config)
runners.append(runner)
# Wait for runners to finish
@@ -261,12 +262,12 @@ class Task(object): # pragma: no cover
if status != 0:
raise RuntimeError(
"{0} runner status {1}".format(runner.__execution_type__, status))
- LOG.info("Runner ended, output in %s", output_file)
+ LOG.info("Runner ended")
else:
# run serially
for scenario in scenarios:
if not _is_background_scenario(scenario):
- runner = self.run_one_scenario(scenario, output_file)
+ runner = self.run_one_scenario(scenario, output_config)
status = runner_join(runner, background_runners, self.outputs, result)
if status != 0:
LOG.error('Scenario NO.%s: "%s" ERROR!',
@@ -274,7 +275,7 @@ class Task(object): # pragma: no cover
scenario.get('type'))
raise RuntimeError(
"{0} runner status {1}".format(runner.__execution_type__, status))
- LOG.info("Runner ended, output in %s", output_file)
+ LOG.info("Runner ended")
# Abort background runners
for runner in background_runners:
@@ -311,10 +312,10 @@ class Task(object): # pragma: no cover
else:
return op
- def run_one_scenario(self, scenario_cfg, output_file):
+ def run_one_scenario(self, scenario_cfg, output_config):
"""run one scenario using context"""
runner_cfg = scenario_cfg["runner"]
- runner_cfg['output_filename'] = output_file
+ runner_cfg['output_config'] = output_config
options = scenario_cfg.get('options', {})
scenario_cfg['options'] = self._parse_options(options)
diff --git a/yardstick/benchmark/runners/base.py b/yardstick/benchmark/runners/base.py
index a887fa5b3..99386a440 100755
--- a/yardstick/benchmark/runners/base.py
+++ b/yardstick/benchmark/runners/base.py
@@ -23,6 +23,7 @@ import multiprocessing
import subprocess
import time
import traceback
+from subprocess import CalledProcessError
import importlib
@@ -30,6 +31,7 @@ from six.moves.queue import Empty
import yardstick.common.utils as utils
from yardstick.benchmark.scenarios import base as base_scenario
+from yardstick.dispatcher.base import Base as DispatcherBase
log = logging.getLogger(__name__)
@@ -39,7 +41,7 @@ def _execute_shell_command(command):
exitcode = 0
try:
output = subprocess.check_output(command, shell=True)
- except Exception:
+ except CalledProcessError:
exitcode = -1
output = traceback.format_exc()
log.error("exec command '%s' error:\n ", command)
@@ -137,6 +139,8 @@ class Runner(object):
Runner.release(runner)
def __init__(self, config):
+ self.task_id = None
+ self.case_name = None
self.config = config
self.periodic_action_process = None
self.output_queue = multiprocessing.Queue()
@@ -170,6 +174,8 @@ class Runner(object):
cls = getattr(module, path_split[-1])
self.config['object'] = class_name
+ self.case_name = scenario_cfg['tc']
+ self.task_id = scenario_cfg['task_id']
self.aborted.clear()
# run a potentially configured pre-start action
@@ -245,10 +251,24 @@ class Runner(object):
def get_result(self):
result = []
+
+ dispatcher = self.config['output_config']['DEFAULT']['dispatcher']
+ output_in_influxdb = 'influxdb' in dispatcher
+
while not self.result_queue.empty():
log.debug("result_queue size %s", self.result_queue.qsize())
try:
- result.append(self.result_queue.get(True, 1))
+ one_record = self.result_queue.get(True, 1)
except Empty:
pass
+ else:
+ if output_in_influxdb:
+ self._output_to_influxdb(one_record)
+
+ result.append(one_record)
return result
+
+ def _output_to_influxdb(self, record):
+ dispatchers = DispatcherBase.get(self.config['output_config'])
+ dispatcher = next((d for d in dispatchers if d.__dispatcher_type__ == 'Influxdb'))
+ dispatcher.upload_one_record(record, self.case_name, '', task_id=self.task_id)
diff --git a/yardstick/benchmark/scenarios/networking/sfc_openstack.py b/yardstick/benchmark/scenarios/networking/sfc_openstack.py
index d5feabbbe..aaab2131a 100644
--- a/yardstick/benchmark/scenarios/networking/sfc_openstack.py
+++ b/yardstick/benchmark/scenarios/networking/sfc_openstack.py
@@ -34,11 +34,13 @@ def get_credentials(service): # pragma: no cover
# The most common way to pass these info to the script is to do it through
# environment variables.
+ # NOTE(ralonsoh): OS_TENANT_NAME is deprecated.
+ project_name = os.environ.get('OS_PROJECT_NAME', 'admin')
creds.update({
"username": os.environ.get('OS_USERNAME', "admin"),
password: os.environ.get("OS_PASSWORD", 'admin'),
"auth_url": os.environ.get("OS_AUTH_URL"),
- tenant: os.environ.get("OS_TENANT_NAME", "admin"),
+ tenant: os.environ.get("OS_TENANT_NAME", project_name),
})
cacert = os.environ.get("OS_CACERT")
if cacert is not None:
@@ -59,7 +61,7 @@ def get_instances(nova_client): # pragma: no cover
try:
instances = nova_client.servers.list(search_opts={'all_tenants': 1})
return instances
- except Exception as e:
+ except Exception as e: # pylint: disable=broad-except
print("Error [get_instances(nova_client)]:", e)
return None
@@ -72,7 +74,7 @@ def get_SFs(nova_client): # pragma: no cover
if "sfc_test" not in instance.name:
SFs.append(instance)
return SFs
- except Exception as e:
+ except Exception as e: # pylint: disable=broad-except
print("Error [get_SFs(nova_client)]:", e)
return None
@@ -93,7 +95,7 @@ def create_floating_ips(neutron_client): # pragma: no cover
ip_json = neutron_client.create_floatingip({'floatingip': props})
fip_addr = ip_json['floatingip']['floating_ip_address']
ips.append(fip_addr)
- except Exception as e:
+ except Exception as e: # pylint: disable=broad-except
print("Error [create_floating_ip(neutron_client)]:", e)
return None
return ips
@@ -106,7 +108,7 @@ def floatIPtoSFs(SFs, floatips): # pragma: no cover
SF.add_floating_ip(floatips[i])
i = i + 1
return True
- except Exception as e:
+ except Exception as e: # pylint: disable=broad-except
print(("Error [add_floating_ip(nova_client, '%s', '%s')]:" %
(SF, floatips[i]), e))
return False
@@ -122,7 +124,3 @@ def get_an_IP(): # pragma: no cover
floatips = create_floating_ips(neutron_client)
floatIPtoSFs(SFs, floatips)
return floatips
-
-
-if __name__ == '__main__': # pragma: no cover
- get_an_IP()
diff --git a/yardstick/benchmark/scenarios/networking/vnf_generic.py b/yardstick/benchmark/scenarios/networking/vnf_generic.py
index b94bfc9ab..0e4785294 100644
--- a/yardstick/benchmark/scenarios/networking/vnf_generic.py
+++ b/yardstick/benchmark/scenarios/networking/vnf_generic.py
@@ -11,115 +11,38 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-""" NSPerf specific scenario definition """
-
-from __future__ import absolute_import
+import copy
import logging
-import errno
import ipaddress
-
-import copy
+from itertools import chain
import os
import sys
-import re
-from itertools import chain
import six
import yaml
-from collections import defaultdict
-from yardstick.benchmark.scenarios import base
+from yardstick.benchmark.scenarios import base as scenario_base
+from yardstick.error import IncorrectConfig
from yardstick.common.constants import LOG_DIR
from yardstick.common.process import terminate_children
-from yardstick.common.utils import import_modules_from_package, itersubclasses
-from yardstick.common.yaml_loader import yaml_load
+from yardstick.common import utils
from yardstick.network_services.collector.subscriber import Collector
from yardstick.network_services.vnf_generic import vnfdgen
from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
-from yardstick.network_services.traffic_profile.base import TrafficProfile
+from yardstick.network_services import traffic_profile
+from yardstick.network_services.traffic_profile import base as tprofile_base
from yardstick.network_services.utils import get_nsb_option
from yardstick import ssh
-
-LOG = logging.getLogger(__name__)
-
-
-class SSHError(Exception):
- """Class handles ssh connection error exception"""
- pass
-
-
-class SSHTimeout(SSHError):
- """Class handles ssh connection timeout exception"""
- pass
-
-
-class IncorrectConfig(Exception):
- """Class handles incorrect configuration during setup"""
- pass
-
-
-class IncorrectSetup(Exception):
- """Class handles incorrect setup during setup"""
- pass
+traffic_profile.register_modules()
-class SshManager(object):
- def __init__(self, node, timeout=120):
- super(SshManager, self).__init__()
- self.node = node
- self.conn = None
- self.timeout = timeout
-
- def __enter__(self):
- """
- args -> network device mappings
- returns -> ssh connection ready to be used
- """
- try:
- self.conn = ssh.SSH.from_node(self.node)
- self.conn.wait(timeout=self.timeout)
- except SSHError as error:
- LOG.info("connect failed to %s, due to %s", self.node["ip"], error)
- # self.conn defaults to None
- return self.conn
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- if self.conn:
- self.conn.close()
-
-
-def find_relative_file(path, task_path):
- """
- Find file in one of places: in abs of path or
- relative to TC scenario file. In this order.
-
- :param path:
- :param task_path:
- :return str: full path to file
- """
- # fixme: create schema to validate all fields have been provided
- for lookup in [os.path.abspath(path), os.path.join(task_path, path)]:
- try:
- with open(lookup):
- return lookup
- except IOError:
- pass
- raise IOError(errno.ENOENT, 'Unable to find {} file'.format(path))
-
-
-def open_relative_file(path, task_path):
- try:
- return open(path)
- except IOError as e:
- if e.errno == errno.ENOENT:
- return open(os.path.join(task_path, path))
- raise
+LOG = logging.getLogger(__name__)
-class NetworkServiceTestCase(base.Scenario):
+class NetworkServiceTestCase(scenario_base.Scenario):
"""Class handles Generic framework to do pre-deployment VNF &
Network service testing """
@@ -130,16 +53,12 @@ class NetworkServiceTestCase(base.Scenario):
self.scenario_cfg = scenario_cfg
self.context_cfg = context_cfg
- # fixme: create schema to validate all fields have been provided
- with open_relative_file(scenario_cfg["topology"],
- scenario_cfg['task_path']) as stream:
- topology_yaml = yaml_load(stream)
-
- self.topology = topology_yaml["nsd:nsd-catalog"]["nsd"][0]
+ self._render_topology()
self.vnfs = []
self.collector = None
self.traffic_profile = None
self.node_netdevs = {}
+ self.bin_path = get_nsb_option('bin_path', '')
def _get_ip_flow_range(self, ip_start_range):
@@ -211,37 +130,47 @@ class NetworkServiceTestCase(base.Scenario):
def _get_traffic_profile(self):
profile = self.scenario_cfg["traffic_profile"]
path = self.scenario_cfg["task_path"]
- with open_relative_file(profile, path) as infile:
+ with utils.open_relative_file(profile, path) as infile:
+ return infile.read()
+
+ def _get_topology(self):
+ topology = self.scenario_cfg["topology"]
+ path = self.scenario_cfg["task_path"]
+ with utils.open_relative_file(topology, path) as infile:
return infile.read()
def _fill_traffic_profile(self):
- traffic_mapping = self._get_traffic_profile()
- traffic_map_data = {
+ tprofile = self._get_traffic_profile()
+ extra_args = self.scenario_cfg.get('extra_args', {})
+ tprofile_data = {
'flow': self._get_traffic_flow(),
'imix': self._get_traffic_imix(),
- TrafficProfile.UPLINK: {},
- TrafficProfile.DOWNLINK: {},
+ tprofile_base.TrafficProfile.UPLINK: {},
+ tprofile_base.TrafficProfile.DOWNLINK: {},
+ 'extra_args': extra_args
}
- traffic_vnfd = vnfdgen.generate_vnfd(traffic_mapping, traffic_map_data)
- self.traffic_profile = TrafficProfile.get(traffic_vnfd)
- return self.traffic_profile
+ traffic_vnfd = vnfdgen.generate_vnfd(tprofile, tprofile_data)
+ self.traffic_profile = tprofile_base.TrafficProfile.get(traffic_vnfd)
+
+ def _render_topology(self):
+ topology = self._get_topology()
+ topology_args = self.scenario_cfg.get('extra_args', {})
+ topolgy_data = {
+ 'extra_args': topology_args
+ }
+ topology_yaml = vnfdgen.generate_vnfd(topology, topolgy_data)
+ self.topology = topology_yaml["nsd:nsd-catalog"]["nsd"][0]
def _find_vnf_name_from_id(self, vnf_id):
return next((vnfd["vnfd-id-ref"]
for vnfd in self.topology["constituent-vnfd"]
if vnf_id == vnfd["member-vnf-index"]), None)
- @staticmethod
- def get_vld_networks(networks):
- # network name is vld_id
- vld_map = {}
- for name, n in networks.items():
- try:
- vld_map[n['vld_id']] = n
- except KeyError:
- vld_map[name] = n
- return vld_map
+ def _find_vnfd_from_vnf_idx(self, vnf_id):
+ return next((vnfd
+ for vnfd in self.topology["constituent-vnfd"]
+ if vnf_id == vnfd["member-vnf-index"]), None)
@staticmethod
def find_node_if(nodes, name, if_name, vld_id):
@@ -293,7 +222,9 @@ class NetworkServiceTestCase(base.Scenario):
node1_if["peer_ifname"] = node0_if_name
# just load the network
- vld_networks = self.get_vld_networks(self.context_cfg["networks"])
+ vld_networks = {n.get('vld_id', name): n for name, n in
+ self.context_cfg["networks"].items()}
+
node0_if["network"] = vld_networks.get(vld["id"], {})
node1_if["network"] = vld_networks.get(vld["id"], {})
@@ -332,10 +263,6 @@ class NetworkServiceTestCase(base.Scenario):
node0_if["peer_intf"] = node1_copy
node1_if["peer_intf"] = node0_copy
- def _find_vnfd_from_vnf_idx(self, vnf_idx):
- return next((vnfd for vnfd in self.topology["constituent-vnfd"]
- if vnf_idx == vnfd["member-vnf-index"]), None)
-
def _update_context_with_topology(self):
for vnfd in self.topology["constituent-vnfd"]:
vnf_idx = vnfd["member-vnf-index"]
@@ -343,43 +270,6 @@ class NetworkServiceTestCase(base.Scenario):
vnfd = self._find_vnfd_from_vnf_idx(vnf_idx)
self.context_cfg["nodes"][vnf_name].update(vnfd)
- def _probe_netdevs(self, node, node_dict, timeout=120):
- try:
- return self.node_netdevs[node]
- except KeyError:
- pass
-
- netdevs = {}
- cmd = "PATH=$PATH:/sbin:/usr/sbin ip addr show"
-
- with SshManager(node_dict, timeout=timeout) as conn:
- if conn:
- exit_status = conn.execute(cmd)[0]
- if exit_status != 0:
- raise IncorrectSetup("Node's %s lacks ip tool." % node)
- exit_status, stdout, _ = conn.execute(
- self.FIND_NETDEVICE_STRING)
- if exit_status != 0:
- raise IncorrectSetup(
- "Cannot find netdev info in sysfs" % node)
- netdevs = node_dict['netdevs'] = self.parse_netdev_info(stdout)
-
- self.node_netdevs[node] = netdevs
- return netdevs
-
- @classmethod
- def _probe_missing_values(cls, netdevs, network):
-
- mac_lower = network['local_mac'].lower()
- for netdev in netdevs.values():
- if netdev['address'].lower() != mac_lower:
- continue
- network.update({
- 'driver': netdev['driver'],
- 'vpci': netdev['pci_bus_id'],
- 'ifindex': netdev['ifindex'],
- })
-
def _generate_pod_yaml(self):
context_yaml = os.path.join(LOG_DIR, "pod-{}.yaml".format(self.scenario_cfg['task_id']))
# convert OrderedDict to a list
@@ -405,82 +295,16 @@ class NetworkServiceTestCase(base.Scenario):
pass
return new_node
- TOPOLOGY_REQUIRED_KEYS = frozenset({
- "vpci", "local_ip", "netmask", "local_mac", "driver"})
-
def map_topology_to_infrastructure(self):
""" This method should verify if the available resources defined in pod.yaml
match the topology.yaml file.
:return: None. Side effect: context_cfg is updated
"""
- num_nodes = len(self.context_cfg["nodes"])
- # OpenStack instance creation time is probably proportional to the number
- # of instances
- timeout = 120 * num_nodes
- for node, node_dict in self.context_cfg["nodes"].items():
-
- for network in node_dict["interfaces"].values():
- missing = self.TOPOLOGY_REQUIRED_KEYS.difference(network)
- if not missing:
- continue
-
- # only ssh probe if there are missing values
- # ssh probe won't work on Ixia, so we had better define all our values
- try:
- netdevs = self._probe_netdevs(node, node_dict, timeout=timeout)
- except (SSHError, SSHTimeout):
- raise IncorrectConfig(
- "Unable to probe missing interface fields '%s', on node %s "
- "SSH Error" % (', '.join(missing), node))
- try:
- self._probe_missing_values(netdevs, network)
- except KeyError:
- pass
- else:
- missing = self.TOPOLOGY_REQUIRED_KEYS.difference(
- network)
- if missing:
- raise IncorrectConfig(
- "Require interface fields '%s' not found, topology file "
- "corrupted" % ', '.join(missing))
-
- # we have to generate pod.yaml here so we have vpci and driver
- self._generate_pod_yaml()
# 3. Use topology file to find connections & resolve dest address
self._resolve_topology()
self._update_context_with_topology()
- FIND_NETDEVICE_STRING = r"""find /sys/devices/pci* -type d -name net -exec sh -c '{ grep -sH ^ \
-$1/ifindex $1/address $1/operstate $1/device/vendor $1/device/device \
-$1/device/subsystem_vendor $1/device/subsystem_device ; \
-printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
-' sh \{\}/* \;
-"""
- BASE_ADAPTER_RE = re.compile(
- '^/sys/devices/(.*)/net/([^/]*)/([^:]*):(.*)$', re.M)
-
- @classmethod
- def parse_netdev_info(cls, stdout):
- network_devices = defaultdict(dict)
- matches = cls.BASE_ADAPTER_RE.findall(stdout)
- for bus_path, interface_name, name, value in matches:
- dirname, bus_id = os.path.split(bus_path)
- if 'virtio' in bus_id:
- # for some stupid reason VMs include virtio1/
- # in PCI device path
- bus_id = os.path.basename(dirname)
- # remove extra 'device/' from 'device/vendor,
- # device/subsystem_vendor', etc.
- if 'device/' in name:
- name = name.split('/')[1]
- network_devices[interface_name][name] = value
- network_devices[interface_name][
- 'interface_name'] = interface_name
- network_devices[interface_name]['pci_bus_id'] = bus_id
- # convert back to regular dict
- return dict(network_devices)
-
@classmethod
def get_vnf_impl(cls, vnf_model_id):
""" Find the implementing class from vnf_model["vnf"]["name"] field
@@ -488,13 +312,14 @@ printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
:param vnf_model_id: parsed vnfd model ID field
:return: subclass of GenericVNF
"""
- import_modules_from_package(
+ utils.import_modules_from_package(
"yardstick.network_services.vnf_generic.vnf")
expected_name = vnf_model_id
classes_found = []
def impl():
- for name, class_ in ((c.__name__, c) for c in itersubclasses(GenericVNF)):
+ for name, class_ in ((c.__name__, c) for c in
+ utils.itersubclasses(GenericVNF)):
if name == expected_name:
yield class_
classes_found.append(name)
@@ -547,7 +372,7 @@ printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
context_cfg = self.context_cfg
vnfs = []
- # we assume OrderedDict for consistenct in instantiation
+ # we assume OrderedDict for consistency in instantiation
for node_name, node in context_cfg["nodes"].items():
LOG.debug(node)
try:
@@ -556,7 +381,7 @@ printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
LOG.debug("no model for %s, skipping", node_name)
continue
file_path = scenario_cfg['task_path']
- with open_relative_file(file_name, file_path) as stream:
+ with utils.open_relative_file(file_name, file_path) as stream:
vnf_model = stream.read()
vnfd = vnfdgen.generate_vnfd(vnf_model, node)
# TODO: here add extra context_cfg["nodes"] regardless of template
@@ -606,6 +431,9 @@ printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
vnf.terminate()
raise
+ # we have to generate pod.yaml here after VNF has probed so we know vpci and driver
+ self._generate_pod_yaml()
+
# 3. Run experiment
# Start listeners first to avoid losing packets
for traffic_gen in traffic_runners:
diff --git a/yardstick/common/exceptions.py b/yardstick/common/exceptions.py
index 2af4c6343..41d7b8830 100644
--- a/yardstick/common/exceptions.py
+++ b/yardstick/common/exceptions.py
@@ -68,6 +68,11 @@ class HeatTemplateError(YardstickException):
class IPv6RangeError(YardstickException):
message = 'Start IP "%(start_ip)s" is greater than end IP "%(end_ip)s"'
+
+class TrafficProfileNotImplemented(YardstickException):
+ message = 'No implementation for traffic profile %(profile_class)s.'
+
+
class DPDKSetupDriverError(YardstickException):
message = '"igb_uio" driver is not loaded'
diff --git a/yardstick/common/openstack_utils.py b/yardstick/common/openstack_utils.py
index 8f666e268..e3f67baa5 100644
--- a/yardstick/common/openstack_utils.py
+++ b/yardstick/common/openstack_utils.py
@@ -33,38 +33,22 @@ DEFAULT_API_VERSION = '2'
# CREDENTIALS
# *********************************************
def get_credentials():
- """Returns a creds dictionary filled with parsed from env"""
- creds = {}
-
- keystone_api_version = os.getenv('OS_IDENTITY_API_VERSION')
-
- if keystone_api_version is None or keystone_api_version == '2':
- keystone_v3 = False
- tenant_env = 'OS_TENANT_NAME'
- tenant = 'tenant_name'
- else:
- keystone_v3 = True
- tenant_env = 'OS_PROJECT_NAME'
- tenant = 'project_name'
-
- # The most common way to pass these info to the script is to do it
- # through environment variables.
- creds.update({
- "username": os.environ.get("OS_USERNAME"),
- "password": os.environ.get("OS_PASSWORD"),
- "auth_url": os.environ.get("OS_AUTH_URL"),
- tenant: os.environ.get(tenant_env)
- })
-
- if keystone_v3:
- if os.getenv('OS_USER_DOMAIN_NAME') is not None:
- creds.update({
- "user_domain_name": os.getenv('OS_USER_DOMAIN_NAME')
- })
- if os.getenv('OS_PROJECT_DOMAIN_NAME') is not None:
- creds.update({
- "project_domain_name": os.getenv('OS_PROJECT_DOMAIN_NAME')
- })
+ """Returns a creds dictionary filled with parsed from env
+
+ Keystone API version used is 3; v2 was deprecated in 2014 (Icehouse). Along
+ with this deprecation, environment variable 'OS_TENANT_NAME' is replaced by
+ 'OS_PROJECT_NAME'.
+ """
+ creds = {'username': os.environ.get('OS_USERNAME'),
+ 'password': os.environ.get('OS_PASSWORD'),
+ 'auth_url': os.environ.get('OS_AUTH_URL'),
+ 'project_name': os.environ.get('OS_PROJECT_NAME')
+ }
+
+ if os.getenv('OS_USER_DOMAIN_NAME'):
+ creds['user_domain_name'] = os.getenv('OS_USER_DOMAIN_NAME')
+ if os.getenv('OS_PROJECT_DOMAIN_NAME'):
+ creds['project_domain_name'] = os.getenv('OS_PROJECT_DOMAIN_NAME')
return creds
@@ -294,8 +278,7 @@ def create_instance_and_wait_for_active(json_body): # pragma: no cover
VM_BOOT_TIMEOUT = 180
nova_client = get_nova_client()
instance = create_instance(json_body)
- count = VM_BOOT_TIMEOUT / SLEEP
- for _ in range(count, -1, -1):
+ for _ in range(int(VM_BOOT_TIMEOUT / SLEEP)):
status = get_instance_status(nova_client, instance)
if status.lower() == "active":
return instance
diff --git a/yardstick/common/packages.py b/yardstick/common/packages.py
new file mode 100644
index 000000000..f20217fdc
--- /dev/null
+++ b/yardstick/common/packages.py
@@ -0,0 +1,87 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import re
+
+import pip
+from pip import exceptions as pip_exceptions
+from pip.operations import freeze
+
+from yardstick.common import privsep
+
+
+LOG = logging.getLogger(__name__)
+
+ACTION_INSTALL = 'install'
+ACTION_UNINSTALL = 'uninstall'
+
+
+@privsep.yardstick_root.entrypoint
+def _pip_main(package, action, target=None):
+ if action == ACTION_UNINSTALL:
+ cmd = [action, package, '-y']
+ elif action == ACTION_INSTALL:
+ cmd = [action, package, '--upgrade']
+ if target:
+ cmd.append('--target=%s' % target)
+ return pip.main(cmd)
+
+
+def _pip_execute_action(package, action=ACTION_INSTALL, target=None):
+ """Execute an action with a PIP package.
+
+ According to [1], a package could be a URL, a local directory, a local dist
+ file or a requirements file.
+
+ [1] https://pip.pypa.io/en/stable/reference/pip_install/#argument-handling
+ """
+ try:
+ status = _pip_main(package, action, target)
+ except pip_exceptions.PipError:
+ status = 1
+
+ if not status:
+ LOG.info('Action "%s" executed, package %s', package, action)
+ else:
+ LOG.info('Error executing action "%s", package %s', package, action)
+ return status
+
+
+def pip_remove(package):
+ """Remove an installed PIP package"""
+ return _pip_execute_action(package, action=ACTION_UNINSTALL)
+
+
+def pip_install(package, target=None):
+ """Install a PIP package"""
+ return _pip_execute_action(package, action=ACTION_INSTALL, target=target)
+
+
+def pip_list(pkg_name=None):
+ """Dict of installed PIP packages with version.
+
+ If 'pkg_name' is not None, will return only those packages matching the
+ name."""
+ pip_regex = re.compile(r"(?P<name>.*)==(?P<version>[\w\.]+)")
+ git_regex = re.compile(r".*@(?P<version>[\w]+)#egg=(?P<name>[\w]+)")
+
+ pkg_dict = {}
+ for _pkg in freeze.freeze(local_only=True):
+ match = pip_regex.match(_pkg) or git_regex.match(_pkg)
+ if match and (not pkg_name or (
+ pkg_name and match.group('name').find(pkg_name) != -1)):
+ pkg_dict[match.group('name')] = match.group('version')
+
+ return pkg_dict
diff --git a/yardstick/common/privsep.py b/yardstick/common/privsep.py
new file mode 100644
index 000000000..4ae510489
--- /dev/null
+++ b/yardstick/common/privsep.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from oslo_privsep import capabilities as c
+from oslo_privsep import priv_context
+
+yardstick_root = priv_context.PrivContext(
+ "yardstick",
+ cfg_section="yardstick_privileged",
+ pypath=__name__ + ".yardstick_root",
+ capabilities=[c.CAP_SYS_ADMIN]
+)
diff --git a/yardstick/common/utils.py b/yardstick/common/utils.py
index 8604e900f..357f66be8 100644
--- a/yardstick/common/utils.py
+++ b/yardstick/common/utils.py
@@ -22,6 +22,7 @@ import ipaddress
import logging
import os
import random
+import re
import socket
import subprocess
import sys
@@ -30,6 +31,7 @@ import six
from flask import jsonify
from six.moves import configparser
from oslo_serialization import jsonutils
+from oslo_utils import encodeutils
import yardstick
@@ -64,7 +66,7 @@ def itersubclasses(cls, _seen=None):
yield sub
-def import_modules_from_package(package):
+def import_modules_from_package(package, raise_exception=False):
"""Import modules given a package name
:param: package - Full package name. For example: rally.deploy.engines
@@ -85,10 +87,27 @@ def import_modules_from_package(package):
for module_name in missing_modules:
try:
importlib.import_module(module_name)
- except (ImportError, SyntaxError):
+ except (ImportError, SyntaxError) as exc:
+ if raise_exception:
+ raise exc
logger.exception('Unable to import module %s', module_name)
+NON_NONE_DEFAULT = object()
+
+
+def get_key_with_default(data, key, default=NON_NONE_DEFAULT):
+ value = data.get(key, default)
+ if value is NON_NONE_DEFAULT:
+ raise KeyError(key)
+ return value
+
+
+def make_dict_from_map(data, key_map):
+ return {dest_key: get_key_with_default(data, src_key, default)
+ for dest_key, (src_key, default) in key_map.items()}
+
+
def makedirs(d):
try:
os.makedirs(d)
@@ -105,13 +124,12 @@ def remove_file(path):
raise
-def execute_command(cmd):
+def execute_command(cmd, **kwargs):
exec_msg = "Executing command: '%s'" % cmd
logger.debug(exec_msg)
- output = subprocess.check_output(cmd.split()).split(os.linesep)
-
- return output
+ output = subprocess.check_output(cmd.split(), **kwargs)
+ return encodeutils.safe_decode(output, incoming='utf-8').split(os.linesep)
def source_env(env_file):
@@ -395,3 +413,45 @@ class Timer(object):
def __getattr__(self, item):
return getattr(self.delta, item)
+
+
+def read_meminfo(ssh_client):
+ """Read "/proc/meminfo" file and parse all keys and values"""
+
+ cpuinfo = six.BytesIO()
+ ssh_client.get_file_obj('/proc/meminfo', cpuinfo)
+ lines = cpuinfo.getvalue().decode('utf-8')
+ matches = re.findall(r"([\w\(\)]+):\s+(\d+)( kB)*", lines)
+ output = {}
+ for match in matches:
+ output[match[0]] = match[1]
+
+ return output
+
+
+def find_relative_file(path, task_path):
+ """
+ Find file in one of places: in abs of path or relative to a directory path,
+ in this order.
+
+ :param path:
+ :param task_path:
+ :return str: full path to file
+ """
+ # fixme: create schema to validate all fields have been provided
+ for lookup in [os.path.abspath(path), os.path.join(task_path, path)]:
+ try:
+ with open(lookup):
+ return lookup
+ except IOError:
+ pass
+ raise IOError(errno.ENOENT, 'Unable to find {} file'.format(path))
+
+
+def open_relative_file(path, task_path):
+ try:
+ return open(path)
+ except IOError as e:
+ if e.errno == errno.ENOENT:
+ return open(os.path.join(task_path, path))
+ raise
diff --git a/yardstick/dispatcher/influxdb.py b/yardstick/dispatcher/influxdb.py
index 632b433b5..e8c7cf57b 100644
--- a/yardstick/dispatcher/influxdb.py
+++ b/yardstick/dispatcher/influxdb.py
@@ -11,8 +11,10 @@ from __future__ import absolute_import
import logging
import time
+import os
import requests
+from requests import ConnectionError
from yardstick.common import utils
from third_party.influxdb.influxdb_line_protocol import make_lines
@@ -38,7 +40,8 @@ class InfluxdbDispatcher(DispatchBase):
self.influxdb_url = "%s/write?db=%s" % (self.target, self.db_name)
- self.task_id = -1
+ self.task_id = None
+ self.tags = None
def flush_result_data(self, data):
LOG.debug('Test result all : %s', data)
@@ -57,28 +60,41 @@ class InfluxdbDispatcher(DispatchBase):
for record in data['tc_data']:
# skip results with no data because we influxdb encode empty dicts
if record.get("data"):
- self._upload_one_record(record, case, tc_criteria)
+ self.upload_one_record(record, case, tc_criteria)
return 0
- def _upload_one_record(self, data, case, tc_criteria):
+ def upload_one_record(self, data, case, tc_criteria, task_id=None):
+ if task_id:
+ self.task_id = task_id
+
+ line = self._data_to_line_protocol(data, case, tc_criteria)
+ LOG.debug('Test result line format : %s', line)
+
try:
- line = self._data_to_line_protocol(data, case, tc_criteria)
- LOG.debug('Test result line format : %s', line)
res = requests.post(self.influxdb_url,
data=line,
auth=(self.username, self.password),
timeout=self.timeout)
+ except ConnectionError as err:
+ LOG.exception('Failed to record result data: %s', err)
+ else:
if res.status_code != 204:
LOG.error('Test result posting finished with status code'
' %d.', res.status_code)
LOG.error(res.text)
- except Exception as err:
- LOG.exception('Failed to record result data: %s', err)
-
def _data_to_line_protocol(self, data, case, criteria):
msg = {}
+
+ if not self.tags:
+ self.tags = {
+ 'deploy_scenario': os.environ.get('DEPLOY_SCENARIO', 'unknown'),
+ 'installer': os.environ.get('INSTALLER_TYPE', 'unknown'),
+ 'pod_name': os.environ.get('NODE_NAME', 'unknown'),
+ 'version': os.environ.get('YARDSTICK_BRANCH', 'unknown')
+ }
+
point = {
"measurement": case,
"fields": utils.flatten_dict_key(data["data"]),
@@ -93,7 +109,7 @@ class InfluxdbDispatcher(DispatchBase):
def _get_nano_timestamp(self, results):
try:
timestamp = results["timestamp"]
- except Exception:
+ except KeyError:
timestamp = time.time()
return str(int(float(timestamp) * 1000000000))
diff --git a/yardstick/error.py b/yardstick/error.py
new file mode 100644
index 000000000..9b84de1af
--- /dev/null
+++ b/yardstick/error.py
@@ -0,0 +1,48 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class SSHError(Exception):
+ """Class handles ssh connection error exception"""
+ pass
+
+
+class SSHTimeout(SSHError):
+ """Class handles ssh connection timeout exception"""
+ pass
+
+
+class IncorrectConfig(Exception):
+ """Class handles incorrect configuration during setup"""
+ pass
+
+
+class IncorrectSetup(Exception):
+ """Class handles incorrect setup during setup"""
+ pass
+
+
+class IncorrectNodeSetup(IncorrectSetup):
+ """Class handles incorrect setup during setup"""
+ pass
+
+
+class ErrorClass(object):
+
+ def __init__(self, *args, **kwargs):
+ if 'test' not in kwargs:
+ raise RuntimeError
+
+ def __getattr__(self, item):
+ raise AttributeError
diff --git a/yardstick/network_services/constants.py b/yardstick/network_services/constants.py
new file mode 100644
index 000000000..79951e353
--- /dev/null
+++ b/yardstick/network_services/constants.py
@@ -0,0 +1,17 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+REMOTE_TMP = "/tmp"
+DEFAULT_VNF_TIMEOUT = 3600
+PROCESS_JOIN_TIMEOUT = 3
diff --git a/yardstick/network_services/helpers/dpdkbindnic_helper.py b/yardstick/network_services/helpers/dpdkbindnic_helper.py
index 8c44b26c2..05b822c2e 100644
--- a/yardstick/network_services/helpers/dpdkbindnic_helper.py
+++ b/yardstick/network_services/helpers/dpdkbindnic_helper.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2016-2017 Intel Corporation
+# Copyright (c) 2016-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,11 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
+import os
import re
-import itertools
+from collections import defaultdict
+from itertools import chain
-import six
+from yardstick.common.utils import validate_non_string_sequence
+from yardstick.error import IncorrectConfig
+from yardstick.error import IncorrectSetup
+from yardstick.error import IncorrectNodeSetup
+from yardstick.error import SSHTimeout
+from yardstick.error import SSHError
NETWORK_KERNEL = 'network_kernel'
NETWORK_DPDK = 'network_dpdk'
@@ -25,7 +32,6 @@ CRYPTO_KERNEL = 'crypto_kernel'
CRYPTO_DPDK = 'crypto_dpdk'
CRYPTO_OTHER = 'crypto_other'
-
LOG = logging.getLogger(__name__)
@@ -33,6 +39,166 @@ class DpdkBindHelperException(Exception):
pass
+class DpdkInterface(object):
+ TOPOLOGY_REQUIRED_KEYS = frozenset({
+ "vpci", "local_ip", "netmask", "local_mac", "driver"})
+
+ def __init__(self, dpdk_node, interface):
+ super(DpdkInterface, self).__init__()
+ self.dpdk_node = dpdk_node
+ self.interface = interface
+
+ try:
+ assert self.local_mac
+ except (AssertionError, KeyError):
+ raise IncorrectConfig
+
+ @property
+ def local_mac(self):
+ return self.interface['local_mac']
+
+ @property
+ def mac_lower(self):
+ return self.local_mac.lower()
+
+ @property
+ def missing_fields(self):
+ return self.TOPOLOGY_REQUIRED_KEYS.difference(self.interface)
+
+ @staticmethod
+ def _detect_socket(netdev):
+ try:
+ socket = netdev['numa_node']
+ except KeyError:
+ # Where is this documented?
+ # It seems for dual-sockets systems the second socket PCI bridge
+ # will have an address > 0x0f, e.g.
+ # Bridge PCI->PCI (P#524320 busid=0000:80:02.0 id=8086:6f04
+ if netdev['pci_bus_id'][5] == "0":
+ socket = 0
+ else:
+ # this doesn't handle quad-sockets
+ # TODO: fix this for quad-socket
+ socket = 1
+ return socket
+
+ def probe_missing_values(self):
+ try:
+ for netdev in self.dpdk_node.netdevs.values():
+ if netdev['address'].lower() == self.mac_lower:
+ socket = self._detect_socket(netdev)
+ self.interface.update({
+ 'vpci': netdev['pci_bus_id'],
+ 'driver': netdev['driver'],
+ 'socket': socket,
+ # don't need ifindex
+ })
+
+ except KeyError:
+ # if we don't find all the keys then don't update
+ pass
+
+ except (IncorrectNodeSetup, SSHError, SSHTimeout):
+ raise IncorrectConfig(
+ "Unable to probe missing interface fields '%s', on node %s "
+ "SSH Error" % (', '.join(self.missing_fields), self.dpdk_node.node_key))
+
+
+class DpdkNode(object):
+
+ def __init__(self, node_name, interfaces, ssh_helper, timeout=120):
+ super(DpdkNode, self).__init__()
+ self.interfaces = interfaces
+ self.ssh_helper = ssh_helper
+ self.node_key = node_name
+ self.timeout = timeout
+ self._dpdk_helper = None
+ self.netdevs = {}
+
+ try:
+ self.dpdk_interfaces = {intf['name']: DpdkInterface(self, intf['virtual-interface'])
+ for intf in self.interfaces}
+ except IncorrectConfig:
+ template = "MAC address is required for all interfaces, missing on: {}"
+ errors = (intf['name'] for intf in self.interfaces if
+ 'local_mac' not in intf['virtual-interface'])
+ raise IncorrectSetup(template.format(", ".join(errors)))
+
+ @property
+ def dpdk_helper(self):
+ if not isinstance(self._dpdk_helper, DpdkBindHelper):
+ self._dpdk_helper = DpdkBindHelper(self.ssh_helper)
+ return self._dpdk_helper
+
+ @property
+ def _interface_missing_iter(self):
+ return chain.from_iterable(self._interface_missing_map.values())
+
+ @property
+ def _interface_missing_map(self):
+ return {name: intf.missing_fields for name, intf in self.dpdk_interfaces.items()}
+
+ def _probe_netdevs(self):
+ self.netdevs.update(self.dpdk_helper.find_net_devices())
+
+ def _force_rebind(self):
+ return self.dpdk_helper.force_dpdk_rebind()
+
+ def _probe_dpdk_drivers(self):
+ self.dpdk_helper.probe_real_kernel_drivers()
+ for pci, driver in self.dpdk_helper.real_kernel_interface_driver_map.items():
+ for intf in self.interfaces:
+ vintf = intf['virtual-interface']
+ # stupid substring matches
+ # don't use netdev use interface
+ if vintf['vpci'].endswith(pci):
+ vintf['driver'] = driver
+ # we can't update netdevs because we may not have netdev info
+
+ def _probe_missing_values(self):
+ for intf in self.dpdk_interfaces.values():
+ intf.probe_missing_values()
+
+ def check(self):
+ # only ssh probe if there are missing values
+ # ssh probe won't work on Ixia, so we had better define all our values
+ try:
+ missing_fields_set = set(self._interface_missing_iter)
+
+ # if we are only missing driver then maybe we can get kernel module
+ # this requires vpci
+ if missing_fields_set == {'driver'}:
+ self._probe_dpdk_drivers()
+ # we can't reprobe missing values because we may not have netdev info
+
+ # if there are any other missing then we have to netdev probe
+ if missing_fields_set.difference({'driver'}):
+ self._probe_netdevs()
+ try:
+ self._probe_missing_values()
+ except IncorrectConfig:
+ # ignore for now
+ pass
+
+ # check again and verify we have all the fields
+ if set(self._interface_missing_iter):
+ # last chance fallback, rebind everything and probe
+ # this probably won't work
+ self._force_rebind()
+ self._probe_netdevs()
+ self._probe_missing_values()
+
+ errors = ("{} missing: {}".format(name, ", ".join(missing_fields)) for
+ name, missing_fields in self._interface_missing_map.items() if
+ missing_fields)
+ errors = "\n".join(errors)
+ if errors:
+ raise IncorrectSetup(errors)
+
+ finally:
+ self._dpdk_helper = None
+
+
class DpdkBindHelper(object):
DPDK_STATUS_CMD = "{dpdk_devbind} --status"
DPDK_BIND_CMD = "sudo {dpdk_devbind} {force} -b {driver} {vpci}"
@@ -42,6 +208,8 @@ class DpdkBindHelper(object):
SKIP_RE = re.compile('(====|<none>|^$)')
NIC_ROW_FIELDS = ['vpci', 'dev_type', 'iface', 'driver', 'unused', 'active']
+ UIO_DRIVER = "uio"
+
HEADER_DICT_PAIRS = [
(re.compile('^Network.*DPDK.*$'), NETWORK_DPDK),
(re.compile('^Network.*kernel.*$'), NETWORK_KERNEL),
@@ -51,6 +219,42 @@ class DpdkBindHelper(object):
(re.compile('^Other crypto.*$'), CRYPTO_OTHER),
]
+ FIND_NETDEVICE_STRING = r"""\
+find /sys/devices/pci* -type d -name net -exec sh -c '{ grep -sH ^ \
+$1/ifindex $1/address $1/operstate $1/device/vendor $1/device/device \
+$1/device/subsystem_vendor $1/device/subsystem_device $1/device/numa_node ; \
+printf "%s/driver:" $1 ; basename $(readlink -s $1/device/driver); } \
+' sh \{\}/* \;
+"""
+
+ BASE_ADAPTER_RE = re.compile('^/sys/devices/(.*)/net/([^/]*)/([^:]*):(.*)$', re.M)
+ DPDK_DEVBIND = "dpdk-devbind.py"
+
+ @classmethod
+ def parse_netdev_info(cls, stdout):
+ network_devices = defaultdict(dict)
+ match_iter = (match.groups() for match in cls.BASE_ADAPTER_RE.finditer(stdout))
+ for bus_path, interface_name, name, value in match_iter:
+ dir_name, bus_id = os.path.split(bus_path)
+ if 'virtio' in bus_id:
+ # for some stupid reason VMs include virtio1/
+ # in PCI device path
+ bus_id = os.path.basename(dir_name)
+
+ # remove extra 'device/' from 'device/vendor,
+ # device/subsystem_vendor', etc.
+ if 'device' in name:
+ name = name.split('/')[1]
+
+ network_devices[interface_name].update({
+ name: value,
+ 'interface_name': interface_name,
+ 'pci_bus_id': bus_id,
+ })
+
+ # convert back to regular dict
+ return dict(network_devices)
+
def clean_status(self):
self.dpdk_status = {
NETWORK_KERNEL: [],
@@ -61,11 +265,17 @@ class DpdkBindHelper(object):
CRYPTO_OTHER: [],
}
- def __init__(self, ssh_helper):
+ # TODO: add support for driver other than igb_uio
+ def __init__(self, ssh_helper, dpdk_driver="igb_uio"):
+ self.ssh_helper = ssh_helper
+ self.real_kernel_interface_driver_map = {}
+ self.dpdk_driver = dpdk_driver
self.dpdk_status = None
self.status_nic_row_re = None
- self._dpdk_devbind = None
+ self.dpdk_devbind = self.ssh_helper.join_bin_path(self.DPDK_DEVBIND)
self._status_cmd_attr = None
+ self.used_drivers = None
+ self.real_kernel_drivers = {}
self.ssh_helper = ssh_helper
self.clean_status()
@@ -73,15 +283,16 @@ class DpdkBindHelper(object):
def _dpdk_execute(self, *args, **kwargs):
res = self.ssh_helper.execute(*args, **kwargs)
if res[0] != 0:
- raise DpdkBindHelperException('{} command failed with rc={}'.format(
- self.dpdk_devbind, res[0]))
+ template = '{} command failed with rc={}'
+ raise DpdkBindHelperException(template.format(self.dpdk_devbind, res[0]))
return res
- @property
- def dpdk_devbind(self):
- if self._dpdk_devbind is None:
- self._dpdk_devbind = self.ssh_helper.provision_tool(tool_file="dpdk-devbind.py")
- return self._dpdk_devbind
+ def load_dpdk_driver(self):
+ cmd_template = "sudo modprobe {} && sudo modprobe {}"
+ self.ssh_helper.execute(cmd_template.format(self.UIO_DRIVER, self.dpdk_driver))
+
+ def check_dpdk_driver(self):
+ return self.ssh_helper.execute("lsmod | grep -i {}".format(self.dpdk_driver))[0]
@property
def _status_cmd(self):
@@ -89,12 +300,14 @@ class DpdkBindHelper(object):
self._status_cmd_attr = self.DPDK_STATUS_CMD.format(dpdk_devbind=self.dpdk_devbind)
return self._status_cmd_attr
- def _addline(self, active_list, line):
+ def _add_line(self, active_list, line):
if active_list is None:
return
+
res = self.NIC_ROW_RE.match(line)
if res is None:
return
+
new_data = {k: v for k, v in zip(self.NIC_ROW_FIELDS, res.groups())}
new_data['active'] = bool(new_data['active'])
self.dpdk_status[active_list].append(new_data)
@@ -106,14 +319,14 @@ class DpdkBindHelper(object):
return a_dict
return active_dict
- def parse_dpdk_status_output(self, input):
+ def _parse_dpdk_status_output(self, output):
active_dict = None
self.clean_status()
- for a_row in input.splitlines():
+ for a_row in output.splitlines():
if self.SKIP_RE.match(a_row):
continue
active_dict = self._switch_active_dict(a_row, active_dict)
- self._addline(active_dict, a_row)
+ self._add_line(active_dict, a_row)
return self.dpdk_status
def _get_bound_pci_addresses(self, active_dict):
@@ -130,31 +343,85 @@ class DpdkBindHelper(object):
@property
def interface_driver_map(self):
return {interface['vpci']: interface['driver']
- for interface in itertools.chain.from_iterable(self.dpdk_status.values())}
+ for interface in chain.from_iterable(self.dpdk_status.values())}
def read_status(self):
- return self.parse_dpdk_status_output(self._dpdk_execute(self._status_cmd)[1])
+ return self._parse_dpdk_status_output(self._dpdk_execute(self._status_cmd)[1])
+
+ def find_net_devices(self):
+ exit_status, stdout, _ = self.ssh_helper.execute(self.FIND_NETDEVICE_STRING)
+ if exit_status != 0:
+ return {}
+
+ return self.parse_netdev_info(stdout)
def bind(self, pci_addresses, driver, force=True):
- # accept single PCI or list of PCI
- if isinstance(pci_addresses, six.string_types):
- pci_addresses = [pci_addresses]
+ # accept single PCI or sequence of PCI
+ pci_addresses = validate_non_string_sequence(pci_addresses, [pci_addresses])
+
cmd = self.DPDK_BIND_CMD.format(dpdk_devbind=self.dpdk_devbind,
driver=driver,
vpci=' '.join(list(pci_addresses)),
force='--force' if force else '')
LOG.debug(cmd)
self._dpdk_execute(cmd)
+
# update the inner status dict
self.read_status()
+ def probe_real_kernel_drivers(self):
+ self.read_status()
+ self.save_real_kernel_interface_driver_map()
+
+ def force_dpdk_rebind(self):
+ self.load_dpdk_driver()
+ self.read_status()
+ self.save_real_kernel_interface_driver_map()
+ self.save_used_drivers()
+
+ real_driver_map = {}
+ # only rebind devices that are bound to DPDK
+ for pci in self.dpdk_bound_pci_addresses:
+ # messy
+ real_driver = self.real_kernel_interface_driver_map[pci]
+ real_driver_map.setdefault(real_driver, []).append(pci)
+ for real_driver, pcis in real_driver_map.items():
+ self.bind(pcis, real_driver, force=True)
+
def save_used_drivers(self):
# invert the map, so we can bind by driver type
self.used_drivers = {}
- # sort for stabililty
+ # sort for stability
for vpci, driver in sorted(self.interface_driver_map.items()):
self.used_drivers.setdefault(driver, []).append(vpci)
+ KERNEL_DRIVER_RE = re.compile(r"Kernel modules: (\S+)", re.M)
+ VIRTIO_DRIVER_RE = re.compile(r"Ethernet.*Virtio network device", re.M)
+ VIRTIO_DRIVER = "virtio-pci"
+
+ def save_real_kernel_drivers(self):
+ # invert the map, so we can bind by driver type
+ self.real_kernel_drivers = {}
+ # sort for stability
+ for vpci, driver in sorted(self.real_kernel_interface_driver_map.items()):
+ self.used_drivers.setdefault(driver, []).append(vpci)
+
+ def get_real_kernel_driver(self, pci):
+ out = self.ssh_helper.execute('lspci -k -s %s' % pci)[1]
+ match = self.KERNEL_DRIVER_RE.search(out)
+ if match:
+ return match.group(1)
+
+ match = self.VIRTIO_DRIVER_RE.search(out)
+ if match:
+ return self.VIRTIO_DRIVER
+
+ return None
+
+ def save_real_kernel_interface_driver_map(self):
+ iter1 = ((pci, self.get_real_kernel_driver(pci)) for pci in self.interface_driver_map)
+ self.real_kernel_interface_driver_map = {pci: driver for pci, driver in iter1 if driver}
+
def rebind_drivers(self, force=True):
for driver, vpcis in self.used_drivers.items():
self.bind(vpcis, driver, force)
diff --git a/yardstick/network_services/traffic_profile/__init__.py b/yardstick/network_services/traffic_profile/__init__.py
index e69de29bb..356b36bd9 100644
--- a/yardstick/network_services/traffic_profile/__init__.py
+++ b/yardstick/network_services/traffic_profile/__init__.py
@@ -0,0 +1,33 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import importlib
+
+
+def register_modules():
+ modules = [
+ 'yardstick.network_services.traffic_profile.trex_traffic_profile',
+ 'yardstick.network_services.traffic_profile.fixed',
+ 'yardstick.network_services.traffic_profile.http',
+ 'yardstick.network_services.traffic_profile.http_ixload',
+ 'yardstick.network_services.traffic_profile.ixia_rfc2544',
+ 'yardstick.network_services.traffic_profile.prox_ACL',
+ 'yardstick.network_services.traffic_profile.prox_binsearch',
+ 'yardstick.network_services.traffic_profile.prox_profile',
+ 'yardstick.network_services.traffic_profile.prox_ramp',
+ 'yardstick.network_services.traffic_profile.rfc2544',
+ ]
+
+ for module in modules:
+ importlib.import_module(module)
diff --git a/yardstick/network_services/traffic_profile/base.py b/yardstick/network_services/traffic_profile/base.py
index ad256b444..162bab2bc 100644
--- a/yardstick/network_services/traffic_profile/base.py
+++ b/yardstick/network_services/traffic_profile/base.py
@@ -11,10 +11,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-""" Base class for the generic traffic profile implementation """
-from __future__ import absolute_import
-from yardstick.common.utils import import_modules_from_package, itersubclasses
+from yardstick.common import exceptions
+from yardstick.common import utils
class TrafficProfile(object):
@@ -33,13 +32,12 @@ class TrafficProfile(object):
:return:
"""
profile_class = tp_config["traffic_profile"]["traffic_type"]
- import_modules_from_package(
- "yardstick.network_services.traffic_profile")
try:
- return next(c for c in itersubclasses(TrafficProfile)
+ return next(c for c in utils.itersubclasses(TrafficProfile)
if c.__name__ == profile_class)(tp_config)
except StopIteration:
- raise RuntimeError("No implementation for %s", profile_class)
+ raise exceptions.TrafficProfileNotImplemented(
+ profile_class=profile_class)
def __init__(self, tp_config):
# e.g. RFC2544 start_ip, stop_ip, drop_rate,
diff --git a/yardstick/network_services/traffic_profile/ixia_rfc2544.py b/yardstick/network_services/traffic_profile/ixia_rfc2544.py
index 3ab157dc7..7f047226b 100644
--- a/yardstick/network_services/traffic_profile/ixia_rfc2544.py
+++ b/yardstick/network_services/traffic_profile/ixia_rfc2544.py
@@ -15,7 +15,7 @@
from __future__ import absolute_import
import logging
-from yardstick.network_services.traffic_profile.traffic_profile import \
+from yardstick.network_services.traffic_profile.trex_traffic_profile import \
TrexProfile
LOG = logging.getLogger(__name__)
diff --git a/yardstick/network_services/traffic_profile/prox_binsearch.py b/yardstick/network_services/traffic_profile/prox_binsearch.py
index 1fd6ec41a..5700f98e5 100644
--- a/yardstick/network_services/traffic_profile/prox_binsearch.py
+++ b/yardstick/network_services/traffic_profile/prox_binsearch.py
@@ -16,6 +16,8 @@
from __future__ import absolute_import
import logging
+import datetime
+import time
from yardstick.network_services.traffic_profile.prox_profile import ProxProfile
@@ -81,19 +83,66 @@ class ProxBinSearchProfile(ProxProfile):
# success, the binary search will complete on an integer multiple
# of the precision, rather than on a fraction of it.
+ theor_max_thruput = 0
+
+ result_samples = {}
+
+ # Store one time only value in influxdb
+ single_samples = {
+ "test_duration" : traffic_gen.scenario_helper.scenario_cfg["runner"]["duration"],
+ "test_precision" : self.params["traffic_profile"]["test_precision"],
+ "tolerated_loss" : self.params["traffic_profile"]["tolerated_loss"],
+ "duration" : duration
+ }
+ self.queue.put(single_samples)
+ self.prev_time = time.time()
+
# throughput and packet loss from the most recent successful test
successful_pkt_loss = 0.0
for test_value in self.bounds_iterator(LOG):
result, port_samples = self._profile_helper.run_test(pkt_size, duration,
test_value, self.tolerated_loss)
+ self.curr_time = time.time()
+ diff_time = self.curr_time - self.prev_time
+ self.prev_time = self.curr_time
if result.success:
LOG.debug("Success! Increasing lower bound")
self.current_lower = test_value
successful_pkt_loss = result.pkt_loss
+ samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
+ samples["TxThroughput"] = samples["TxThroughput"] * 1000 * 1000
+
+ # store results with success tag in influxdb
+ success_samples = {'Success_' + key: value for key, value in samples.items()}
+
+ success_samples["Success_rx_total"] = int(result.rx_total / diff_time)
+ success_samples["Success_tx_total"] = int(result.tx_total / diff_time)
+ success_samples["Success_can_be_lost"] = int(result.can_be_lost / diff_time)
+ success_samples["Success_drop_total"] = int(result.drop_total / diff_time)
+ self.queue.put(success_samples)
+
+ # Store Actual throughput for result samples
+ result_samples["Result_Actual_throughput"] = \
+ success_samples["Success_RxThroughput"]
else:
LOG.debug("Failure... Decreasing upper bound")
self.current_upper = test_value
+ samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
+
+ for k in samples:
+ tmp = samples[k]
+ if isinstance(tmp, dict):
+ for k2 in tmp:
+ samples[k][k2] = int(samples[k][k2] / diff_time)
- samples = result.get_samples(pkt_size, successful_pkt_loss, port_samples)
+ if theor_max_thruput < samples["TxThroughput"]:
+ theor_max_thruput = samples['TxThroughput']
+ self.queue.put({'theor_max_throughput': theor_max_thruput})
+
+ LOG.debug("Collect TG KPIs %s %s", datetime.datetime.now(), samples)
self.queue.put(samples)
+
+ result_samples["Result_pktSize"] = pkt_size
+ result_samples["Result_theor_max_throughput"] = theor_max_thruput/ (1000 * 1000)
+ self.queue.put(result_samples)
diff --git a/yardstick/network_services/traffic_profile/rfc2544.py b/yardstick/network_services/traffic_profile/rfc2544.py
index b1ca8a345..83020c85c 100644
--- a/yardstick/network_services/traffic_profile/rfc2544.py
+++ b/yardstick/network_services/traffic_profile/rfc2544.py
@@ -21,7 +21,7 @@ from trex_stl_lib.trex_stl_client import STLStream
from trex_stl_lib.trex_stl_streams import STLFlowLatencyStats
from trex_stl_lib.trex_stl_streams import STLTXCont
-from yardstick.network_services.traffic_profile.traffic_profile \
+from yardstick.network_services.traffic_profile.trex_traffic_profile \
import TrexProfile
LOGGING = logging.getLogger(__name__)
diff --git a/yardstick/network_services/traffic_profile/traffic_profile.py b/yardstick/network_services/traffic_profile/trex_traffic_profile.py
index 8cde5e4a7..f5e3923d5 100644
--- a/yardstick/network_services/traffic_profile/traffic_profile.py
+++ b/yardstick/network_services/traffic_profile/trex_traffic_profile.py
@@ -21,7 +21,7 @@ import ipaddress
import six
from yardstick.common import exceptions as y_exc
-from yardstick.network_services.traffic_profile.base import TrafficProfile
+from yardstick.network_services.traffic_profile import base
from trex_stl_lib.trex_stl_client import STLStream
from trex_stl_lib.trex_stl_streams import STLFlowLatencyStats
from trex_stl_lib.trex_stl_streams import STLTXCont
@@ -48,7 +48,7 @@ TYPE_OF_SERVICE = 'tos'
LOG = logging.getLogger(__name__)
-class TrexProfile(TrafficProfile):
+class TrexProfile(base.TrafficProfile):
""" This class handles Trex Traffic profile generation and execution """
PROTO_MAP = {
@@ -127,7 +127,7 @@ class TrexProfile(TrafficProfile):
self.vm_flow_vars.append(stl_vm_wr_flow_var)
return partial
- def _dscp_range_action_partial(self, *_):
+ def _dscp_range_action_partial(self, *args):
def partial(min_value, max_value, count):
# pylint: disable=unused-argument
stl_vm_flow_var = STLVmFlowVar(name="dscp",
diff --git a/yardstick/network_services/utils.py b/yardstick/network_services/utils.py
index 7a1815eb9..4b987fafe 100644
--- a/yardstick/network_services/utils.py
+++ b/yardstick/network_services/utils.py
@@ -121,7 +121,6 @@ def provision_tool(connection, tool_path, tool_file=None):
tool_path = get_nsb_option('tool_path')
if tool_file:
tool_path = os.path.join(tool_path, tool_file)
- bin_path = get_nsb_option("bin_path")
exit_status = connection.execute("which %s > /dev/null 2>&1" % tool_path)[0]
if exit_status == 0:
return encodeutils.safe_decode(tool_path, incoming='utf-8').rstrip()
diff --git a/yardstick/network_services/vnf_generic/vnf/acl_vnf.py b/yardstick/network_services/vnf_generic/vnf/acl_vnf.py
index 1390dd02e..f3cafef7a 100644
--- a/yardstick/network_services/vnf_generic/vnf/acl_vnf.py
+++ b/yardstick/network_services/vnf_generic/vnf/acl_vnf.py
@@ -12,11 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from __future__ import absolute_import
-from __future__ import print_function
import logging
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
+from yardstick.common import utils
from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF, DpdkVnfSetupEnvHelper
from yardstick.network_services.yang_model import YangModel
@@ -62,8 +60,9 @@ class AclApproxVnf(SampleVNF):
self.acl_rules = None
def _start_vnf(self):
- yang_model_path = find_relative_file(self.scenario_helper.options['rules'],
- self.scenario_helper.task_path)
+ yang_model_path = utils.find_relative_file(
+ self.scenario_helper.options['rules'],
+ self.scenario_helper.task_path)
yang_model = YangModel(yang_model_path)
self.acl_rules = yang_model.get_rules()
super(AclApproxVnf, self)._start_vnf()
diff --git a/yardstick/network_services/vnf_generic/vnf/prox_helpers.py b/yardstick/network_services/vnf_generic/vnf/prox_helpers.py
index 285ead3b6..29f9c7bba 100644
--- a/yardstick/network_services/vnf_generic/vnf/prox_helpers.py
+++ b/yardstick/network_services/vnf_generic/vnf/prox_helpers.py
@@ -30,7 +30,6 @@ import six
from six.moves import cStringIO
from six.moves import zip, StringIO
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
from yardstick.common import utils
from yardstick.common.utils import SocketTopology, join_non_strings, try_int
from yardstick.network_services.helpers.iniparser import ConfigParser
@@ -798,7 +797,7 @@ class ProxDpdkVnfSetupEnvHelper(DpdkVnfSetupEnvHelper):
options = self.scenario_helper.options
config_path = options['prox_config']
config_file = os.path.basename(config_path)
- config_path = find_relative_file(config_path, task_path)
+ config_path = utils.find_relative_file(config_path, task_path)
self.additional_files = {}
try:
@@ -815,7 +814,7 @@ class ProxDpdkVnfSetupEnvHelper(DpdkVnfSetupEnvHelper):
prox_files = [prox_files]
for key_prox_file in prox_files:
base_prox_file = os.path.basename(key_prox_file)
- key_prox_path = find_relative_file(key_prox_file, task_path)
+ key_prox_path = utils.find_relative_file(key_prox_file, task_path)
remote_prox_file = self.copy_to_target(key_prox_path, base_prox_file)
self.additional_files[base_prox_file] = remote_prox_file
@@ -929,6 +928,7 @@ class ProxResourceHelper(ClientResourceHelper):
func = getattr(self.sut, cmd, None)
if func:
return func(*args, **kwargs)
+ return None
def _connect(self, client=None):
"""Run and connect to prox on the remote system """
@@ -1005,11 +1005,18 @@ class ProxDataHelper(object):
def samples(self):
samples = {}
for port_name, port_num in self.vnfd_helper.ports_iter():
- port_rx_total, port_tx_total = self.sut.port_stats([port_num])[6:8]
- samples[port_name] = {
- "in_packets": port_rx_total,
- "out_packets": port_tx_total,
- }
+ try:
+ port_rx_total, port_tx_total = self.sut.port_stats([port_num])[6:8]
+ samples[port_name] = {
+ "in_packets": port_rx_total,
+ "out_packets": port_tx_total,
+ }
+ except (KeyError, TypeError, NameError, MemoryError, ValueError,
+ SystemError, BufferError):
+ samples[port_name] = {
+ "in_packets": 0,
+ "out_packets": 0,
+ }
return samples
def __enter__(self):
@@ -1127,7 +1134,7 @@ class ProxProfileHelper(object):
for key, value in section:
if key == "mode" and value == mode:
core_tuple = CoreSocketTuple(section_name)
- core = core_tuple.find_in_topology(self.cpu_topology)
+ core = core_tuple.core_id
cores.append(core)
return cores
@@ -1149,6 +1156,10 @@ class ProxProfileHelper(object):
:return: return lat_min, lat_max, lat_avg
:rtype: list
"""
+
+ if not self._latency_cores:
+ self._latency_cores = self.get_cores(self.PROX_CORE_LAT_MODE)
+
if self._latency_cores:
return self.sut.lat_stats(self._latency_cores)
return []
@@ -1198,12 +1209,12 @@ class ProxMplsProfileHelper(ProxProfileHelper):
if item_value.startswith("tag"):
core_tuple = CoreSocketTuple(section_name)
- core_tag = core_tuple.find_in_topology(self.cpu_topology)
+ core_tag = core_tuple.core_id
cores_tagged.append(core_tag)
elif item_value.startswith("udp"):
core_tuple = CoreSocketTuple(section_name)
- core_udp = core_tuple.find_in_topology(self.cpu_topology)
+ core_udp = core_tuple.core_id
cores_plain.append(core_udp)
return cores_tagged, cores_plain
@@ -1276,23 +1287,23 @@ class ProxBngProfileHelper(ProxProfileHelper):
if item_value.startswith("cpe"):
core_tuple = CoreSocketTuple(section_name)
- cpe_core = core_tuple.find_in_topology(self.cpu_topology)
+ cpe_core = core_tuple.core_id
cpe_cores.append(cpe_core)
elif item_value.startswith("inet"):
core_tuple = CoreSocketTuple(section_name)
- inet_core = core_tuple.find_in_topology(self.cpu_topology)
+ inet_core = core_tuple.core_id
inet_cores.append(inet_core)
elif item_value.startswith("arp"):
core_tuple = CoreSocketTuple(section_name)
- arp_core = core_tuple.find_in_topology(self.cpu_topology)
+ arp_core = core_tuple.core_id
arp_cores.append(arp_core)
# We check the tasks/core separately
if item_value.startswith("arp_task"):
core_tuple = CoreSocketTuple(section_name)
- arp_task_core = core_tuple.find_in_topology(self.cpu_topology)
+ arp_task_core = core_tuple.core_id
arp_tasks_core.append(arp_task_core)
return cpe_cores, inet_cores, arp_cores, arp_tasks_core
@@ -1455,12 +1466,12 @@ class ProxVpeProfileHelper(ProxProfileHelper):
if item_value.startswith("cpe"):
core_tuple = CoreSocketTuple(section_name)
- core_tag = core_tuple.find_in_topology(self.cpu_topology)
+ core_tag = core_tuple.core_id
cpe_cores.append(core_tag)
elif item_value.startswith("inet"):
core_tuple = CoreSocketTuple(section_name)
- inet_core = core_tuple.find_in_topology(self.cpu_topology)
+ inet_core = core_tuple.core_id
inet_cores.append(inet_core)
return cpe_cores, inet_cores
@@ -1639,7 +1650,7 @@ class ProxlwAFTRProfileHelper(ProxProfileHelper):
continue
core_tuple = CoreSocketTuple(section_name)
- core_tag = core_tuple.find_in_topology(self.cpu_topology)
+ core_tag = core_tuple.core_id
for item_value in (v for k, v in section if k == 'name'):
if item_value.startswith('tun'):
tun_cores.append(core_tag)
diff --git a/yardstick/network_services/vnf_generic/vnf/prox_vnf.py b/yardstick/network_services/vnf_generic/vnf/prox_vnf.py
index b7d295eee..2cdb3f904 100644
--- a/yardstick/network_services/vnf_generic/vnf/prox_vnf.py
+++ b/yardstick/network_services/vnf_generic/vnf/prox_vnf.py
@@ -14,12 +14,15 @@
import errno
import logging
+import datetime
+import time
from yardstick.common.process import check_if_process_failed
from yardstick.network_services.vnf_generic.vnf.prox_helpers import ProxDpdkVnfSetupEnvHelper
from yardstick.network_services.vnf_generic.vnf.prox_helpers import ProxResourceHelper
-from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF, PROCESS_JOIN_TIMEOUT
+from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF
+from yardstick.network_services.constants import PROCESS_JOIN_TIMEOUT
LOG = logging.getLogger(__name__)
@@ -39,6 +42,9 @@ class ProxApproxVnf(SampleVNF):
if resource_helper_type is None:
resource_helper_type = ProxResourceHelper
+ self.prev_packets_in = 0
+ self.prev_packets_sent = 0
+ self.prev_time = time.time()
super(ProxApproxVnf, self).__init__(name, vnfd, setup_env_helper_type,
resource_helper_type)
@@ -79,12 +85,13 @@ class ProxApproxVnf(SampleVNF):
raise RuntimeError("Failed ..Invalid no of ports .. "
"1, 2 or 4 ports only supported at this time")
- port_stats = self.vnf_execute('port_stats', range(port_count))
+ self.port_stats = self.vnf_execute('port_stats', range(port_count))
+ curr_time = time.time()
try:
- rx_total = port_stats[6]
- tx_total = port_stats[7]
+ rx_total = self.port_stats[6]
+ tx_total = self.port_stats[7]
except IndexError:
- LOG.error("port_stats parse fail %s", port_stats)
+ LOG.debug("port_stats parse fail ")
# return empty dict so we don't mess up existing KPIs
return {}
@@ -96,7 +103,17 @@ class ProxApproxVnf(SampleVNF):
# collectd KPIs here and not TG KPIs, so use a different method name
"collect_stats": self.resource_helper.collect_collectd_kpi(),
}
- LOG.debug("%s collect KPIs %s", self.APP_NAME, result)
+ curr_packets_in = int((rx_total - self.prev_packets_in) / (curr_time - self.prev_time))
+ curr_packets_fwd = int((tx_total - self.prev_packets_sent) / (curr_time - self.prev_time))
+
+ result["curr_packets_in"] = curr_packets_in
+ result["curr_packets_fwd"] = curr_packets_fwd
+
+ self.prev_packets_in = rx_total
+ self.prev_packets_sent = tx_total
+ self.prev_time = curr_time
+
+ LOG.debug("%s collect KPIs %s %s", self.APP_NAME, datetime.datetime.now(), result)
return result
def _tear_down(self):
diff --git a/yardstick/network_services/vnf_generic/vnf/sample_vnf.py b/yardstick/network_services/vnf_generic/vnf/sample_vnf.py
index d57d7e601..f16b4142d 100644
--- a/yardstick/network_services/vnf_generic/vnf/sample_vnf.py
+++ b/yardstick/network_services/vnf_generic/vnf/sample_vnf.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2016-2017 Intel Corporation
+# Copyright (c) 2016-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,78 +16,39 @@
from collections import Mapping
import logging
from multiprocessing import Queue, Value, Process
+
import os
import posixpath
import re
-from six.moves import cStringIO
import subprocess
import time
+import six
+
from trex_stl_lib.trex_stl_client import LoggerApi
from trex_stl_lib.trex_stl_client import STLClient
from trex_stl_lib.trex_stl_exceptions import STLError
from yardstick.benchmark.contexts.base import Context
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
from yardstick.common import exceptions as y_exceptions
from yardstick.common.process import check_if_process_failed
-from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkBindHelper
-from yardstick.network_services.helpers.samplevnf_helper import PortPairs
+from yardstick.common import utils
+from yardstick.network_services.constants import DEFAULT_VNF_TIMEOUT
+from yardstick.network_services.constants import PROCESS_JOIN_TIMEOUT
+from yardstick.network_services.constants import REMOTE_TMP
+from yardstick.network_services.helpers.dpdkbindnic_helper import DpdkBindHelper, DpdkNode
from yardstick.network_services.helpers.samplevnf_helper import MultiPortConfig
+from yardstick.network_services.helpers.samplevnf_helper import PortPairs
from yardstick.network_services.nfvi.resource import ResourceProfile
from yardstick.network_services.utils import get_nsb_option
-from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
from yardstick.network_services.vnf_generic.vnf.base import GenericTrafficGen
+from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
from yardstick.network_services.vnf_generic.vnf.base import QueueFileWrapper
-from yardstick.ssh import AutoConnectSSH
+from yardstick.network_services.vnf_generic.vnf.vnf_ssh_helper import VnfSshHelper
-DPDK_VERSION = "dpdk-16.07"
-
LOG = logging.getLogger(__name__)
-REMOTE_TMP = "/tmp"
-DEFAULT_VNF_TIMEOUT = 3600
-PROCESS_JOIN_TIMEOUT = 3
-
-
-class VnfSshHelper(AutoConnectSSH):
-
- def __init__(self, node, bin_path, wait=None):
- self.node = node
- kwargs = self.args_from_node(self.node)
- if wait:
- kwargs.setdefault('wait', wait)
-
- super(VnfSshHelper, self).__init__(**kwargs)
- self.bin_path = bin_path
-
- @staticmethod
- def get_class():
- # must return static class name, anything else refers to the calling class
- # i.e. the subclass, not the superclass
- return VnfSshHelper
-
- def copy(self):
- # this copy constructor is different from SSH classes, since it uses node
- return self.get_class()(self.node, self.bin_path)
-
- def upload_config_file(self, prefix, content):
- cfg_file = os.path.join(REMOTE_TMP, prefix)
- LOG.debug(content)
- file_obj = cStringIO(content)
- self.put_file_obj(file_obj, cfg_file)
- return cfg_file
-
- def join_bin_path(self, *args):
- return os.path.join(self.bin_path, *args)
-
- def provision_tool(self, tool_path=None, tool_file=None):
- if tool_path is None:
- tool_path = self.bin_path
- return super(VnfSshHelper, self).provision_tool(tool_path, tool_file)
-
-
class SetupEnvHelper(object):
CFG_CONFIG = os.path.join(REMOTE_TMP, "sample_config")
@@ -119,6 +80,8 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
APP_NAME = 'DpdkVnf'
FIND_NET_CMD = "find /sys/class/net -lname '*{}*' -printf '%f'"
+ NR_HUGEPAGES_PATH = '/proc/sys/vm/nr_hugepages'
+ HUGEPAGES_KB = 1024 * 1024 * 16
@staticmethod
def _update_packet_type(ip_pipeline_cfg, traffic_options):
@@ -155,24 +118,22 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
self.dpdk_bind_helper = DpdkBindHelper(ssh_helper)
def _setup_hugepages(self):
- cmd = "awk '/Hugepagesize/ { print $2$3 }' < /proc/meminfo"
- hugepages = self.ssh_helper.execute(cmd)[1].rstrip()
-
- memory_path = \
- '/sys/kernel/mm/hugepages/hugepages-%s/nr_hugepages' % hugepages
- self.ssh_helper.execute("awk -F: '{ print $1 }' < %s" % memory_path)
-
- if hugepages == "2048kB":
- pages = 8192
- else:
- pages = 16
-
- self.ssh_helper.execute("echo %s | sudo tee %s" % (pages, memory_path))
+ meminfo = utils.read_meminfo(self.ssh_helper)
+ hp_size_kb = int(meminfo['Hugepagesize'])
+ nr_hugepages = int(abs(self.HUGEPAGES_KB / hp_size_kb))
+ self.ssh_helper.execute('echo %s | sudo tee %s' %
+ (nr_hugepages, self.NR_HUGEPAGES_PATH))
+ hp = six.BytesIO()
+ self.ssh_helper.get_file_obj(self.NR_HUGEPAGES_PATH, hp)
+ nr_hugepages_set = int(hp.getvalue().decode('utf-8').splitlines()[0])
+ LOG.info('Hugepages size (kB): %s, number claimed: %s, number set: %s',
+ hp_size_kb, nr_hugepages, nr_hugepages_set)
def build_config(self):
vnf_cfg = self.scenario_helper.vnf_cfg
task_path = self.scenario_helper.task_path
+ config_file = vnf_cfg.get('file')
lb_count = vnf_cfg.get('lb_count', 3)
lb_config = vnf_cfg.get('lb_config', 'SW')
worker_config = vnf_cfg.get('worker_config', '1C/1T')
@@ -185,7 +146,8 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
'vnf_type': self.VNF_TYPE,
}
- config_tpl_cfg = find_relative_file(self.DEFAULT_CONFIG_TPL_CFG, task_path)
+ config_tpl_cfg = utils.find_relative_file(self.DEFAULT_CONFIG_TPL_CFG,
+ task_path)
config_basename = posixpath.basename(self.CFG_CONFIG)
script_basename = posixpath.basename(self.CFG_SCRIPT)
multiport = MultiPortConfig(self.scenario_helper.topology,
@@ -200,12 +162,20 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
self.socket)
multiport.generate_config()
- with open(self.CFG_CONFIG) as handle:
- new_config = handle.read()
-
- new_config = self._update_traffic_type(new_config, traffic_options)
- new_config = self._update_packet_type(new_config, traffic_options)
-
+ if config_file:
+ with utils.open_relative_file(config_file, task_path) as infile:
+ new_config = ['[EAL]']
+ vpci = []
+ for port in self.vnfd_helper.port_pairs.all_ports:
+ interface = self.vnfd_helper.find_interface(name=port)
+ vpci.append(interface['virtual-interface']["vpci"])
+ new_config.extend('w = {0}'.format(item) for item in vpci)
+ new_config = '\n'.join(new_config) + '\n' + infile.read()
+ else:
+ with open(self.CFG_CONFIG) as handle:
+ new_config = handle.read()
+ new_config = self._update_traffic_type(new_config, traffic_options)
+ new_config = self._update_packet_type(new_config, traffic_options)
self.ssh_helper.upload_config_file(config_basename, new_config)
self.ssh_helper.upload_config_file(script_basename,
multiport.generate_script(self.vnfd_helper))
@@ -234,7 +204,6 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
def setup_vnf_environment(self):
self._setup_dpdk()
- self.bound_pci = [v['virtual-interface']["vpci"] for v in self.vnfd_helper.interfaces]
self.kill_vnf()
# bind before _setup_resources so we can use dpdk_port_num
self._detect_and_bind_drivers()
@@ -252,10 +221,11 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
def _setup_dpdk(self):
"""Setup DPDK environment needed for VNF to run"""
self._setup_hugepages()
- self.ssh_helper.execute('sudo modprobe uio && sudo modprobe igb_uio')
- exit_status = self.ssh_helper.execute('lsmod | grep -i igb_uio')[0]
- if exit_status:
- raise y_exceptions.DPDKSetupDriverError()
+ self.dpdk_bind_helper.load_dpdk_driver()
+
+ exit_status = self.dpdk_bind_helper.check_dpdk_driver()
+ if exit_status == 0:
+ return
def get_collectd_options(self):
options = self.scenario_helper.all_options.get("collectd", {})
@@ -282,9 +252,22 @@ class DpdkVnfSetupEnvHelper(SetupEnvHelper):
plugins=plugins, interval=collectd_options.get("interval"),
timeout=self.scenario_helper.timeout)
+ def _check_interface_fields(self):
+ num_nodes = len(self.scenario_helper.nodes)
+ # OpenStack instance creation time is probably proportional to the number
+ # of instances
+ timeout = 120 * num_nodes
+ dpdk_node = DpdkNode(self.scenario_helper.name, self.vnfd_helper.interfaces,
+ self.ssh_helper, timeout)
+ dpdk_node.check()
+
def _detect_and_bind_drivers(self):
interfaces = self.vnfd_helper.interfaces
+ self._check_interface_fields()
+ # check for bound after probe
+ self.bound_pci = [v['virtual-interface']["vpci"] for v in interfaces]
+
self.dpdk_bind_helper.read_status()
self.dpdk_bind_helper.save_used_drivers()
diff --git a/yardstick/network_services/vnf_generic/vnf/tg_ixload.py b/yardstick/network_services/vnf_generic/vnf/tg_ixload.py
index 3ab30b53e..02e7803f7 100644
--- a/yardstick/network_services/vnf_generic/vnf/tg_ixload.py
+++ b/yardstick/network_services/vnf_generic/vnf/tg_ixload.py
@@ -22,10 +22,10 @@ import shutil
from collections import OrderedDict
from subprocess import call
-from yardstick.common.utils import makedirs
+from yardstick.common import utils
from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFTrafficGen
from yardstick.network_services.vnf_generic.vnf.sample_vnf import ClientResourceHelper
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
+
LOG = logging.getLogger(__name__)
@@ -93,9 +93,10 @@ class IxLoadResourceHelper(ClientResourceHelper):
def setup(self):
# NOTE: fixup scenario_helper to hanlde ixia
self.resource_file_name = \
- find_relative_file(self.scenario_helper.scenario_cfg['ixia_profile'],
- self.scenario_helper.scenario_cfg["task_path"])
- makedirs(self.RESULTS_MOUNT)
+ utils.find_relative_file(
+ self.scenario_helper.scenario_cfg['ixia_profile'],
+ self.scenario_helper.scenario_cfg["task_path"])
+ utils.makedirs(self.RESULTS_MOUNT)
cmd = MOUNT_CMD.format(self.vnfd_helper.mgmt_interface, self)
LOG.debug(cmd)
@@ -103,7 +104,7 @@ class IxLoadResourceHelper(ClientResourceHelper):
call(cmd, shell=True)
shutil.rmtree(self.RESULTS_MOUNT, ignore_errors=True)
- makedirs(self.RESULTS_MOUNT)
+ utils.makedirs(self.RESULTS_MOUNT)
shutil.copy(self.resource_file_name, self.RESULTS_MOUNT)
def make_aggregates(self):
diff --git a/yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py b/yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py
index 12510db96..265d0b7a9 100644
--- a/yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py
+++ b/yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py
@@ -19,11 +19,11 @@ import os
import logging
import sys
-from yardstick.common.utils import ErrorClass
+from yardstick.common import utils
+from yardstick import error
from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFTrafficGen
from yardstick.network_services.vnf_generic.vnf.sample_vnf import ClientResourceHelper
from yardstick.network_services.vnf_generic.vnf.sample_vnf import Rfc2544ResourceHelper
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
LOG = logging.getLogger(__name__)
@@ -36,7 +36,7 @@ sys.path.append(IXNET_LIB)
try:
from IxNet import IxNextgen
except ImportError:
- IxNextgen = ErrorClass
+ IxNextgen = error.ErrorClass
class IxiaRfc2544Helper(Rfc2544ResourceHelper):
@@ -122,8 +122,9 @@ class IxiaResourceHelper(ClientResourceHelper):
# we don't know client_file_name until runtime as instantiate
client_file_name = \
- find_relative_file(self.scenario_helper.scenario_cfg['ixia_profile'],
- self.scenario_helper.scenario_cfg["task_path"])
+ utils.find_relative_file(
+ self.scenario_helper.scenario_cfg['ixia_profile'],
+ self.scenario_helper.scenario_cfg["task_path"])
self.client.ix_load_config(client_file_name)
time.sleep(WAIT_AFTER_CFG_LOAD)
diff --git a/yardstick/network_services/vnf_generic/vnf/vfw_vnf.py b/yardstick/network_services/vnf_generic/vnf/vfw_vnf.py
index 6c95648ce..61e99855f 100644
--- a/yardstick/network_services/vnf_generic/vnf/vfw_vnf.py
+++ b/yardstick/network_services/vnf_generic/vnf/vfw_vnf.py
@@ -12,10 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from __future__ import absolute_import
import logging
-from yardstick.benchmark.scenarios.networking.vnf_generic import find_relative_file
+from yardstick.common import utils
from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNF, DpdkVnfSetupEnvHelper
from yardstick.network_services.yang_model import YangModel
@@ -60,8 +59,9 @@ class FWApproxVnf(SampleVNF):
self.vfw_rules = None
def _start_vnf(self):
- yang_model_path = find_relative_file(self.scenario_helper.options['rules'],
- self.scenario_helper.task_path)
+ yang_model_path = utils.find_relative_file(
+ self.scenario_helper.options['rules'],
+ self.scenario_helper.task_path)
yang_model = YangModel(yang_model_path)
self.vfw_rules = yang_model.get_rules()
super(FWApproxVnf, self)._start_vnf()
diff --git a/yardstick/network_services/vnf_generic/vnf/vnf_ssh_helper.py b/yardstick/network_services/vnf_generic/vnf/vnf_ssh_helper.py
new file mode 100644
index 000000000..8e02cf3ac
--- /dev/null
+++ b/yardstick/network_services/vnf_generic/vnf/vnf_ssh_helper.py
@@ -0,0 +1,61 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+
+from six.moves import StringIO
+
+from yardstick.network_services.constants import REMOTE_TMP
+from yardstick.ssh import AutoConnectSSH
+
+LOG = logging.getLogger(__name__)
+
+
+class VnfSshHelper(AutoConnectSSH):
+
+ def __init__(self, node, bin_path, wait=None):
+ self.node = node
+ kwargs = self.args_from_node(self.node)
+ if wait:
+ # if wait is defined here we want to override
+ kwargs['wait'] = wait
+
+ super(VnfSshHelper, self).__init__(**kwargs)
+ self.bin_path = bin_path
+
+ @staticmethod
+ def get_class():
+ # must return static class name, anything else refers to the calling class
+ # i.e. the subclass, not the superclass
+ return VnfSshHelper
+
+ def copy(self):
+ # this copy constructor is different from SSH classes, since it uses node
+ return self.get_class()(self.node, self.bin_path)
+
+ def upload_config_file(self, prefix, content):
+ cfg_file = os.path.join(REMOTE_TMP, prefix)
+ LOG.debug(content)
+ file_obj = StringIO(content)
+ self.put_file_obj(file_obj, cfg_file)
+ return cfg_file
+
+ def join_bin_path(self, *args):
+ return os.path.join(self.bin_path, *args)
+
+ def provision_tool(self, tool_path=None, tool_file=None):
+ if tool_path is None:
+ tool_path = self.bin_path
+ return super(VnfSshHelper, self).provision_tool(tool_path, tool_file)
diff --git a/yardstick/orchestrator/heat.py b/yardstick/orchestrator/heat.py
index 78e9c3df2..20be89f57 100644
--- a/yardstick/orchestrator/heat.py
+++ b/yardstick/orchestrator/heat.py
@@ -491,7 +491,36 @@ name (i.e. %s).
'port_range_max': '65535'},
{'remote_ip_prefix': '::/0',
'ethertype': 'IPv6',
- 'protocol': 'ipv6-icmp'}
+ 'protocol': 'ipv6-icmp'},
+ {'remote_ip_prefix': '0.0.0.0/0',
+ 'direction': 'egress',
+ 'protocol': 'tcp',
+ 'port_range_min': '1',
+ 'port_range_max': '65535'},
+ {'remote_ip_prefix': '0.0.0.0/0',
+ 'direction': 'egress',
+ 'protocol': 'udp',
+ 'port_range_min': '1',
+ 'port_range_max': '65535'},
+ {'remote_ip_prefix': '0.0.0.0/0',
+ 'direction': 'egress',
+ 'protocol': 'icmp'},
+ {'remote_ip_prefix': '::/0',
+ 'direction': 'egress',
+ 'ethertype': 'IPv6',
+ 'protocol': 'tcp',
+ 'port_range_min': '1',
+ 'port_range_max': '65535'},
+ {'remote_ip_prefix': '::/0',
+ 'direction': 'egress',
+ 'ethertype': 'IPv6',
+ 'protocol': 'udp',
+ 'port_range_min': '1',
+ 'port_range_max': '65535'},
+ {'remote_ip_prefix': '::/0',
+ 'direction': 'egress',
+ 'ethertype': 'IPv6',
+ 'protocol': 'ipv6-icmp'},
]
}
}
diff --git a/yardstick/ssh.py b/yardstick/ssh.py
index 6ddf327f2..d7adc0d05 100644
--- a/yardstick/ssh.py
+++ b/yardstick/ssh.py
@@ -78,7 +78,7 @@ from oslo_utils import encodeutils
from scp import SCPClient
import six
-from yardstick.common.utils import try_int
+from yardstick.common.utils import try_int, NON_NONE_DEFAULT, make_dict_from_map
from yardstick.network_services.utils import provision_tool
@@ -102,6 +102,7 @@ class SSH(object):
"""Represent ssh connection."""
SSH_PORT = paramiko.config.SSH_PORT
+ DEFAULT_WAIT_TIMEOUT = 120
@staticmethod
def gen_keys(key_filename, bit_count=2048):
@@ -120,6 +121,18 @@ class SSH(object):
# i.e. the subclass, not the superclass
return SSH
+ @classmethod
+ def get_arg_key_map(cls):
+ return {
+ 'user': ('user', NON_NONE_DEFAULT),
+ 'host': ('ip', NON_NONE_DEFAULT),
+ 'port': ('ssh_port', cls.SSH_PORT),
+ 'pkey': ('pkey', None),
+ 'key_filename': ('key_filename', None),
+ 'password': ('password', None),
+ 'name': ('name', None),
+ }
+
def __init__(self, user, host, port=None, pkey=None,
key_filename=None, password=None, name=None):
"""Initialize SSH client.
@@ -137,6 +150,7 @@ class SSH(object):
else:
self.log = logging.getLogger(__name__)
+ self.wait_timeout = self.DEFAULT_WAIT_TIMEOUT
self.user = user
self.host = host
# everybody wants to debug this in the caller, do it here instead
@@ -162,16 +176,9 @@ class SSH(object):
overrides = {}
if defaults is None:
defaults = {}
+
params = ChainMap(overrides, node, defaults)
- return {
- 'user': params['user'],
- 'host': params['ip'],
- 'port': params.get('ssh_port', cls.SSH_PORT),
- 'pkey': params.get('pkey'),
- 'key_filename': params.get('key_filename'),
- 'password': params.get('password'),
- 'name': params.get('name'),
- }
+ return make_dict_from_map(params, cls.get_arg_key_map())
@classmethod
def from_node(cls, node, overrides=None, defaults=None):
@@ -186,7 +193,7 @@ class SSH(object):
return key_class.from_private_key(key)
except paramiko.SSHException as e:
errors.append(e)
- raise SSHError("Invalid pkey: %s" % (errors))
+ raise SSHError("Invalid pkey: %s" % errors)
@property
def is_connected(self):
@@ -287,7 +294,7 @@ class SSH(object):
while True:
# Block until data can be read/write.
- r, w, e = select.select([session], writes, [session], 1)
+ e = select.select([session], writes, [session], 1)[2]
if session.recv_ready():
data = encodeutils.safe_decode(session.recv(4096), 'utf-8')
@@ -361,17 +368,20 @@ class SSH(object):
stderr.seek(0)
return exit_status, stdout.read(), stderr.read()
- def wait(self, timeout=120, interval=1):
+ def wait(self, timeout=None, interval=1):
"""Wait for the host will be available via ssh."""
- start_time = time.time()
+ if timeout is None:
+ timeout = self.wait_timeout
+
+ end_time = time.time() + timeout
while True:
try:
return self.execute("uname")
except (socket.error, SSHError) as e:
self.log.debug("Ssh is still unavailable: %r", e)
time.sleep(interval)
- if time.time() > (start_time + timeout):
- raise SSHTimeout("Timeout waiting for '%s'", self.host)
+ if time.time() > end_time:
+ raise SSHTimeout("Timeout waiting for '%s'" % self.host)
def put(self, files, remote_path=b'.', recursive=False):
client = self._get_client()
@@ -447,24 +457,40 @@ class SSH(object):
class AutoConnectSSH(SSH):
+ @classmethod
+ def get_arg_key_map(cls):
+ arg_key_map = super(AutoConnectSSH, cls).get_arg_key_map()
+ arg_key_map['wait'] = ('wait', True)
+ return arg_key_map
+
# always wait or we will get OpenStack SSH errors
def __init__(self, user, host, port=None, pkey=None,
key_filename=None, password=None, name=None, wait=True):
super(AutoConnectSSH, self).__init__(user, host, port, pkey, key_filename, password, name)
- self._wait = wait
+ if wait and wait is not True:
+ self.wait_timeout = int(wait)
def _make_dict(self):
data = super(AutoConnectSSH, self)._make_dict()
data.update({
- 'wait': self._wait
+ 'wait': self.wait_timeout
})
return data
def _connect(self):
if not self.is_connected:
- self._get_client()
- if self._wait:
- self.wait()
+ interval = 1
+ timeout = self.wait_timeout
+
+ end_time = time.time() + timeout
+ while True:
+ try:
+ return self._get_client()
+ except (socket.error, SSHError) as e:
+ self.log.debug("Ssh is still unavailable: %r", e)
+ time.sleep(interval)
+ if time.time() > end_time:
+ raise SSHTimeout("Timeout waiting for '%s'" % self.host)
def drop_connection(self):
""" Don't close anything, just force creation of a new client """
diff --git a/yardstick/tests/__init__.py b/yardstick/tests/__init__.py
index e69de29bb..56e3106b8 100644
--- a/yardstick/tests/__init__.py
+++ b/yardstick/tests/__init__.py
@@ -0,0 +1,75 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+
+
+STL_MOCKS = {
+ 'trex_stl_lib': mock.MagicMock(),
+ 'trex_stl_lib.base64': mock.MagicMock(),
+ 'trex_stl_lib.binascii': mock.MagicMock(),
+ 'trex_stl_lib.collections': mock.MagicMock(),
+ 'trex_stl_lib.copy': mock.MagicMock(),
+ 'trex_stl_lib.datetime': mock.MagicMock(),
+ 'trex_stl_lib.functools': mock.MagicMock(),
+ 'trex_stl_lib.imp': mock.MagicMock(),
+ 'trex_stl_lib.inspect': mock.MagicMock(),
+ 'trex_stl_lib.json': mock.MagicMock(),
+ 'trex_stl_lib.linecache': mock.MagicMock(),
+ 'trex_stl_lib.math': mock.MagicMock(),
+ 'trex_stl_lib.os': mock.MagicMock(),
+ 'trex_stl_lib.platform': mock.MagicMock(),
+ 'trex_stl_lib.pprint': mock.MagicMock(),
+ 'trex_stl_lib.random': mock.MagicMock(),
+ 'trex_stl_lib.re': mock.MagicMock(),
+ 'trex_stl_lib.scapy': mock.MagicMock(),
+ 'trex_stl_lib.socket': mock.MagicMock(),
+ 'trex_stl_lib.string': mock.MagicMock(),
+ 'trex_stl_lib.struct': mock.MagicMock(),
+ 'trex_stl_lib.sys': mock.MagicMock(),
+ 'trex_stl_lib.threading': mock.MagicMock(),
+ 'trex_stl_lib.time': mock.MagicMock(),
+ 'trex_stl_lib.traceback': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_async_client': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_client': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_exceptions': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_ext': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_jsonrpc_client': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_packet_builder_interface': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_packet_builder_scapy': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_port': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_stats': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_streams': mock.MagicMock(),
+ 'trex_stl_lib.trex_stl_types': mock.MagicMock(),
+ 'trex_stl_lib.types': mock.MagicMock(),
+ 'trex_stl_lib.utils': mock.MagicMock(),
+ 'trex_stl_lib.utils.argparse': mock.MagicMock(),
+ 'trex_stl_lib.utils.collections': mock.MagicMock(),
+ 'trex_stl_lib.utils.common': mock.MagicMock(),
+ 'trex_stl_lib.utils.json': mock.MagicMock(),
+ 'trex_stl_lib.utils.os': mock.MagicMock(),
+ 'trex_stl_lib.utils.parsing_opts': mock.MagicMock(),
+ 'trex_stl_lib.utils.pwd': mock.MagicMock(),
+ 'trex_stl_lib.utils.random': mock.MagicMock(),
+ 'trex_stl_lib.utils.re': mock.MagicMock(),
+ 'trex_stl_lib.utils.string': mock.MagicMock(),
+ 'trex_stl_lib.utils.sys': mock.MagicMock(),
+ 'trex_stl_lib.utils.text_opts': mock.MagicMock(),
+ 'trex_stl_lib.utils.text_tables': mock.MagicMock(),
+ 'trex_stl_lib.utils.texttable': mock.MagicMock(),
+ 'trex_stl_lib.warnings': mock.MagicMock(),
+ 'trex_stl_lib.yaml': mock.MagicMock(),
+ 'trex_stl_lib.zlib': mock.MagicMock(),
+ 'trex_stl_lib.zmq': mock.MagicMock(),
+}
diff --git a/yardstick/tests/functional/base.py b/yardstick/tests/functional/base.py
new file mode 100644
index 000000000..51be013a1
--- /dev/null
+++ b/yardstick/tests/functional/base.py
@@ -0,0 +1,46 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+import six
+
+from oslo_config import cfg
+from oslotest import base
+
+
+CONF = cfg.CONF
+
+
+@six.add_metaclass(abc.ABCMeta)
+class BaseFunctionalTestCase(base.BaseTestCase):
+ """Base class for functional tests."""
+
+ def setUp(self):
+ super(BaseFunctionalTestCase, self).setUp()
+
+ def config(self, **kw):
+ """Override some configuration values.
+
+ The keyword arguments are the names of configuration options to
+ override and their values.
+
+ If a group argument is supplied, the overrides are applied to
+ the specified configuration option group.
+
+ All overrides are automatically cleared at the end of the current
+ test by the fixtures cleanup process.
+ """
+ group = kw.pop('group', None)
+ for k, v in kw.items():
+ CONF.set_override(k, v, group)
diff --git a/yardstick/tests/functional/benchmark/__init__.py b/yardstick/tests/functional/benchmark/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/functional/benchmark/__init__.py
diff --git a/yardstick/tests/functional/benchmark/scenarios/__init__.py b/yardstick/tests/functional/benchmark/scenarios/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/functional/benchmark/scenarios/__init__.py
diff --git a/yardstick/tests/functional/benchmark/scenarios/networking/__init__.py b/yardstick/tests/functional/benchmark/scenarios/networking/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/functional/benchmark/scenarios/networking/__init__.py
diff --git a/yardstick/tests/functional/benchmark/scenarios/networking/test_vnf_generic.py b/yardstick/tests/functional/benchmark/scenarios/networking/test_vnf_generic.py
new file mode 100644
index 000000000..38f1a978d
--- /dev/null
+++ b/yardstick/tests/functional/benchmark/scenarios/networking/test_vnf_generic.py
@@ -0,0 +1,195 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import sys
+
+import mock
+import unittest
+
+from yardstick import tests as y_tests
+from yardstick.common import utils
+
+
+with mock.patch.dict(sys.modules, y_tests.STL_MOCKS):
+ from yardstick.benchmark.scenarios.networking import vnf_generic
+
+
+TRAFFIC_PROFILE_1 = """
+schema: nsb:traffic_profile:0.1
+name: rfc2544
+description: Traffic profile to run RFC2544 latency
+traffic_profile:
+ traffic_type : RFC2544Profile
+ frame_rate : 100
+uplink_0:
+ ipv4:
+ id: 1
+ outer_l2:
+ framesize:
+ 64B: "{{get(imix, 'imix.uplink.64B', '0') }}"
+ 128B: "{{get(imix, 'imix.uplink.128B', '0') }}"
+"""
+
+TRAFFIC_PROFILE_2 = """
+{% set vports = get(extra_args, 'vports', 1) %}
+traffic_profile:
+ traffic_type : RFC2544Profile
+{% for vport in range(vports|int) %}
+uplink_{{vport}}:
+ ipv4: 192.168.0.{{vport}}
+{% endfor %}
+"""
+
+TOPOLOGY_PROFILE = """
+{% set vports = get(extra_args, 'vports', 2) %}
+nsd:nsd-catalog:
+ nsd:
+ - id: 3tg-topology
+ vld:
+{% for vport in range(0,vports,2|int) %}
+ - id: uplink_{{loop.index0}}
+ name: tg__0 to vnf__0 link {{vport + 1}}
+ vnfd-connection-point-ref:
+ - vnfd-connection-point-ref: xe{{vport}}
+ - id: downlink_{{loop.index0}}
+ name: vnf__0 to tg__0 link {{vport + 2}}
+ vnfd-connection-point-ref:
+ - vnfd-connection-point-ref: xe{{vport+1}}
+{% endfor %}
+"""
+
+class VnfGenericTestCase(unittest.TestCase):
+
+ def setUp(self):
+ scenario_cfg = {'topology': 'fake_topology',
+ 'task_path': 'fake_path',
+ 'traffic_profile': 'fake_fprofile_path'}
+ context_cfg = {}
+ topology_yaml = {'nsd:nsd-catalog': {'nsd': [mock.Mock()]}}
+
+ with mock.patch.object(utils, 'open_relative_file') as mock_open_path:
+ mock_open_path.side_effect = mock.mock_open(read_data=str(topology_yaml))
+ self.ns_testcase = vnf_generic.NetworkServiceTestCase(scenario_cfg,
+ context_cfg)
+ self.ns_testcase._get_traffic_profile = mock.Mock()
+ self.ns_testcase._get_topology = mock.Mock()
+
+ def test__fill_traffic_profile_no_args(self):
+ traffic_profile = copy.deepcopy(TRAFFIC_PROFILE_1)
+ self.ns_testcase._get_traffic_profile.return_value = traffic_profile
+
+ self.ns_testcase._fill_traffic_profile()
+ config = self.ns_testcase.traffic_profile.params
+ self.assertEqual('nsb:traffic_profile:0.1', config['schema'])
+ self.assertEqual('rfc2544', config['name'])
+ self.assertEqual('Traffic profile to run RFC2544 latency',
+ config['description'])
+ t_profile = {'traffic_type': 'RFC2544Profile',
+ 'frame_rate': 100}
+ self.assertEqual(t_profile, config['traffic_profile'])
+ uplink_0 = {
+ 'ipv4': {'id': 1,
+ 'outer_l2': {'framesize': {'128B': '0', '64B': '0'}}
+ }
+ }
+ self.assertEqual(uplink_0, config['uplink_0'])
+
+ def test__fill_traffic_profile_with_args(self):
+ traffic_profile = copy.deepcopy(TRAFFIC_PROFILE_2)
+ self.ns_testcase._get_traffic_profile.return_value = traffic_profile
+ self.ns_testcase.scenario_cfg['extra_args'] = {'vports': 3}
+
+ self.ns_testcase._fill_traffic_profile()
+ config = self.ns_testcase.traffic_profile.params
+ self.assertEqual({'ipv4': '192.168.0.0'}, config['uplink_0'])
+ self.assertEqual({'ipv4': '192.168.0.1'}, config['uplink_1'])
+ self.assertEqual({'ipv4': '192.168.0.2'}, config['uplink_2'])
+ self.assertNotIn('uplink_3', config)
+
+ def test__fill_traffic_profile_incorrect_args(self):
+ traffic_profile = copy.deepcopy(TRAFFIC_PROFILE_2)
+ self.ns_testcase._get_traffic_profile.return_value = traffic_profile
+ self.ns_testcase.scenario_cfg['extra_args'] = {'incorrect_vports': 3}
+
+ self.ns_testcase._fill_traffic_profile()
+ config = self.ns_testcase.traffic_profile.params
+ self.assertEqual({'ipv4': '192.168.0.0'}, config['uplink_0'])
+ self.assertNotIn('uplink_1', config)
+
+ def test__render_topology_with_args(self):
+ topology_profile = copy.deepcopy(TOPOLOGY_PROFILE)
+ self.ns_testcase._get_topology.return_value = topology_profile
+ self.ns_testcase.scenario_cfg['extra_args'] = {'vports': 6}
+
+ self.ns_testcase._render_topology()
+ topology = self.ns_testcase.topology
+ self.assertEqual("3tg-topology", topology['id'])
+ vld = self.ns_testcase.topology['vld']
+ self.assertEqual(len(vld), 6)
+ for index, vport in enumerate(range(0, 6, 2)):
+ self.assertEqual('uplink_{}'.format(index), vld[vport]['id'])
+ self.assertEqual('tg__0 to vnf__0 link {}'.format(vport + 1), vld[vport]['name'])
+ self.assertEqual('xe{}'.format(vport),
+ vld[vport]['vnfd-connection-point-ref'][0]
+ ['vnfd-connection-point-ref'])
+
+ self.assertEqual('downlink_{}'.format(index), vld[vport + 1]['id'])
+ self.assertEqual('vnf__0 to tg__0 link {}'.format(vport + 2), vld[vport + 1]['name'])
+ self.assertEqual('xe{}'.format(vport + 1),
+ vld[vport + 1]['vnfd-connection-point-ref'][0]
+ ['vnfd-connection-point-ref'])
+
+ def test__render_topology_incorrect_args(self):
+ topology_profile = copy.deepcopy(TOPOLOGY_PROFILE)
+ self.ns_testcase._get_topology.return_value = topology_profile
+ self.ns_testcase.scenario_cfg['extra_args'] = {'fake_vports': 5}
+
+ self.ns_testcase._render_topology()
+
+ topology = self.ns_testcase.topology
+ self.assertEqual("3tg-topology", topology['id'])
+ vld = self.ns_testcase.topology['vld']
+ self.assertEqual(len(vld), 2)
+
+ self.assertEqual('uplink_0', vld[0]['id'])
+ self.assertEqual('tg__0 to vnf__0 link 1', vld[0]['name'])
+ self.assertEqual('xe0',
+ vld[0]['vnfd-connection-point-ref'][0]['vnfd-connection-point-ref'])
+
+ self.assertEqual('downlink_0', vld[1]['id'])
+ self.assertEqual('vnf__0 to tg__0 link 2', vld[1]['name'])
+ self.assertEqual('xe1',
+ vld[1]['vnfd-connection-point-ref'][0]['vnfd-connection-point-ref'])
+
+ def test__render_topology_no_args(self):
+ topology_profile = copy.deepcopy(TOPOLOGY_PROFILE)
+ self.ns_testcase._get_topology.return_value = topology_profile
+
+ self.ns_testcase._render_topology()
+
+ topology = self.ns_testcase.topology
+ self.assertEqual("3tg-topology", topology['id'])
+ vld = self.ns_testcase.topology['vld']
+ self.assertEqual(len(vld), 2)
+
+ self.assertEqual('uplink_0', vld[0]['id'])
+ self.assertEqual('tg__0 to vnf__0 link 1', vld[0]['name'])
+ self.assertEqual('xe0',
+ vld[0]['vnfd-connection-point-ref'][0]['vnfd-connection-point-ref'])
+
+ self.assertEqual('downlink_0', vld[1]['id'])
+ self.assertEqual('vnf__0 to tg__0 link 2', vld[1]['name'])
+ self.assertEqual('xe1',
+ vld[1]['vnfd-connection-point-ref'][0]['vnfd-connection-point-ref'])
diff --git a/yardstick/tests/functional/common/fake_directory_package/README.md b/yardstick/tests/functional/common/fake_directory_package/README.md
new file mode 100644
index 000000000..689e47039
--- /dev/null
+++ b/yardstick/tests/functional/common/fake_directory_package/README.md
@@ -0,0 +1,2 @@
+# yardstick_new_plugin
+Yardstick plugin
diff --git a/yardstick/tests/functional/common/fake_directory_package/setup.py b/yardstick/tests/functional/common/fake_directory_package/setup.py
new file mode 100644
index 000000000..cf938ef4f
--- /dev/null
+++ b/yardstick/tests/functional/common/fake_directory_package/setup.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from setuptools import setup, find_packages
+
+setup(
+ name='yardstick_new_plugin_2',
+ version='1.0.0',
+ packages=find_packages(),
+ include_package_data=True,
+ url='https://www.opnfv.org',
+ entry_points={
+ 'yardstick.scenarios': [
+ 'Dummy2 = yardstick_new_plugin.benchmark.scenarios.dummy2.dummy2:'
+ 'Dummy2',
+ ]
+ },
+)
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/__init__.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/__init__.py
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/__init__.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/__init__.py
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/__init__.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/__init__.py
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/__init__.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/__init__.py
diff --git a/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/dummy2.py b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/dummy2.py
new file mode 100644
index 000000000..a2211ec51
--- /dev/null
+++ b/yardstick/tests/functional/common/fake_directory_package/yardstick_new_plugin_2/benchmark/scenarios/dummy2/dummy2.py
@@ -0,0 +1,40 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from yardstick.benchmark.scenarios import base
+
+
+LOG = logging.getLogger(__name__)
+
+
+class Dummy2(base.Scenario):
+ """Execute Dummy (v2!) echo"""
+ __scenario_type__ = "Dummy2"
+
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
+ self.setup_done = False
+
+ def setup(self):
+ self.setup_done = True
+
+ def run(self, result):
+ if not self.setup_done:
+ self.setup()
+
+ result["hello"] = "yardstick"
+ LOG.info("Dummy (v2!) echo hello yardstick!")
diff --git a/yardstick/tests/functional/common/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz b/yardstick/tests/functional/common/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz
new file mode 100644
index 000000000..e5379a78a
--- /dev/null
+++ b/yardstick/tests/functional/common/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz
Binary files differ
diff --git a/yardstick/tests/functional/common/test_packages.py b/yardstick/tests/functional/common/test_packages.py
new file mode 100644
index 000000000..5dead4e55
--- /dev/null
+++ b/yardstick/tests/functional/common/test_packages.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from os import path
+import re
+
+from yardstick.common import packages
+from yardstick.common import utils
+from yardstick.tests.functional import base
+
+
+class PipPackagesTestCase(base.BaseFunctionalTestCase):
+
+ TMP_FOLDER = '/tmp/pip_packages/'
+ PYTHONPATH = 'PYTHONPATH=%s' % TMP_FOLDER
+
+ def setUp(self):
+ super(PipPackagesTestCase, self).setUp()
+ privsep_helper = os.path.join(
+ os.getenv('VIRTUAL_ENV'), 'bin', 'privsep-helper')
+ self.config(
+ helper_command=' '.join(['sudo', '-EH', privsep_helper]),
+ group='yardstick_privileged')
+ self.addCleanup(self._cleanup)
+
+ def _cleanup(self):
+ utils.execute_command('sudo rm -rf %s' % self.TMP_FOLDER)
+
+ def _remove_package(self, package):
+ os.system('%s pip uninstall %s -y' % (self.PYTHONPATH, package))
+
+ def _list_packages(self):
+ pip_list_regex = re.compile(
+ r"(?P<name>[\w\.-]+) \((?P<version>[\w\d_\.\-]+),*.*\)")
+ pkg_dict = {}
+ pkgs = utils.execute_command('pip list',
+ env={'PYTHONPATH': self.TMP_FOLDER})
+ for line in pkgs:
+ match = pip_list_regex.match(line)
+ if match and match.group('name'):
+ pkg_dict[match.group('name')] = match.group('version')
+ return pkg_dict
+
+ def test_install_from_folder(self):
+ dirname = path.dirname(__file__)
+ package_dir = dirname + '/fake_directory_package'
+ package_name = 'yardstick-new-plugin-2'
+ self.addCleanup(self._remove_package, package_name)
+ self._remove_package(package_name)
+ self.assertFalse(package_name in self._list_packages())
+
+ self.assertEqual(0, packages.pip_install(package_dir, self.TMP_FOLDER))
+ self.assertTrue(package_name in self._list_packages())
+
+ def test_install_from_pip_package(self):
+ dirname = path.dirname(__file__)
+ package_path = (dirname +
+ '/fake_pip_package/yardstick_new_plugin-1.0.0.tar.gz')
+ package_name = 'yardstick-new-plugin'
+ self.addCleanup(self._remove_package, package_name)
+ self._remove_package(package_name)
+ self.assertFalse(package_name in self._list_packages())
+
+ self.assertEqual(0, packages.pip_install(package_path, self.TMP_FOLDER))
+ self.assertTrue(package_name in self._list_packages())
+
+ # NOTE(ralonsoh): an stable test plugin project is needed in OPNFV git
+ # server to execute this test.
+ # def test_install_from_url(self):
+
+ def test_pip_freeze(self):
+ # NOTE (ralonsoh): from requirements.txt file. The best way to test
+ # this function is to parse requirements.txt and test-requirements.txt
+ # and check all packages.
+ pkgs_ref = {'Babel': '2.3.4',
+ 'SQLAlchemy': '1.1.12',
+ 'influxdb': '4.1.1',
+ 'netifaces': '0.10.6',
+ 'unicodecsv': '0.14.1'}
+ pkgs = packages.pip_list()
+ for name, version in (pkgs_ref.items()):
+ self.assertEqual(version, pkgs[name])
diff --git a/yardstick/tests/unit/__init__.py b/yardstick/tests/unit/__init__.py
index a468b272b..c05f91c81 100644
--- a/yardstick/tests/unit/__init__.py
+++ b/yardstick/tests/unit/__init__.py
@@ -12,65 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from __future__ import absolute_import
+import sys
+
import mock
+from yardstick import tests
+
-STL_MOCKS = {
- 'trex_stl_lib': mock.MagicMock(),
- 'trex_stl_lib.base64': mock.MagicMock(),
- 'trex_stl_lib.binascii': mock.MagicMock(),
- 'trex_stl_lib.collections': mock.MagicMock(),
- 'trex_stl_lib.copy': mock.MagicMock(),
- 'trex_stl_lib.datetime': mock.MagicMock(),
- 'trex_stl_lib.functools': mock.MagicMock(),
- 'trex_stl_lib.imp': mock.MagicMock(),
- 'trex_stl_lib.inspect': mock.MagicMock(),
- 'trex_stl_lib.json': mock.MagicMock(),
- 'trex_stl_lib.linecache': mock.MagicMock(),
- 'trex_stl_lib.math': mock.MagicMock(),
- 'trex_stl_lib.os': mock.MagicMock(),
- 'trex_stl_lib.platform': mock.MagicMock(),
- 'trex_stl_lib.pprint': mock.MagicMock(),
- 'trex_stl_lib.random': mock.MagicMock(),
- 'trex_stl_lib.re': mock.MagicMock(),
- 'trex_stl_lib.scapy': mock.MagicMock(),
- 'trex_stl_lib.socket': mock.MagicMock(),
- 'trex_stl_lib.string': mock.MagicMock(),
- 'trex_stl_lib.struct': mock.MagicMock(),
- 'trex_stl_lib.sys': mock.MagicMock(),
- 'trex_stl_lib.threading': mock.MagicMock(),
- 'trex_stl_lib.time': mock.MagicMock(),
- 'trex_stl_lib.traceback': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_async_client': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_client': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_exceptions': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_ext': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_jsonrpc_client': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_packet_builder_interface': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_packet_builder_scapy': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_port': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_stats': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_streams': mock.MagicMock(),
- 'trex_stl_lib.trex_stl_types': mock.MagicMock(),
- 'trex_stl_lib.types': mock.MagicMock(),
- 'trex_stl_lib.utils': mock.MagicMock(),
- 'trex_stl_lib.utils.argparse': mock.MagicMock(),
- 'trex_stl_lib.utils.collections': mock.MagicMock(),
- 'trex_stl_lib.utils.common': mock.MagicMock(),
- 'trex_stl_lib.utils.json': mock.MagicMock(),
- 'trex_stl_lib.utils.os': mock.MagicMock(),
- 'trex_stl_lib.utils.parsing_opts': mock.MagicMock(),
- 'trex_stl_lib.utils.pwd': mock.MagicMock(),
- 'trex_stl_lib.utils.random': mock.MagicMock(),
- 'trex_stl_lib.utils.re': mock.MagicMock(),
- 'trex_stl_lib.utils.string': mock.MagicMock(),
- 'trex_stl_lib.utils.sys': mock.MagicMock(),
- 'trex_stl_lib.utils.text_opts': mock.MagicMock(),
- 'trex_stl_lib.utils.text_tables': mock.MagicMock(),
- 'trex_stl_lib.utils.texttable': mock.MagicMock(),
- 'trex_stl_lib.warnings': mock.MagicMock(),
- 'trex_stl_lib.yaml': mock.MagicMock(),
- 'trex_stl_lib.zlib': mock.MagicMock(),
- 'trex_stl_lib.zmq': mock.MagicMock(),
-}
+mock_stl = mock.patch.dict(sys.modules, tests.STL_MOCKS)
+mock_stl.start()
diff --git a/yardstick/tests/unit/base.py b/yardstick/tests/unit/base.py
new file mode 100644
index 000000000..b943efc2b
--- /dev/null
+++ b/yardstick/tests/unit/base.py
@@ -0,0 +1,23 @@
+# Copyright 2018 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import abc
+
+import six
+import unittest
+
+
+@six.add_metaclass(abc.ABCMeta)
+class BaseUnitTestCase(unittest.TestCase):
+ """Base class for unit tests"""
diff --git a/yardstick/tests/unit/benchmark/core/test_task.py b/yardstick/tests/unit/benchmark/core/test_task.py
index e91853ffe..77e95bbec 100644
--- a/yardstick/tests/unit/benchmark/core/test_task.py
+++ b/yardstick/tests/unit/benchmark/core/test_task.py
@@ -51,8 +51,15 @@ class TaskTestCase(unittest.TestCase):
def test__do_output(self, mock_dispatcher):
t = task.Task()
output_config = {"DEFAULT": {"dispatcher": "file, http"}}
- mock_dispatcher.get = mock.MagicMock(return_value=[mock.MagicMock(),
- mock.MagicMock()])
+
+ dispatcher1 = mock.MagicMock()
+ dispatcher1.__dispatcher_type__ = 'file'
+
+ dispatcher2 = mock.MagicMock()
+ dispatcher2.__dispatcher_type__ = 'http'
+
+ mock_dispatcher.get = mock.MagicMock(return_value=[dispatcher1,
+ dispatcher2])
self.assertEqual(None, t._do_output(output_config, {}))
@mock.patch.object(task, 'Context')
diff --git a/yardstick/tests/unit/benchmark/runner/test_base.py b/yardstick/tests/unit/benchmark/runner/test_base.py
index 0fdc42347..59739c54f 100644
--- a/yardstick/tests/unit/benchmark/runner/test_base.py
+++ b/yardstick/tests/unit/benchmark/runner/test_base.py
@@ -11,6 +11,8 @@ import time
import mock
import unittest
+from subprocess import CalledProcessError
+
from yardstick.benchmark.runners import base
from yardstick.benchmark.runners import iteration
@@ -20,19 +22,19 @@ class ActionTestCase(unittest.TestCase):
@mock.patch("yardstick.benchmark.runners.base.subprocess")
def test__execute_shell_command(self, mock_subprocess):
- mock_subprocess.check_output.side_effect = Exception()
+ mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
self.assertEqual(base._execute_shell_command("")[0], -1)
@mock.patch("yardstick.benchmark.runners.base.subprocess")
def test__single_action(self, mock_subprocess):
- mock_subprocess.check_output.side_effect = Exception()
+ mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
base._single_action(0, "echo", mock.MagicMock())
@mock.patch("yardstick.benchmark.runners.base.subprocess")
def test__periodic_action(self, mock_subprocess):
- mock_subprocess.check_output.side_effect = Exception()
+ mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
base._periodic_action(0, "echo", mock.MagicMock())
@@ -40,7 +42,14 @@ class ActionTestCase(unittest.TestCase):
class RunnerTestCase(unittest.TestCase):
def setUp(self):
- self.runner = iteration.IterationRunner({})
+ config = {
+ 'output_config': {
+ 'DEFAULT': {
+ 'dispatcher': 'file'
+ }
+ }
+ }
+ self.runner = iteration.IterationRunner(config)
@mock.patch("yardstick.benchmark.runners.iteration.multiprocessing")
def test_get_output(self, *args):
diff --git a/yardstick/tests/unit/benchmark/runner/test_search.py b/yardstick/tests/unit/benchmark/runner/test_search.py
index 1bc07448d..4e5b4fe77 100644
--- a/yardstick/tests/unit/benchmark/runner/test_search.py
+++ b/yardstick/tests/unit/benchmark/runner/test_search.py
@@ -17,15 +17,8 @@ import time
import mock
import unittest
-from yardstick.tests.unit import STL_MOCKS
-
-STLClient = mock.MagicMock()
-stl_patch = mock.patch.dict("sys.modules", STL_MOCKS)
-stl_patch.start()
-
-if stl_patch:
- from yardstick.benchmark.runners.search import SearchRunner
- from yardstick.benchmark.runners.search import SearchRunnerHelper
+from yardstick.benchmark.runners.search import SearchRunner
+from yardstick.benchmark.runners.search import SearchRunnerHelper
class TestSearchRunnerHelper(unittest.TestCase):
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
index fb55b5ea0..c7a29f27e 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
# Copyright (c) 2016-2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,27 +11,29 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-#
-
-# Unittest for yardstick.benchmark.scenarios.networking.test_vnf_generic
-
-from __future__ import absolute_import
+from copy import deepcopy
import os
-import errno
-import unittest
-import mock
+import sys
-from copy import deepcopy
+import mock
+import unittest
-from yardstick.tests.unit import STL_MOCKS
-from yardstick.benchmark.scenarios.networking.vnf_generic import \
- SshManager, NetworkServiceTestCase, IncorrectConfig, \
- open_relative_file
+from yardstick import tests
+from yardstick.common import utils
from yardstick.network_services.collector.subscriber import Collector
-from yardstick.network_services.vnf_generic.vnf.base import \
- GenericTrafficGen, GenericVNF
+from yardstick.network_services.traffic_profile import base
+from yardstick.network_services.vnf_generic import vnfdgen
+from yardstick.error import IncorrectConfig
+from yardstick.network_services.vnf_generic.vnf.base import GenericTrafficGen
+from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
+
+
+stl_patch = mock.patch.dict(sys.modules, tests.STL_MOCKS)
+stl_patch.start()
+if stl_patch:
+ from yardstick.benchmark.scenarios.networking import vnf_generic
# pylint: disable=unused-argument
# disable this for now because I keep forgetting mock patch arg ordering
@@ -317,6 +317,7 @@ class TestNetworkServiceTestCase(unittest.TestCase):
'task_id': 'a70bdf4a-8e67-47a3-9dc1-273c14506eb7',
'tc': 'tc_ipv4_1Mflow_64B_packetsize',
'traffic_profile': 'ipv4_throughput_vpe.yaml',
+ 'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
'type': 'ISB',
'tc_options': {
'rfc2544': {
@@ -345,23 +346,14 @@ class TestNetworkServiceTestCase(unittest.TestCase):
},
}
- self.s = NetworkServiceTestCase(self.scenario_cfg, self.context_cfg)
+ self.s = vnf_generic.NetworkServiceTestCase(self.scenario_cfg,
+ self.context_cfg)
def _get_file_abspath(self, filename):
curr_path = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(curr_path, filename)
return file_path
- def test_ssh_manager(self):
- with mock.patch("yardstick.ssh.SSH") as ssh:
- ssh_mock = mock.Mock(autospec=ssh.SSH)
- ssh_mock.execute = \
- mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
- ssh.from_node.return_value = ssh_mock
- for _, node_dict in self.context_cfg["nodes"].items():
- with SshManager(node_dict) as conn:
- self.assertIsNotNone(conn)
-
def test___init__(self):
assert self.topology
@@ -415,7 +407,10 @@ class TestNetworkServiceTestCase(unittest.TestCase):
'public_ip': ['1.1.1.1'],
},
}
-
+ # NOTE(ralonsoh): check the expected output. This test could be
+ # incorrect
+ # result = {'flow': {'dst_ip0': '152.16.40.2-152.16.40.254',
+ # 'src_ip0': '152.16.100.2-152.16.100.254'}}
self.assertEqual({'flow': {}}, self.s._get_traffic_flow())
def test___get_traffic_flow_error(self):
@@ -425,16 +420,16 @@ class TestNetworkServiceTestCase(unittest.TestCase):
def test_get_vnf_imp(self):
vnfd = COMPLETE_TREX_VNFD['vnfd:vnfd-catalog']['vnfd'][0]['class-name']
- with mock.patch.dict("sys.modules", STL_MOCKS):
+ with mock.patch.dict(sys.modules, tests.STL_MOCKS):
self.assertIsNotNone(self.s.get_vnf_impl(vnfd))
- with self.assertRaises(IncorrectConfig) as raised:
- self.s.get_vnf_impl('NonExistentClass')
+ with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+ self.s.get_vnf_impl('NonExistentClass')
- exc_str = str(raised.exception)
- print(exc_str)
- self.assertIn('No implementation', exc_str)
- self.assertIn('found in', exc_str)
+ exc_str = str(raised.exception)
+ print(exc_str)
+ self.assertIn('No implementation', exc_str)
+ self.assertIn('found in', exc_str)
def test_load_vnf_models_invalid(self):
self.context_cfg["nodes"]['tg__1']['VNF model'] = \
@@ -456,39 +451,37 @@ class TestNetworkServiceTestCase(unittest.TestCase):
self.s.load_vnf_models(self.scenario_cfg, self.context_cfg))
def test_map_topology_to_infrastructure(self):
- with mock.patch("yardstick.ssh.SSH") as ssh:
- ssh_mock = mock.Mock(autospec=ssh.SSH)
- ssh_mock.execute = \
- mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
- ssh.from_node.return_value = ssh_mock
- self.s.map_topology_to_infrastructure()
+ self.s.map_topology_to_infrastructure()
nodes = self.context_cfg["nodes"]
- self.assertEqual(
- "../../vnf_descriptors/tg_rfc2544_tpl.yaml", nodes['tg__1']['VNF model'])
- self.assertEqual("../../vnf_descriptors/vpe_vnf.yaml",
+ self.assertEqual('../../vnf_descriptors/tg_rfc2544_tpl.yaml',
+ nodes['tg__1']['VNF model'])
+ self.assertEqual('../../vnf_descriptors/vpe_vnf.yaml',
nodes['vnf__1']['VNF model'])
def test_map_topology_to_infrastructure_insufficient_nodes(self):
- del self.context_cfg['nodes']['vnf__1']
- with mock.patch("yardstick.ssh.SSH") as ssh:
- ssh_mock = mock.Mock(autospec=ssh.SSH)
- ssh_mock.execute = \
- mock.Mock(return_value=(1, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
- ssh.from_node.return_value = ssh_mock
+ cfg = deepcopy(self.context_cfg)
+ del cfg['nodes']['vnf__1']
+ cfg_patch = mock.patch.object(self.s, 'context_cfg', cfg)
+ with cfg_patch:
with self.assertRaises(IncorrectConfig):
self.s.map_topology_to_infrastructure()
def test_map_topology_to_infrastructure_config_invalid(self):
- cfg = dict(self.context_cfg)
+ ssh_mock = mock.Mock()
+ ssh_mock.execute.return_value = 0, SYS_CLASS_NET + IP_ADDR_SHOW, ""
+
+ cfg = deepcopy(self.s.context_cfg)
+
+ # delete all, we don't know which will come first
del cfg['nodes']['vnf__1']['interfaces']['xe0']['local_mac']
- with mock.patch("yardstick.ssh.SSH") as ssh:
- ssh_mock = mock.Mock(autospec=ssh.SSH)
- ssh_mock.execute = \
- mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
- ssh.from_node.return_value = ssh_mock
+ del cfg['nodes']['vnf__1']['interfaces']['xe1']['local_mac']
+ del cfg['nodes']['tg__1']['interfaces']['xe0']['local_mac']
+ del cfg['nodes']['tg__1']['interfaces']['xe1']['local_mac']
+ config_patch = mock.patch.object(self.s, 'context_cfg', cfg)
+ with config_patch:
with self.assertRaises(IncorrectConfig):
self.s.map_topology_to_infrastructure()
@@ -503,10 +496,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
for interface in self.tg__1['interfaces'].values():
del interface['local_mac']
- with mock.patch(
- "yardstick.benchmark.scenarios.networking.vnf_generic.LOG"):
- with self.assertRaises(IncorrectConfig) as raised:
- self.s._resolve_topology()
+ with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+ self.s._resolve_topology()
self.assertIn('not found', str(raised.exception))
@@ -518,10 +509,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
self.s.topology["vld"][0]['vnfd-connection-point-ref'].append(
self.s.topology["vld"][0]['vnfd-connection-point-ref'][0])
- with mock.patch(
- "yardstick.benchmark.scenarios.networking.vnf_generic.LOG"):
- with self.assertRaises(IncorrectConfig) as raised:
- self.s._resolve_topology()
+ with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+ self.s._resolve_topology()
self.assertIn('wrong endpoint count', str(raised.exception))
@@ -529,10 +518,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
self.s.topology["vld"][0]['vnfd-connection-point-ref'] = \
self.s.topology["vld"][0]['vnfd-connection-point-ref'][:1]
- with mock.patch(
- "yardstick.benchmark.scenarios.networking.vnf_generic.LOG"):
- with self.assertRaises(IncorrectConfig) as raised:
- self.s._resolve_topology()
+ with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+ self.s._resolve_topology()
self.assertIn('wrong endpoint count', str(raised.exception))
@@ -625,15 +612,48 @@ class TestNetworkServiceTestCase(unittest.TestCase):
self.assertEqual({'imix': {'64B': 100}},
self.s._get_traffic_imix())
- def test__fill_traffic_profile(self):
- with mock.patch.dict("sys.modules", STL_MOCKS):
- self.scenario_cfg["traffic_profile"] = \
- self._get_file_abspath("ipv4_throughput_vpe.yaml")
- self.scenario_cfg["traffic_options"]["flow"] = \
- self._get_file_abspath("ipv4_1flow_Packets_vpe.yaml")
- self.scenario_cfg["traffic_options"]["imix"] = \
- self._get_file_abspath("imix_voice.yaml")
- self.assertIsNotNone(self.s._fill_traffic_profile())
+ @mock.patch.object(base.TrafficProfile, 'get')
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__fill_traffic_profile(self, mock_generate, mock_tprofile_get):
+ fake_tprofile = mock.Mock()
+ fake_vnfd = mock.Mock()
+ with mock.patch.object(self.s, '_get_traffic_profile',
+ return_value=fake_tprofile) as mock_get_tp:
+ mock_generate.return_value = fake_vnfd
+ self.s._fill_traffic_profile()
+ mock_get_tp.assert_called_once()
+ mock_generate.assert_called_once_with(
+ fake_tprofile,
+ {'downlink': {},
+ 'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
+ 'flow': {'flow': {}},
+ 'imix': {'imix': {'64B': 100}},
+ 'uplink': {}}
+ )
+ mock_tprofile_get.assert_called_once_with(fake_vnfd)
+
+ @mock.patch.object(utils, 'open_relative_file')
+ def test__get_topology(self, mock_open_path):
+ self.s.scenario_cfg['topology'] = 'fake_topology'
+ self.s.scenario_cfg['task_path'] = 'fake_path'
+ mock_open_path.side_effect = mock.mock_open(read_data='fake_data')
+ self.assertEqual('fake_data', self.s._get_topology())
+ mock_open_path.assert_called_once_with('fake_topology', 'fake_path')
+
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__render_topology(self, mock_generate):
+ fake_topology = 'fake_topology'
+ mock_generate.return_value = {'nsd:nsd-catalog': {'nsd': ['fake_nsd']}}
+ with mock.patch.object(self.s, '_get_topology',
+ return_value=fake_topology) as mock_get_topology:
+ self.s._render_topology()
+ mock_get_topology.assert_called_once()
+
+ mock_generate.assert_called_once_with(
+ fake_topology,
+ {'extra_args': {'arg1': 'value1', 'arg2': 'value2'}}
+ )
+ self.assertEqual(self.s.topology, 'fake_nsd')
def test_teardown(self):
vnf = mock.Mock(autospec=GenericVNF)
@@ -658,141 +678,3 @@ class TestNetworkServiceTestCase(unittest.TestCase):
mock.Mock(return_value=True)
with self.assertRaises(RuntimeError):
self.s.teardown()
-
- SAMPLE_NETDEVS = {
- 'enp11s0': {
- 'address': '0a:de:ad:be:ef:f5',
- 'device': '0x1533',
- 'driver': 'igb',
- 'ifindex': '2',
- 'interface_name': 'enp11s0',
- 'operstate': 'down',
- 'pci_bus_id': '0000:0b:00.0',
- 'subsystem_device': '0x1533',
- 'subsystem_vendor': '0x15d9',
- 'vendor': '0x8086'
- },
- 'lan': {
- 'address': '0a:de:ad:be:ef:f4',
- 'device': '0x153a',
- 'driver': 'e1000e',
- 'ifindex': '3',
- 'interface_name': 'lan',
- 'operstate': 'up',
- 'pci_bus_id': '0000:00:19.0',
- 'subsystem_device': '0x153a',
- 'subsystem_vendor': '0x15d9',
- 'vendor': '0x8086'
- }
- }
-
- SAMPLE_VM_NETDEVS = {
- 'eth1': {
- 'address': 'fa:de:ad:be:ef:5b',
- 'device': '0x0001',
- 'driver': 'virtio_net',
- 'ifindex': '3',
- 'interface_name': 'eth1',
- 'operstate': 'down',
- 'pci_bus_id': '0000:00:04.0',
- 'vendor': '0x1af4'
- }
- }
-
- def test_parse_netdev_info(self):
- output = """\
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/ifindex:2
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/address:0a:de:ad:be:ef:f5
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/operstate:down
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/vendor:0x8086
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/device:0x1533
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/subsystem_vendor:0x15d9
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/device/subsystem_device:0x1533
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/driver:igb
-/sys/devices/pci0000:00/0000:00:1c.3/0000:0b:00.0/net/enp11s0/pci_bus_id:0000:0b:00.0
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/ifindex:3
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/address:0a:de:ad:be:ef:f4
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/operstate:up
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/vendor:0x8086
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/device:0x153a
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/subsystem_vendor:0x15d9
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/device/subsystem_device:0x153a
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/driver:e1000e
-/sys/devices/pci0000:00/0000:00:19.0/net/lan/pci_bus_id:0000:00:19.0
-"""
- res = NetworkServiceTestCase.parse_netdev_info(output)
- assert res == self.SAMPLE_NETDEVS
-
- def test_parse_netdev_info_virtio(self):
- output = """\
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/ifindex:3
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/address:fa:de:ad:be:ef:5b
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/operstate:down
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/device/vendor:0x1af4
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/device/device:0x0001
-/sys/devices/pci0000:00/0000:00:04.0/virtio1/net/eth1/driver:virtio_net
-"""
- res = NetworkServiceTestCase.parse_netdev_info(output)
- assert res == self.SAMPLE_VM_NETDEVS
-
- def test_probe_missing_values(self):
- netdevs = self.SAMPLE_NETDEVS.copy()
- network = {'local_mac': '0a:de:ad:be:ef:f5'}
- NetworkServiceTestCase._probe_missing_values(netdevs, network)
- assert network['vpci'] == '0000:0b:00.0'
-
- network = {'local_mac': '0a:de:ad:be:ef:f4'}
- NetworkServiceTestCase._probe_missing_values(netdevs, network)
- assert network['vpci'] == '0000:00:19.0'
-
- # TODO: Split this into several tests, for different IOError sub-types
- def test_open_relative_path(self):
- mock_open = mock.mock_open()
- mock_open_result = mock_open()
- mock_open_call_count = 1 # initial call to get result
-
- module_name = \
- 'yardstick.benchmark.scenarios.networking.vnf_generic.open'
-
- # test
- with mock.patch(module_name, mock_open, create=True):
- self.assertEqual(open_relative_file(
- 'foo', 'bar'), mock_open_result)
-
- mock_open_call_count += 1 # one more call expected
- self.assertEqual(mock_open.call_count, mock_open_call_count)
- self.assertIn('foo', mock_open.call_args_list[-1][0][0])
- self.assertNotIn('bar', mock_open.call_args_list[-1][0][0])
-
- def open_effect(*args, **kwargs):
- if kwargs.get('name', args[0]) == os.path.join('bar', 'foo'):
- return mock_open_result
- raise IOError(errno.ENOENT, 'not found')
-
- mock_open.side_effect = open_effect
- self.assertEqual(open_relative_file(
- 'foo', 'bar'), mock_open_result)
-
- mock_open_call_count += 2 # two more calls expected
- self.assertEqual(mock_open.call_count, mock_open_call_count)
- self.assertIn('foo', mock_open.call_args_list[-1][0][0])
- self.assertIn('bar', mock_open.call_args_list[-1][0][0])
-
- # test an IOError of type ENOENT
- mock_open.side_effect = IOError(errno.ENOENT, 'not found')
- with self.assertRaises(IOError):
- # the second call still raises
- open_relative_file('foo', 'bar')
-
- mock_open_call_count += 2 # two more calls expected
- self.assertEqual(mock_open.call_count, mock_open_call_count)
- self.assertIn('foo', mock_open.call_args_list[-1][0][0])
- self.assertIn('bar', mock_open.call_args_list[-1][0][0])
-
- # test an IOError other than ENOENT
- mock_open.side_effect = IOError(errno.EBUSY, 'busy')
- with self.assertRaises(IOError):
- open_relative_file('foo', 'bar')
-
- mock_open_call_count += 1 # one more call expected
- self.assertEqual(mock_open.call_count, mock_open_call_count)
diff --git a/yardstick/tests/unit/benchmark/scenarios/test_base.py b/yardstick/tests/unit/benchmark/scenarios/test_base.py
index a95e6bc86..985338532 100644
--- a/yardstick/tests/unit/benchmark/scenarios/test_base.py
+++ b/yardstick/tests/unit/benchmark/scenarios/test_base.py
@@ -13,12 +13,11 @@
# License for the specific language governing permissions and limitations
# under the License.
-import unittest
-
from yardstick.benchmark.scenarios import base
+from yardstick.tests.unit import base as ut_base
-class ScenarioTestCase(unittest.TestCase):
+class ScenarioTestCase(ut_base.BaseUnitTestCase):
def test_get_scenario_type(self):
scenario_type = 'dummy scenario'
@@ -87,7 +86,7 @@ class ScenarioTestCase(unittest.TestCase):
str(exc.exception))
-class IterScenarioClassesTestCase(unittest.TestCase):
+class IterScenarioClassesTestCase(ut_base.BaseUnitTestCase):
def test_no_scenario_type_defined(self):
some_existing_scenario_class_names = [
diff --git a/yardstick/tests/unit/common/test_packages.py b/yardstick/tests/unit/common/test_packages.py
new file mode 100644
index 000000000..ba59a3015
--- /dev/null
+++ b/yardstick/tests/unit/common/test_packages.py
@@ -0,0 +1,88 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+from pip import exceptions as pip_exceptions
+from pip.operations import freeze
+import unittest
+
+from yardstick.common import packages
+
+
+class PipExecuteActionTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_pip_main = mock.patch.object(packages, '_pip_main')
+ self.mock_pip_main = self._mock_pip_main.start()
+ self.mock_pip_main.return_value = 0
+ self._mock_freeze = mock.patch.object(freeze, 'freeze')
+ self.mock_freeze = self._mock_freeze.start()
+ self.addCleanup(self._cleanup)
+
+ def _cleanup(self):
+ self._mock_pip_main.stop()
+ self._mock_freeze.stop()
+
+ def test_pip_execute_action(self):
+ self.assertEqual(0, packages._pip_execute_action('test_package'))
+
+ def test_remove(self):
+ self.assertEqual(0, packages._pip_execute_action('test_package',
+ action='uninstall'))
+
+ def test_install(self):
+ self.assertEqual(0, packages._pip_execute_action(
+ 'test_package', action='install', target='temp_dir'))
+
+ def test_pip_execute_action_error(self):
+ self.mock_pip_main.return_value = 1
+ self.assertEqual(1, packages._pip_execute_action('test_package'))
+
+ def test_pip_execute_action_exception(self):
+ self.mock_pip_main.side_effect = pip_exceptions.PipError
+ self.assertEqual(1, packages._pip_execute_action('test_package'))
+
+ def test_pip_list(self):
+ pkg_input = [
+ 'XStatic-Rickshaw==1.5.0.0',
+ 'xvfbwrapper==0.2.9',
+ '-e git+https://git.opnfv.org/yardstick@50773a24afc02c9652b662ecca'
+ '2fc5621ea6097a#egg=yardstick',
+ 'zope.interface==4.4.3'
+ ]
+ pkg_dict = {
+ 'XStatic-Rickshaw': '1.5.0.0',
+ 'xvfbwrapper': '0.2.9',
+ 'yardstick': '50773a24afc02c9652b662ecca2fc5621ea6097a',
+ 'zope.interface': '4.4.3'
+ }
+ self.mock_freeze.return_value = pkg_input
+
+ pkg_output = packages.pip_list()
+ for pkg_name, pkg_version in pkg_output.items():
+ self.assertEqual(pkg_dict.get(pkg_name), pkg_version)
+
+ def test_pip_list_single_package(self):
+ pkg_input = [
+ 'XStatic-Rickshaw==1.5.0.0',
+ 'xvfbwrapper==0.2.9',
+ '-e git+https://git.opnfv.org/yardstick@50773a24afc02c9652b662ecca'
+ '2fc5621ea6097a#egg=yardstick',
+ 'zope.interface==4.4.3'
+ ]
+ self.mock_freeze.return_value = pkg_input
+
+ pkg_output = packages.pip_list(pkg_name='xvfbwrapper')
+ self.assertEqual(1, len(pkg_output))
+ self.assertEqual(pkg_output.get('xvfbwrapper'), '0.2.9')
diff --git a/yardstick/tests/unit/common/test_utils.py b/yardstick/tests/unit/common/test_utils.py
index 033bb0243..e71d0ff0f 100644
--- a/yardstick/tests/unit/common/test_utils.py
+++ b/yardstick/tests/unit/common/test_utils.py
@@ -19,6 +19,8 @@ from six.moves import configparser
import unittest
import yardstick
+from yardstick import ssh
+import yardstick.error
from yardstick.common import utils
from yardstick.common import constants
@@ -125,6 +127,63 @@ class CommonUtilTestCase(unittest.TestCase):
("=".join(item) for item in sorted(flattened_data.items())))
self.assertEqual(result, line)
+ def test_get_key_with_default_negative(self):
+ with self.assertRaises(KeyError):
+ utils.get_key_with_default({}, 'key1')
+
+ @mock.patch('yardstick.common.utils.open', create=True)
+ def test_(self, mock_open):
+ mock_open.side_effect = IOError
+
+ with self.assertRaises(IOError):
+ utils.find_relative_file('my/path', 'task/path')
+
+ self.assertEqual(mock_open.call_count, 2)
+
+ @mock.patch('yardstick.common.utils.open', create=True)
+ def test_open_relative_path(self, mock_open):
+ mock_open_result = mock_open()
+ mock_open_call_count = 1 # initial call to get result
+
+ self.assertEqual(utils.open_relative_file('foo', 'bar'), mock_open_result)
+
+ mock_open_call_count += 1 # one more call expected
+ self.assertEqual(mock_open.call_count, mock_open_call_count)
+ self.assertIn('foo', mock_open.call_args_list[-1][0][0])
+ self.assertNotIn('bar', mock_open.call_args_list[-1][0][0])
+
+ def open_effect(*args, **kwargs):
+ if kwargs.get('name', args[0]) == os.path.join('bar', 'foo'):
+ return mock_open_result
+ raise IOError(errno.ENOENT, 'not found')
+
+ mock_open.side_effect = open_effect
+ self.assertEqual(utils.open_relative_file('foo', 'bar'), mock_open_result)
+
+ mock_open_call_count += 2 # two more calls expected
+ self.assertEqual(mock_open.call_count, mock_open_call_count)
+ self.assertIn('foo', mock_open.call_args_list[-1][0][0])
+ self.assertIn('bar', mock_open.call_args_list[-1][0][0])
+
+ # test an IOError of type ENOENT
+ mock_open.side_effect = IOError(errno.ENOENT, 'not found')
+ with self.assertRaises(IOError):
+ # the second call still raises
+ utils.open_relative_file('foo', 'bar')
+
+ mock_open_call_count += 2 # two more calls expected
+ self.assertEqual(mock_open.call_count, mock_open_call_count)
+ self.assertIn('foo', mock_open.call_args_list[-1][0][0])
+ self.assertIn('bar', mock_open.call_args_list[-1][0][0])
+
+ # test an IOError other than ENOENT
+ mock_open.side_effect = IOError(errno.EBUSY, 'busy')
+ with self.assertRaises(IOError):
+ utils.open_relative_file('foo', 'bar')
+
+ mock_open_call_count += 1 # one more call expected
+ self.assertEqual(mock_open.call_count, mock_open_call_count)
+
class TestMacAddressToHex(unittest.TestCase):
@@ -930,9 +989,9 @@ class TestUtils(unittest.TestCase):
def test_error_class(self):
with self.assertRaises(RuntimeError):
- utils.ErrorClass()
+ yardstick.error.ErrorClass()
- error_instance = utils.ErrorClass(test='')
+ error_instance = yardstick.error.ErrorClass(test='')
with self.assertRaises(AttributeError):
error_instance.get_name()
@@ -1075,8 +1134,27 @@ class SafeDecodeUtf8TestCase(unittest.TestCase):
self.assertEqual('this is a byte array', out)
-def main():
- unittest.main()
-
-if __name__ == '__main__':
- main()
+class ReadMeminfoTestCase(unittest.TestCase):
+
+ MEMINFO = (b'MemTotal: 65860500 kB\n'
+ b'MemFree: 28690900 kB\n'
+ b'MemAvailable: 52873764 kB\n'
+ b'Active(anon): 3015676 kB\n'
+ b'HugePages_Total: 8\n'
+ b'Hugepagesize: 1048576 kB')
+ MEMINFO_DICT = {'MemTotal': '65860500',
+ 'MemFree': '28690900',
+ 'MemAvailable': '52873764',
+ 'Active(anon)': '3015676',
+ 'HugePages_Total': '8',
+ 'Hugepagesize': '1048576'}
+
+ def test_read_meminfo(self):
+ ssh_client = ssh.SSH('user', 'host')
+ with mock.patch.object(ssh_client, 'get_file_obj') as \
+ mock_get_client, \
+ mock.patch.object(six, 'BytesIO',
+ return_value=six.BytesIO(self.MEMINFO)):
+ output = utils.read_meminfo(ssh_client)
+ mock_get_client.assert_called_once_with('/proc/meminfo', mock.ANY)
+ self.assertEqual(self.MEMINFO_DICT, output)
diff --git a/yardstick/tests/unit/test_ssh.py b/yardstick/tests/unit/test_ssh.py
index dbaae8c37..615783f3e 100644
--- a/yardstick/tests/unit/test_ssh.py
+++ b/yardstick/tests/unit/test_ssh.py
@@ -21,12 +21,13 @@ import os
import socket
import unittest
from io import StringIO
+from itertools import count
import mock
from oslo_utils import encodeutils
from yardstick import ssh
-from yardstick.ssh import SSHError
+from yardstick.ssh import SSHError, SSHTimeout
from yardstick.ssh import SSH
from yardstick.ssh import AutoConnectSSH
@@ -508,13 +509,45 @@ class SSHRunTestCase(unittest.TestCase):
class TestAutoConnectSSH(unittest.TestCase):
- def test__connect_with_wait(self):
- auto_connect_ssh = AutoConnectSSH('user1', 'host1', wait=True)
- auto_connect_ssh._get_client = mock.Mock()
- auto_connect_ssh.wait = mock_wait = mock.Mock()
+ def test__connect_loop(self):
+ auto_connect_ssh = AutoConnectSSH('user1', 'host1', wait=0)
+ auto_connect_ssh._get_client = mock__get_client = mock.Mock()
auto_connect_ssh._connect()
- self.assertEqual(mock_wait.call_count, 1)
+ self.assertEqual(mock__get_client.call_count, 1)
+
+ def test___init___negative(self):
+ with self.assertRaises(TypeError):
+ AutoConnectSSH('user1', 'host1', wait=['wait'])
+
+ with self.assertRaises(ValueError):
+ AutoConnectSSH('user1', 'host1', wait='wait')
+
+ @mock.patch('yardstick.ssh.time')
+ def test__connect_loop_ssh_error(self, mock_time):
+ mock_time.time.side_effect = count()
+
+ auto_connect_ssh = AutoConnectSSH('user1', 'host1', wait=10)
+ auto_connect_ssh._get_client = mock__get_client = mock.Mock()
+ mock__get_client.side_effect = SSHError
+
+ with self.assertRaises(SSHTimeout):
+ auto_connect_ssh._connect()
+
+ self.assertEqual(mock_time.time.call_count, 12)
+
+ def test_get_file_obj(self):
+ auto_connect_ssh = AutoConnectSSH('user1', 'host1', wait=10)
+ auto_connect_ssh._get_client = mock__get_client = mock.Mock()
+ mock_client = mock__get_client()
+ mock_open_sftp = mock_client.open_sftp()
+ mock_sftp = mock.Mock()
+ mock_open_sftp.__enter__ = mock.Mock(return_value=mock_sftp)
+ mock_open_sftp.__exit__ = mock.Mock()
+
+ auto_connect_ssh.get_file_obj('remote/path', mock.Mock())
+
+ self.assertEqual(mock_sftp.getfo.call_count, 1)
def test__make_dict(self):
auto_connect_ssh = AutoConnectSSH('user1', 'host1')
@@ -527,7 +560,7 @@ class TestAutoConnectSSH(unittest.TestCase):
'key_filename': None,
'password': None,
'name': None,
- 'wait': True,
+ 'wait': AutoConnectSSH.DEFAULT_WAIT_TIMEOUT,
}
result = auto_connect_ssh._make_dict()
self.assertDictEqual(result, expected)
@@ -537,6 +570,13 @@ class TestAutoConnectSSH(unittest.TestCase):
self.assertEqual(auto_connect_ssh.get_class(), AutoConnectSSH)
+ def test_drop_connection(self):
+ auto_connect_ssh = AutoConnectSSH('user1', 'host1')
+ self.assertFalse(auto_connect_ssh._client)
+ auto_connect_ssh._client = True
+ auto_connect_ssh.drop_connection()
+ self.assertFalse(auto_connect_ssh._client)
+
@mock.patch('yardstick.ssh.SCPClient')
def test_put(self, mock_scp_client_type):
auto_connect_ssh = AutoConnectSSH('user1', 'host1')