summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--docs/index.rst52
-rwxr-xr-xdocs/templates/Yardstick_task_templates.rst48
-rw-r--r--docs/templates/testcase_description_template.rst94
-rw-r--r--docs/templates/testcase_description_v2_template.rst6
-rw-r--r--docs/user_guides/framework/index.rst9
-rw-r--r--docs/vTC/README.rst71
-rw-r--r--docs/vTC/abbreviations.rst3
-rw-r--r--docs/yardstick/index.rst21
-rw-r--r--docs/yardstick/opnfv_yardstick_tc001.rst8
-rw-r--r--docs/yardstick/opnfv_yardstick_tc002.rst6
-rw-r--r--docs/yardstick/opnfv_yardstick_tc008.rst71
-rw-r--r--docs/yardstick/opnfv_yardstick_tc009.rst70
-rw-r--r--docs/yardstick/opnfv_yardstick_tc012.rst8
-rw-r--r--samples/lmbench.yaml20
-rw-r--r--samples/ping-serial.yaml11
-rw-r--r--samples/pktgen.yaml22
-rw-r--r--tests/opnfv/test_cases/opnfv_yardstick_tc008.yaml58
-rw-r--r--tests/opnfv/test_cases/opnfv_yardstick_tc009.yaml53
-rw-r--r--tests/opnfv/test_suites/opnfv_ericsson-pod1_daily.yaml6
-rw-r--r--tests/opnfv/test_suites/opnfv_huawei-us-deploy-bare-1_daily.yaml4
-rw-r--r--tests/opnfv/test_suites/opnfv_opnfv-jump-2_daily.yaml4
-rwxr-xr-xyardstick/benchmark/runners/arithmetic.py91
-rw-r--r--yardstick/vTC/apexlake/.gitignore2
-rwxr-xr-xyardstick/vTC/apexlake/bin/run_tests.sh1
-rw-r--r--yardstick/vTC/apexlake/experimental_framework/__init__.py17
-rw-r--r--yardstick/vTC/apexlake/experimental_framework/benchmarks/__init__.py17
-rw-r--r--yardstick/vTC/apexlake/experimental_framework/common.py570
-rw-r--r--yardstick/vTC/apexlake/experimental_framework/constants/conf_file_sections.py26
-rw-r--r--yardstick/vTC/apexlake/tests/common_test.py643
-rw-r--r--yardstick/vTC/apexlake/tests/generates_template_test.py36
-rw-r--r--yardstick/vTC/apexlake/tests/heat_manager_test.py4
-rw-r--r--yardstick/vTC/apexlake/tests/instantiation_validation_bench_test.py9
-rw-r--r--yardstick/vTC/apexlake/tests/instantiation_validation_noisy_bench_test.py18
-rw-r--r--yardstick/vTC/apexlake/tests/multi_tenancy_throughput_benchmark_test.py5
-rw-r--r--yardstick/vTC/apexlake/tests/rfc2544_throughput_benchmark_test.py17
35 files changed, 1796 insertions, 305 deletions
diff --git a/docs/index.rst b/docs/index.rst
deleted file mode 100644
index 36a78321a..000000000
--- a/docs/index.rst
+++ /dev/null
@@ -1,52 +0,0 @@
-.. OPNFV Yardstick documentation master file.
- Add chapters and user guides using the
- root `toctree` directive.
-
-=================
-Yardstick Project
-=================
-
-Welcome to Yardstick's documentation !
-
-.. _Yardstick: https://wiki.opnfv.org/yardstick
-
-Yardstick_ is an OPNFV testing project.
-
-The project goal is to verify infrastructure compliance, from the perspective
-of a VNF.
-
-The project scope is the development of a test framework, test cases and test
-stimuli.
-
-This document introduces the methodology applied in Yardstick_ project, which
-decomposes typical VNF work-load performance metrics into a number of
-characteristics/performance vectors, each of them can be represented by
-distinct test-cases.
-
-The user guides and test cases delivered for the release are included.
-
-Contents:
-
-Yardstick Project Description
-=============================
-
-.. toctree::
- :numbered:
- :maxdepth: 2
-
-User Guides
-===========
-
-.. toctree::
- :maxdepth: 1
-
- user_guides/framework/03-installation
-
-Indices
-=======
-
-* :ref:`search`
-
-Revision: _sha1_
-
-Build date: |today|
diff --git a/docs/templates/Yardstick_task_templates.rst b/docs/templates/Yardstick_task_templates.rst
index 538937fd7..d2c2b7ec9 100755
--- a/docs/templates/Yardstick_task_templates.rst
+++ b/docs/templates/Yardstick_task_templates.rst
@@ -3,10 +3,12 @@ Task Template Syntax
Basic template syntax
---------------------
-A nice feature of the input task format used in Yardstick is that it supports the template syntax based on Jinja2.
-This turns out to be extremely useful when, say, you have a fixed structure of your task but you want to
-parameterize this task in some way.
-For example, imagine your input task file (task.yaml) runs a set of Ping scenarios:
+A nice feature of the input task format used in Yardstick is that it supports
+the template syntax based on Jinja2.
+This turns out to be extremely useful when, say, you have a fixed structure of
+your task but you want to parameterize this task in some way.
+For example, imagine your input task file (task.yaml) runs a set of Ping
+scenarios:
::
@@ -34,9 +36,10 @@ For example, imagine your input task file (task.yaml) runs a set of Ping scenari
context:
...
-Let's say you want to run the same set of scenarios with the same runner/context/sla,
-but you want to try another packetsize to compare the performance.
-The most elegant solution is then to turn the packetsize name into a template variable:
+Let's say you want to run the same set of scenarios with the same runner/
+context/sla, but you want to try another packetsize to compare the performance.
+The most elegant solution is then to turn the packetsize name into a template
+variable:
::
@@ -64,14 +67,17 @@ The most elegant solution is then to turn the packetsize name into a template va
context:
...
-and then pass the argument value for {{packetsize}} when starting a task with this configuration file.
+and then pass the argument value for {{packetsize}} when starting a task with
+this configuration file.
Yardstick provides you with different ways to do that:
-1.Pass the argument values directly in the command-line interface (with either a JSON or YAML dictionary):
+1.Pass the argument values directly in the command-line interface (with either
+a JSON or YAML dictionary):
::
- yardstick task start samples/ping-template.yaml --task-args '{"packetsize": "200"}'
+ yardstick task start samples/ping-template.yaml
+ --task-args'{"packetsize":"200"}'
2.Refer to a file that specifies the argument values (JSON/YAML):
@@ -81,9 +87,12 @@ Yardstick provides you with different ways to do that:
Using the default values
------------------------
-Note that the Jinja2 template syntax allows you to set the default values for your parameters.
-With default values set, your task file will work even if you don't parameterize it explicitly while starting a task.
-The default values should be set using the {% set ... %} clause (task.yaml).For example:
+Note that the Jinja2 template syntax allows you to set the default values for
+your parameters.
+With default values set, your task file will work even if you don't
+parameterize it explicitly while starting a task.
+The default values should be set using the {% set ... %} clause (task.yaml).
+For example:
::
@@ -105,13 +114,18 @@ The default values should be set using the {% set ... %} clause (task.yaml).For
interval: 1
...
-If you don't pass the value for {{packetsize}} while starting a task, the default one will be used.
+If you don't pass the value for {{packetsize}} while starting a task, the
+default one will be used.
Advanced templates
------------------
-Yardstick makes it possible to use all the power of Jinja2 template syntax, including the mechanism of built-in functions.
-As an example, let us make up a task file that will do a block storage performance test.
-The input task file (fio-template.yaml) below uses the Jinja2 for-endfor construct to accomplish that:
+
+Yardstick makes it possible to use all the power of Jinja2 template syntax,
+including the mechanism of built-in functions.
+As an example, let us make up a task file that will do a block storage
+performance test.
+The input task file (fio-template.yaml) below uses the Jinja2 for-endfor
+construct to accomplish that:
::
diff --git a/docs/templates/testcase_description_template.rst b/docs/templates/testcase_description_template.rst
deleted file mode 100644
index 1651d360c..000000000
--- a/docs/templates/testcase_description_template.rst
+++ /dev/null
@@ -1,94 +0,0 @@
-.. Template to be used for test case descriptions in Yardstick Project.
- Write one .rst per test case.
- Upload the .rst for the test case in /docs/source/yardstick directory.
- Review in Gerrit.
-
-.. image:: ../etc/opnfv-logo.png
- :height: 40
- :width: 200
- :alt: OPNFV
- :align: left
-
-******************
-Test Case <slogan>
-******************
-
-.. contents:: Table of Contents
- :depth: 3
-
----------------------
-Test Case Description
----------------------
-
-Yardstick Test Case ID
-----------------------
-
-OPNFV_YARDSTICK_TC<abc>_<slogan>
-
-where:
- - <abc>: check Jira issue for the test case
- - <slogan>: check Jira issue for the test case
-
-
-Purpose
--------
-
-Describe what is the purpose of the test case
-
-Area
-----
-
-State the area and sub-area covered by the test case.
-
-Areas: Compute, Networking, Storage
-
-Sub-areas: Performance, System limit, QoS
-
-Metrics
--------
-
-What will be measured, attribute name or collection of attributes, behavior
-
-References
-----------
-
-Reference documentation
-
---------------
-Pre-requisites
---------------
-
-Tools
------
-
-What tools are used to perform the measurements (e.g. fio, pktgen)
-
-
-Configuration
--------------
-
-State the .yaml file to use.
-
-State default configuration in the tool(s) used to perform the measurements
-(e.g. fio, pktgen).
-
-State what POD-specific configuration is required to enable running the test
-case in different PODs.
-
-
-State SLA, if applicable.
-
-State test duration.
-
--------
-Results
--------
-
-Expected outcome
-----------------
-
-State applicable graphical presentation
-
-State applicable output details
-
-State expected Value, behavior, pass/fail criteria
diff --git a/docs/templates/testcase_description_v2_template.rst b/docs/templates/testcase_description_v2_template.rst
index 0fa2359e9..da90f561e 100644
--- a/docs/templates/testcase_description_v2_template.rst
+++ b/docs/templates/testcase_description_v2_template.rst
@@ -3,12 +3,6 @@
Upload the .rst for the test case in /docs/source/yardstick directory.
Review in Gerrit.
-.. image:: ../etc/opnfv-logo.png
- :height: 40
- :width: 200
- :alt: OPNFV
- :align: left
-
*************************************
Yardstick Test Case Description TCXXX
*************************************
diff --git a/docs/user_guides/framework/index.rst b/docs/user_guides/framework/index.rst
new file mode 100644
index 000000000..f982c30ff
--- /dev/null
+++ b/docs/user_guides/framework/index.rst
@@ -0,0 +1,9 @@
+=================================
+Yardstick Framework Documentation
+=================================
+
+.. toctree::
+ :numbered:
+ :maxdepth: 2
+
+ 03-installation
diff --git a/docs/vTC/README.rst b/docs/vTC/README.rst
index 018573541..ae6fefa59 100644
--- a/docs/vTC/README.rst
+++ b/docs/vTC/README.rst
@@ -1,20 +1,24 @@
-=========
-Yardstick
-=========
+==========================
+Virtual Traffic Classifier
+==========================
-Overview of the virtual Traffic Classifier
+Overview
========
+
The virtual Traffic Classifier VNF [1], comprises in the current version of
1 VNFC [2]. The VNFC contains both the Traffic Inspection module, and the
Traffic forwarding module, needed to run the VNF. The exploitation of DPI
methods for traffic classification is built around two basic assumptions:
+
(i) third parties unaffiliated with either source or recipient are able to
inspect each IP packet’s payload and
(ii) the classifier knows the relevant syntax of each application’s packet
payloads (protocol signatures, data patterns, etc.).
-The proposed DPI based approach will only use an indicative, small number of the
-initial packets from each flow in order to identify the content and not inspect
-each packet.
+
+The proposed DPI based approach will only use an indicative, small number of
+the initial packets from each flow in order to identify the content and not
+inspect each packet.
+
In this respect it follows the Packet Based per Flow State (PBFS).
This method uses a table to track each session based on the 5-tuples
(src address,dest address,src port,dest port,transport protocol)
@@ -22,6 +26,7 @@ that is maintained for each flow.
Concepts
========
+
Traffic Inspection: The process of packet analysis and application
identification of network traffic that passes through the vTC.
@@ -29,7 +34,8 @@ Traffic Forwarding: The process of packet forwarding from an incoming
network interface to a pre-defined outgoing network interface.
Traffic Rule Application: The process of packet tagging, based on a
-predefined set of rules. Packet tagging may include e.g. ToS field modification.
+predefined set of rules. Packet tagging may include e.g. ToS field
+modification.
Architecture
============
@@ -37,44 +43,29 @@ Architecture
The Traffic Inspection module is the most computationally intensive component
of the VNF. It implements filtering and packet matching algorithms in order to
support the enhanced traffic forwarding capability of the VNF. The component
-supports a flow table (exploiting hashing algorithms for fast indexing of flows)
-and an inspection engine for traffic classification. The implementation used for
-these experiments exploits the nDPI library. The packet capturing mechanism is
-implemented using libpcap. When the DPI engine identifies a new flow, the flow
-register is updated with the appropriate information and transmitted across the
-Traffic Forwarding module, which then applies any required policy updates.
+supports a flow table (exploiting hashing algorithms for fast indexing of
+flows) and an inspection engine for traffic classification.
+
+The implementation used for these experiments exploits the nDPI library.
+The packet capturing mechanism is implemented using libpcap. When the DPI
+engine identifies a new flow, the flow register is updated with the
+appropriate information and transmitted across the Traffic Forwarding module,
+which then applies any required policy updates.
+
The Traffic Forwarding moudle is responsible for routing and packet forwarding.
It accepts incoming network traffic, consults the flow table for classification
-information for each incoming flow and then applies pre-defined policies marking
-e.g. type of Service/Differentiated Services Code Point (TOS/DSCP) multimedia
-traffic for QoS enablement on the forwarded traffic. It is assumed that the
-traffic is forwarded using the default policy until it is identified and new
-policies are enforced. The expected response delay is considered to be
-negligible,as only a small number of packets are required to identify each flow.
+information for each incoming flow and then applies pre-defined policies
+marking e.g. type of Service/Differentiated Services Code Point (TOS/DSCP)
+multimedia traffic for QoS enablement on the forwarded traffic.
+It is assumed that the traffic is forwarded using the default policy until it
+is identified and new policies are enforced.
+
+The expected response delay is considered to be negligible,as only a small
+number of packets are required to identify each flow.
Graphical Overview
==================
-+----------------------------+
-| |
-| Virtual Traffic Classifier |
-| |
-| Analysing/Forwarding |
-| +--------> |
-| ethA ethB |
-+------+--------------+------+
- | ^
- | |
- | |
- | |
- v |
-+------+--------------+------+
-| |
-| Virtual Switch |
-| |
-+----------------------------+
-
-
Install
=======
diff --git a/docs/vTC/abbreviations.rst b/docs/vTC/abbreviations.rst
index 61475415a..a713ee66b 100644
--- a/docs/vTC/abbreviations.rst
+++ b/docs/vTC/abbreviations.rst
@@ -1,6 +1,5 @@
Abbreviations for the virtual Traffic Classifier
-========
+================================================
[1] VNF - Virtual Network Function
[2] VNFC - Virtual Network Function Component
-
diff --git a/docs/yardstick/index.rst b/docs/yardstick/index.rst
new file mode 100644
index 000000000..b14670bdd
--- /dev/null
+++ b/docs/yardstick/index.rst
@@ -0,0 +1,21 @@
+======================
+Yardstick Config Guide
+======================
+
+Test Case Descriptions
+======================
+
+.. toctree::
+ :maxdepth: 1
+
+ opnfv_yardstick_tc001.rst
+ opnfv_yardstick_tc002.rst
+
+Templates
+=========
+
+.. toctree::
+ :maxdepth: 1
+
+ ../templates/Yardstick_task_templates
+ ../templates/testcase_description_v2_template
diff --git a/docs/yardstick/opnfv_yardstick_tc001.rst b/docs/yardstick/opnfv_yardstick_tc001.rst
index 72ba68e9f..16c9d2c60 100644
--- a/docs/yardstick/opnfv_yardstick_tc001.rst
+++ b/docs/yardstick/opnfv_yardstick_tc001.rst
@@ -1,9 +1,3 @@
-.. image:: ../../etc/opnfv-logo.png
- :height: 40
- :width: 200
- :alt: OPNFV
- :align: left
-
*************************************
Yardstick Test Case Description TC001
*************************************
@@ -68,4 +62,4 @@ Yardstick Test Case Description TC001
+--------------+------+----------------------------------+--------------------+
|test verdict | Fails only if SLA is not passed, or if there is a test case |
| | execution problem. |
-+--------------+--------------------------------------------------------------+ \ No newline at end of file
++--------------+--------------------------------------------------------------+
diff --git a/docs/yardstick/opnfv_yardstick_tc002.rst b/docs/yardstick/opnfv_yardstick_tc002.rst
index fb1b4694f..bc795bf38 100644
--- a/docs/yardstick/opnfv_yardstick_tc002.rst
+++ b/docs/yardstick/opnfv_yardstick_tc002.rst
@@ -1,9 +1,3 @@
-.. image:: ../../etc/opnfv-logo.png
- :height: 40
- :width: 200
- :alt: OPNFV
- :align: left
-
*************************************
Yardstick Test Case Description TC002
*************************************
diff --git a/docs/yardstick/opnfv_yardstick_tc008.rst b/docs/yardstick/opnfv_yardstick_tc008.rst
new file mode 100644
index 000000000..f4971fbad
--- /dev/null
+++ b/docs/yardstick/opnfv_yardstick_tc008.rst
@@ -0,0 +1,71 @@
+*************************************
+Yardstick Test Case Description TC008
+*************************************
++-----------------------------------------------------------------------------+
+|Network Performance |
++==============+==============================================================+
+|test case id | OPNFV_YARDSTICK_TC008_NW PERF, Packet loss Extended Test |
++--------------+--------------------------------------------------------------+
+|metric | Number of flows, packet size and throughput |
++--------------+--------------------------------------------------------------+
+|test purpose | To evaluate the IaaS network performance with regards to |
+| | flows and throughput, such as if and how different amounts |
+| | of packet sizes and flows matter for the throughput between |
+| | VMs on different compute blades. Typically e.g. the |
+| | performance of a vSwitch |
+| | depends on the number of flows running through it. Also |
+| | performance of other equipment or entities can depend |
+| | on the number of flows or the packet sizes used. |
+| | The purpose is also to be able to spot trends. Test results, |
+| | graphs ans similar shall be stored for comparison reasons and|
+| | product evolution understanding between different OPNFV |
+| | versions and/or configurations. |
++--------------+--------------------------------------------------------------+
+|configuration | file: opnfv_yardstick_tc008.yaml |
+| | |
+| | Packet size: 64, 128, 256, 512, 1024, 1280 and 1518 bytes. |
+| | |
+| | Number of ports: 1, 10, 50, 100, 500 and 1000. The amount of |
+| | configured ports map from 2 up to 1001000 flows, |
+| | respectively. Each packet_size/port_amount combination is run|
+| | ten times, for 20 seconds each. Then the next |
+| | packet_size/port_amount combination is run, and so on. |
+| | |
+| | The client and server are distributed on different HW. |
+| | |
+| | For SLA max_ppm is set to 1000. |
++--------------+--------------------------------------------------------------+
+|test tool | pktgen |
+| | |
+| | (Pktgen is not always part of a Linux distribution, hence it |
+| | needs to be installed. It is part of the Yardstick Docker |
+| | image. |
+| | As an example see the /yardstick/tools/ directory for how |
+| | to generate a Linux image with pktgen included.) |
++--------------+--------------------------------------------------------------+
+|references |https://www.kernel.org/doc/Documentation/networking/pktgen.txt|
+| | |
+| |ETSI-NFV-TST001 |
++--------------+--------------------------------------------------------------+
+|applicability | Test can be configured with different packet sizes, amount |
+| | of flows and test duration. Default values exist. |
+| | |
+| |SLA (optional): |
+| | max_ppm: The number of packets per million packets sent |
+| | that are acceptable to lose, i.e. not received. |
++--------------+--------------------------------------------------------------+
+|pre-test | The test case image needs to be installed into Glance |
+|conditions | with pktgen included in it. |
+| | |
+| | No POD specific requirements have been identified. |
++--------------+------+----------------------------------+--------------------+
+|test sequence | step | description | result |
+| +------+----------------------------------+--------------------+
+| | 1 | The hosts are installed, as | Logs are stored |
+| | | server and client. pktgen is | |
+| | | invoked and logs are produced | |
+| | | and stored. | |
++--------------+------+----------------------------------+--------------------+
+|test verdict | Fails only if SLA is not passed, or if there is a test case |
+| | execution problem. |
++--------------+--------------------------------------------------------------+
diff --git a/docs/yardstick/opnfv_yardstick_tc009.rst b/docs/yardstick/opnfv_yardstick_tc009.rst
new file mode 100644
index 000000000..07d7fbfea
--- /dev/null
+++ b/docs/yardstick/opnfv_yardstick_tc009.rst
@@ -0,0 +1,70 @@
+*************************************
+Yardstick Test Case Description TC009
+*************************************
++-----------------------------------------------------------------------------+
+|Network Performance |
++==============+==============================================================+
+|test case id | OPNFV_YARDSTICK_TC009_NW PERF, Packet loss |
++--------------+--------------------------------------------------------------+
+|metric | Number of flows and throughput |
++--------------+--------------------------------------------------------------+
+|test purpose | To evaluate the IaaS network performance with regards to |
+| | flows and throughput, such as if and how different amounts |
+| | of flows matter for the throughput between VMs on different |
+| | compute blades. |
+| | Typically e.g. the performance of a vSwitch |
+| | depends on the number of flows running through it. Also |
+| | performance of other equipment or entities can depend |
+| | on the number of flows or the packet sizes used. |
+| | The purpose is also to be able to spot trends. Test results, |
+| | graphs ans similar shall be stored for comparison reasons and|
+| | product evolution understanding between different OPNFV |
+| | versions and/or configurations. |
++--------------+--------------------------------------------------------------+
+|configuration | file: opnfv_yardstick_tc009.yaml |
+| | |
+| | Packet size: 64 bytes |
+| | |
+| | Number of ports: 1, 10, 50, 100, 500 and 1000. The amount of |
+| | configured ports map from 2 up to 1001000 flows, |
+| | respectively. Each port amount is run ten times, for 20 |
+| | seconds each. Then the next port_amount is run, and so on. |
+| | |
+| | The client and server are distributed on different HW. |
+| | |
+| | For SLA max_ppm is set to 1000. |
++--------------+--------------------------------------------------------------+
+|test tool | pktgen |
+| | |
+| | (Pktgen is not always part of a Linux distribution, hence it |
+| | needs to be installed. It is part of the Yardstick Docker |
+| | image. |
+| | As an example see the /yardstick/tools/ directory for how |
+| | to generate a Linux image with pktgen included.) |
++--------------+--------------------------------------------------------------+
+|references |https://www.kernel.org/doc/Documentation/networking/pktgen.txt|
+| | |
+| |ETSI-NFV-TST001 |
++--------------+--------------------------------------------------------------+
+|applicability | Test can be configured with different packet sizes, amount |
+| | of flows and test duration. Default values exist. |
+| | |
+| |SLA (optional): |
+| | max_ppm: The number of packets per million packets sent |
+| | that are acceptable to lose, i.e. not received. |
++--------------+--------------------------------------------------------------+
+|pre-test | The test case image needs to be installed into Glance |
+|conditions | with pktgen included in it. |
+| | |
+| | No POD specific requirements have been identified. |
++--------------+------+----------------------------------+--------------------+
+|test sequence | step | description | result |
+| +------+----------------------------------+--------------------+
+| | 1 | The hosts are installed, as | Logs are stored |
+| | | server and client. pktgen is | |
+| | | invoked and logs are produced | |
+| | | and stored. | |
++--------------+------+----------------------------------+--------------------+
+|test verdict | Fails only if SLA is not passed, or if there is a test case |
+| | execution problem. |
++--------------+--------------------------------------------------------------+
diff --git a/docs/yardstick/opnfv_yardstick_tc012.rst b/docs/yardstick/opnfv_yardstick_tc012.rst
index 0c8e9c02a..b5768c0c5 100644
--- a/docs/yardstick/opnfv_yardstick_tc012.rst
+++ b/docs/yardstick/opnfv_yardstick_tc012.rst
@@ -1,4 +1,3 @@
-
*************************************
Yardstick Test Case Description TC012
*************************************
@@ -15,7 +14,7 @@ Yardstick Test Case Description TC012
+--------------+--------------------------------------------------------------+
|configuration | File: opnfv_yardstick_tc012.yaml |
| | |
-| | * SLA (optional): 15000 (MBps) |
+| | * SLA (optional): 15000 (MBps) |
| | min_bw: The minimum amount of memory bandwidth that is |
| | accepted. |
| | * Size: 10 240 kB - test allocates twice that size (20 480kB)|
@@ -41,8 +40,8 @@ Yardstick Test Case Description TC012
|references | * http://manpages.ubuntu.com/manpages/trusty/bw_mem.8.html |
| | |
| | * McVoy, Larry W., and Carl Staelin. "lmbench: Portable Tools|
-| | for Performance Analysis." *USENIX annual technical |
-| | conference*. 1996. |
+| | for Performance Analysis." |
+| | * USENIX annual technical conference. 1996. |
+--------------+--------------------------------------------------------------+
|applicability | Test can be configured with different |
| | * memory sizes; |
@@ -50,6 +49,7 @@ Yardstick Test Case Description TC012
| | fcp, bzero, bcopy); |
| | * number of warmup iterations; |
| | * iterations and intervals. |
+| | |
| | There are default values for each above-mentioned option. |
+--------------+--------------------------------------------------------------+
|pre-test | The test case image needs to be installed into Glance |
diff --git a/samples/lmbench.yaml b/samples/lmbench.yaml
index 2b8e99084..8baa81477 100644
--- a/samples/lmbench.yaml
+++ b/samples/lmbench.yaml
@@ -9,16 +9,18 @@ scenarios:
type: Lmbench
options:
test_type: "latency"
- stride: 64
stop_size: 32
host: demeter.demo
runner:
type: Arithmetic
- name: stride
- stop: 128
- step: 64
+ iterators:
+ -
+ name: stride
+ start: 64
+ stop: 128
+ step: 64
sla:
max_latency: 35
@@ -27,16 +29,18 @@ scenarios:
type: Lmbench
options:
test_type: "bandwidth"
- size: 500
benchmark: "wr"
host: demeter.demo
runner:
type: Arithmetic
- name: size
- stop: 2000
- step: 500
+ iterators:
+ -
+ name: size
+ start: 500
+ stop: 2000
+ step: 500
sla:
min_bandwidth: 10000
diff --git a/samples/ping-serial.yaml b/samples/ping-serial.yaml
index 37ea715a2..9c492e481 100644
--- a/samples/ping-serial.yaml
+++ b/samples/ping-serial.yaml
@@ -20,16 +20,17 @@ scenarios:
action: monitor
-
type: Ping
- options:
- packetsize: 100
host: apollo.demo
target: artemis.demo
runner:
type: Arithmetic
- name: packetsize
interval: 0.2
- stop: 6000
- step: 100
+ iterators:
+ -
+ name: packetsize
+ start: 100
+ stop: 6000
+ step: 100
context:
name: demo
diff --git a/samples/pktgen.yaml b/samples/pktgen.yaml
index 11d62795e..d621cb730 100644
--- a/samples/pktgen.yaml
+++ b/samples/pktgen.yaml
@@ -1,6 +1,8 @@
---
# Sample benchmark task config file
# measure network throughput using pktgen
+# with 2 stepping parameters. One stepping
+# in positive and the other in negative direction
schema: "yardstick:task:0.1"
@@ -8,8 +10,6 @@ scenarios:
-
type: Pktgen
options:
- packetsize: 60
- number_of_ports: 10
duration: 20
host: demeter.demo
@@ -17,10 +17,20 @@ scenarios:
runner:
type: Arithmetic
- name: number_of_ports
- # run twice with values 10 and 20
- stop: 20
- step: 10
+ interval: 2
+ iter_type: nested_for_loops
+ # run with packetsize/number_of_ports: 60,20; 60,10; ... 70,10
+ iterators:
+ -
+ name: packetsize
+ start: 60
+ stop: 70
+ step: 5
+ -
+ name: number_of_ports
+ start: 20
+ stop: 10
+ step: -10
sla:
max_ppm: 1000
diff --git a/tests/opnfv/test_cases/opnfv_yardstick_tc008.yaml b/tests/opnfv/test_cases/opnfv_yardstick_tc008.yaml
new file mode 100644
index 000000000..385e530ce
--- /dev/null
+++ b/tests/opnfv/test_cases/opnfv_yardstick_tc008.yaml
@@ -0,0 +1,58 @@
+---
+# Yardstick TC008 config file
+# Measure network throughput and packet loss using Pktgen.
+# Different amount of flows, from 2 up to 1001000, in combination
+# with different packet sizes are run in each test.
+# Each combination of packet size and flow amount is run 10 times.
+# First 10 times with the smallest packet size, starting with the
+# least amount of ports/flows, then next amount of ports with same
+# packet size, and so on. The test sequence continues with the next
+# packet size, with same ports/flows sequence as before.
+
+schema: "yardstick:task:0.1"
+
+scenarios:
+{% for pkt_size in [64, 128, 256, 512, 1024, 1280, 1518] %}
+ {% for num_ports in [1, 10, 50, 100, 500, 1000] %}
+-
+ type: Pktgen
+ options:
+ packetsize: {{pkt_size}}
+ number_of_ports: {{num_ports}}
+ duration: 20
+
+ host: demeter.yardstick-TC008
+ target: poseidon.yardstick-TC008
+
+ runner:
+ type: Iteration
+ iterations: 10
+ interval: 1
+
+ sla:
+ max_ppm: 1000
+ action: monitor
+ {% endfor %}
+{% endfor %}
+
+context:
+ name: yardstick-TC008
+ image: yardstick-trusty-server
+ flavor: yardstick-flavor
+ user: ec2-user
+
+ placement_groups:
+ pgrp1:
+ policy: "availability"
+
+ servers:
+ demeter:
+ floating_ip: true
+ placement: "pgrp1"
+ poseidon:
+ floating_ip: true
+ placement: "pgrp1"
+
+ networks:
+ test:
+ cidr: '10.0.1.0/24'
diff --git a/tests/opnfv/test_cases/opnfv_yardstick_tc009.yaml b/tests/opnfv/test_cases/opnfv_yardstick_tc009.yaml
new file mode 100644
index 000000000..4d46c0336
--- /dev/null
+++ b/tests/opnfv/test_cases/opnfv_yardstick_tc009.yaml
@@ -0,0 +1,53 @@
+---
+# Yardstick TC009 config file
+# Measure network throughput and packet loss using pktgen.
+# Different amounts of flows are tested with, from 2 up to 1001000.
+# All tests are run 10 times each. First 10 times with the least
+# amount of ports, then 10 times with the next amount of ports,
+# and so on until all packet sizes have been run with.
+
+schema: "yardstick:task:0.1"
+
+scenarios:
+{% for num_ports in [1, 10, 50, 100, 500, 1000] %}
+-
+ type: Pktgen
+ options:
+ packetsize: 64
+ number_of_ports: {{num_ports}}
+ duration: 20
+
+ host: demeter.yardstick-TC009
+ target: poseidon.yardstick-TC009
+
+ runner:
+ type: Iteration
+ iterations: 10
+ interval: 1
+
+ sla:
+ max_ppm: 1000
+ action: monitor
+{% endfor %}
+
+context:
+ name: yardstick-TC009
+ image: yardstick-trusty-server
+ flavor: yardstick-flavor
+ user: ec2-user
+
+ placement_groups:
+ pgrp1:
+ policy: "availability"
+
+ servers:
+ demeter:
+ floating_ip: true
+ placement: "pgrp1"
+ poseidon:
+ floating_ip: true
+ placement: "pgrp1"
+
+ networks:
+ test:
+ cidr: '10.0.1.0/24'
diff --git a/tests/opnfv/test_suites/opnfv_ericsson-pod1_daily.yaml b/tests/opnfv/test_suites/opnfv_ericsson-pod1_daily.yaml
index 80689beb1..8279d2378 100644
--- a/tests/opnfv/test_suites/opnfv_ericsson-pod1_daily.yaml
+++ b/tests/opnfv/test_suites/opnfv_ericsson-pod1_daily.yaml
@@ -1,5 +1,5 @@
---
-# LF POD 2 daily task suite
+# ERICSSON POD1 daily task suite
schema: "yardstick:suite:0.1"
@@ -7,4 +7,8 @@ name: "opnfv_ericsson_daily"
test_cases_dir: "tests/opnfv/test_cases/"
test_cases:
-
+ file_name: opnfv_yardstick_tc001.yaml
+-
file_name: opnfv_yardstick_tc002.yaml
+-
+ file_name: opnfv_yardstick_tc012.yaml
diff --git a/tests/opnfv/test_suites/opnfv_huawei-us-deploy-bare-1_daily.yaml b/tests/opnfv/test_suites/opnfv_huawei-us-deploy-bare-1_daily.yaml
index 16b9e7eed..e883f560f 100644
--- a/tests/opnfv/test_suites/opnfv_huawei-us-deploy-bare-1_daily.yaml
+++ b/tests/opnfv/test_suites/opnfv_huawei-us-deploy-bare-1_daily.yaml
@@ -7,4 +7,8 @@ name: "opnfv_huawei_daily"
test_cases_dir: "tests/opnfv/test_cases/"
test_cases:
-
+ file_name: opnfv_yardstick_tc001.yaml
+-
file_name: opnfv_yardstick_tc002.yaml
+-
+ file_name: opnfv_yardstick_tc012.yaml
diff --git a/tests/opnfv/test_suites/opnfv_opnfv-jump-2_daily.yaml b/tests/opnfv/test_suites/opnfv_opnfv-jump-2_daily.yaml
index 022aeeceb..4dece13f2 100644
--- a/tests/opnfv/test_suites/opnfv_opnfv-jump-2_daily.yaml
+++ b/tests/opnfv/test_suites/opnfv_opnfv-jump-2_daily.yaml
@@ -7,4 +7,8 @@ name: "opnfv_lf_daily"
test_cases_dir: "tests/opnfv/test_cases/"
test_cases:
-
+ file_name: opnfv_yardstick_tc001.yaml
+-
file_name: opnfv_yardstick_tc002.yaml
+-
+ file_name: opnfv_yardstick_tc012.yaml
diff --git a/yardstick/benchmark/runners/arithmetic.py b/yardstick/benchmark/runners/arithmetic.py
index 4eab6643e..b1446e01c 100755
--- a/yardstick/benchmark/runners/arithmetic.py
+++ b/yardstick/benchmark/runners/arithmetic.py
@@ -7,8 +7,12 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-'''A runner that every run arithmetically steps a specified input value to
-the scenario. This just means a step value is added to the previous value.
+'''A runner that every run arithmetically steps specified input value(s) to
+the scenario. This just means step value(s) is added to the previous value(s).
+It is possible to combine several named input values and run with those either
+as nested for loops or combine each i:th index of each "input value list"
+until the end of the shortest list is reached (optimally all lists should be
+defined with the same number of values when using such iter_type).
'''
import os
@@ -16,6 +20,7 @@ import multiprocessing
import logging
import traceback
import time
+import itertools
from yardstick.benchmark.runners import base
@@ -30,16 +35,15 @@ def _worker_process(queue, cls, method_name, scenario_cfg,
runner_cfg = scenario_cfg['runner']
interval = runner_cfg.get("interval", 1)
- arg_name = runner_cfg.get('name')
- stop = runner_cfg.get('stop')
- step = runner_cfg.get('step')
- options = scenario_cfg['options']
- start = options.get(arg_name, 0)
+ if 'options' in scenario_cfg:
+ options = scenario_cfg['options']
+ else: # options must be instatiated if not present in yaml
+ options = {}
+ scenario_cfg['options'] = options
runner_cfg['runner_id'] = os.getpid()
- LOG.info("worker START, step(%s, %d, %d, %d), class %s",
- arg_name, start, stop, step, cls)
+ LOG.info("worker START, class %s", cls)
benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
@@ -52,18 +56,39 @@ def _worker_process(queue, cls, method_name, scenario_cfg,
sla_action = None
if "sla" in scenario_cfg:
sla_action = scenario_cfg["sla"].get("action", "assert")
- margin = 1 if step > 0 else -1
- for value in range(start, stop+margin, step):
+ # To both be able to include the stop value and handle backwards stepping
+ margin = lambda start, stop: -1 if start > stop else 1
+
+ param_iters = \
+ [xrange(d['start'], d['stop'] + margin(d['start'], d['stop']),
+ d['step']) for d in runner_cfg['iterators']]
+ param_names = [d['name'] for d in runner_cfg['iterators']]
+
+ iter_type = runner_cfg.get("iter_type", "nested_for_loops")
+
+ if iter_type == 'nested_for_loops':
+ # Create a complete combination set of all parameter lists
+ loop_iter = itertools.product(*param_iters)
+ elif iter_type == 'tuple_loops':
+ # Combine each i;th index of respective parameter list
+ loop_iter = itertools.izip(*param_iters)
+ else:
+ LOG.warning("iter_type unrecognized: %s", iter_type)
+ raise
+
+ # Populate options and run the requested method for each value combination
+ for comb_values in loop_iter:
if aborted.is_set():
break
- options[arg_name] = value
-
LOG.debug("runner=%(runner)s seq=%(sequence)s START" %
{"runner": runner_cfg["runner_id"], "sequence": sequence})
+ for i, value in enumerate(comb_values):
+ options[param_names[i]] = value
+
data = {}
errors = ""
@@ -107,29 +132,39 @@ def _worker_process(queue, cls, method_name, scenario_cfg,
class ArithmeticRunner(base.Runner):
- '''Run a scenario arithmetically stepping an input value
+ '''Run a scenario arithmetically stepping input value(s)
Parameters
interval - time to wait between each scenario invocation
type: int
unit: seconds
default: 1 sec
- name - name of scenario option that will be increased for each invocation
+ iter_type: - Iteration type of input parameter(s): nested_for_loops
+ or tuple_loops
type: string
unit: na
- default: none
- start - value to use in first invocation of scenario
- type: int
- unit: na
- default: none
- step - value added to start value in next invocation of scenario
- type: int
- unit: na
- default: none
- stop - value indicating end of invocation
- type: int
- unit: na
- default: none
+ default: nested_for_loops
+ -
+ name - name of scenario option that will be increased for each invocation
+ type: string
+ unit: na
+ default: na
+ start - value to use in first invocation of scenario
+ type: int
+ unit: na
+ default: none
+ stop - value indicating end of invocation. Can be set to same
+ value as start for one single value.
+ type: int
+ unit: na
+ default: none
+ step - value added to start value in next invocation of scenario.
+ Must not be set to zero. Can be set negative if start > stop
+ type: int
+ unit: na
+ default: none
+ -
+ name - and so on......
'''
__execution_type__ = 'Arithmetic'
diff --git a/yardstick/vTC/apexlake/.gitignore b/yardstick/vTC/apexlake/.gitignore
new file mode 100644
index 000000000..ddcd58683
--- /dev/null
+++ b/yardstick/vTC/apexlake/.gitignore
@@ -0,0 +1,2 @@
+benchmark.log
+
diff --git a/yardstick/vTC/apexlake/bin/run_tests.sh b/yardstick/vTC/apexlake/bin/run_tests.sh
index 01592ddc7..6707ad75e 100755
--- a/yardstick/vTC/apexlake/bin/run_tests.sh
+++ b/yardstick/vTC/apexlake/bin/run_tests.sh
@@ -1 +1,2 @@
+export PYTHONPATH=`pwd`
nosetests --with-coverage --cover-erase --cover-package experimental_framework
diff --git a/yardstick/vTC/apexlake/experimental_framework/__init__.py b/yardstick/vTC/apexlake/experimental_framework/__init__.py
new file mode 100644
index 000000000..d4ab29e9d
--- /dev/null
+++ b/yardstick/vTC/apexlake/experimental_framework/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (c) 2015 Intel Research and Development Ireland Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Experimental Framework
+'''
diff --git a/yardstick/vTC/apexlake/experimental_framework/benchmarks/__init__.py b/yardstick/vTC/apexlake/experimental_framework/benchmarks/__init__.py
new file mode 100644
index 000000000..99635a45a
--- /dev/null
+++ b/yardstick/vTC/apexlake/experimental_framework/benchmarks/__init__.py
@@ -0,0 +1,17 @@
+# Copyright (c) 2015 Intel Research and Development Ireland Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Benchmarks to be executed within the framework
+"""
diff --git a/yardstick/vTC/apexlake/experimental_framework/common.py b/yardstick/vTC/apexlake/experimental_framework/common.py
new file mode 100644
index 000000000..97f5bee93
--- /dev/null
+++ b/yardstick/vTC/apexlake/experimental_framework/common.py
@@ -0,0 +1,570 @@
+# Copyright (c) 2015 Intel Research and Development Ireland Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import re
+import ConfigParser
+import logging
+import fileinput
+from experimental_framework.constants import conf_file_sections as cf
+from experimental_framework.constants import framework_parameters as fp
+
+
+# ------------------------------------------------------
+# List of common variables
+# ------------------------------------------------------
+
+LOG = None
+CONF_FILE = None
+DEPLOYMENT_UNIT = None
+ITERATIONS = None
+
+BASE_DIR = None
+RESULT_DIR = None
+TEMPLATE_DIR = None
+TEMPLATE_NAME = None
+TEMPLATE_FILE_EXTENSION = None
+
+PKTGEN = None
+PKTGEN_DIR = None
+PKTGEN_DPDK_DIRECTORY = None
+PKTGEN_PROGRAM = None
+PKTGEN_COREMASK = None
+PKTGEN_MEMCHANNEL = None
+PKTGEN_BUS_SLOT_NIC_1 = None
+PKTGEN_BUS_SLOT_NIC_2 = None
+
+INFLUXDB_IP = None
+INFLUXDB_PORT = None
+INFLUXDB_DB_NAME = None
+
+
+# ------------------------------------------------------
+# Initialization and Input 'heat_templates/'validation
+# ------------------------------------------------------
+
+def init(api=False):
+ global BASE_DIR
+ BASE_DIR = os.getcwd()
+ BASE_DIR = BASE_DIR.replace('/experimental_framework', '')
+ BASE_DIR = InputValidation.validate_directory_exist_and_format(
+ BASE_DIR, "Error 000001")
+
+ init_conf_file(api)
+ init_general_vars()
+ init_log()
+ if len(CONF_FILE.get_variable_list(cf.CFS_PKTGEN)) > 0:
+ init_pktgen()
+
+
+def init_conf_file(api=False):
+ global CONF_FILE
+ if api:
+ CONF_FILE = ConfigurationFile(cf.get_sections_api())
+ else:
+ CONF_FILE = ConfigurationFile(cf.get_sections())
+
+
+def init_general_vars():
+ global TEMPLATE_FILE_EXTENSION
+ global TEMPLATE_NAME
+ global TEMPLATE_DIR
+ global RESULT_DIR
+ global ITERATIONS
+
+ TEMPLATE_FILE_EXTENSION = '.yaml'
+
+ # Check Section in Configuration File
+ InputValidation.\
+ validate_configuration_file_section(
+ cf.CFS_GENERAL,
+ "Section " + cf.CFS_GENERAL +
+ "is not present in configuration file")
+
+ TEMPLATE_DIR = BASE_DIR + 'heat_templates/'
+
+ # Validate template name
+ InputValidation.\
+ validate_configuration_file_parameter(
+ cf.CFS_GENERAL,
+ cf.CFSG_TEMPLATE_NAME,
+ "Parameter " + cf.CFSG_TEMPLATE_NAME +
+ "is not present in configuration file")
+
+ TEMPLATE_NAME = CONF_FILE.get_variable(cf.CFS_GENERAL,
+ cf.CFSG_TEMPLATE_NAME)
+ InputValidation.validate_file_exist(
+ TEMPLATE_DIR + TEMPLATE_NAME,
+ "The provided template file does not exist")
+ RESULT_DIR = BASE_DIR + 'results/'
+
+ # Validate and assign Iterations
+ if cf.CFSG_ITERATIONS in CONF_FILE.get_variable_list(cf.CFS_GENERAL):
+ ITERATIONS = int(CONF_FILE.get_variable(cf.CFS_GENERAL,
+ cf.CFSG_ITERATIONS))
+ else:
+ ITERATIONS = 1
+
+
+def init_log():
+ global LOG
+ if cf.CFSG_DEBUG in CONF_FILE.get_variable_list(cf.CFS_GENERAL) and \
+ CONF_FILE.get_variable(cf.CFS_GENERAL, cf.CFSG_DEBUG):
+ logging.basicConfig(level=logging.DEBUG)
+ else:
+ logging.basicConfig(level=logging.INFO)
+
+ LOG = logging.getLogger()
+ log_formatter = logging.Formatter("%(asctime)s --- %(message)s")
+ file_handler = logging.FileHandler("{0}/{1}.log".format("./", "benchmark"))
+ file_handler.setFormatter(log_formatter)
+ file_handler.setLevel(logging.DEBUG)
+ LOG.addHandler(file_handler)
+
+
+# ------------------------------------------------------
+# InfluxDB conf variables
+# ------------------------------------------------------
+def init_influxdb():
+ global INFLUXDB_IP
+ global INFLUXDB_PORT
+ global INFLUXDB_DB_NAME
+
+ INFLUXDB_IP = CONF_FILE.get_variable(cf.CFS_INFLUXDB, cf.CFSI_IDB_IP)
+ INFLUXDB_PORT = CONF_FILE.get_variable(cf.CFS_INFLUXDB, cf.CFSI_IDB_PORT)
+ INFLUXDB_DB_NAME = CONF_FILE.get_variable(cf.CFS_INFLUXDB,
+ cf.CFSI_IDB_DB_NAME)
+
+
+# ------------------------------------------------------
+# Packet Generator conf variables
+# ------------------------------------------------------
+def init_pktgen():
+ global PKTGEN
+ global PKTGEN_DIR
+ global PKTGEN_PROGRAM
+ global PKTGEN_COREMASK
+ global PKTGEN_MEMCHANNEL
+ global PKTGEN_BUS_SLOT_NIC_1
+ global PKTGEN_BUS_SLOT_NIC_2
+ global PKTGEN_DPDK_DIRECTORY
+
+ msg = "Section {} is not present in the configuration file".\
+ format(cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_section(cf.CFS_PKTGEN, msg)
+
+ pktgen_var_list = CONF_FILE.get_variable_list(cf.CFS_PKTGEN)
+ PKTGEN = 'dpdk_pktgen' # default value
+ if cf.CFSP_PACKET_GENERATOR in pktgen_var_list:
+ msg = "Parameter {} is not present in section {}".format(
+ cf.CFSP_PACKET_GENERATOR, cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_parameter(
+ cf.CFS_PKTGEN, cf.CFSP_PACKET_GENERATOR, msg)
+ PKTGEN = CONF_FILE.get_variable(
+ cf.CFS_PKTGEN, cf.CFSP_PACKET_GENERATOR)
+
+ if PKTGEN not in fp.get_supported_packet_generators():
+ raise ValueError('The specified packet generator is not supported '
+ 'by the framework')
+
+ # Check if the packet gen is dpdk_pktgen
+ if PKTGEN == cf.CFSP_PG_DPDK:
+ # Validation of DPDK pktgen directory
+ msg = "Parameter {} is not present in section {}".format(
+ cf.CFSP_DPDK_PKTGEN_DIRECTORY, cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_parameter(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_PKTGEN_DIRECTORY, msg)
+ PKTGEN_DIR = CONF_FILE.get_variable(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_PKTGEN_DIRECTORY)
+ msg = "The directory {} does not exist.".format(PKTGEN_DIR)
+ PKTGEN_DIR = InputValidation.validate_directory_exist_and_format(
+ PKTGEN_DIR, msg)
+
+ # Validation of the DPDK program name
+ msg = "Parameter {} is not present in section {}".format(
+ cf.CFSP_DPDK_PROGRAM_NAME, cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_parameter(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_PROGRAM_NAME, msg)
+ PKTGEN_PROGRAM = CONF_FILE.get_variable(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_PROGRAM_NAME)
+
+ # Validation of the DPDK Coremask parameter
+ msg = "Parameter {} is not present in section {}".format(
+ cf.CFSP_DPDK_COREMASK, cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_parameter(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_COREMASK, msg)
+ PKTGEN_COREMASK = CONF_FILE.get_variable(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_COREMASK)
+
+ # Validation of the DPDK Memory Channel parameter
+ msg = "Parameter {} is not present in section {}".format(
+ cf.CFSP_DPDK_MEMORY_CHANNEL, cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_parameter(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_MEMORY_CHANNEL, msg)
+ PKTGEN_MEMCHANNEL = CONF_FILE.get_variable(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_MEMORY_CHANNEL)
+
+ # Validation of the DPDK Bus Slot 1
+ msg = "Parameter {} is not present in section {}".format(
+ cf.CFSP_DPDK_BUS_SLOT_NIC_1, cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_parameter(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_BUS_SLOT_NIC_1, msg)
+ PKTGEN_BUS_SLOT_NIC_1 = CONF_FILE.get_variable(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_BUS_SLOT_NIC_1)
+
+ # Validation of the DPDK Bus Slot 2
+ msg = "Parameter {} is not present in section {}".format(
+ cf.CFSP_DPDK_BUS_SLOT_NIC_2, cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_parameter(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_BUS_SLOT_NIC_2, msg)
+ PKTGEN_BUS_SLOT_NIC_2 = CONF_FILE.get_variable(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_BUS_SLOT_NIC_2)
+
+ # Validation of DPDK directory parameter
+ msg = "Parameter {} is not present in section {}".format(
+ cf.CFSP_DPDK_DPDK_DIRECTORY, cf.CFS_PKTGEN)
+ InputValidation.validate_configuration_file_parameter(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_DPDK_DIRECTORY, msg)
+ PKTGEN_DPDK_DIRECTORY = CONF_FILE.get_variable(
+ cf.CFS_PKTGEN, cf.CFSP_DPDK_DPDK_DIRECTORY)
+ msg = "Directory {} does not exist".format(
+ cf.CFSP_DPDK_DPDK_DIRECTORY)
+ PKTGEN_DPDK_DIRECTORY = InputValidation.\
+ validate_directory_exist_and_format(PKTGEN_DPDK_DIRECTORY, msg)
+
+
+# ------------------------------------------------------
+# Configuration file access
+# ------------------------------------------------------
+
+class ConfigurationFile:
+ """
+ Used to extract data from the configuration file
+ """
+
+ def __init__(self, sections, config_file='conf.cfg'):
+ """
+ Reads configuration file sections
+
+ :param sections: list of strings representing the sections to be
+ loaded
+ :param config_file: name of the configuration file (string)
+ :return: None
+ """
+ InputValidation.validate_string(
+ config_file, "The configuration file name must be a string")
+ config_file = BASE_DIR + config_file
+ InputValidation.validate_file_exist(
+ config_file, 'The provided configuration file does not exist')
+ self.config = ConfigParser.ConfigParser()
+ self.config.read(config_file)
+ for section in sections:
+ setattr(
+ self, section, ConfigurationFile.
+ _config_section_map(section, self.config))
+
+ @staticmethod
+ def _config_section_map(section, config_file):
+ """
+ Returns a dictionary with the configuration values for the specific
+ section
+
+ :param section: section to be loaded (string)
+ :param config_file: name of the configuration file (string)
+ :return: dict
+ """
+ dict1 = dict()
+ options = config_file.options(section)
+ for option in options:
+ dict1[option] = config_file.get(section, option)
+ return dict1
+
+ def get_variable(self, section, variable_name):
+ """
+ Returns the value correspondent to a variable
+
+ :param section: section to be loaded (string)
+ :param variable_name: name of the variable (string)
+ :return: string
+ """
+ message = "The variable name must be a string"
+ InputValidation.validate_string(variable_name, message)
+ if variable_name in self.get_variable_list(section):
+ sect = getattr(self, section)
+ return sect[variable_name]
+ else:
+ exc_msg = 'Parameter {} is not in the {} section of the conf file'
+ exc_msg.format(variable_name, section)
+ raise ValueError(exc_msg)
+
+ def get_variable_list(self, section):
+ """
+ Returns the list of the available variables in a section
+ :param section: section to be loaded (string)
+ :return: list
+ """
+ try:
+ return getattr(self, section)
+ except:
+ msg = 'Section {} not found in the configuration file'.\
+ format(section)
+ raise ValueError(msg)
+
+
+# ------------------------------------------------------
+# Get OpenStack Credentials
+# ------------------------------------------------------
+def get_credentials():
+ """
+ Returns the credentials for OpenStack access from the configuration file
+ :return: dictionary
+ """
+ credentials = dict()
+ credentials[cf.CFSO_IP_CONTROLLER] = CONF_FILE.get_variable(
+ cf.CFS_OPENSTACK, cf.CFSO_IP_CONTROLLER)
+ credentials[cf.CFSO_HEAT_URL] = CONF_FILE.get_variable(
+ cf.CFS_OPENSTACK, cf.CFSO_HEAT_URL)
+ credentials[cf.CFSO_USER] = CONF_FILE.get_variable(
+ cf.CFS_OPENSTACK, cf.CFSO_USER)
+ credentials[cf.CFSO_PASSWORD] = CONF_FILE.get_variable(
+ cf.CFS_OPENSTACK, cf.CFSO_PASSWORD)
+ credentials[cf.CFSO_AUTH_URI] = CONF_FILE.get_variable(
+ cf.CFS_OPENSTACK, cf.CFSO_AUTH_URI)
+ credentials[cf.CFSO_PROJECT] = CONF_FILE.get_variable(
+ cf.CFS_OPENSTACK, cf.CFSO_PROJECT)
+ return credentials
+
+
+# ------------------------------------------------------
+# Manage files
+# ------------------------------------------------------
+
+def get_heat_template_params():
+ """
+ Returns the list of deployment parameters from the configuration file
+ for the heat template
+
+ :return: dict
+ """
+ heat_parameters_list = CONF_FILE.get_variable_list(
+ cf.CFS_DEPLOYMENT_PARAMETERS)
+ testcase_parameters = dict()
+ for param in heat_parameters_list:
+ testcase_parameters[param] = CONF_FILE.get_variable(
+ cf.CFS_DEPLOYMENT_PARAMETERS, param)
+ return testcase_parameters
+
+
+def get_testcase_params():
+ """
+ Returns the list of testcase parameters from the configuration file
+
+ :return: dict
+ """
+ testcase_parameters = dict()
+ parameters = CONF_FILE.get_variable_list(cf.CFS_TESTCASE_PARAMETERS)
+ for param in parameters:
+ testcase_parameters[param] = CONF_FILE.get_variable(
+ cf.CFS_TESTCASE_PARAMETERS, param)
+ return testcase_parameters
+
+
+def get_file_first_line(file_name):
+ """
+ Returns the first line of a file
+
+ :param file_name: name of the file to be read (str)
+ :return: str
+ """
+ message = "The name of the file must be a string"
+ InputValidation.validate_string(file_name, message)
+ message = 'The file {} does not exist'.format(file_name)
+ InputValidation.validate_file_exist(file_name, message)
+ res = open(file_name, 'r')
+ return res.readline()
+
+
+def replace_in_file(file, text_to_search, text_to_replace):
+ """
+ Replaces a string within a file
+
+ :param file: name of the file (str)
+ :param text_to_search: text to be replaced
+ :param text_to_replace: new text that will replace the previous
+ :return: None
+ """
+ message = 'The text to be replaced in the file must be a string'
+ InputValidation.validate_string(text_to_search, message)
+ message = 'The text to replace in the file must be a string'
+ InputValidation.validate_string(text_to_replace, message)
+ message = "The name of the file must be a string"
+ InputValidation.validate_string(file, message)
+ message = "The file does not exist"
+ InputValidation.validate_file_exist(file, message)
+ for line in fileinput.input(file, inplace=True):
+ print(line.replace(text_to_search, text_to_replace).rstrip())
+
+
+# ------------------------------------------------------
+# Shell interaction
+# ------------------------------------------------------
+def run_command(command):
+ LOG.info("Running command: " + command)
+ return os.system(command)
+
+
+def push_data_influxdb(data):
+ ip = INFLUXDB_IP
+ port = INFLUXDB_PORT
+ db_name = INFLUXDB_DB_NAME
+ command = "curl -i -XPOST 'http://{}:{}/write?db={}' " \
+ "--data-binary {}".format(ip, port, db_name, data)
+ run_command(command)
+
+
+# ------------------------------------------------------
+# Expose variables to other modules
+# ------------------------------------------------------
+
+def get_base_dir():
+ return BASE_DIR
+
+
+def get_template_dir():
+ return TEMPLATE_DIR
+
+
+def get_dpdk_pktgen_vars():
+ if not (PKTGEN == 'dpdk_pktgen'):
+ return dict()
+ ret_val = dict()
+ ret_val[cf.CFSP_DPDK_PKTGEN_DIRECTORY] = PKTGEN_DIR
+ ret_val[cf.CFSP_DPDK_PROGRAM_NAME] = PKTGEN_PROGRAM
+ ret_val[cf.CFSP_DPDK_COREMASK] = PKTGEN_COREMASK
+ ret_val[cf.CFSP_DPDK_MEMORY_CHANNEL] = PKTGEN_MEMCHANNEL
+ ret_val[cf.CFSP_DPDK_BUS_SLOT_NIC_1] = PKTGEN_BUS_SLOT_NIC_1
+ ret_val[cf.CFSP_DPDK_BUS_SLOT_NIC_2] = PKTGEN_BUS_SLOT_NIC_2
+ ret_val[cf.CFSP_DPDK_DPDK_DIRECTORY] = PKTGEN_DPDK_DIRECTORY
+ return ret_val
+
+
+# ------------------------------------------------------
+# Configuration Variables from Config File
+# ------------------------------------------------------
+def get_deployment_configuration_variables_from_conf_file():
+ variables = dict()
+ types = dict()
+ all_variables = CONF_FILE.get_variable_list(cf.CFS_EXPERIMENT_VNF)
+ for var in all_variables:
+ v = CONF_FILE.get_variable(cf.CFS_EXPERIMENT_VNF, var)
+ type = re.findall(r'@\w*', v)
+ values = re.findall(r'\"(.+?)\"', v)
+ variables[var] = values
+ try:
+ types[var] = type[0][1:]
+ except IndexError:
+ LOG.debug("No type has been specified for variable " + var)
+ return variables
+
+
+# ------------------------------------------------------
+# benchmarks from Config File
+# ------------------------------------------------------
+def get_benchmarks_from_conf_file():
+ requested_benchmarks = list()
+ benchmarks = \
+ CONF_FILE.get_variable(cf.CFS_GENERAL, cf.CFSG_BENCHMARKS).split(', ')
+ for benchmark in benchmarks:
+ requested_benchmarks.append(benchmark)
+ return requested_benchmarks
+
+
+class InputValidation(object):
+
+ @staticmethod
+ def validate_string(param, message):
+ if not isinstance(param, str):
+ raise ValueError(message)
+ return True
+
+ @staticmethod
+ def validate_integer(param, message):
+ if not isinstance(param, int):
+ raise ValueError(message)
+ return True
+
+ @staticmethod
+ def validate_dictionary(param, message):
+ if not isinstance(param, dict):
+ raise ValueError(message)
+ return True
+
+ @staticmethod
+ def validate_file_exist(file_name, message):
+ if not os.path.isfile(file_name):
+ raise ValueError(message + ' ' + file_name)
+ return True
+
+ @staticmethod
+ def validate_directory_exist_and_format(directory, message):
+ if not os.path.isdir(directory):
+ raise ValueError(message)
+ if not directory.endswith('/'):
+ return directory + '/'
+ return directory
+
+ @staticmethod
+ def validate_configuration_file_parameter(section, parameter, message):
+ params = CONF_FILE.get_variable_list(section)
+ if parameter not in params:
+ raise ValueError(message)
+ return True
+
+ @staticmethod
+ def validate_configuration_file_section(section, message):
+ if section not in cf.get_sections():
+ raise ValueError(message)
+ return True
+
+ @staticmethod
+ def validate_boolean(boolean, message):
+ if isinstance(boolean, bool):
+ return boolean
+ if isinstance(boolean, str):
+ if boolean == 'True':
+ return True
+ if boolean == 'False':
+ return False
+ raise ValueError(message)
+
+ @staticmethod
+ def validate_os_credentials(credentials):
+ if not isinstance(credentials, dict):
+ raise ValueError(
+ 'The provided openstack_credentials '
+ 'variable must be in dictionary format')
+
+ credential_keys = ['ip_controller', 'heat_url', 'user', 'password',
+ 'auth_uri', 'project']
+ missing = [
+ credential_key
+ for credential_key in credential_keys
+ if credential_key not in credentials.keys()
+ ]
+ if len(missing) == 0:
+ return True
+ msg = 'OpenStack Credentials Error! ' \
+ 'The following parameters are missing: {}'.\
+ format(", ".join(missing))
+ raise ValueError(msg)
diff --git a/yardstick/vTC/apexlake/experimental_framework/constants/conf_file_sections.py b/yardstick/vTC/apexlake/experimental_framework/constants/conf_file_sections.py
index eed00bce0..f397984e9 100644
--- a/yardstick/vTC/apexlake/experimental_framework/constants/conf_file_sections.py
+++ b/yardstick/vTC/apexlake/experimental_framework/constants/conf_file_sections.py
@@ -23,6 +23,7 @@ CFS_EXPERIMENT_VNF = 'Experiment-VNF'
CFS_EXPERIMENT_GENERIC = 'Experiment-generic'
CFS_TESTCASE_PARAMETERS = 'Testcase-parameters'
CFS_DEPLOYMENT_PARAMETERS = 'Deployment-parameters'
+CFS_INFLUXDB = 'InfluxDB'
def get_sections():
@@ -31,9 +32,10 @@ def get_sections():
CFS_GENERAL,
CFS_OPENSTACK,
CFS_EXPERIMENT_VNF,
- CFS_EXPERIMENT_GENERIC,
+ # CFS_EXPERIMENT_GENERIC,
CFS_TESTCASE_PARAMETERS,
- CFS_DEPLOYMENT_PARAMETERS
+ CFS_DEPLOYMENT_PARAMETERS,
+ CFS_INFLUXDB
# Add here eventually new sections in configuration file ...
]
@@ -42,8 +44,7 @@ def get_sections_api():
return [
CFS_PKTGEN,
CFS_GENERAL,
- # TODO: TO BE REMOVED AFTER TESTING THE API
- CFS_OPENSTACK
+ CFS_INFLUXDB
# Add here eventually new sections in configuration file ...
]
@@ -55,17 +56,30 @@ CFSG_TEMPLATE_DIR = 'template_dir'
CFSG_TEMPLATE_NAME = 'template_base_name'
CFSG_RESULT_DIRECTORY = 'results_directory'
CFSG_BENCHMARKS = 'benchmarks'
+CFSG_DEBUG = 'debug'
+
+
+# ------------------------------------------------------
+# InfluxDB
+# ------------------------------------------------------
+CFSI_IDB_IP = 'influxdb_ip_address'
+CFSI_IDB_PORT = 'influxdb_port'
+CFSI_IDB_DB_NAME = 'influxdb_db_name'
# ------------------------------------------------------
# Packet generator section parameters
# ------------------------------------------------------
CFSP_PACKET_GENERATOR = 'packet_generator'
-CFSP_DPDK_DIRECTORY = 'directory'
+CFSP_DPDK_PKTGEN_DIRECTORY = 'pktgen_directory'
+CFSP_DPDK_DPDK_DIRECTORY = 'dpdk_directory'
CFSP_DPDK_PROGRAM_NAME = 'program_name'
CFSP_DPDK_COREMASK = 'coremask'
CFSP_DPDK_MEMORY_CHANNEL = 'memory_channels'
-CFSP_DPDK_CORE_NICS = 'core_nics'
+CFSP_DPDK_BUS_SLOT_NIC_1 = 'bus_slot_nic_1'
+CFSP_DPDK_BUS_SLOT_NIC_2 = 'bus_slot_nic_2'
+CFSP_DPDK_NAME_IF_1 = 'name_if_1'
+CFSP_DPDK_NAME_IF_2 = 'name_if_2'
# ------------------------------------------------------
diff --git a/yardstick/vTC/apexlake/tests/common_test.py b/yardstick/vTC/apexlake/tests/common_test.py
new file mode 100644
index 000000000..a80672522
--- /dev/null
+++ b/yardstick/vTC/apexlake/tests/common_test.py
@@ -0,0 +1,643 @@
+__author__ = 'vmricco'
+
+import unittest
+import mock
+import os
+import logging
+import ConfigParser
+import experimental_framework.common as common
+import experimental_framework.constants.conf_file_sections as cf
+
+
+def reset_common():
+ common.LOG = None
+ common.CONF_FILE = None
+ common.DEPLOYMENT_UNIT = None
+ common.ITERATIONS = None
+ common.BASE_DIR = None
+ common.RESULT_DIR = None
+ common.TEMPLATE_DIR = None
+ common.TEMPLATE_NAME = None
+ common.TEMPLATE_FILE_EXTENSION = None
+ common.PKTGEN = None
+ common.PKTGEN_DIR = None
+ common.PKTGEN_DPDK_DIRECTORY = None
+ common.PKTGEN_PROGRAM = None
+ common.PKTGEN_COREMASK = None
+ common.PKTGEN_MEMCHANNEL = None
+ common.PKTGEN_BUS_SLOT_NIC_1 = None
+ common.PKTGEN_BUS_SLOT_NIC_2 = None
+ common.INFLUXDB_IP = None
+ common.INFLUXDB_PORT = None
+ common.INFLUXDB_DB_NAME = None
+
+
+class DummyConfigurationFile(common.ConfigurationFile):
+ def __init__(self, sections):
+ pass
+
+ def get_variable(self, section, variable_name):
+ return 'vTC.yaml'
+
+ def get_variable_list(self, section):
+ return ['template_base_name']
+
+
+class DummyConfigurationFile2(common.ConfigurationFile):
+ def __init__(self, sections):
+ self.pktgen_counter = 0
+
+ def get_variable(self, section, variable_name):
+ if variable_name == cf.CFSG_TEMPLATE_NAME:
+ return 'vTC.yaml'
+ if variable_name == cf.CFSG_ITERATIONS:
+ return 2
+ if variable_name == cf.CFSG_DEBUG:
+ return True
+ if variable_name == cf.CFSP_PACKET_GENERATOR:
+ if self.pktgen_counter == 1:
+ return 'non_supported'
+ self.pktgen_counter += 1
+ return 'dpdk_pktgen'
+ if variable_name == cf.CFSP_DPDK_PKTGEN_DIRECTORY:
+ return os.getcwd()
+ if variable_name == cf.CFSP_DPDK_PROGRAM_NAME:
+ return 'program'
+ if variable_name == cf.CFSP_DPDK_COREMASK:
+ return 'coremask'
+ if variable_name == cf.CFSP_DPDK_MEMORY_CHANNEL:
+ return 'memchannel'
+ if variable_name == cf.CFSP_DPDK_BUS_SLOT_NIC_1:
+ return 'bus_slot_nic_1'
+ if variable_name == cf.CFSP_DPDK_BUS_SLOT_NIC_2:
+ return 'bus_slot_nic_2'
+ if variable_name == cf.CFSP_DPDK_DPDK_DIRECTORY:
+ return os.getcwd()
+
+ def get_variable_list(self, section):
+ if section == cf.CFS_PKTGEN:
+ return [
+ cf.CFSP_DPDK_NAME_IF_2,
+ cf.CFSP_DPDK_NAME_IF_1,
+ cf.CFSP_DPDK_BUS_SLOT_NIC_1,
+ cf.CFSP_DPDK_BUS_SLOT_NIC_2,
+ cf.CFSP_DPDK_COREMASK,
+ cf.CFSP_DPDK_DPDK_DIRECTORY,
+ cf.CFSP_DPDK_PKTGEN_DIRECTORY,
+ cf.CFSP_DPDK_MEMORY_CHANNEL,
+ cf.CFSP_DPDK_PROGRAM_NAME,
+ cf.CFSP_PACKET_GENERATOR
+ ]
+ else:
+ return [
+ 'template_base_name',
+ 'iterations',
+ cf.CFSG_DEBUG
+ ]
+
+
+class TestCommonInit(unittest.TestCase):
+
+ def setUp(self):
+ common.CONF_FILE = DummyConfigurationFile('')
+ self.dir = '{}/{}'.format(os.getcwd(),
+ 'experimental_framework/')
+
+ def tearDown(self):
+ reset_common()
+ # common.CONF_FILE = None
+
+ @mock.patch('os.getcwd')
+ @mock.patch('experimental_framework.common.init_conf_file')
+ @mock.patch('experimental_framework.common.init_general_vars')
+ @mock.patch('experimental_framework.common.init_log')
+ @mock.patch('experimental_framework.common.init_pktgen')
+ @mock.patch('experimental_framework.common.CONF_FILE')
+ def test_init_for_success(self, mock_conf_file, init_pkgen, init_log,
+ init_general_vars, init_conf_file, mock_getcwd):
+ mock_getcwd.return_value = self.dir
+ common.init(True)
+ init_pkgen.assert_called_once()
+ init_conf_file.assert_called_once()
+ init_general_vars.assert_called_once()
+ init_log.assert_called_once()
+ expected = self.dir.split('experimental_framework/')[0]
+ self.assertEqual(common.BASE_DIR, expected)
+
+ def test_init_general_vars_for_success(self):
+ common.BASE_DIR = "{}/".format(os.getcwd())
+ common.init_general_vars()
+ self.assertEqual(common.TEMPLATE_FILE_EXTENSION, '.yaml')
+ heat_dir = self.dir.split('experimental_framework/')[0]
+ self.assertEqual(common.TEMPLATE_DIR,
+ '{}{}'.format(heat_dir, 'heat_templates/'))
+ self.assertEqual(common.TEMPLATE_NAME, 'vTC.yaml')
+ self.assertEqual(common.RESULT_DIR,
+ '{}{}'.format(heat_dir, 'results/'))
+ self.assertEqual(common.ITERATIONS, 1)
+
+
+class TestCommonInit2(unittest.TestCase):
+
+ def setUp(self):
+ common.CONF_FILE = DummyConfigurationFile2('')
+ self.dir = '{}/{}'.format(os.getcwd(), 'experimental_framework/')
+
+ def tearDown(self):
+ reset_common()
+ common.CONF_FILE = None
+
+ def test_init_general_vars_2_for_success(self):
+ common.BASE_DIR = "{}/".format(os.getcwd())
+ common.init_general_vars()
+ self.assertEqual(common.TEMPLATE_FILE_EXTENSION, '.yaml')
+ heat_dir = self.dir.split('experimental_framework/')[0]
+ self.assertEqual(common.TEMPLATE_DIR,
+ '{}{}'.format(heat_dir, 'heat_templates/'))
+ self.assertEqual(common.TEMPLATE_NAME, 'vTC.yaml')
+ self.assertEqual(common.RESULT_DIR,
+ '{}{}'.format(heat_dir, 'results/'))
+ self.assertEqual(common.ITERATIONS, 2)
+
+ def test_init_log_2_for_success(self):
+ common.init_log()
+ self.assertIsInstance(common.LOG, logging.RootLogger)
+
+ def test_init_pktgen_for_success(self):
+ common.init_pktgen()
+ self.assertEqual(common.PKTGEN, 'dpdk_pktgen')
+ directory = self.dir.split('experimental_framework/')[0]
+ self.assertEqual(common.PKTGEN_DIR, directory)
+ self.assertEqual(common.PKTGEN_PROGRAM, 'program')
+ self.assertEqual(common.PKTGEN_COREMASK, 'coremask')
+ self.assertEqual(common.PKTGEN_MEMCHANNEL, 'memchannel')
+ self.assertEqual(common.PKTGEN_BUS_SLOT_NIC_1, 'bus_slot_nic_1')
+ self.assertEqual(common.PKTGEN_BUS_SLOT_NIC_2, 'bus_slot_nic_2')
+ expected_dir = "{}/".format(os.getcwd())
+ self.assertEqual(common.PKTGEN_DPDK_DIRECTORY, expected_dir)
+
+ def test_init_pktgen_for_failure(self):
+ common.CONF_FILE.get_variable('', cf.CFSP_PACKET_GENERATOR)
+ self.assertRaises(ValueError, common.init_pktgen)
+
+
+class TestConfFileInitialization(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ reset_common()
+
+ @mock.patch('experimental_framework.common.ConfigurationFile',
+ side_effect=DummyConfigurationFile)
+ def test_init_conf_file_for_success(self, conf_file):
+ common.CONF_FILE = None
+ common.init_conf_file(False)
+ self.assertIsInstance(common.CONF_FILE,
+ DummyConfigurationFile)
+
+ common.CONF_FILE = None
+ common.init_conf_file(True)
+ self.assertIsInstance(common.CONF_FILE,
+ DummyConfigurationFile)
+
+ @mock.patch('experimental_framework.common.CONF_FILE')
+ def test_init_log_for_success(self, mock_conf_file):
+ mock_conf_file.get_variable_list.return_value = 'value'
+ common.init_log()
+ self.assertIsInstance(common.LOG, logging.RootLogger)
+
+ @mock.patch('experimental_framework.common.CONF_FILE')
+ def test_init_influxdb_for_success(self, mock_conf_file):
+ mock_conf_file.get_variable.return_value = 'value'
+ common.init_influxdb()
+ self.assertEqual(common.INFLUXDB_IP, 'value')
+ self.assertEqual(common.INFLUXDB_PORT, 'value')
+ self.assertEqual(common.INFLUXDB_DB_NAME, 'value')
+
+
+class DummyConfigurationFile3(common.ConfigurationFile):
+ counter = 0
+
+ def __init__(self, sections, config_file='conf.cfg'):
+ common.ConfigurationFile.__init__(self, sections, config_file)
+
+ @staticmethod
+ def _config_section_map(section, config_file, get_counter=None):
+ if get_counter:
+ return DummyConfigurationFile3.counter
+ else:
+ DummyConfigurationFile3.counter += 1
+ return dict()
+
+
+class TestConfigFileClass(unittest.TestCase):
+
+ def setUp(self):
+ self.sections = [
+ 'General',
+ 'OpenStack',
+ 'Experiment-VNF',
+ 'PacketGen',
+ 'Deployment-parameters',
+ 'Testcase-parameters'
+ ]
+ c_file = '/tests/data/common/conf.cfg'
+ common.BASE_DIR = os.getcwd()
+ self.conf_file = common.ConfigurationFile(self.sections, c_file)
+
+ def tearDown(self):
+ reset_common()
+ common.BASE_DIR = None
+
+ @mock.patch('experimental_framework.common.ConfigurationFile.'
+ '_config_section_map',
+ side_effect=DummyConfigurationFile3._config_section_map)
+ def test___init___for_success(self, mock_conf_map):
+ sections = ['General', 'OpenStack', 'Experiment-VNF', 'PacketGen',
+ 'Deployment-parameters', 'Testcase-parameters']
+ c = DummyConfigurationFile3(
+ sections, config_file='/tests/data/common/conf.cfg')
+ self.assertEqual(
+ DummyConfigurationFile3._config_section_map('', '', True),
+ 6)
+ for section in sections:
+ self.assertEqual(getattr(c, section), dict())
+
+ def test__config_section_map_for_success(self):
+ general_section = 'General'
+ # openstack_section = 'OpenStack'
+ config_file = 'tests/data/common/conf.cfg'
+ config = ConfigParser.ConfigParser()
+ config.read(config_file)
+
+ expected = {
+ 'benchmarks': 'b_marks',
+ 'iterations': '1',
+ 'template_base_name': 't_name'
+ }
+ output = common.\
+ ConfigurationFile._config_section_map(general_section, config)
+ self.assertEqual(expected, output)
+
+ @mock.patch('experimental_framework.common.'
+ 'ConfigurationFile.get_variable_list')
+ def test_get_variable_for_success(self, mock_get_var_list):
+ section = self.sections[0]
+ variable_name = 'template_base_name'
+ expected = 't_name'
+ mock_get_var_list.return_value = [variable_name]
+ output = self.conf_file.get_variable(section, variable_name)
+ self.assertEqual(expected, output)
+
+ @mock.patch('experimental_framework.common.'
+ 'ConfigurationFile.get_variable_list')
+ def test_get_variable_for_failure(self, mock_get_var_list):
+ section = self.sections[0]
+ variable_name = 'something_else'
+ self.assertRaises(
+ ValueError,
+ self.conf_file.get_variable,
+ section, variable_name
+ )
+
+ def test_get_variable_list_for_success(self):
+ section = self.sections[0]
+ expected = {
+ 'benchmarks': 'b_marks',
+ 'iterations': '1',
+ 'template_base_name': 't_name'
+ }
+ output = self.conf_file.get_variable_list(section)
+ self.assertEqual(expected, output)
+
+ def test_get_variable_list_for_failure(self):
+ section = 'something_else'
+ self.assertRaises(
+ ValueError,
+ self.conf_file.get_variable_list,
+ section)
+
+
+class DummyConfigurationFile4(common.ConfigurationFile):
+
+ def get_variable(self, section, variable_name):
+ if variable_name == 'vnic2_type':
+ return '"value"'
+ elif variable_name == cf.CFSG_BENCHMARKS:
+ return "BenchmarkClass1, BenchmarkClass2"
+ return '@string "value"'
+
+ # def get_variable_list(self, section):
+ # return list()
+
+
+class TestCommonMethods(unittest.TestCase):
+
+ def setUp(self):
+ self.sections = [
+ 'General',
+ 'OpenStack',
+ 'Experiment-VNF',
+ 'PacketGen',
+ 'Deployment-parameters',
+ 'Testcase-parameters'
+ ]
+ config_file = '/tests/data/common/conf.cfg'
+ common.BASE_DIR = os.getcwd()
+ common.CONF_FILE = DummyConfigurationFile4(self.sections, config_file)
+
+ def tearDown(self):
+ reset_common()
+ common.CONF_FILE = None
+
+ def test_get_credentials_for_success(self):
+ expected = {
+ 'ip_controller': '@string "value"',
+ 'project': '@string "value"',
+ 'auth_uri': '@string "value"',
+ 'user': '@string "value"',
+ 'heat_url': '@string "value"',
+ 'password': '@string "value"'
+ }
+ output = common.get_credentials()
+ self.assertEqual(expected, output)
+
+ def test_get_heat_template_params_for_success(self):
+ expected = {
+ 'param_1': '@string "value"',
+ 'param_2': '@string "value"',
+ 'param_3': '@string "value"',
+ 'param_4': '@string "value"'
+ }
+ output = common.get_heat_template_params()
+ self.assertEqual(expected, output)
+
+ def test_get_testcase_params_for_success(self):
+ expected = {'test_case_param': '@string "value"'}
+ output = common.get_testcase_params()
+ self.assertEqual(expected, output)
+
+ def test_get_file_first_line_for_success(self):
+ file = 'tests/data/common/conf.cfg'
+ expected = '[General]\n'
+ output = common.get_file_first_line(file)
+ self.assertEqual(expected, output)
+
+ def test_replace_in_file_for_success(self):
+ filename = 'tests/data/common/file_replacement.txt'
+ text_to_search = 'replacement of'
+ text_to_replace = '***'
+ common.replace_in_file(filename, text_to_search, text_to_replace)
+ after = open(filename, 'r').readline()
+ self.assertEqual(after, 'Test for the *** strings into a file\n')
+ text_to_search = '***'
+ text_to_replace = 'replacement of'
+ common.replace_in_file(filename, text_to_search, text_to_replace)
+
+ @mock.patch('os.system')
+ @mock.patch('experimental_framework.common.LOG')
+ def test_run_command_for_success(self, mock_log, mock_os_system):
+ command = 'command to be run'
+ common.run_command(command)
+ mock_os_system.assert_called_once_with(command)
+
+ @mock.patch('experimental_framework.common.run_command')
+ def test_push_data_influxdb_for_success(self, mock_run_cmd):
+ data = 'string that describes the data'
+ expected = "curl -i -XPOST 'http://None:None/write?db=None' " \
+ "--data-binary string that describes the data"
+ common.push_data_influxdb(data)
+ mock_run_cmd.assert_called_once_with(expected)
+
+ def test_get_base_dir_for_success(self):
+ base_dir = common.BASE_DIR
+ common.BASE_DIR = 'base_dir'
+ expected = 'base_dir'
+ output = common.get_base_dir()
+ self.assertEqual(expected, output)
+ common.BASE_DIR = base_dir
+
+ def test_get_template_dir_for_success(self):
+ template_dir = common.TEMPLATE_DIR
+ common.TEMPLATE_DIR = 'base_dir'
+ expected = 'base_dir'
+ output = common.get_template_dir()
+ self.assertEqual(expected, output)
+ common.TEMPLATE_DIR = template_dir
+
+ def test_get_dpdk_pktgen_vars_test(self):
+ # Test 1
+ common.PKTGEN = 'dpdk_pktgen'
+ common.PKTGEN_DIR = 'var'
+ common.PKTGEN_PROGRAM = 'var'
+ common.PKTGEN_COREMASK = 'var'
+ common.PKTGEN_MEMCHANNEL = 'var'
+ common.PKTGEN_BUS_SLOT_NIC_1 = 'var'
+ common.PKTGEN_BUS_SLOT_NIC_2 = 'var'
+ common.PKTGEN_DPDK_DIRECTORY = 'var'
+ expected = {
+ 'bus_slot_nic_1': 'var',
+ 'bus_slot_nic_2': 'var',
+ 'coremask': 'var',
+ 'dpdk_directory': 'var',
+ 'memory_channels': 'var',
+ 'pktgen_directory': 'var',
+ 'program_name': 'var'
+ }
+ output = common.get_dpdk_pktgen_vars()
+ self.assertEqual(expected, output)
+
+ # Test 2
+ common.PKTGEN = 'something_else'
+ common.PKTGEN_DIR = 'var'
+ common.PKTGEN_PROGRAM = 'var'
+ common.PKTGEN_COREMASK = 'var'
+ common.PKTGEN_MEMCHANNEL = 'var'
+ common.PKTGEN_BUS_SLOT_NIC_1 = 'var'
+ common.PKTGEN_BUS_SLOT_NIC_2 = 'var'
+ common.PKTGEN_DPDK_DIRECTORY = 'var'
+ expected = {}
+ output = common.get_dpdk_pktgen_vars()
+ self.assertEqual(expected, output)
+
+ @mock.patch('experimental_framework.common.LOG')
+ def test_get_deployment_configuration_variables_for_success(self,
+ mock_log):
+ expected = {
+ 'vcpu': ['value'],
+ 'vnic1_type': ['value'],
+ 'ram': ['value'],
+ 'vnic2_type': ['value']
+ }
+ output = common.get_deployment_configuration_variables_from_conf_file()
+ self.assertEqual(expected, output)
+
+ def test_get_benchmarks_from_conf_file_for_success(self):
+ expected = ['BenchmarkClass1', 'BenchmarkClass2']
+ output = common.get_benchmarks_from_conf_file()
+ self.assertEqual(expected, output)
+
+
+class TestinputValidation(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ reset_common()
+
+ def test_validate_string_for_success(self):
+ output = common.InputValidation.validate_string('string', '')
+ self.assertTrue(output)
+
+ def test_validate_string_for_failure(self):
+ self.assertRaises(
+ ValueError,
+ common.InputValidation.validate_string,
+ list(), ''
+ )
+
+ def test_validate_int_for_success(self):
+ output = common.InputValidation.validate_integer(1111, '')
+ self.assertTrue(output)
+
+ def test_validate_int_for_failure(self):
+ self.assertRaises(
+ ValueError,
+ common.InputValidation.validate_integer,
+ list(), ''
+ )
+
+ def test_validate_dict_for_success(self):
+ output = common.InputValidation.validate_dictionary(dict(), '')
+ self.assertTrue(output)
+
+ def test_validate_dict_for_failure(self):
+ self.assertRaises(
+ ValueError,
+ common.InputValidation.validate_dictionary,
+ list(), ''
+ )
+
+ def test_validate_file_exist_for_success(self):
+ filename = 'tests/data/common/file_replacement.txt'
+ output = common.InputValidation.validate_file_exist(filename, '')
+ self.assertTrue(output)
+
+ def test_validate_file_exist_for_failure(self):
+ filename = 'tests/data/common/file_replacement'
+ self.assertRaises(
+ ValueError,
+ common.InputValidation.validate_file_exist,
+ filename, ''
+ )
+
+ def test_validate_directory_exist_and_format_for_success(self):
+ directory = 'tests/data/common/'
+ output = common.InputValidation.\
+ validate_directory_exist_and_format(directory, '')
+ self.assertTrue(output)
+
+ def test_validate_directory_exist_and_format_for_failure(self):
+ directory = 'tests/data/com/'
+ self.assertRaises(
+ ValueError,
+ common.InputValidation.validate_directory_exist_and_format,
+ directory, ''
+ )
+
+ @mock.patch('experimental_framework.common.CONF_FILE')
+ def test_validate_configuration_file_parameter_for_success(self,
+ mock_conf):
+ mock_conf.get_variable_list.return_value = ['param']
+ section = ''
+ parameter = 'param'
+ message = ''
+ output = common.InputValidation.\
+ validate_configuration_file_parameter(section, parameter, message)
+ self.assertTrue(output)
+
+ @mock.patch('experimental_framework.common.CONF_FILE')
+ def test_validate_configuration_file_parameter_for_failure(
+ self, mock_conf_file):
+ section = ''
+ parameter = 'something_else'
+ message = ''
+ mock_conf_file.get_variable_list.return_value(['parameter'])
+ self.assertRaises(
+ ValueError,
+ common.InputValidation.
+ validate_configuration_file_parameter,
+ section, parameter, message
+ )
+
+ def test_validate_configuration_file_section_for_success(self):
+ section = 'General'
+ message = ''
+ output = common.InputValidation.\
+ validate_configuration_file_section(section, message)
+ self.assertTrue(output)
+
+ def test_validate_configuration_file_section_for_failure(self):
+ section = 'Something-Else'
+ message = ''
+ self.assertRaises(
+ ValueError,
+ common.InputValidation.validate_configuration_file_section,
+ section, message
+ )
+
+ def test_validate_boolean_for_success(self):
+ message = ''
+ boolean = True
+ output = common.InputValidation.validate_boolean(boolean, message)
+ self.assertTrue(output)
+
+ boolean = 'True'
+ output = common.InputValidation.validate_boolean(boolean, message)
+ self.assertTrue(output)
+
+ boolean = 'False'
+ output = common.InputValidation.validate_boolean(boolean, message)
+ self.assertFalse(output)
+
+ def test_validate_boolean_for_failure(self):
+ message = ''
+ boolean = 'string'
+ self.assertRaises(
+ ValueError,
+ common.InputValidation.validate_boolean,
+ boolean, message
+ )
+
+ def test_validate_os_credentials_for_failure(self):
+ # Test 1
+ credentials = list()
+ self.assertRaises(ValueError,
+ common.InputValidation.validate_os_credentials,
+ credentials)
+
+ # Test 2
+ credentials = dict()
+ credentials['ip_controller'] = ''
+ credentials['heat_url'] = ''
+ credentials['user'] = ''
+ credentials['password'] = ''
+ credentials['auth_uri'] = ''
+ # credentials['project'] = ''
+ self.assertRaises(ValueError,
+ common.InputValidation.validate_os_credentials,
+ credentials)
+
+ def test_validate_os_credentials_for_success(self):
+ credentials = dict()
+ credentials['ip_controller'] = ''
+ credentials['heat_url'] = ''
+ credentials['user'] = ''
+ credentials['password'] = ''
+ credentials['auth_uri'] = ''
+ credentials['project'] = ''
+ self.assertTrue(
+ common.InputValidation.validate_os_credentials(credentials))
diff --git a/yardstick/vTC/apexlake/tests/generates_template_test.py b/yardstick/vTC/apexlake/tests/generates_template_test.py
index 85435db6a..67e17cd37 100644
--- a/yardstick/vTC/apexlake/tests/generates_template_test.py
+++ b/yardstick/vTC/apexlake/tests/generates_template_test.py
@@ -19,6 +19,29 @@ import os
import experimental_framework.common as common
+def reset_common():
+ common.LOG = None
+ common.CONF_FILE = None
+ common.DEPLOYMENT_UNIT = None
+ common.ITERATIONS = None
+ common.BASE_DIR = None
+ common.RESULT_DIR = None
+ common.TEMPLATE_DIR = None
+ common.TEMPLATE_NAME = None
+ common.TEMPLATE_FILE_EXTENSION = None
+ common.PKTGEN = None
+ common.PKTGEN_DIR = None
+ common.PKTGEN_DPDK_DIRECTORY = None
+ common.PKTGEN_PROGRAM = None
+ common.PKTGEN_COREMASK = None
+ common.PKTGEN_MEMCHANNEL = None
+ common.PKTGEN_BUS_SLOT_NIC_1 = None
+ common.PKTGEN_BUS_SLOT_NIC_2 = None
+ common.INFLUXDB_IP = None
+ common.INFLUXDB_PORT = None
+ common.INFLUXDB_DB_NAME = None
+
+
class TestGeneratesTemplate(unittest.TestCase):
def setUp(self):
self.deployment_configuration = {
@@ -27,16 +50,15 @@ class TestGeneratesTemplate(unittest.TestCase):
'vcpus': ['2']
}
self.template_name = 'VTC_base_single_vm_wait.tmp'
- common.init()
-
- def test_dummy(self):
- self.assertTrue(True)
+ # common.init()
def tearDown(self):
- pass
+ reset_common()
+ @mock.patch('experimental_framework.common.LOG')
@mock.patch('experimental_framework.common.get_template_dir')
- def test_generates_template_for_success(self, mock_template_dir):
+ def test_generates_template_for_success(self, mock_template_dir,
+ mock_log):
generated_templates_dir = 'tests/data/generated_templates/'
mock_template_dir.return_value = generated_templates_dir
test_templates = 'tests/data/test_templates/'
@@ -50,7 +72,7 @@ class TestGeneratesTemplate(unittest.TestCase):
generated.readlines())
t_name = '/tests/data/generated_templates/VTC_base_single_vm_wait.tmp'
- self.template_name = os.getcwd() + t_name
+ self.template_name = "{}{}".format(os.getcwd(), t_name)
heat_gen.generates_templates(self.template_name,
self.deployment_configuration)
for dirname, dirnames, filenames in os.walk(test_templates):
diff --git a/yardstick/vTC/apexlake/tests/heat_manager_test.py b/yardstick/vTC/apexlake/tests/heat_manager_test.py
index f89835cc7..9191a17f8 100644
--- a/yardstick/vTC/apexlake/tests/heat_manager_test.py
+++ b/yardstick/vTC/apexlake/tests/heat_manager_test.py
@@ -144,11 +144,13 @@ class TestHeatManager(unittest.TestCase):
def test_delete_stack_for_success_2(self):
self.assertTrue(self.heat_manager.delete_stack('stack_1'))
+ @mock.patch('experimental_framework.common.LOG')
@mock.patch('heatclient.common.template_utils.get_template_contents')
@mock.patch('heatclient.client.Client')
# @mock.patch('heatclient.client.Client', side_effect=DummyHeatClient)
def test_create_stack_for_success(self, mock_stack_create,
- mock_get_template_contents):
+ mock_get_template_contents,
+ mock_log):
return_value = ({'template': 'template'}, 'template')
mock_get_template_contents.return_value = return_value
self.heat_manager.create_stack('template', 'stack_n', 'parameters')
diff --git a/yardstick/vTC/apexlake/tests/instantiation_validation_bench_test.py b/yardstick/vTC/apexlake/tests/instantiation_validation_bench_test.py
index 569d24c5a..a44c1f26b 100644
--- a/yardstick/vTC/apexlake/tests/instantiation_validation_bench_test.py
+++ b/yardstick/vTC/apexlake/tests/instantiation_validation_bench_test.py
@@ -14,7 +14,9 @@
import unittest
import mock
+import os
import experimental_framework.constants.conf_file_sections as cfs
+import experimental_framework.common as common
import experimental_framework.benchmarks.\
instantiation_validation_benchmark as iv_module
from experimental_framework.benchmarks.\
@@ -152,11 +154,12 @@ class DummyInstantiaionValidationBenchmark(InstantiationValidationBenchmark):
class InstantiationValidationInitTest(unittest.TestCase):
def setUp(self):
+ common.BASE_DIR = os.getcwd()
self.iv = InstantiationValidationBenchmark('InstantiationValidation',
dict())
def tearDown(self):
- pass
+ common.BASE_DIR = None
@mock.patch('experimental_framework.common.get_base_dir')
def test___init___for_success(self, mock_base_dir):
@@ -301,11 +304,13 @@ class InstantiationValidationInitTest(unittest.TestCase):
self.assertEqual(dummy_replace_in_file('', '', '', True),
[0, 0, 0, 1, 1, 1])
+ @mock.patch('experimental_framework.common.LOG')
@mock.patch('experimental_framework.packet_generators.'
'dpdk_packet_generator.DpdkPacketGenerator',
side_effect=DummyDpdkPacketGenerator)
@mock.patch('experimental_framework.common.get_dpdk_pktgen_vars')
- def test_run_for_success(self, mock_common_get_vars, mock_pktgen):
+ def test_run_for_success(self, mock_common_get_vars, mock_pktgen,
+ mock_log):
rval = dict()
rval[cfs.CFSP_DPDK_BUS_SLOT_NIC_2] = 'bus_2'
rval[cfs.CFSP_DPDK_NAME_IF_2] = 'if_2'
diff --git a/yardstick/vTC/apexlake/tests/instantiation_validation_noisy_bench_test.py b/yardstick/vTC/apexlake/tests/instantiation_validation_noisy_bench_test.py
index bbdf73947..cdcce37e3 100644
--- a/yardstick/vTC/apexlake/tests/instantiation_validation_noisy_bench_test.py
+++ b/yardstick/vTC/apexlake/tests/instantiation_validation_noisy_bench_test.py
@@ -14,8 +14,9 @@
import unittest
import mock
-
-
+import os
+import experimental_framework.common as common
+import experimental_framework.deployment_unit as deploy
import experimental_framework.benchmarks.\
instantiation_validation_noisy_neighbors_benchmark as mut
@@ -25,11 +26,22 @@ class InstantiationValidationInitTest(unittest.TestCase):
def setUp(self):
name = 'instantiation_validation_noisy'
params = {'param': 'value'}
+ openstack_credentials = dict()
+ openstack_credentials['ip_controller'] = ''
+ openstack_credentials['project'] = ''
+ openstack_credentials['auth_uri'] = ''
+ openstack_credentials['user'] = ''
+ openstack_credentials['heat_url'] = ''
+ openstack_credentials['password'] = ''
+ common.DEPLOYMENT_UNIT = deploy.DeploymentUnit(openstack_credentials)
+ common.BASE_DIR = os.getcwd()
+ common.TEMPLATE_DIR = 'tests/data/generated_templates'
self.iv = mut.\
InstantiationValidationNoisyNeighborsBenchmark(name, params)
def tearDown(self):
- pass
+ common.BASE_DIR = None
+ common.TEMPLATE_DIR = None
@mock.patch('experimental_framework.benchmarks.'
'instantiation_validation_benchmark.'
diff --git a/yardstick/vTC/apexlake/tests/multi_tenancy_throughput_benchmark_test.py b/yardstick/vTC/apexlake/tests/multi_tenancy_throughput_benchmark_test.py
index 78aff35ba..60d597513 100644
--- a/yardstick/vTC/apexlake/tests/multi_tenancy_throughput_benchmark_test.py
+++ b/yardstick/vTC/apexlake/tests/multi_tenancy_throughput_benchmark_test.py
@@ -12,11 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__author__ = 'gpetralx'
-
import unittest
import mock
+import os
+import experimental_framework.common as common
from experimental_framework.benchmarks \
import multi_tenancy_throughput_benchmark as bench
@@ -37,6 +37,7 @@ class TestMultiTenancyThroughputBenchmark(unittest.TestCase):
def setUp(self):
name = 'benchmark'
params = dict()
+ common.BASE_DIR = os.getcwd()
self.benchmark = bench.MultiTenancyThroughputBenchmark(name, params)
def tearDown(self):
diff --git a/yardstick/vTC/apexlake/tests/rfc2544_throughput_benchmark_test.py b/yardstick/vTC/apexlake/tests/rfc2544_throughput_benchmark_test.py
index bef9b7f30..15d0f3040 100644
--- a/yardstick/vTC/apexlake/tests/rfc2544_throughput_benchmark_test.py
+++ b/yardstick/vTC/apexlake/tests/rfc2544_throughput_benchmark_test.py
@@ -12,11 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__author__ = 'vmriccox'
-
import unittest
import mock
+import os
from experimental_framework.benchmarks import rfc2544_throughput_benchmark \
as mut
import experimental_framework.common as common
@@ -29,11 +28,11 @@ class RFC2544ThroughputBenchmarkRunTest(unittest.TestCase):
params = dict()
params[mut.VLAN_SENDER] = '1'
params[mut.VLAN_RECEIVER] = '2'
+ common.BASE_DIR = os.getcwd()
self.benchmark = mut.RFC2544ThroughputBenchmark(name, params)
- common.init_log()
def tearDown(self):
- pass
+ common.BASE_DIR = None
def test_get_features_for_sanity(self):
output = self.benchmark.get_features()
@@ -51,6 +50,7 @@ class RFC2544ThroughputBenchmarkRunTest(unittest.TestCase):
def test_finalize(self):
self.assertEqual(self.benchmark.finalize(), None)
+ @mock.patch('experimental_framework.common.LOG')
@mock.patch('experimental_framework.benchmarks.'
'rfc2544_throughput_benchmark.RFC2544ThroughputBenchmark.'
'_reset_lua_file')
@@ -67,7 +67,7 @@ class RFC2544ThroughputBenchmarkRunTest(unittest.TestCase):
'rfc2544_throughput_benchmark.dpdk.DpdkPacketGenerator')
def test_run_for_success(self, mock_dpdk, mock_get_results,
mock_extract_size, conf_lua_file_mock,
- reset_lua_file_mock):
+ reset_lua_file_mock, mock_common_log):
expected = {'results': 0, 'packet_size': '1'}
mock_extract_size.return_value = '1'
mock_get_results.return_value = {'results': 0}
@@ -88,10 +88,11 @@ class RFC2544ThroughputBenchmarkOthers(unittest.TestCase):
def setUp(self):
name = 'benchmark'
params = {'packet_size': '128'}
+ common.BASE_DIR = os.getcwd()
self.benchmark = mut.RFC2544ThroughputBenchmark(name, params)
def tearDown(self):
- pass
+ common.BASE_DIR = None
def test__extract_packet_size_from_params_for_success(self):
expected = '128'
@@ -121,10 +122,10 @@ class RFC2544ThroughputBenchmarkOthers(unittest.TestCase):
class RFC2544ThroughputBenchmarkGetResultsTest(unittest.TestCase):
def setUp(self):
- pass
+ common.BASE_DIR = os.getcwd()
def tearDown(self):
- pass
+ common.BASE_DIR = None
@mock.patch('experimental_framework.common.get_file_first_line')
def test__get_results_for_success(self, mock_common_file_line):