aboutsummaryrefslogtreecommitdiffstats
path: root/tests/unit/benchmark/scenarios/networking
diff options
context:
space:
mode:
Diffstat (limited to 'tests/unit/benchmark/scenarios/networking')
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_iperf3.py23
-rwxr-xr-xtests/unit/benchmark/scenarios/networking/test_netperf.py13
-rwxr-xr-xtests/unit/benchmark/scenarios/networking/test_netperf_node.py13
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_netutilization.py4
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_networkcapacity.py27
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_ping.py5
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_ping6.py52
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_pktgen.py11
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py13
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_sfc.py6
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vsperf.py26
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py8
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py9
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py1
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py1
15 files changed, 126 insertions, 86 deletions
diff --git a/tests/unit/benchmark/scenarios/networking/test_iperf3.py b/tests/unit/benchmark/scenarios/networking/test_iperf3.py
index 91f800b60..ea53cb9ab 100644
--- a/tests/unit/benchmark/scenarios/networking/test_iperf3.py
+++ b/tests/unit/benchmark/scenarios/networking/test_iperf3.py
@@ -11,10 +11,13 @@
# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
-import mock
-import unittest
+from __future__ import absolute_import
+
import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.networking import iperf3
@@ -78,7 +81,7 @@ class IperfTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
@@ -97,7 +100,7 @@ class IperfTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
@@ -119,8 +122,7 @@ class IperfTestCase(unittest.TestCase):
self.assertRaises(AssertionError, p.run, result)
def test_iperf_successful_sla_jitter(self, mock_ssh):
-
- options = {"udp":"udp","bandwidth":"20m"}
+ options = {"udp": "udp", "bandwidth": "20m"}
args = {
'options': options,
'sla': {'jitter': 10}
@@ -133,13 +135,12 @@ class IperfTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.output_name_udp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
def test_iperf_unsuccessful_sla_jitter(self, mock_ssh):
-
- options = {"udp":"udp","bandwidth":"20m"}
+ options = {"udp": "udp", "bandwidth": "20m"}
args = {
'options': options,
'sla': {'jitter': 0.0001}
@@ -167,7 +168,7 @@ class IperfTestCase(unittest.TestCase):
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.run, result)
- def _read_sample_output(self,filename):
+ def _read_sample_output(self, filename):
curr_path = os.path.dirname(os.path.abspath(__file__))
output = os.path.join(curr_path, filename)
with open(output) as f:
diff --git a/tests/unit/benchmark/scenarios/networking/test_netperf.py b/tests/unit/benchmark/scenarios/networking/test_netperf.py
index 3f224733c..1b5dd6472 100755
--- a/tests/unit/benchmark/scenarios/networking/test_netperf.py
+++ b/tests/unit/benchmark/scenarios/networking/test_netperf.py
@@ -11,10 +11,13 @@
# Unittest for yardstick.benchmark.scenarios.networking.netperf.Netperf
-import mock
-import unittest
+from __future__ import absolute_import
+
import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.networking import netperf
@@ -59,7 +62,7 @@ class NetperfTestCase(unittest.TestCase):
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
@@ -78,7 +81,7 @@ class NetperfTestCase(unittest.TestCase):
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
diff --git a/tests/unit/benchmark/scenarios/networking/test_netperf_node.py b/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
index 1c39b292b..29a7edf67 100755
--- a/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
+++ b/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
@@ -12,10 +12,13 @@
# Unittest for
# yardstick.benchmark.scenarios.networking.netperf_node.NetperfNode
-import mock
-import unittest
+from __future__ import absolute_import
+
import os
-import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.networking import netperf_node
@@ -59,7 +62,7 @@ class NetperfNodeTestCase(unittest.TestCase):
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
@@ -78,7 +81,7 @@ class NetperfNodeTestCase(unittest.TestCase):
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
p.run(result)
self.assertEqual(result, expected_result)
diff --git a/tests/unit/benchmark/scenarios/networking/test_netutilization.py b/tests/unit/benchmark/scenarios/networking/test_netutilization.py
index eb6626fea..7c04f5e9a 100644
--- a/tests/unit/benchmark/scenarios/networking/test_netutilization.py
+++ b/tests/unit/benchmark/scenarios/networking/test_netutilization.py
@@ -9,8 +9,10 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-# Unittest for yardstick.benchmark.scenarios.networking.netutilization.NetUtilization
+# Unittest for
+# yardstick.benchmark.scenarios.networking.netutilization.NetUtilization
+from __future__ import absolute_import
import mock
import unittest
import os
diff --git a/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py b/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
index e42832f1b..3f8d84e54 100644
--- a/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
+++ b/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
@@ -9,27 +9,32 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-# Unittest for yardstick.benchmark.scenarios.networking.networkcapacity.NetworkCapacity
+# Unittest for
+# yardstick.benchmark.scenarios.networking.networkcapacity.NetworkCapacity
+
+from __future__ import absolute_import
-import mock
import unittest
-import os
-import json
+
+import mock
+from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.networking import networkcapacity
-SAMPLE_OUTPUT = '{"Number of connections":"308","Number of frames received": "166503"}'
+SAMPLE_OUTPUT = \
+ '{"Number of connections":"308","Number of frames received": "166503"}'
+
@mock.patch('yardstick.benchmark.scenarios.networking.networkcapacity.ssh')
class NetworkCapacityTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
- 'host': {
- 'ip': '172.16.0.137',
- 'user': 'cirros',
- 'password': "root"
- },
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'password': "root"
+ },
}
self.result = {}
@@ -46,7 +51,7 @@ class NetworkCapacityTestCase(unittest.TestCase):
mock_ssh.SSH().execute.return_value = (0, SAMPLE_OUTPUT, '')
c.run(self.result)
- expected_result = json.loads(SAMPLE_OUTPUT)
+ expected_result = jsonutils.loads(SAMPLE_OUTPUT)
self.assertEqual(self.result, expected_result)
def test_capacity_unsuccessful_script_error(self, mock_ssh):
diff --git a/tests/unit/benchmark/scenarios/networking/test_ping.py b/tests/unit/benchmark/scenarios/networking/test_ping.py
index 8d35b8490..5535a79a9 100644
--- a/tests/unit/benchmark/scenarios/networking/test_ping.py
+++ b/tests/unit/benchmark/scenarios/networking/test_ping.py
@@ -11,6 +11,7 @@
# Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
+from __future__ import absolute_import
import mock
import unittest
@@ -37,7 +38,7 @@ class PingTestCase(unittest.TestCase):
args = {
'options': {'packetsize': 200},
'target': 'ares.demo'
- }
+ }
result = {}
p = ping.Ping(args, self.ctx)
@@ -53,7 +54,7 @@ class PingTestCase(unittest.TestCase):
'options': {'packetsize': 200},
'sla': {'max_rtt': 150},
'target': 'ares.demo'
- }
+ }
result = {}
p = ping.Ping(args, self.ctx)
diff --git a/tests/unit/benchmark/scenarios/networking/test_ping6.py b/tests/unit/benchmark/scenarios/networking/test_ping6.py
index 0b8fba268..e22cacb36 100644
--- a/tests/unit/benchmark/scenarios/networking/test_ping6.py
+++ b/tests/unit/benchmark/scenarios/networking/test_ping6.py
@@ -11,6 +11,7 @@
# Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
+from __future__ import absolute_import
import mock
import unittest
@@ -21,37 +22,37 @@ class PingTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
- 'nodes':{
- 'host1': {
- 'ip': '172.16.0.137',
- 'user': 'cirros',
- 'role': "Controller",
- 'key_filename': "mykey.key",
- 'password': "root"
+ 'nodes': {
+ 'host1': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'role': "Controller",
+ 'key_filename': "mykey.key",
+ 'password': "root"
},
- 'host2': {
- "ip": "172.16.0.138",
- "key_filename": "/root/.ssh/id_rsa",
- "role": "Compute",
- "name": "node3.IPV6",
- "user": "root"
+ 'host2': {
+ "ip": "172.16.0.138",
+ "key_filename": "/root/.ssh/id_rsa",
+ "role": "Compute",
+ "name": "node3.IPV6",
+ "user": "root"
},
}
}
def test_get_controller_node(self):
args = {
- 'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
'sla': {'max_rtt': 50}
}
p = ping6.Ping6(args, self.ctx)
- controller_node = p._get_controller_node(['host1','host2'])
+ controller_node = p._get_controller_node(['host1', 'host2'])
self.assertEqual(controller_node, 'host1')
@mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
def test_ping_successful_setup(self, mock_ssh):
args = {
- 'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
'sla': {'max_rtt': 50}
}
p = ping6.Ping6(args, self.ctx)
@@ -63,58 +64,57 @@ class PingTestCase(unittest.TestCase):
@mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
def test_ping_successful_no_sla(self, mock_ssh):
args = {
- 'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
}
result = {}
p = ping6.Ping6(args, self.ctx)
p.client = mock_ssh.SSH()
- mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+ mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
p.run(result)
self.assertEqual(result, {'rtt': 100.0})
@mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
def test_ping_successful_sla(self, mock_ssh):
-
args = {
- 'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
'sla': {'max_rtt': 150}
}
result = {}
p = ping6.Ping6(args, self.ctx)
p.client = mock_ssh.SSH()
- mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+ mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
p.run(result)
self.assertEqual(result, {'rtt': 100.0})
@mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
def test_ping_unsuccessful_sla(self, mock_ssh):
-
args = {
- 'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
'sla': {'max_rtt': 50}
}
result = {}
p = ping6.Ping6(args, self.ctx)
p.client = mock_ssh.SSH()
- mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(0, 100, '')]
+ mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
self.assertRaises(AssertionError, p.run, result)
@mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
def test_ping_unsuccessful_script_error(self, mock_ssh):
args = {
- 'options': {'host': 'host1','packetsize': 200, 'ping_count': 5},
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
'sla': {'max_rtt': 150}
}
result = {}
p = ping6.Ping6(args, self.ctx)
p.client = mock_ssh.SSH()
- mock_ssh.SSH().execute.side_effect = [(0, 'host1', ''),(1, '', 'FOOBAR')]
+ mock_ssh.SSH().execute.side_effect = [
+ (0, 'host1', ''), (1, '', 'FOOBAR')]
self.assertRaises(RuntimeError, p.run, result)
diff --git a/tests/unit/benchmark/scenarios/networking/test_pktgen.py b/tests/unit/benchmark/scenarios/networking/test_pktgen.py
index 13a4c1bd4..f50fa108c 100644
--- a/tests/unit/benchmark/scenarios/networking/test_pktgen.py
+++ b/tests/unit/benchmark/scenarios/networking/test_pktgen.py
@@ -11,9 +11,12 @@
# Unittest for yardstick.benchmark.scenarios.networking.pktgen.Pktgen
-import mock
+from __future__ import absolute_import
+
import unittest
-import json
+
+import mock
+from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.networking import pktgen
@@ -133,7 +136,7 @@ class PktgenTestCase(unittest.TestCase):
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
p.run(result)
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
expected_result["packets_received"] = 149300
self.assertEqual(result, expected_result)
@@ -159,7 +162,7 @@ class PktgenTestCase(unittest.TestCase):
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
p.run(result)
- expected_result = json.loads(sample_output)
+ expected_result = jsonutils.loads(sample_output)
expected_result["packets_received"] = 149300
self.assertEqual(result, expected_result)
diff --git a/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py b/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
index afc87abfb..7ba4db9d9 100644
--- a/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
+++ b/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
@@ -11,12 +11,14 @@
# Unittest for yardstick.benchmark.scenarios.networking.pktgen.Pktgen
-import mock
+from __future__ import absolute_import
import unittest
-import json
+
+import mock
from yardstick.benchmark.scenarios.networking import pktgen_dpdk
+
@mock.patch('yardstick.benchmark.scenarios.networking.pktgen_dpdk.ssh')
class PktgenDPDKLatencyTestCase(unittest.TestCase):
@@ -116,7 +118,11 @@ class PktgenDPDKLatencyTestCase(unittest.TestCase):
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
p.run(result)
- self.assertEqual(result, {"avg_latency": 132})
+ # with python 3 we get float, might be due python division changes
+ # AssertionError: {'avg_latency': 132.33333333333334} != {
+ # 'avg_latency': 132}
+ delta = result['avg_latency'] - 132
+ self.assertLessEqual(delta, 1)
def test_pktgen_dpdk_successful_sla(self, mock_ssh):
@@ -169,5 +175,6 @@ class PktgenDPDKLatencyTestCase(unittest.TestCase):
def main():
unittest.main()
+
if __name__ == '__main__':
main()
diff --git a/tests/unit/benchmark/scenarios/networking/test_sfc.py b/tests/unit/benchmark/scenarios/networking/test_sfc.py
index 618efc32e..224a43bd8 100644
--- a/tests/unit/benchmark/scenarios/networking/test_sfc.py
+++ b/tests/unit/benchmark/scenarios/networking/test_sfc.py
@@ -11,6 +11,7 @@
# Unittest for yardstick.benchmark.scenarios.networking.sfc
+from __future__ import absolute_import
import mock
import unittest
@@ -27,7 +28,7 @@ class SfcTestCase(unittest.TestCase):
context_cfg['target'] = dict()
context_cfg['target']['user'] = 'root'
context_cfg['target']['password'] = 'opnfv'
- context_cfg['target']['ip'] = '127.0.0.1'
+ context_cfg['target']['ip'] = '127.0.0.1'
# Used in Sfc.run()
context_cfg['host'] = dict()
@@ -58,7 +59,8 @@ class SfcTestCase(unittest.TestCase):
@mock.patch('yardstick.benchmark.scenarios.networking.sfc.subprocess')
def test2_run_for_success(self, mock_subprocess, mock_openstack, mock_ssh):
# Mock a successfull SSH in Sfc.setup() and Sfc.run()
- mock_ssh.SSH().execute.return_value = (0, 'vxlan_tool.py', 'succeeded timed out')
+ mock_ssh.SSH().execute.return_value = (
+ 0, 'vxlan_tool.py', 'succeeded timed out')
mock_openstack.get_an_IP.return_value = "127.0.0.1"
mock_subprocess.call.return_value = 'mocked!'
diff --git a/tests/unit/benchmark/scenarios/networking/test_vsperf.py b/tests/unit/benchmark/scenarios/networking/test_vsperf.py
index 25d52212b..76d2afdc0 100644
--- a/tests/unit/benchmark/scenarios/networking/test_vsperf.py
+++ b/tests/unit/benchmark/scenarios/networking/test_vsperf.py
@@ -16,17 +16,20 @@
# Unittest for yardstick.benchmark.scenarios.networking.vsperf.Vsperf
-import mock
+from __future__ import absolute_import
+try:
+ from unittest import mock
+except ImportError:
+ import mock
import unittest
-import os
-import subprocess
from yardstick.benchmark.scenarios.networking import vsperf
@mock.patch('yardstick.benchmark.scenarios.networking.vsperf.subprocess')
@mock.patch('yardstick.benchmark.scenarios.networking.vsperf.ssh')
-@mock.patch("__builtin__.open", return_value=None)
+@mock.patch("yardstick.benchmark.scenarios.networking.vsperf.open",
+ mock.mock_open())
class VsperfTestCase(unittest.TestCase):
def setUp(self):
@@ -58,7 +61,7 @@ class VsperfTestCase(unittest.TestCase):
}
}
- def test_vsperf_setup(self, mock_open, mock_ssh, mock_subprocess):
+ def test_vsperf_setup(self, mock_ssh, mock_subprocess):
p = vsperf.Vsperf(self.args, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
mock_subprocess.call().execute.return_value = None
@@ -67,7 +70,7 @@ class VsperfTestCase(unittest.TestCase):
self.assertIsNotNone(p.client)
self.assertEqual(p.setup_done, True)
- def test_vsperf_teardown(self, mock_open, mock_ssh, mock_subprocess):
+ def test_vsperf_teardown(self, mock_ssh, mock_subprocess):
p = vsperf.Vsperf(self.args, self.ctx)
# setup() specific mocks
@@ -81,7 +84,7 @@ class VsperfTestCase(unittest.TestCase):
p.teardown()
self.assertEqual(p.setup_done, False)
- def test_vsperf_run_ok(self, mock_open, mock_ssh, mock_subprocess):
+ def test_vsperf_run_ok(self, mock_ssh, mock_subprocess):
p = vsperf.Vsperf(self.args, self.ctx)
# setup() specific mocks
@@ -90,14 +93,16 @@ class VsperfTestCase(unittest.TestCase):
# run() specific mocks
mock_ssh.SSH().execute.return_value = (0, '', '')
- mock_ssh.SSH().execute.return_value = (0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+ mock_ssh.SSH().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
result = {}
p.run(result)
self.assertEqual(result['throughput_rx_fps'], '14797660.000')
- def test_vsperf_run_falied_vsperf_execution(self, mock_open, mock_ssh, mock_subprocess):
+ def test_vsperf_run_falied_vsperf_execution(self, mock_ssh,
+ mock_subprocess):
p = vsperf.Vsperf(self.args, self.ctx)
# setup() specific mocks
@@ -110,7 +115,7 @@ class VsperfTestCase(unittest.TestCase):
result = {}
self.assertRaises(RuntimeError, p.run, result)
- def test_vsperf_run_falied_csv_report(self, mock_open, mock_ssh, mock_subprocess):
+ def test_vsperf_run_falied_csv_report(self, mock_ssh, mock_subprocess):
p = vsperf.Vsperf(self.args, self.ctx)
# setup() specific mocks
@@ -128,5 +133,6 @@ class VsperfTestCase(unittest.TestCase):
def main():
unittest.main()
+
if __name__ == '__main__':
main()
diff --git a/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py b/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py
index 418dd39e6..07b3da992 100644
--- a/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py
+++ b/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py
@@ -11,10 +11,11 @@
# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
-import mock
+from __future__ import absolute_import
import unittest
-from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation
+from yardstick.benchmark.scenarios.networking import \
+ vtc_instantiation_validation
class VtcInstantiationValidationTestCase(unittest.TestCase):
@@ -34,7 +35,8 @@ class VtcInstantiationValidationTestCase(unittest.TestCase):
scenario['options']['vlan_sender'] = ''
scenario['options']['vlan_receiver'] = ''
- self.vt = vtc_instantiation_validation.VtcInstantiationValidation(scenario, '')
+ self.vt = vtc_instantiation_validation.VtcInstantiationValidation(
+ scenario, '')
def test_run_for_success(self):
result = {}
diff --git a/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py b/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py
index e0a46241c..34f3610b1 100644
--- a/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py
+++ b/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py
@@ -11,10 +11,11 @@
# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
-import mock
+from __future__ import absolute_import
import unittest
-from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation_noisy
+from yardstick.benchmark.scenarios.networking import \
+ vtc_instantiation_validation_noisy
class VtcInstantiationValidationiNoisyTestCase(unittest.TestCase):
@@ -37,7 +38,9 @@ class VtcInstantiationValidationiNoisyTestCase(unittest.TestCase):
scenario['options']['amount_of_ram'] = '1G'
scenario['options']['number_of_cores'] = '1'
- self.vt = vtc_instantiation_validation_noisy.VtcInstantiationValidationNoisy(scenario, '')
+ self.vt = \
+ vtc_instantiation_validation_noisy.VtcInstantiationValidationNoisy(
+ scenario, '')
def test_run_for_success(self):
result = {}
diff --git a/tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py b/tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py
index ecdf555d2..a73fad5a8 100644
--- a/tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py
+++ b/tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py
@@ -11,6 +11,7 @@
# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
+from __future__ import absolute_import
import mock
import unittest
diff --git a/tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py b/tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py
index 98957b1de..e1b162c79 100644
--- a/tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py
+++ b/tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py
@@ -11,6 +11,7 @@
# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
+from __future__ import absolute_import
import mock
import unittest