aboutsummaryrefslogtreecommitdiffstats
path: root/yardstick/tests/unit/benchmark
diff options
context:
space:
mode:
Diffstat (limited to 'yardstick/tests/unit/benchmark')
-rw-r--r--yardstick/tests/unit/benchmark/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/contexts/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample.yaml21
-rw-r--r--yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample_new.yaml32
-rw-r--r--yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample_ovs.yaml63
-rw-r--r--yardstick/tests/unit/benchmark/contexts/nodes_sample.yaml33
-rw-r--r--yardstick/tests/unit/benchmark/contexts/nodes_sample_new.yaml96
-rw-r--r--yardstick/tests/unit/benchmark/contexts/nodes_sample_new_sriov.yaml82
-rw-r--r--yardstick/tests/unit/benchmark/contexts/nodes_sample_ovs.yaml104
-rw-r--r--yardstick/tests/unit/benchmark/contexts/nodes_sample_ovsdpdk.yaml104
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/nodes_duplicate_sample.yaml37
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/nodes_ovs_dpdk_sample.yaml40
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/nodes_sample.yaml33
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/nodes_sriov_sample.yaml40
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/test_model.py640
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py500
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py362
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone_duplicate_sample.yaml135
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone_sample.yaml112
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_base.py174
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_dummy.py86
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_heat.py893
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_kubernetes.py273
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_model.py586
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_node.py392
-rw-r--r--yardstick/tests/unit/benchmark/core/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/core/no_constraint_no_args_scenario_sample.yaml21
-rw-r--r--yardstick/tests/unit/benchmark/core/no_constraint_with_args_scenario_sample.yaml23
-rw-r--r--yardstick/tests/unit/benchmark/core/test_plugin.py148
-rw-r--r--yardstick/tests/unit/benchmark/core/test_report.py584
-rw-r--r--yardstick/tests/unit/benchmark/core/test_task.py644
-rw-r--r--yardstick/tests/unit/benchmark/core/test_testcase.py35
-rw-r--r--yardstick/tests/unit/benchmark/core/with_constraint_no_args_scenario_sample.yaml24
-rw-r--r--yardstick/tests/unit/benchmark/core/with_constraint_with_args_scenario_sample.yaml26
-rw-r--r--yardstick/tests/unit/benchmark/runner/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_arithmetic.py446
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_base.py119
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_duration.py315
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_iteration.py45
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_proxduration.py286
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_search.py192
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py93
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py53
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py36
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py119
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_baseoperation.py79
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py89
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_director.py106
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_command.py95
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py82
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py82
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py73
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py74
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py118
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py76
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py131
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_util.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/cachestat_sample_output.txt5
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output1.txt9
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output2.txt2
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/memload_sample_output.txt3
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py95
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py64
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py262
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py167
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py192
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py109
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py60
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py158
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py236
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py75
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py76
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py163
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/dummy/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py32
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt14
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt300
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py182
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py78
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py76
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py63
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py29
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_floating_ip.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py55
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py27
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py59
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py59
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py58
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py58
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py27
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py55
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py52
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py51
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py54
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py25
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py54
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py27
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py54
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py52
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py43
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py103
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py33
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/imix_voice.yaml41
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/ipv4_1flow_Packets_vpe.yaml18
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/ipv4_throughput_vpe.yaml101
-rwxr-xr-xyardstick/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output1.txt9
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output2.txt13
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py190
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_moongen_testpmd.py353
-rwxr-xr-xyardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py122
-rwxr-xr-xyardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py122
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py225
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py59
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py105
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py107
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py117
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py453
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py136
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py194
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py68
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py873
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py196
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py181
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/tg_trex_tpl.yaml75
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml50
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/parser/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py70
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py63
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py280
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py99
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py513
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/test_base.py135
156 files changed, 18102 insertions, 0 deletions
diff --git a/yardstick/tests/unit/benchmark/__init__.py b/yardstick/tests/unit/benchmark/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/__init__.py
diff --git a/yardstick/tests/unit/benchmark/contexts/__init__.py b/yardstick/tests/unit/benchmark/contexts/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/__init__.py
diff --git a/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample.yaml b/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample.yaml
new file mode 100644
index 000000000..dbdd3700d
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample.yaml
@@ -0,0 +1,21 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+nodes:
+-
+ name: node1
+ role: Controller
+ ip: 10.229.47.137
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node1
+ role: Controller
+ ip: 10.229.47.138
+ user: root
+ key_filename: /root/.yardstick_key
diff --git a/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample_new.yaml b/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample_new.yaml
new file mode 100644
index 000000000..306915ca1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample_new.yaml
@@ -0,0 +1,32 @@
+nodes:
+-
+ name: sriov
+ role: Sriov1
+ ip: 10.123.123.122
+ user: root
+ auth_type: password
+ password: password
+ vf_macs:
+ - "00:00:00:00:00:00"
+ - "00:00:00:00:00:00"
+ phy_ports: # Physical ports to configure sriov
+ - "0000:06:00.0"
+ - "0000:06:00.1"
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu1.img"
+-
+ name: sriov
+ role: Sriov1
+ ip: 10.123.123.111
+ user: root
+ auth_type: password
+ password: password
+ vf_macs:
+ - "00:00:00:00:00:00"
+ - "00:00:00:00:00:00"
+ phy_ports: # Physical ports to configure sriov
+ - "0000:06:00.0"
+ - "0000:06:00.1"
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu1.img"
+
diff --git a/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample_ovs.yaml b/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample_ovs.yaml
new file mode 100644
index 000000000..65449c91c
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/nodes_duplicate_sample_ovs.yaml
@@ -0,0 +1,63 @@
+# Copyright (c) 2016 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+nodes:
+-
+ name: ovs
+ role: test
+ ip: 10.223.197.222
+ user: root
+ auth_type: password
+ password: intel123
+ vpath: "/usr/local/"
+ vports:
+ - dpdkvhostuser0
+ - dpdkvhostuser1
+ vports_mac:
+ - "00:00:00:00:00:03"
+ - "00:00:00:00:00:04"
+ phy_ports: # Physical ports to configure ovs
+ - "0000:06:00.0"
+ - "0000:06:00.1"
+ flow:
+ - ovs-ofctl add-flow br0 in_port=1,action=output:3
+ - ovs-ofctl add-flow br0 in_port=3,action=output:1
+ - ovs-ofctl add-flow br0 in_port=4,action=output:2
+ - ovs-ofctl add-flow br0 in_port=2,action=output:4
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu1.img"
+-
+ name: ovs
+ role: test
+ ip: 10.223.197.112
+ user: root
+ auth_type: password
+ password: intel123
+ vpath: "/usr/local/"
+ vports:
+ - dpdkvhostuser0
+ - dpdkvhostuser1
+ vports_mac:
+ - "00:00:00:00:00:03"
+ - "00:00:00:00:00:04"
+ phy_ports: # Physical ports to configure ovs
+ - "0000:06:00.0"
+ - "0000:06:00.1"
+ flow:
+ - ovs-ofctl add-flow br0 in_port=1,action=output:3
+ - ovs-ofctl add-flow br0 in_port=3,action=output:1
+ - ovs-ofctl add-flow br0 in_port=4,action=output:2
+ - ovs-ofctl add-flow br0 in_port=2,action=output:4
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu1.img"
+
diff --git a/yardstick/tests/unit/benchmark/contexts/nodes_sample.yaml b/yardstick/tests/unit/benchmark/contexts/nodes_sample.yaml
new file mode 100644
index 000000000..8d50c3aea
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/nodes_sample.yaml
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+nodes:
+-
+ name: node1
+ role: Controller
+ ip: 10.229.47.137
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node2
+ role: Controller
+ ip: 10.229.47.138
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node3
+ role: Compute
+ ip: 10.229.47.139
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node4
+ role: Baremetal
+ ip: 10.229.47.140
+ user: root
+ key_filename: /root/.yardstick_key
diff --git a/yardstick/tests/unit/benchmark/contexts/nodes_sample_new.yaml b/yardstick/tests/unit/benchmark/contexts/nodes_sample_new.yaml
new file mode 100644
index 000000000..a400bec03
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/nodes_sample_new.yaml
@@ -0,0 +1,96 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+nodes:
+-
+ name: trafficgen_1
+ role: TrafficGen
+ ip: 10.123.123.123
+ user: root
+ auth_type: password
+ password: password
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:03:00.0"
+ driver: ixgbe
+ dpdk_port_num: 0
+ local_ip: "152.16.100.20"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:00"
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:03:00.1"
+ driver: ixgbe
+ dpdk_port_num: 1
+ local_ip: "152.16.100.21"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:00"
+-
+ name: sriov
+ role: Sriov
+ ip: 10.123.123.122
+ user: root
+ auth_type: password
+ password: password
+ vf_macs:
+ - "00:00:00:00:00:00"
+ - "00:00:00:00:00:00"
+ phy_ports: # Physical ports to configure sriov
+ - "0000:06:00.0"
+ - "0000:06:00.1"
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu1.img"
+
+-
+ name: vnf
+ role: vnf
+ ip: 10.123.123.121
+ user: root
+ auth_type: password
+ password: password
+ host: 10.123.123.121 #BM host == ip, SRIOV & ovs-dpdk host == compute node.
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:06:00.0"
+ driver: i40e
+ dpdk_port_num: 0
+ local_ip: "152.16.100.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:00"
+
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:06:00.1"
+ driver: i40e
+ dpdk_port_num: 1
+ local_ip: "152.16.40.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:00"
+ routing_table:
+ - network: "152.16.100.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.100.20"
+ if: "xe0"
+ - network: "152.16.40.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.40.20"
+ if: "xe1"
+ nd_route_tbl:
+ - network: "0064:ff9b:0:0:0:0:9810:6414"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:6414"
+ if: "xe0"
+ - network: "0064:ff9b:0:0:0:0:9810:2814"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:2814"
+ if: "xe1"
+
diff --git a/yardstick/tests/unit/benchmark/contexts/nodes_sample_new_sriov.yaml b/yardstick/tests/unit/benchmark/contexts/nodes_sample_new_sriov.yaml
new file mode 100644
index 000000000..55ff2e778
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/nodes_sample_new_sriov.yaml
@@ -0,0 +1,82 @@
+nodes:
+-
+ name: trafficgen_1
+ role: TrafficGen
+ ip: 10.123.123.123
+ user: root
+ auth_type: password
+ password: password
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:03:00.0"
+ driver: ixgbe
+ dpdk_port_num: 0
+ local_ip: "152.16.100.20"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:00"
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:03:00.1"
+ driver: ixgbe
+ dpdk_port_num: 1
+ local_ip: "152.16.100.21"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:00"
+-
+ name: sriov
+ role: Sriov1
+ ip: 10.123.123.122
+ user: root
+ auth_type: password
+ password: password
+ vf_macs:
+ - "00:00:00:00:00:00"
+ - "00:00:00:00:00:00"
+ phy_ports: # Physical ports to configure sriov
+ - "0000:06:00.0"
+ - "0000:06:00.1"
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu1.img"
+
+-
+ name: vnf
+ role: vnf
+ ip: 10.123.123.121
+ user: root
+ auth_type: password
+ password: password
+ host: 10.123.123.121 #BM host == ip, SRIOV & ovs-dpdk host == compute node.
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:06:00.0"
+ driver: i40e
+ dpdk_port_num: 0
+ local_ip: "152.16.100.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:00"
+
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:06:00.1"
+ driver: i40e
+ dpdk_port_num: 1
+ local_ip: "152.16.40.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:00"
+ routing_table:
+ - network: "152.16.100.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.100.20"
+ if: "xe0"
+ - network: "152.16.40.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.40.20"
+ if: "xe1"
+ nd_route_tbl:
+ - network: "0064:ff9b:0:0:0:0:9810:6414"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:6414"
+ if: "xe0"
+ - network: "0064:ff9b:0:0:0:0:9810:2814"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:2814"
+ if: "xe1"
+
diff --git a/yardstick/tests/unit/benchmark/contexts/nodes_sample_ovs.yaml b/yardstick/tests/unit/benchmark/contexts/nodes_sample_ovs.yaml
new file mode 100644
index 000000000..b1da1ea9f
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/nodes_sample_ovs.yaml
@@ -0,0 +1,104 @@
+# Copyright (c) 2016 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+nodes:
+-
+ name: trafficgen_1
+ role: TrafficGen
+ ip: 10.223.197.182
+ user: root
+ auth_type: password
+ password: intel123
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:03:00.0"
+ driver: ixgbe
+ dpdk_port_num: 0
+ local_ip: "152.16.100.20"
+ netmask: "255.255.255.0"
+ local_mac: "90:e2:ba:77:ce:68"
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:03:00.1"
+ driver: ixgbe
+ dpdk_port_num: 1
+ local_ip: "152.16.100.21"
+ netmask: "255.255.255.0"
+ local_mac: "90:e2:ba:77:ce:69"
+-
+ name: ovs
+ role: Ovsdpdk
+ ip: 10.223.197.222
+ user: root
+ auth_type: password
+ password: intel123
+ vpath: "/usr/local/"
+ vports:
+ - dpdkvhostuser0
+ - dpdkvhostuser1
+ vports_mac:
+ - "00:00:00:00:00:03"
+ - "00:00:00:00:00:04"
+ phy_ports: # Physical ports to configure ovs
+ - "0000:06:00.0"
+ - "0000:06:00.1"
+ flow:
+ - ovs-ofctl add-flow br0 in_port=1,action=output:3
+ - ovs-ofctl add-flow br0 in_port=3,action=output:1
+ - ovs-ofctl add-flow br0 in_port=4,action=output:2
+ - ovs-ofctl add-flow br0 in_port=2,action=output:4
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu1.img"
+
+-
+ name: vnf
+ role: vnf
+ ip: 10.223.197.155
+ user: root
+ auth_type: password
+ password: intel123
+ host: 10.223.197.140
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:00:04.0"
+ driver: virtio-pci
+ dpdk_port_num: 0
+ local_ip: "152.16.100.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:03"
+
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:00:05.0"
+ driver: virtio-pci
+ dpdk_port_num: 1
+ local_ip: "152.16.40.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:04"
+ routing_table:
+ - network: "152.16.100.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.100.20"
+ if: "xe0"
+ - network: "152.16.40.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.40.20"
+ if: "xe1"
+ nd_route_tbl:
+ - network: "0064:ff9b:0:0:0:0:9810:6414"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:6414"
+ if: "xe0"
+ - network: "0064:ff9b:0:0:0:0:9810:2814"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:2814"
+ if: "xe1"
diff --git a/yardstick/tests/unit/benchmark/contexts/nodes_sample_ovsdpdk.yaml b/yardstick/tests/unit/benchmark/contexts/nodes_sample_ovsdpdk.yaml
new file mode 100644
index 000000000..c02849a05
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/nodes_sample_ovsdpdk.yaml
@@ -0,0 +1,104 @@
+# Copyright (c) 2016 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+nodes:
+-
+ name: trafficgen_1
+ role: TrafficGen
+ ip: 10.223.197.182
+ user: root
+ auth_type: password
+ password: intel123
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:03:00.0"
+ driver: ixgbe
+ dpdk_port_num: 0
+ local_ip: "152.16.100.20"
+ netmask: "255.255.255.0"
+ local_mac: "90:e2:ba:77:ce:68"
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:03:00.1"
+ driver: ixgbe
+ dpdk_port_num: 1
+ local_ip: "152.16.100.21"
+ netmask: "255.255.255.0"
+ local_mac: "90:e2:ba:77:ce:69"
+-
+ name: ovs
+ role: Ovsdpdk1
+ ip: 10.223.197.222
+ user: root
+ auth_type: password
+ password: intel123
+ vpath: "/usr/local/"
+ vports:
+ - dpdkvhostuser0
+ - dpdkvhostuser1
+ vports_mac:
+ - "00:00:00:00:00:03"
+ - "00:00:00:00:00:04"
+ phy_ports: # Physical ports to configure ovs
+ - "0000:06:00.0"
+ - "0000:06:00.1"
+ flow:
+ - ovs-ofctl add-flow br0 in_port=1,action=output:3
+ - ovs-ofctl add-flow br0 in_port=3,action=output:1
+ - ovs-ofctl add-flow br0 in_port=4,action=output:2
+ - ovs-ofctl add-flow br0 in_port=2,action=output:4
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu1.img"
+
+-
+ name: vnf
+ role: vnf
+ ip: 10.223.197.155
+ user: root
+ auth_type: password
+ password: intel123
+ host: 10.223.197.140
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:00:04.0"
+ driver: virtio-pci
+ dpdk_port_num: 0
+ local_ip: "152.16.100.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:03"
+
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:00:05.0"
+ driver: virtio-pci
+ dpdk_port_num: 1
+ local_ip: "152.16.40.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:04"
+ routing_table:
+ - network: "152.16.100.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.100.20"
+ if: "xe0"
+ - network: "152.16.40.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.40.20"
+ if: "xe1"
+ nd_route_tbl:
+ - network: "0064:ff9b:0:0:0:0:9810:6414"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:6414"
+ if: "xe0"
+ - network: "0064:ff9b:0:0:0:0:9810:2814"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:2814"
+ if: "xe1"
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/__init__.py b/yardstick/tests/unit/benchmark/contexts/standalone/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/__init__.py
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/nodes_duplicate_sample.yaml b/yardstick/tests/unit/benchmark/contexts/standalone/nodes_duplicate_sample.yaml
new file mode 100644
index 000000000..2e501a6af
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/nodes_duplicate_sample.yaml
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+nodes:
+-
+ name: node1
+ role: Controller
+ ip: 10.229.47.137
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node1
+ role: Controller
+ ip: 10.229.47.138
+ user: root
+ key_filename: /root/.yardstick_key
+
+-
+ name: node5
+ role: Sriov
+ ip: 10.229.47.140
+ user: root
+ password: password
+ key_filename: /root/.yardstick_key
+
+-
+ name: node5
+ role: OvsDpdk
+ ip: 10.229.47.140
+ user: root
+ password: password
+ key_filename: /root/.yardstick_key
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/nodes_ovs_dpdk_sample.yaml b/yardstick/tests/unit/benchmark/contexts/standalone/nodes_ovs_dpdk_sample.yaml
new file mode 100644
index 000000000..0f51dbe63
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/nodes_ovs_dpdk_sample.yaml
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+nodes:
+-
+ name: node1
+ role: Controller
+ ip: 10.229.47.137
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node2
+ role: Controller
+ ip: 10.229.47.138
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node3
+ role: Compute
+ ip: 10.229.47.139
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node4
+ role: Baremetal
+ ip: 10.229.47.140
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node5
+ role: OvsDpdk
+ ip: 10.229.47.140
+ user: root
+ password: password
+ key_filename: /root/.yardstick_key
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/nodes_sample.yaml b/yardstick/tests/unit/benchmark/contexts/standalone/nodes_sample.yaml
new file mode 100644
index 000000000..8d50c3aea
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/nodes_sample.yaml
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+nodes:
+-
+ name: node1
+ role: Controller
+ ip: 10.229.47.137
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node2
+ role: Controller
+ ip: 10.229.47.138
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node3
+ role: Compute
+ ip: 10.229.47.139
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node4
+ role: Baremetal
+ ip: 10.229.47.140
+ user: root
+ key_filename: /root/.yardstick_key
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/nodes_sriov_sample.yaml b/yardstick/tests/unit/benchmark/contexts/standalone/nodes_sriov_sample.yaml
new file mode 100644
index 000000000..1c43b8725
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/nodes_sriov_sample.yaml
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+nodes:
+-
+ name: node1
+ role: Controller
+ ip: 10.229.47.137
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node2
+ role: Controller
+ ip: 10.229.47.138
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node3
+ role: Compute
+ ip: 10.229.47.139
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node4
+ role: Baremetal
+ ip: 10.229.47.140
+ user: root
+ key_filename: /root/.yardstick_key
+-
+ name: node5
+ role: Sriov
+ ip: 10.229.47.140
+ user: root
+ password: password
+ key_filename: /root/.yardstick_key
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/test_model.py b/yardstick/tests/unit/benchmark/contexts/standalone/test_model.py
new file mode 100644
index 000000000..e76a3ca27
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/test_model.py
@@ -0,0 +1,640 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import os
+import uuid
+
+import mock
+import netaddr
+import unittest
+from xml.etree import ElementTree
+
+from yardstick import ssh
+from yardstick.benchmark.contexts.standalone import model
+from yardstick.common import exceptions
+from yardstick import constants
+from yardstick.network_services import utils
+
+
+XML_SAMPLE = """<?xml version="1.0"?>
+<domain type="kvm">
+ <devices>
+ </devices>
+</domain>
+"""
+
+XML_SAMPLE_INTERFACE = """<?xml version="1.0"?>
+<domain type="kvm">
+ <devices>
+ <interface>
+ </interface>
+ </devices>
+</domain>
+"""
+
+
+class ModelLibvirtTestCase(unittest.TestCase):
+
+ XML_STR = model.VM_TEMPLATE.format(
+ vm_name="vm_name",
+ random_uuid=uuid.uuid4(),
+ mac_addr="00:01:02:03:04:05",
+ memory=2048, vcpu=2, cpu=2,
+ numa_cpus=0 - 10,
+ socket=1, threads=1,
+ vm_image="/var/lib/libvirt/images/yardstick-nsb-image.img",
+ cpuset=2 - 10, cputune='', machine='pc')
+
+ def setUp(self):
+ self.pci_address_str = '0001:04:03.2'
+ self.pci_address = utils.PciAddress(self.pci_address_str)
+ self.mac = '00:00:00:00:00:01'
+ self._mock_ssh = mock.Mock()
+ self.mock_ssh = self._mock_ssh.start()
+ self.addCleanup(self._cleanup)
+
+ def _cleanup(self):
+ self._mock_ssh.stop()
+
+ # TODO: Remove mocking of yardstick.ssh.SSH (here and elsewhere)
+ # In this case, we are mocking a param to be passed into other methods
+ # It can be a generic Mock() with return values set for the right methods
+ def test_check_if_vm_exists_and_delete(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = mock.Mock(return_value=(0, "a", ""))
+ ssh.return_value = ssh_mock
+ # NOTE(ralonsoh): this test doesn't cover function execution.
+ model.Libvirt.check_if_vm_exists_and_delete('vm-0', ssh_mock)
+
+ def test_virsh_create_vm(self):
+ self.mock_ssh.execute = mock.Mock(return_value=(0, 0, 0))
+ model.Libvirt.virsh_create_vm(self.mock_ssh, 'vm-0')
+ self.mock_ssh.execute.assert_called_once_with('virsh create vm-0')
+
+ def test_virsh_create_vm_error(self):
+ self.mock_ssh.execute = mock.Mock(return_value=(1, 0, 'error_create'))
+ with self.assertRaises(exceptions.LibvirtCreateError) as exc:
+ model.Libvirt.virsh_create_vm(self.mock_ssh, 'vm-0')
+ self.assertEqual('Error creating the virtual machine. Error: '
+ 'error_create.', str(exc.exception))
+ self.mock_ssh.execute.assert_called_once_with('virsh create vm-0')
+
+ def test_virsh_destroy_vm(self):
+ self.mock_ssh.execute = mock.Mock(return_value=(0, 0, 0))
+ model.Libvirt.virsh_destroy_vm('vm-0', self.mock_ssh)
+ self.mock_ssh.execute.assert_called_once_with('virsh destroy vm-0')
+
+ @mock.patch.object(model, 'LOG')
+ def test_virsh_destroy_vm_error(self, mock_logger):
+ self.mock_ssh.execute = mock.Mock(return_value=(1, 0, 'error_destroy'))
+ mock_logger.warning = mock.Mock()
+ model.Libvirt.virsh_destroy_vm('vm-0', self.mock_ssh)
+ mock_logger.warning.assert_called_once_with(
+ 'Error destroying VM %s. Error: %s', 'vm-0', 'error_destroy')
+ self.mock_ssh.execute.assert_called_once_with('virsh destroy vm-0')
+
+ def test_add_interface_address(self):
+ xml = ElementTree.ElementTree(
+ element=ElementTree.fromstring(XML_SAMPLE_INTERFACE))
+ interface = xml.find('devices').find('interface')
+ result = model.Libvirt._add_interface_address(interface, self.pci_address)
+ self.assertEqual('pci', result.get('type'))
+ self.assertEqual('0x{}'.format(self.pci_address.domain),
+ result.get('domain'))
+ self.assertEqual('0x{}'.format(self.pci_address.bus),
+ result.get('bus'))
+ self.assertEqual('0x{}'.format(self.pci_address.slot),
+ result.get('slot'))
+ self.assertEqual('0x{}'.format(self.pci_address.function),
+ result.get('function'))
+
+ def test_add_ovs_interfaces(self):
+ xml_input = copy.deepcopy(XML_SAMPLE)
+ xml_output = model.Libvirt.add_ovs_interface(
+ '/usr/local', 0, self.pci_address_str, self.mac, xml_input, 4)
+
+ root = ElementTree.fromstring(xml_output)
+ et_out = ElementTree.ElementTree(element=root)
+ interface = et_out.find('devices').find('interface')
+ self.assertEqual('vhostuser', interface.get('type'))
+ mac = interface.find('mac')
+ self.assertEqual(self.mac, mac.get('address'))
+ source = interface.find('source')
+ self.assertEqual('unix', source.get('type'))
+ self.assertEqual('/usr/local/var/run/openvswitch/dpdkvhostuser0',
+ source.get('path'))
+ self.assertEqual('client', source.get('mode'))
+ _model = interface.find('model')
+ self.assertEqual('virtio', _model.get('type'))
+ driver = interface.find('driver')
+ self.assertEqual('4', driver.get('queues'))
+ host = driver.find('host')
+ self.assertEqual('off', host.get('mrg_rxbuf'))
+ self.assertIsNotNone(interface.find('address'))
+
+ def test_add_sriov_interfaces(self):
+ xml_input = copy.deepcopy(XML_SAMPLE)
+ vm_pci = '0001:05:04.2'
+ xml_output = model.Libvirt.add_sriov_interfaces(
+ vm_pci, self.pci_address_str, self.mac, xml_input)
+ root = ElementTree.fromstring(xml_output)
+ et_out = ElementTree.ElementTree(element=root)
+ interface = et_out.find('devices').find('interface')
+ self.assertEqual('yes', interface.get('managed'))
+ self.assertEqual('hostdev', interface.get('type'))
+ mac = interface.find('mac')
+ self.assertEqual(self.mac, mac.get('address'))
+ source = interface.find('source')
+ source_address = source.find('address')
+ self.assertIsNotNone(source.find('address'))
+
+ self.assertEqual('pci', source_address.get('type'))
+ self.assertEqual('0x' + self.pci_address_str.split(':')[0],
+ source_address.get('domain'))
+ self.assertEqual('0x' + self.pci_address_str.split(':')[1],
+ source_address.get('bus'))
+ self.assertEqual('0x' + self.pci_address_str.split(':')[2].split('.')[0],
+ source_address.get('slot'))
+ self.assertEqual('0x' + self.pci_address_str.split(':')[2].split('.')[1],
+ source_address.get('function'))
+
+ interface_address = interface.find('address')
+ self.assertEqual('pci', interface_address.get('type'))
+ self.assertEqual('0x' + vm_pci.split(':')[0],
+ interface_address.get('domain'))
+ self.assertEqual('0x' + vm_pci.split(':')[1],
+ interface_address.get('bus'))
+ self.assertEqual('0x' + vm_pci.split(':')[2].split('.')[0],
+ interface_address.get('slot'))
+ self.assertEqual('0x' + vm_pci.split(':')[2].split('.')[1],
+ interface_address.get('function'))
+
+ def test_add_cdrom(self):
+ xml_input = copy.deepcopy(XML_SAMPLE)
+ xml_output = model.Libvirt.add_cdrom('/var/lib/libvirt/images/data.img', xml_input)
+
+ root = ElementTree.fromstring(xml_output)
+ et_out = ElementTree.ElementTree(element=root)
+ disk = et_out.find('devices').find('disk')
+ self.assertEqual('file', disk.get('type'))
+ self.assertEqual('cdrom', disk.get('device'))
+ driver = disk.find('driver')
+ self.assertEqual('qemu', driver.get('name'))
+ self.assertEqual('raw', driver.get('type'))
+ source = disk.find('source')
+ self.assertEqual('/var/lib/libvirt/images/data.img', source.get('file'))
+ target = disk.find('target')
+ self.assertEqual('hdb', target.get('dev'))
+ self.assertIsNotNone(disk.find('readonly'))
+
+ def test_gen_cdrom_image(self):
+ self.mock_ssh.execute = mock.Mock(return_value=(0, 0, 0))
+ root = ElementTree.fromstring(self.XML_STR)
+ hostname = root.find('name').text
+ meta_data = "/tmp/meta-data"
+ user_data = "/tmp/user-data"
+ network_data = "/tmp/network-config"
+ file_path = "/tmp/cdrom-0.img"
+ key_filename = "id_rsa"
+ pub_key_str = "KEY"
+ user = 'root'
+ mac = "00:11:22:33:44:55"
+ ip = "1.1.1.7/24"
+ user_config = [" - name: {user_name}",
+ " ssh_authorized_keys:",
+ " - {pub_key_str}"]
+
+ user_conf = os.linesep.join(user_config).format(pub_key_str=pub_key_str, user_name=user)
+ with mock.patch('six.moves.builtins.open', mock.mock_open(read_data=pub_key_str),
+ create=True) as mock_file:
+ with open(key_filename, "r") as h:
+ result = h.read()
+ model.Libvirt.gen_cdrom_image(self.mock_ssh, file_path, hostname, user, key_filename,
+ mac, ip)
+ mock_file.assert_called_with(".".join([key_filename, "pub"]), "r")
+ self.assertEqual(result, pub_key_str)
+
+ self.mock_ssh.execute.assert_has_calls([
+ mock.call("touch %s" % meta_data),
+ mock.call(model.USER_DATA_TEMPLATE.format(user_file=user_data, host=hostname,
+ user_config=user_conf)),
+ mock.call(model.NETWORK_DATA_TEMPLATE.format(network_file=network_data,
+ mac_address=mac, ip_address=ip)),
+ mock.call("genisoimage -output {0} -volid cidata"
+ " -joliet -r {1} {2} {3}".format(file_path, meta_data, user_data,
+ network_data)),
+ mock.call("rm {0} {1} {2}".format(meta_data, user_data, network_data))
+ ])
+
+ def test_create_snapshot_qemu(self):
+ self.mock_ssh.execute = mock.Mock(return_value=(0, 0, 0))
+ index = 1
+ vm_image = '/var/lib/libvirt/images/%s.qcow2' % index
+ base_image = '/tmp/base_image'
+
+ model.Libvirt.create_snapshot_qemu(self.mock_ssh, index, base_image)
+ self.mock_ssh.execute.assert_has_calls([
+ mock.call('rm -- "%s"' % vm_image),
+ mock.call('test -r %s' % base_image),
+ mock.call('qemu-img create -f qcow2 -o backing_file=%s %s' %
+ (base_image, vm_image))
+ ])
+
+ @mock.patch.object(os.path, 'basename', return_value='base_image')
+ @mock.patch.object(os.path, 'normpath')
+ @mock.patch.object(os, 'access', return_value=True)
+ def test_create_snapshot_qemu_no_image_remote(self,
+ mock_os_access, mock_normpath, mock_basename):
+ self.mock_ssh.execute = mock.Mock(
+ side_effect=[(0, 0, 0), (1, 0, 0), (0, 0, 0), (0, 0, 0)])
+ index = 1
+ vm_image = '/var/lib/libvirt/images/%s.qcow2' % index
+ base_image = '/tmp/base_image'
+ mock_normpath.return_value = base_image
+
+ model.Libvirt.create_snapshot_qemu(self.mock_ssh, index, base_image)
+ self.mock_ssh.execute.assert_has_calls([
+ mock.call('rm -- "%s"' % vm_image),
+ mock.call('test -r %s' % base_image),
+ mock.call('mv -- "/tmp/%s" "%s"' % ('base_image', base_image)),
+ mock.call('qemu-img create -f qcow2 -o backing_file=%s %s' %
+ (base_image, vm_image))
+ ])
+ mock_os_access.assert_called_once_with(base_image, os.R_OK)
+ mock_normpath.assert_called_once_with(base_image)
+ mock_basename.assert_has_calls([mock.call(base_image)])
+ self.mock_ssh.put_file.assert_called_once_with(base_image,
+ '/tmp/base_image')
+
+ @mock.patch.object(model.Libvirt, 'gen_cdrom_image')
+ def test_check_update_key(self, mock_gen_cdrom_image):
+ node = {
+ 'user': 'defuser',
+ 'key_filename': '/home/ubuntu/id_rsa',
+ 'ip': '1.1.1.7',
+ 'netmask': '255.255.255.0'}
+ cdrom_img = "/var/lib/libvirt/images/data.img"
+ id_name = 'fake_name'
+ key_filename = node.get('key_filename')
+ root = ElementTree.fromstring(self.XML_STR)
+ hostname = root.find('name').text
+ mac = "00:11:22:33:44:55"
+ ip = "{0}/{1}".format(node.get('ip'), node.get('netmask'))
+ ip = "{0}/{1}".format(node.get('ip'), netaddr.IPNetwork(ip).prefixlen)
+ model.StandaloneContextHelper.check_update_key(self.mock_ssh, node, hostname, id_name,
+ cdrom_img, mac)
+ mock_gen_cdrom_image.assert_called_once_with(self.mock_ssh, cdrom_img, hostname,
+ node.get('user'), key_filename, mac, ip)
+
+ @mock.patch.object(os, 'access', return_value=False)
+ def test_create_snapshot_qemu_no_image_local(self, mock_os_access):
+ self.mock_ssh.execute = mock.Mock(side_effect=[(0, 0, 0), (1, 0, 0)])
+ base_image = '/tmp/base_image'
+
+ with self.assertRaises(exceptions.LibvirtQemuImageBaseImageNotPresent):
+ model.Libvirt.create_snapshot_qemu(self.mock_ssh, 3, base_image)
+ mock_os_access.assert_called_once_with(base_image, os.R_OK)
+
+ def test_create_snapshot_qemu_error_qemuimg_command(self):
+ self.mock_ssh.execute = mock.Mock(
+ side_effect=[(0, 0, 0), (0, 0, 0), (1, 0, 0)])
+ index = 1
+ vm_image = '/var/lib/libvirt/images/%s.qcow2' % index
+ base_image = '/tmp/base_image'
+
+ with self.assertRaises(exceptions.LibvirtQemuImageCreateError):
+ model.Libvirt.create_snapshot_qemu(self.mock_ssh, index,
+ base_image)
+ self.mock_ssh.execute.assert_has_calls([
+ mock.call('rm -- "%s"' % vm_image),
+ mock.call('test -r %s' % base_image),
+ mock.call('qemu-img create -f qcow2 -o backing_file=%s %s' %
+ (base_image, vm_image))
+ ])
+
+ @mock.patch.object(model.Libvirt, 'pin_vcpu_for_perf', return_value='4,5')
+ @mock.patch.object(model.Libvirt, 'create_snapshot_qemu',
+ return_value='qemu_image')
+ def test_build_vm_xml(self, mock_create_snapshot_qemu,
+ mock_pin_vcpu_for_perf):
+ extra_specs = {'hw:cpu_cores': '4',
+ 'hw:cpu_sockets': '3',
+ 'hw:cpu_threads': '2',
+ 'cputune': 'cool'}
+ flavor = {'ram': '1024',
+ 'extra_specs': extra_specs,
+ 'hw_socket': '1',
+ 'images': 'images'}
+ mac = model.StandaloneContextHelper.get_mac_address(0x00)
+ _uuid = uuid.uuid4()
+ connection = mock.Mock()
+ cdrom_img = '/tmp/cdrom-0.img'
+ with mock.patch.object(model.StandaloneContextHelper,
+ 'get_mac_address', return_value=mac) as \
+ mock_get_mac_address, \
+ mock.patch.object(uuid, 'uuid4', return_value=_uuid):
+ xml_out, mac = model.Libvirt.build_vm_xml(
+ connection, flavor, 'vm_name', 100, cdrom_img)
+
+ xml_ref = model.VM_TEMPLATE.format(vm_name='vm_name',
+ random_uuid=_uuid, mac_addr=mac, memory='1024', vcpu='8', cpu='4',
+ numa_cpus='0-7', socket='3', threads='2',
+ vm_image='qemu_image', cpuset='4,5', cputune='cool',
+ machine='pc-i440fx-xenial')
+ xml_ref = model.Libvirt.add_cdrom(cdrom_img, xml_ref)
+ self.assertEqual(xml_out, xml_ref)
+ mock_get_mac_address.assert_called_once_with(0x00)
+ mock_create_snapshot_qemu.assert_called_once_with(
+ connection, 100, 'images')
+ mock_pin_vcpu_for_perf.assert_called_once_with(connection, '1')
+
+ # TODO: Edit this test to test state instead of output
+ # update_interrupts_hugepages_perf does not return anything
+ def test_update_interrupts_hugepages_perf(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, "a", ""))
+ ssh.return_value = ssh_mock
+ # NOTE(ralonsoh): 'update_interrupts_hugepages_perf' always return
+ # None, this check is trivial.
+ #status = Libvirt.update_interrupts_hugepages_perf(ssh_mock)
+ #self.assertIsNone(status)
+ model.Libvirt.update_interrupts_hugepages_perf(ssh_mock)
+
+ @mock.patch.object(model, 'CpuSysCores')
+ @mock.patch.object(model.Libvirt, 'update_interrupts_hugepages_perf')
+ def test_pin_vcpu_for_perf(self, *args):
+ # NOTE(ralonsoh): test mocked methods/variables.
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, "a", ""))
+ ssh.return_value = ssh_mock
+ status = model.Libvirt.pin_vcpu_for_perf(ssh_mock, 4)
+ self.assertIsNotNone(status)
+
+
+class StandaloneContextHelperTestCase(unittest.TestCase):
+
+ NODE_SAMPLE = "nodes_sample.yaml"
+ NODE_SRIOV_SAMPLE = "nodes_sriov_sample.yaml"
+
+ NETWORKS = {
+ 'mgmt': {'cidr': '152.16.100.10/24'},
+ 'private_0': {
+ 'phy_port': "0000:05:00.0",
+ 'vpci': "0000:00:07.0",
+ 'cidr': '152.16.100.10/24',
+ 'gateway_ip': '152.16.100.20'},
+ 'public_0': {
+ 'phy_port': "0000:05:00.1",
+ 'vpci': "0000:00:08.0",
+ 'cidr': '152.16.40.10/24',
+ 'gateway_ip': '152.16.100.20'}
+ }
+
+ def setUp(self):
+ self.helper = model.StandaloneContextHelper()
+
+ def test___init__(self):
+ self.assertIsNone(self.helper.file_path)
+
+ def test_install_req_libs(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(1, "a", ""))
+ ssh.return_value = ssh_mock
+ # NOTE(ralonsoh): this test doesn't cover function execution. This test
+ # should also check mocked function calls.
+ model.StandaloneContextHelper.install_req_libs(ssh_mock)
+
+ def test_get_kernel_module(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(1, "i40e", ""))
+ ssh.return_value = ssh_mock
+ # NOTE(ralonsoh): this test doesn't cover function execution. This test
+ # should also check mocked function calls.
+ model.StandaloneContextHelper.get_kernel_module(
+ ssh_mock, "05:00.0", None)
+
+ @mock.patch.object(model.StandaloneContextHelper, 'get_kernel_module')
+ def test_get_nic_details(self, mock_get_kernel_module):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = mock.Mock(return_value=(1, "i40e ixgbe", ""))
+ ssh.return_value = ssh_mock
+ mock_get_kernel_module.return_value = "i40e"
+ # NOTE(ralonsoh): this test doesn't cover function execution. This test
+ # should also check mocked function calls.
+ model.StandaloneContextHelper.get_nic_details(
+ ssh_mock, self.NETWORKS, 'dpdk-devbind.py')
+
+ def test_get_virtual_devices(self):
+ pattern = "PCI_SLOT_NAME=0000:05:00.0"
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(1, pattern, ""))
+ ssh.return_value = ssh_mock
+ # NOTE(ralonsoh): this test doesn't cover function execution. This test
+ # should also check mocked function calls.
+ model.StandaloneContextHelper.get_virtual_devices(
+ ssh_mock, '0000:00:05.0')
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.join(curr_path, filename)
+ return file_path
+
+ def test_parse_pod_file(self):
+ self.helper.file_path = self._get_file_abspath("dummy")
+ self.assertRaises(IOError, self.helper.parse_pod_file,
+ self.helper.file_path)
+
+ self.helper.file_path = self._get_file_abspath(self.NODE_SAMPLE)
+ self.assertRaises(TypeError, self.helper.parse_pod_file,
+ self.helper.file_path)
+
+ self.helper.file_path = self._get_file_abspath(self.NODE_SRIOV_SAMPLE)
+ self.assertIsNotNone(self.helper.parse_pod_file(self.helper.file_path))
+
+ def test_get_mac_address(self):
+ status = model.StandaloneContextHelper.get_mac_address()
+ self.assertIsNotNone(status)
+
+ @mock.patch('yardstick.ssh.SSH')
+ def test_get_mgmt_ip(self, *args):
+ # NOTE(ralonsoh): test mocked methods/variables.
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = mock.Mock(
+ return_value=(1, "1.2.3.4 00:00:00:00:00:01", ""))
+ ssh.return_value = ssh_mock
+ # NOTE(ralonsoh): this test doesn't cover function execution. This test
+ # should also check mocked function calls.
+ status = model.StandaloneContextHelper.get_mgmt_ip(
+ ssh_mock, "00:00:00:00:00:01", "1.1.1.1/24", {})
+ self.assertIsNotNone(status)
+
+ @mock.patch('yardstick.ssh.SSH')
+ def test_get_mgmt_ip_no(self, *args):
+ # NOTE(ralonsoh): test mocked methods/variables.
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(1, "", ""))
+ ssh.return_value = ssh_mock
+ # NOTE(ralonsoh): this test doesn't cover function execution. This test
+ # should also check mocked function calls.
+ model.WAIT_FOR_BOOT = 0
+ status = model.StandaloneContextHelper.get_mgmt_ip(
+ ssh_mock, "99", "1.1.1.1/24", {})
+ self.assertIsNone(status)
+
+
+class ServerTestCase(unittest.TestCase):
+
+ NETWORKS = {
+ 'mgmt': {'cidr': '152.16.100.10/24'},
+ 'private_0': {
+ 'phy_port': "0000:05:00.0",
+ 'vpci': "0000:00:07.0",
+ 'driver': 'i40e',
+ 'mac': '',
+ 'cidr': '152.16.100.10/24',
+ 'gateway_ip': '152.16.100.20'},
+ 'public_0': {
+ 'phy_port': "0000:05:00.1",
+ 'vpci': "0000:00:08.0",
+ 'driver': 'i40e',
+ 'mac': '',
+ 'cidr': '152.16.40.10/24',
+ 'gateway_ip': '152.16.100.20'}
+ }
+
+ def setUp(self):
+ self.server = model.Server()
+
+ def test___init__(self):
+ self.assertIsNotNone(self.server)
+
+ def test_build_vnf_interfaces(self):
+ vnf = {
+ "network_ports": {
+ 'mgmt': {'cidr': '152.16.100.10/24'},
+ 'xe0': ['private_0'],
+ 'xe1': ['public_0'],
+ }
+ }
+ status = model.Server.build_vnf_interfaces(vnf, self.NETWORKS)
+ self.assertIsNotNone(status)
+
+ def test_generate_vnf_instance(self):
+ vnf = {
+ "network_ports": {
+ 'mgmt': {'cidr': '152.16.100.10/24'},
+ 'xe0': ['private_0'],
+ 'xe1': ['public_0'],
+ }
+ }
+ status = self.server.generate_vnf_instance(
+ {}, self.NETWORKS, '1.1.1.1/24', 'vm-0', vnf, '00:00:00:00:00:01')
+ self.assertIsNotNone(status)
+
+
+class OvsDeployTestCase(unittest.TestCase):
+
+ OVS_DETAILS = {'version': {'ovs': 'ovs_version', 'dpdk': 'dpdk_version'}}
+
+ def setUp(self):
+ self._mock_ssh = mock.patch.object(ssh, 'SSH')
+ self.mock_ssh = self._mock_ssh.start()
+ self.ovs_deploy = model.OvsDeploy(self.mock_ssh,
+ '/tmp/dpdk-devbind.py',
+ self.OVS_DETAILS)
+ self._mock_path_isfile = mock.patch.object(os.path, 'isfile')
+ self._mock_path_join = mock.patch.object(os.path, 'join')
+ self.mock_path_isfile = self._mock_path_isfile.start()
+ self.mock_path_join = self._mock_path_join.start()
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_ssh.stop()
+ self._mock_path_isfile.stop()
+ self._mock_path_join.stop()
+
+ @mock.patch.object(model.StandaloneContextHelper, 'install_req_libs')
+ def test_prerequisite(self, mock_install_req_libs):
+ pkgs = ["git", "build-essential", "pkg-config", "automake",
+ "autotools-dev", "libltdl-dev", "cmake", "libnuma-dev",
+ "libpcap-dev"]
+ self.ovs_deploy.prerequisite()
+ mock_install_req_libs.assert_called_once_with(
+ self.ovs_deploy.connection, pkgs)
+
+ def test_ovs_deploy_no_file(self):
+ self.mock_path_isfile.return_value = False
+ mock_file = mock.Mock()
+ self.mock_path_join.return_value = mock_file
+
+ self.ovs_deploy.ovs_deploy()
+ self.mock_path_isfile.assert_called_once_with(mock_file)
+ self.mock_path_join.assert_called_once_with(
+ constants.YARDSTICK_ROOT_PATH,
+ 'yardstick/resources/scripts/install/',
+ self.ovs_deploy.OVS_DEPLOY_SCRIPT)
+
+ @mock.patch.object(os.environ, 'get', return_value='test_proxy')
+ def test_ovs_deploy(self, mock_env_get):
+ self.mock_path_isfile.return_value = True
+ mock_deploy_file = mock.Mock()
+ mock_remove_ovs_deploy = mock.Mock()
+ self.mock_path_join.side_effect = [mock_deploy_file,
+ mock_remove_ovs_deploy]
+ dpdk_version = self.OVS_DETAILS['version']['dpdk']
+ ovs_version = self.OVS_DETAILS['version']['ovs']
+
+ with mock.patch.object(self.ovs_deploy.connection, 'put') as \
+ mock_put, \
+ mock.patch.object(self.ovs_deploy.connection, 'execute') as \
+ mock_execute, \
+ mock.patch.object(self.ovs_deploy, 'prerequisite'):
+ mock_execute.return_value = (0, 0, 0)
+ self.ovs_deploy.ovs_deploy()
+
+ self.mock_path_isfile.assert_called_once_with(mock_deploy_file)
+ self.mock_path_join.assert_has_calls([
+ mock.call(constants.YARDSTICK_ROOT_PATH,
+ 'yardstick/resources/scripts/install/',
+ self.ovs_deploy.OVS_DEPLOY_SCRIPT),
+ mock.call(self.ovs_deploy.bin_path,
+ self.ovs_deploy.OVS_DEPLOY_SCRIPT)
+ ])
+ mock_put.assert_called_once_with(mock_deploy_file,
+ mock_remove_ovs_deploy)
+ cmd = ("sudo -E %(remote_ovs_deploy)s --ovs='%(ovs_version)s' "
+ "--dpdk='%(dpdk_version)s' -p='%(proxy)s'" %
+ {'remote_ovs_deploy': mock_remove_ovs_deploy,
+ 'ovs_version': ovs_version,
+ 'dpdk_version': dpdk_version,
+ 'proxy': 'test_proxy'})
+ mock_execute.assert_called_once_with(cmd)
+ mock_env_get.assert_has_calls([mock.call('http_proxy', '')])
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py b/yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py
new file mode 100644
index 000000000..413bb68b7
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py
@@ -0,0 +1,500 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import os
+
+import mock
+import six
+import unittest
+
+from yardstick.benchmark import contexts
+from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts.standalone import model
+from yardstick.benchmark.contexts.standalone import ovs_dpdk
+from yardstick.common import exceptions
+from yardstick.common import utils as common_utils
+from yardstick.network_services import utils
+
+
+class OvsDpdkContextTestCase(unittest.TestCase):
+
+ NODES_SAMPLE = "nodes_sample.yaml"
+ NODES_ovs_dpdk_SAMPLE = "nodes_ovs_dpdk_sample.yaml"
+ NODES_DUPLICATE_SAMPLE = "nodes_duplicate_sample.yaml"
+
+ NETWORKS = {
+ 'private_0': {
+ 'phy_port': "0000:05:00.0",
+ 'vpci': "0000:00:07.0",
+ 'cidr': '152.16.100.10/24',
+ 'interface': 'if0',
+ 'mac': "00:00:00:00:00:01",
+ 'vf_pci': {'vf_pci': 0},
+ 'gateway_ip': '152.16.100.20'},
+ 'public_0': {
+ 'phy_port': "0000:05:00.1",
+ 'vpci': "0000:00:08.0",
+ 'cidr': '152.16.40.10/24',
+ 'interface': 'if0',
+ 'vf_pci': {'vf_pci': 0},
+ 'mac': "00:00:00:00:00:01",
+ 'gateway_ip': '152.16.100.20'},
+ }
+
+ def setUp(self):
+ self.attrs = {
+ 'name': 'foo',
+ 'task_id': '1234567890',
+ 'file': self._get_file_abspath(self.NODES_ovs_dpdk_SAMPLE)
+ }
+ self.ovs_dpdk = ovs_dpdk.OvsDpdkContext()
+ self._mock_log = mock.patch.object(ovs_dpdk, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.addCleanup(self._remove_contexts)
+ self.addCleanup(self._stop_mocks)
+
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
+
+ def _stop_mocks(self):
+ self._mock_log.stop()
+
+ @mock.patch('yardstick.benchmark.contexts.standalone.model.Server')
+ @mock.patch('yardstick.benchmark.contexts.standalone.model.StandaloneContextHelper')
+ def test___init__(self, mock_helper, mock_server):
+ self.ovs_dpdk.helper = mock_helper
+ self.ovs_dpdk.vnf_node = mock_server
+ self.assertIsNone(self.ovs_dpdk.file_path)
+ self.assertTrue(self.ovs_dpdk.first_run)
+
+ def test_init(self):
+ ATTRS = {
+ 'name': contexts.CONTEXT_STANDALONEOVSDPDK,
+ 'task_id': '1234567890',
+ 'file': 'pod',
+ 'flavor': {},
+ 'servers': {},
+ 'networks': {},
+ }
+
+ self.ovs_dpdk.helper.parse_pod_file = mock.Mock(
+ return_value=[{}, {}, {}])
+ self.assertIsNone(self.ovs_dpdk.init(ATTRS))
+
+ def test_setup_ovs(self):
+ fake_path = '/fake_path'
+ fake_dpdk_nic_bind = 'dpdk_tool.py'
+ self.ovs_dpdk.ovs_properties = {'vpath': fake_path}
+ self.ovs_dpdk.dpdk_devbind = fake_dpdk_nic_bind
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.connection = mock.Mock()
+ self.ovs_dpdk.connection.execute = mock.Mock(return_value=(0, 0, 0))
+ create_from = fake_path + '/etc/openvswitch/conf.db'
+ create_to = fake_path + '/share/openvswitch/vswitch.ovsschema'
+ cmd_list = [
+ 'killall -r "ovs.*" -q | true',
+ 'mkdir -p {0}/etc/openvswitch'.format(fake_path),
+ 'mkdir -p {0}/var/run/openvswitch'.format(fake_path),
+ 'rm {0}/etc/openvswitch/conf.db | true'.format(fake_path),
+ 'ovsdb-tool create {0} {1}'.format(create_from, create_to),
+ 'modprobe vfio-pci',
+ 'chmod a+x /dev/vfio',
+ 'chmod 0666 /dev/vfio/*',
+ '{0} --force -b vfio-pci {1}'.format(fake_dpdk_nic_bind,
+ self.ovs_dpdk.networks['private_0']['phy_port']),
+ '{0} --force -b vfio-pci {1}'.format(fake_dpdk_nic_bind,
+ self.ovs_dpdk.networks['public_0']['phy_port'])
+ ]
+ calls = [mock.call(cmd, timeout=self.ovs_dpdk.CMD_TIMEOUT)
+ for cmd in cmd_list]
+
+ self.ovs_dpdk.setup_ovs()
+ self.ovs_dpdk.connection.execute.assert_has_calls(calls,
+ any_order=True)
+
+ def test_setup_ovs_exception(self):
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.connection = mock.Mock()
+ self.ovs_dpdk.connection.execute = mock.Mock(return_value=(1, 0, 0))
+
+ with self.assertRaises(exceptions.OVSSetupError):
+ self.ovs_dpdk.setup_ovs()
+
+ def test_start_ovs_serverswitch(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, "a", ""))
+ ssh.return_value = ssh_mock
+ self.ovs_dpdk.connection = ssh_mock
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.ovs_properties = {}
+ self.ovs_dpdk.wait_for_vswitchd = 0
+ self.assertIsNone(self.ovs_dpdk.start_ovs_serverswitch())
+
+ def test_setup_ovs_bridge_add_flows(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, "a", ""))
+ ssh.return_value = ssh_mock
+ self.ovs_dpdk.connection = ssh_mock
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.ovs_properties = {
+ 'version': {'ovs': '2.7.0'}
+ }
+ self.ovs_dpdk.wait_for_vswitchd = 0
+ self.assertIsNone(self.ovs_dpdk.setup_ovs_bridge_add_flows())
+ self.ovs_dpdk.ovs_properties.update(
+ {'dpdk_pmd-rxq-affinity': {'0': "0:1"}})
+ self.ovs_dpdk.ovs_properties.update(
+ {'vhost_pmd-rxq-affinity': {'0': "0:1"}})
+ self.NETWORKS['private_0'].update({'port_num': '0'})
+ self.NETWORKS['public_0'].update({'port_num': '1'})
+ self.ovs_dpdk.setup_ovs_bridge_add_flows()
+
+ @mock.patch("yardstick.ssh.SSH")
+ def test_cleanup_ovs_dpdk_env(self, mock_ssh):
+ mock_ssh.execute.return_value = 0, "a", ""
+ self.ovs_dpdk.connection = mock_ssh
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.ovs_properties = {
+ 'version': {'ovs': '2.7.0'}
+ }
+ self.ovs_dpdk.wait_for_vswitchd = 0
+ self.assertIsNone(self.ovs_dpdk.cleanup_ovs_dpdk_env())
+
+ @mock.patch.object(utils, 'get_nsb_option')
+ @mock.patch.object(model.OvsDeploy, 'ovs_deploy')
+ def test_check_ovs_dpdk_env(self, mock_ovs_deploy, mock_get_nsb_option):
+ self.ovs_dpdk.connection = mock.Mock()
+ self.ovs_dpdk.connection.execute = mock.Mock(
+ return_value=(1, 0, 0))
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.ovs_properties = {
+ 'version': {'ovs': '2.7.0', 'dpdk': '16.11.1'}
+ }
+ self.ovs_dpdk.wait_for_vswitchd = 0
+ self.ovs_dpdk.cleanup_ovs_dpdk_env = mock.Mock()
+ mock_get_nsb_option.return_value = 'fake_path'
+
+ self.ovs_dpdk.check_ovs_dpdk_env()
+ mock_ovs_deploy.assert_called_once()
+ mock_get_nsb_option.assert_called_once_with('bin_path')
+
+ def test_check_ovs_dpdk_env_wrong_version(self):
+ self.ovs_dpdk.connection = mock.Mock()
+ self.ovs_dpdk.connection.execute = mock.Mock(
+ return_value=(1, 0, 0))
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.ovs_properties = {
+ 'version': {'ovs': '0.0.1', 'dpdk': '9.8.7'}
+ }
+ self.ovs_dpdk.wait_for_vswitchd = 0
+ self.ovs_dpdk.cleanup_ovs_dpdk_env = mock.Mock()
+
+ with self.assertRaises(exceptions.OVSUnsupportedVersion):
+ self.ovs_dpdk.check_ovs_dpdk_env()
+
+ @mock.patch('yardstick.ssh.SSH')
+ def test_deploy(self, *args):
+ self.ovs_dpdk.vm_deploy = False
+ self.assertIsNone(self.ovs_dpdk.deploy())
+
+ self.ovs_dpdk.vm_deploy = True
+ self.ovs_dpdk.host_mgmt = {}
+ self.ovs_dpdk.install_req_libs = mock.Mock()
+ self.ovs_dpdk.helper.get_nic_details = mock.Mock(return_value={})
+ self.ovs_dpdk.check_ovs_dpdk_env = mock.Mock(return_value={})
+ self.ovs_dpdk.setup_ovs = mock.Mock(return_value={})
+ self.ovs_dpdk.start_ovs_serverswitch = mock.Mock(return_value={})
+ self.ovs_dpdk.setup_ovs_bridge_add_flows = mock.Mock(return_value={})
+ self.ovs_dpdk.setup_ovs_dpdk_context = mock.Mock(return_value={})
+ self.ovs_dpdk.wait_for_vnfs_to_start = mock.Mock(return_value={})
+ # TODO(elfoley): This test should check states/sideeffects instead of
+ # output.
+ self.assertIsNone(self.ovs_dpdk.deploy())
+
+ @mock.patch.object(model.Libvirt, 'check_if_vm_exists_and_delete')
+ def test_undeploy(self, mock_libvirt):
+ self.ovs_dpdk.vm_deploy = True
+ self.ovs_dpdk.connection = mock.Mock()
+ self.ovs_dpdk.vm_names = ['vm-0', 'vm-1']
+ self.ovs_dpdk.drivers = ['vm-0', 'vm-1']
+ self.ovs_dpdk.cleanup_ovs_dpdk_env = mock.Mock()
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.undeploy()
+ mock_libvirt.assert_has_calls([
+ mock.call(self.ovs_dpdk.vm_names[0], self.ovs_dpdk.connection),
+ mock.call(self.ovs_dpdk.vm_names[1], self.ovs_dpdk.connection)
+ ])
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.join(curr_path, filename)
+ return file_path
+
+ def test__get_server_with_dic_attr_name(self):
+
+ self.ovs_dpdk.init(self.attrs)
+
+ attr_name = {'name': 'foo.bar'}
+ result = self.ovs_dpdk._get_server(attr_name)
+
+ self.assertEqual(result, None)
+
+ def test__get_server_not_found(self):
+
+ self.ovs_dpdk.helper.parse_pod_file = mock.Mock(
+ return_value=[{}, {}, {}])
+ self.ovs_dpdk.init(self.attrs)
+
+ attr_name = 'bar.foo'
+ result = self.ovs_dpdk._get_server(attr_name)
+
+ self.assertEqual(result, None)
+
+ def test__get_server_mismatch(self):
+
+ self.ovs_dpdk.init(self.attrs)
+
+ attr_name = 'bar.foo1'
+ result = self.ovs_dpdk._get_server(attr_name)
+
+ self.assertEqual(result, None)
+
+ def test__get_server_duplicate(self):
+
+ self.attrs['file'] = self._get_file_abspath(self.NODES_DUPLICATE_SAMPLE)
+
+ self.ovs_dpdk.init(self.attrs)
+
+ attr_name = 'node1.foo-12345678'
+ with self.assertRaises(ValueError):
+ self.ovs_dpdk._get_server(attr_name)
+
+ def test__get_server_found(self):
+
+ self.ovs_dpdk.init(self.attrs)
+
+ attr_name = 'node1.foo-12345678'
+ result = self.ovs_dpdk._get_server(attr_name)
+
+ self.assertEqual(result['ip'], '10.229.47.137')
+ self.assertEqual(result['name'], 'node1.foo-12345678')
+ self.assertEqual(result['user'], 'root')
+ self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+
+ def test__get_physical_node_for_server(self):
+ attrs = self.attrs
+ attrs.update({'servers': {'server1': {}}})
+ self.ovs_dpdk.init(attrs)
+
+ # When server is not from this context
+ result = self.ovs_dpdk._get_physical_node_for_server('server1.another-context')
+ self.assertIsNone(result)
+
+ # When node_name is not from this context
+ result = self.ovs_dpdk._get_physical_node_for_server('fake.foo-12345678')
+ self.assertIsNone(result)
+
+ result = self.ovs_dpdk._get_physical_node_for_server('server1.foo-12345678')
+ self.assertEqual(result, 'node5.foo')
+
+ # TODO(elfoley): Split this test for networks that exist and networks that
+ # don't
+ def test__get_network(self):
+ network1 = {
+ 'name': 'net_1',
+ 'vld_id': 'vld111',
+ 'segmentation_id': 'seg54',
+ 'network_type': 'type_a',
+ 'physical_network': 'phys',
+ }
+ network2 = {
+ 'name': 'net_2',
+ 'vld_id': 'vld999',
+ }
+ self.ovs_dpdk.networks = {
+ 'a': network1,
+ 'b': network2,
+ }
+
+ # Tests for networks that do not exist
+ attr_name = {}
+ self.assertIsNone(self.ovs_dpdk._get_network(attr_name))
+
+ attr_name = {'vld_id': 'vld777'}
+ self.assertIsNone(self.ovs_dpdk._get_network(attr_name))
+
+ self.assertIsNone(self.ovs_dpdk._get_network(None))
+
+ # TODO(elfoley): Split this test
+ attr_name = 'vld777'
+ self.assertIsNone(self.ovs_dpdk._get_network(attr_name))
+
+ # Tests for networks that exist
+ attr_name = {'vld_id': 'vld999'}
+ expected = {
+ "name": 'net_2',
+ "vld_id": 'vld999',
+ "segmentation_id": None,
+ "network_type": None,
+ "physical_network": None,
+ }
+ result = self.ovs_dpdk._get_network(attr_name)
+ self.assertDictEqual(result, expected)
+
+ attr_name = 'a'
+ expected = network1
+ result = self.ovs_dpdk._get_network(attr_name)
+ self.assertDictEqual(result, expected)
+
+ def test_configure_nics_for_ovs_dpdk(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, "a", ""))
+ ssh.return_value = ssh_mock
+ self.ovs_dpdk.vm_deploy = True
+ self.ovs_dpdk.connection = ssh_mock
+ self.ovs_dpdk.vm_names = ['vm-0', 'vm-1']
+ self.ovs_dpdk.drivers = []
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.helper.get_mac_address = mock.Mock(return_value="")
+ self.ovs_dpdk.get_vf_datas = mock.Mock(return_value="")
+ self.assertIsNone(self.ovs_dpdk.configure_nics_for_ovs_dpdk())
+
+ @mock.patch.object(model.Libvirt, 'add_ovs_interface')
+ def test__enable_interfaces(self, mock_add_ovs_interface):
+ self.ovs_dpdk.vm_deploy = True
+ self.ovs_dpdk.connection = mock.Mock()
+ self.ovs_dpdk.vm_names = ['vm-0', 'vm-1']
+ self.ovs_dpdk.drivers = []
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.ovs_properties = {'vpath': 'fake_path'}
+ self.ovs_dpdk.get_vf_datas = mock.Mock(return_value="")
+ self.ovs_dpdk._enable_interfaces(0, ["private_0"], 'test')
+ mock_add_ovs_interface.assert_called_once_with(
+ 'fake_path', 0, self.NETWORKS['private_0']['vpci'],
+ self.NETWORKS['private_0']['mac'], 'test', 1)
+
+ @mock.patch.object(ovs_dpdk.OvsDpdkContext, '_check_hugepages')
+ @mock.patch.object(common_utils, 'setup_hugepages')
+ @mock.patch.object(model.StandaloneContextHelper, 'check_update_key')
+ @mock.patch.object(model.Libvirt, 'write_file')
+ @mock.patch.object(model.Libvirt, 'build_vm_xml')
+ @mock.patch.object(model.Libvirt, 'check_if_vm_exists_and_delete')
+ @mock.patch.object(model.Libvirt, 'virsh_create_vm')
+ def test_setup_ovs_dpdk_context(self, mock_create_vm, mock_check_if_exists,
+ mock_build_xml, mock_write_file,
+ mock_check_update_key,
+ mock_setup_hugepages,
+ mock__check_hugepages):
+ self.ovs_dpdk.vm_deploy = True
+ self.ovs_dpdk.connection = mock.Mock()
+ self.ovs_dpdk.vm_names = ['vm-0', 'vm-1']
+ self.ovs_dpdk.drivers = []
+ self.ovs_dpdk.servers = {
+ 'vnf_0': {
+ 'network_ports': {
+ 'mgmt': {'cidr': '152.16.100.10/24'},
+ 'xe0': ['private_0'],
+ 'xe1': ['public_0']
+ }
+ }
+ }
+ self.ovs_dpdk.networks = self.NETWORKS
+ self.ovs_dpdk.host_mgmt = {}
+ self.ovs_dpdk.vm_flavor = {'ram': '1024'}
+ self.ovs_dpdk.file_path = '/var/lib/libvirt/images/cdrom-0.img'
+ self.ovs_dpdk.configure_nics_for_ovs_dpdk = mock.Mock(return_value="")
+ self.ovs_dpdk._name_task_id = 'fake_name'
+ xml_str = 'vm-0'
+ self.ovs_dpdk.mac = '00:00:00:00:00:01'
+ mock_build_xml.return_value = (xml_str, self.ovs_dpdk.mac)
+ self.ovs_dpdk._enable_interfaces = mock.Mock(return_value=xml_str)
+ vnf_instance = mock.Mock()
+ vnf_instance_2 = mock.Mock()
+ mock_check_update_key.return_value = vnf_instance_2
+ self.ovs_dpdk.vnf_node.generate_vnf_instance = mock.Mock(
+ return_value=vnf_instance)
+
+ self.assertEqual([vnf_instance_2],
+ self.ovs_dpdk.setup_ovs_dpdk_context())
+ mock_setup_hugepages.assert_called_once_with(self.ovs_dpdk.connection,
+ (1024 + 4096) * 1024) # ram + dpdk_socket0_mem + dpdk_socket1_mem
+ mock__check_hugepages.assert_called_once()
+ mock_create_vm.assert_called_once_with(
+ self.ovs_dpdk.connection, '/tmp/vm_ovs_0.xml')
+ mock_check_if_exists.assert_called_once_with(
+ 'vm-0', self.ovs_dpdk.connection)
+ mock_build_xml.assert_called_once_with(
+ self.ovs_dpdk.connection, self.ovs_dpdk.vm_flavor, 'vm-0', 0, self.ovs_dpdk.file_path)
+ mock_write_file.assert_called_once_with('/tmp/vm_ovs_0.xml', xml_str)
+ mock_check_update_key.assert_called_once_with(self.ovs_dpdk.connection,
+ vnf_instance,
+ xml_str,
+ self.ovs_dpdk._name_task_id,
+ self.ovs_dpdk.file_path,
+ self.ovs_dpdk.mac)
+
+ @mock.patch.object(io, 'BytesIO')
+ def test__check_hugepages(self, mock_bytesio):
+ data = six.BytesIO('HugePages_Total: 20\n'
+ 'HugePages_Free: 20\n'
+ 'HugePages_Rsvd: 0\n'
+ 'HugePages_Surp: 0\n'
+ 'Hugepagesize: 1048576 kB'.encode())
+ mock_bytesio.return_value = data
+ self.ovs_dpdk.connection = mock.Mock()
+ self.ovs_dpdk._check_hugepages()
+
+ @mock.patch.object(io, 'BytesIO')
+ def test__check_hugepages_no_info(self, mock_bytesio):
+ data = six.BytesIO(''.encode())
+ mock_bytesio.return_value = data
+ self.ovs_dpdk.connection = mock.Mock()
+ with self.assertRaises(exceptions.OVSHugepagesInfoError):
+ self.ovs_dpdk._check_hugepages()
+
+ @mock.patch.object(io, 'BytesIO')
+ def test__check_hugepages_no_total_hp(self, mock_bytesio):
+ data = six.BytesIO('HugePages_Total: 0\n'
+ 'HugePages_Free: 0\n'
+ 'HugePages_Rsvd: 0\n'
+ 'HugePages_Surp: 0\n'
+ 'Hugepagesize: 1048576 kB'.encode())
+ mock_bytesio.return_value = data
+ self.ovs_dpdk.connection = mock.Mock()
+ with self.assertRaises(exceptions.OVSHugepagesNotConfigured):
+ self.ovs_dpdk._check_hugepages()
+
+ @mock.patch.object(io, 'BytesIO')
+ def test__check_hugepages_no_free_hp(self, mock_bytesio):
+ data = six.BytesIO('HugePages_Total: 20\n'
+ 'HugePages_Free: 0\n'
+ 'HugePages_Rsvd: 0\n'
+ 'HugePages_Surp: 0\n'
+ 'Hugepagesize: 1048576 kB'.encode())
+ mock_bytesio.return_value = data
+ self.ovs_dpdk.connection = mock.Mock()
+ with self.assertRaises(exceptions.OVSHugepagesZeroFree) as exc:
+ self.ovs_dpdk._check_hugepages()
+ self.assertEqual('There are no HugePages free in this system. Total '
+ 'HugePages configured: 20', exc.exception.msg)
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py b/yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py
new file mode 100644
index 000000000..0809a983a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py
@@ -0,0 +1,362 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import mock
+import unittest
+
+from yardstick import ssh
+from yardstick.benchmark import contexts
+from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts.standalone import model
+from yardstick.benchmark.contexts.standalone import sriov
+from yardstick.common import utils
+
+
+class SriovContextTestCase(unittest.TestCase):
+
+ NODES_SAMPLE = "nodes_sample.yaml"
+ NODES_SRIOV_SAMPLE = "nodes_sriov_sample.yaml"
+ NODES_DUPLICATE_SAMPLE = "nodes_duplicate_sample.yaml"
+
+ ATTRS = {
+ 'name': contexts.CONTEXT_STANDALONESRIOV,
+ 'task_id': '1234567890',
+ 'file': 'pod',
+ 'flavor': {},
+ 'servers': {},
+ 'networks': {},
+ }
+
+ NETWORKS = {
+ 'mgmt': {'cidr': '152.16.100.10/24'},
+ 'private_0': {
+ 'phy_port': "0000:05:00.0",
+ 'vpci': "0000:00:07.0",
+ 'cidr': '152.16.100.10/24',
+ 'interface': 'if0',
+ 'mac': "00:00:00:00:00:01",
+ 'vf_pci': {'vf_pci': 0},
+ 'gateway_ip': '152.16.100.20'},
+ 'public_0': {
+ 'phy_port': "0000:05:00.1",
+ 'vpci': "0000:00:08.0",
+ 'cidr': '152.16.40.10/24',
+ 'interface': 'if0',
+ 'vf_pci': {'vf_pci': 0},
+ 'mac': "00:00:00:00:00:01",
+ 'gateway_ip': '152.16.100.20'},
+ }
+
+ def setUp(self):
+ self.attrs = {
+ 'name': 'foo',
+ 'task_id': '1234567890',
+ 'file': self._get_file_abspath(self.NODES_SRIOV_SAMPLE),
+ }
+ self.sriov = sriov.SriovContext()
+ self.addCleanup(self._remove_contexts)
+
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
+
+ @mock.patch.object(model, 'StandaloneContextHelper')
+ @mock.patch.object(model, 'Libvirt')
+ @mock.patch.object(model, 'Server')
+ def test___init__(self, mock_helper, mock_libvirt, mock_server):
+ # pylint: disable=unused-argument
+ # NOTE(ralonsoh): this test doesn't cover function execution.
+ self.sriov.helper = mock_helper
+ self.sriov.vnf_node = mock_server
+ self.assertIsNone(self.sriov.file_path)
+ self.assertTrue(self.sriov.first_run)
+
+ def test_init(self):
+ self.sriov.helper.parse_pod_file = mock.Mock(return_value=[{}, {}, {}])
+ self.assertIsNone(self.sriov.init(self.ATTRS))
+
+ @mock.patch.object(ssh, 'SSH', return_value=(0, "a", ""))
+ def test_deploy(self, *args):
+ # NOTE(ralonsoh): this test doesn't cover function execution.
+ self.sriov.vm_deploy = False
+ self.assertIsNone(self.sriov.deploy())
+
+ self.sriov.vm_deploy = True
+ self.sriov.host_mgmt = {}
+ self.sriov.install_req_libs = mock.Mock()
+ self.sriov.get_nic_details = mock.Mock(return_value={})
+ self.sriov.setup_sriov_context = mock.Mock(return_value={})
+ self.sriov.wait_for_vnfs_to_start = mock.Mock(return_value={})
+ self.assertIsNone(self.sriov.deploy())
+
+ @mock.patch.object(ssh, 'SSH', return_value=(0, "a", ""))
+ @mock.patch.object(model, 'Libvirt')
+ def test_undeploy(self, mock_libvirt, mock_ssh):
+ # pylint: disable=unused-argument
+ # NOTE(ralonsoh): the pylint exception should be removed.
+ self.sriov.vm_deploy = False
+ self.assertIsNone(self.sriov.undeploy())
+
+ self.sriov.vm_deploy = True
+ self.sriov.connection = mock_ssh
+ self.sriov.vm_names = ['vm-0', 'vm-1']
+ self.sriov.drivers = ['vm-0', 'vm-1']
+ self.assertIsNone(self.sriov.undeploy())
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.join(curr_path, filename)
+ return file_path
+
+ def test__get_server_with_dic_attr_name(self):
+
+ self.sriov.init(self.attrs)
+
+ attr_name = {'name': 'foo.bar'}
+ result = self.sriov._get_server(attr_name)
+
+ self.assertEqual(result, None)
+
+ def test__get_server_not_found(self):
+
+ self.sriov.helper.parse_pod_file = mock.Mock(return_value=[{}, {}, {}])
+ self.sriov.init(self.attrs)
+
+ attr_name = 'bar.foo'
+ result = self.sriov._get_server(attr_name)
+
+ self.assertEqual(result, None)
+
+ def test__get_server_mismatch(self):
+
+ self.sriov.init(self.attrs)
+
+ attr_name = 'bar.foo1'
+ result = self.sriov._get_server(attr_name)
+
+ self.assertEqual(result, None)
+
+ def test__get_server_duplicate(self):
+
+ self.attrs['file'] = self._get_file_abspath(self.NODES_DUPLICATE_SAMPLE)
+
+ self.sriov.init(self.attrs)
+
+ attr_name = 'node1.foo-12345678'
+ with self.assertRaises(ValueError):
+ self.sriov._get_server(attr_name)
+
+ def test__get_server_found(self):
+
+ self.sriov.init(self.attrs)
+
+ attr_name = 'node1.foo-12345678'
+ result = self.sriov._get_server(attr_name)
+
+ self.assertEqual(result['ip'], '10.229.47.137')
+ self.assertEqual(result['name'], 'node1.foo-12345678')
+ self.assertEqual(result['user'], 'root')
+ self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+
+ def test__get_physical_node_for_server(self):
+ attrs = self.attrs
+ attrs.update({'servers': {'server1': {}}})
+ self.sriov.init(attrs)
+
+ # When server is not from this context
+ result = self.sriov._get_physical_node_for_server('server1.another-context')
+ self.assertIsNone(result)
+
+ # When node_name is not from this context
+ result = self.sriov._get_physical_node_for_server('fake.foo-12345678')
+ self.assertIsNone(result)
+
+ result = self.sriov._get_physical_node_for_server('server1.foo-12345678')
+ self.assertEqual(result, 'node5.foo')
+
+ def test__get_server_no_task_id(self):
+ self.attrs['flags'] = {'no_setup': True}
+ self.sriov.init(self.attrs)
+
+ attr_name = 'node1.foo'
+ result = self.sriov._get_server(attr_name)
+
+ self.assertEqual(result['ip'], '10.229.47.137')
+ self.assertEqual(result['name'], 'node1.foo')
+ self.assertEqual(result['user'], 'root')
+ self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+
+ # TODO(elfoley): Split this test
+ # There are at least two sets of inputs/outputs
+ def test__get_network(self):
+ network1 = {
+ 'name': 'net_1',
+ 'vld_id': 'vld111',
+ 'segmentation_id': 'seg54',
+ 'network_type': 'type_a',
+ 'physical_network': 'phys',
+ }
+ network2 = {
+ 'name': 'net_2',
+ 'vld_id': 'vld999',
+ }
+ self.sriov.networks = {
+ 'a': network1,
+ 'b': network2,
+ }
+
+ attr_name = {}
+ self.assertIsNone(self.sriov._get_network(attr_name))
+
+ attr_name = {'vld_id': 'vld777'}
+ self.assertIsNone(self.sriov._get_network(attr_name))
+
+ self.assertIsNone(self.sriov._get_network(None))
+
+ attr_name = 'vld777'
+ self.assertIsNone(self.sriov._get_network(attr_name))
+
+ attr_name = {'vld_id': 'vld999'}
+ expected = {
+ "name": 'net_2',
+ "vld_id": 'vld999',
+ "segmentation_id": None,
+ "network_type": None,
+ "physical_network": None,
+ }
+ result = self.sriov._get_network(attr_name)
+ self.assertDictEqual(result, expected)
+
+ attr_name = 'a'
+ expected = network1
+ result = self.sriov._get_network(attr_name)
+ self.assertDictEqual(result, expected)
+
+ def test_configure_nics_for_sriov(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, "a", ""))
+ ssh.return_value = ssh_mock
+ self.sriov.vm_deploy = True
+ self.sriov.connection = ssh_mock
+ self.sriov.vm_names = ['vm-0', 'vm-1']
+ self.sriov.drivers = []
+ self.sriov.networks = self.NETWORKS
+ self.sriov.helper.get_mac_address = mock.Mock(return_value="")
+ self.sriov._get_vf_data = mock.Mock(return_value="")
+ self.assertIsNone(self.sriov.configure_nics_for_sriov())
+
+ @mock.patch.object(ssh, 'SSH', return_value=(0, "a", ""))
+ @mock.patch.object(model.Libvirt, 'add_sriov_interfaces',
+ return_value='out_xml')
+ def test__enable_interfaces(self, mock_add_sriov, mock_ssh):
+ self.sriov.vm_deploy = True
+ self.sriov.connection = mock_ssh
+ self.sriov.vm_names = ['vm-0', 'vm-1']
+ self.sriov.drivers = []
+ self.sriov.networks = self.NETWORKS
+ self.assertEqual(
+ 'out_xml',
+ self.sriov._enable_interfaces(0, 0, ['private_0'], 'test'))
+ mock_add_sriov.assert_called_once_with(
+ '0000:00:0a.0', 0, self.NETWORKS['private_0']['mac'], 'test')
+
+ @mock.patch.object(utils, 'setup_hugepages')
+ @mock.patch.object(model.StandaloneContextHelper, 'check_update_key')
+ @mock.patch.object(model.Libvirt, 'build_vm_xml')
+ @mock.patch.object(model.Libvirt, 'check_if_vm_exists_and_delete')
+ @mock.patch.object(model.Libvirt, 'write_file')
+ @mock.patch.object(model.Libvirt, 'virsh_create_vm')
+ def test_setup_sriov_context(self, mock_create_vm, mock_write_file,
+ mock_check, mock_build_vm_xml,
+ mock_check_update_key, mock_setup_hugepages):
+ self.sriov.servers = {
+ 'vnf_0': {
+ 'network_ports': {
+ 'mgmt': {'cidr': '152.16.100.10/24'},
+ 'xe0': ['private_0'],
+ 'xe1': ['public_0']
+ }
+ }
+ }
+ connection = mock.Mock()
+ self.sriov.connection = connection
+ self.sriov.host_mgmt = {'ip': '1.2.3.4'}
+ self.sriov.vm_flavor = {'ram': '1024'}
+ self.sriov.networks = 'networks'
+ self.sriov.configure_nics_for_sriov = mock.Mock()
+ self.sriov._name_task_id = 'fake_name'
+ cfg = '/tmp/vm_sriov_0.xml'
+ vm_name = 'vm-0'
+ mac = '00:00:00:00:00:01'
+ xml_out = mock.Mock()
+ mock_build_vm_xml.return_value = (xml_out, mac)
+ mock_check_update_key.return_value = 'node_2'
+ cdrom_img = '/var/lib/libvirt/images/cdrom-0.img'
+
+ with mock.patch.object(self.sriov, 'vnf_node') as mock_vnf_node, \
+ mock.patch.object(self.sriov, '_enable_interfaces') as \
+ mock_enable_interfaces:
+ mock_enable_interfaces.return_value = 'out_xml'
+ mock_vnf_node.generate_vnf_instance = mock.Mock(
+ return_value='node_1')
+ nodes_out = self.sriov.setup_sriov_context()
+ mock_setup_hugepages.assert_called_once_with(connection, 1024*1024)
+ mock_check_update_key.assert_called_once_with(connection, 'node_1', vm_name,
+ self.sriov._name_task_id, cdrom_img,
+ mac)
+ self.assertEqual(['node_2'], nodes_out)
+ mock_vnf_node.generate_vnf_instance.assert_called_once_with(
+ self.sriov.vm_flavor, 'networks', '1.2.3.4', 'vnf_0',
+ self.sriov.servers['vnf_0'], '00:00:00:00:00:01')
+ mock_build_vm_xml.assert_called_once_with(
+ connection, self.sriov.vm_flavor, vm_name, 0, cdrom_img)
+ mock_create_vm.assert_called_once_with(connection, cfg)
+ mock_check.assert_called_once_with(vm_name, connection)
+ mock_write_file.assert_called_once_with(cfg, 'out_xml')
+ mock_enable_interfaces.assert_has_calls([
+ mock.call(0, mock.ANY, ['private_0'], mock.ANY),
+ mock.call(0, mock.ANY, ['public_0'], mock.ANY)], any_order=True)
+
+ def test__get_vf_data(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, "a", ""))
+ ssh_mock.put = \
+ mock.Mock(return_value=(0, "a", ""))
+ ssh.return_value = ssh_mock
+ self.sriov.vm_deploy = True
+ self.sriov.connection = ssh_mock
+ self.sriov.vm_names = ['vm-0', 'vm-1']
+ self.sriov.drivers = []
+ self.sriov.servers = {
+ 'vnf_0': {
+ 'network_ports': {
+ 'mgmt': {'cidr': '152.16.100.10/24'},
+ 'xe0': ['private_0'],
+ 'xe1': ['public_0']
+ }
+ }
+ }
+ self.sriov.networks = self.NETWORKS
+ self.sriov.helper.get_virtual_devices = mock.Mock(
+ return_value={'0000:00:01.0': ''})
+ self.assertIsNotNone(self.sriov._get_vf_data(
+ '0000:00:01.0', '00:00:00:00:00:01', 'if0'))
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone_duplicate_sample.yaml b/yardstick/tests/unit/benchmark/contexts/standalone_duplicate_sample.yaml
new file mode 100644
index 000000000..e468d0465
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone_duplicate_sample.yaml
@@ -0,0 +1,135 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+---
+# Sample config file about the POD information, including the
+# name/IP/user/ssh key of Bare Metal and Controllers/Computes
+#
+# The options of this config file include:
+# name: the name of this node
+# role: node's role, support role: Master/Controller/Comupte/BareMetal
+# ip: the node's IP address
+# user: the username for login
+# key_filename:the path of the private key file for login
+
+nodes:
+-
+ name: node1
+ role: TrafficGen
+ ip: 1.1.1.1
+ user: root
+ password: r00t
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:05:00.0"
+ driver: i40e
+ dpdk_port_num: 0
+ local_ip: "152.16.100.20"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:01"
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:05:00.1"
+ driver: i40e
+ dpdk_port_num: 1
+ local_ip: "152.16.100.21"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:02"
+-
+ name: node2
+ role: nfvi_node
+ class: OvsDpdk
+ ip: 1.1.1.2
+ user: root
+ password: r00t
+ vports:
+ - dpdkvhostuser0
+ - dpdkvhostuser1
+ vports_mac:
+ - "00:00:00:00:00:03"
+ - "00:00:00:00:00:04"
+ phy_ports: # Physical ports to configure sriov
+ - "0000:05:00.0"
+ - "0000:05:00.1"
+ flow:
+ - ovs-ofctl add-flow br0 in_port=1,action=output:3
+ - ovs-ofctl add-flow br0 in_port=3,action=output:1
+ - ovs-ofctl add-flow br0 in_port=4,action=output:2
+ - ovs-ofctl add-flow br0 in_port=2,action=output:4
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu.qcow2"
+-
+ name: node2
+ role: nfvi_node
+ class: OvsDpdk
+ ip: 1.1.1.5
+ user: root
+ password: r00t
+ vports:
+ - dpdkvhostuser0
+ - dpdkvhostuser1
+ vports_mac:
+ - "00:00:00:00:00:03"
+ - "00:00:00:00:00:04"
+ phy_ports: # Physical ports to configure sriov
+ - "0000:05:00.0"
+ - "0000:05:00.1"
+ flow:
+ - ovs-ofctl add-flow br0 in_port=1,action=output:3
+ - ovs-ofctl add-flow br0 in_port=3,action=output:1
+ - ovs-ofctl add-flow br0 in_port=4,action=output:2
+ - ovs-ofctl add-flow br0 in_port=2,action=output:4
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu.qcow2"
+
+-
+ name: node3
+ role: vnf
+ ip: 1.1.1.3
+ user: root
+ password: r00t
+ host: 1.1.1.1
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:00:04.0"
+ driver: virtio-pci
+ dpdk_port_num: 0
+ local_ip: "152.16.100.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:05"
+
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:00:05.0"
+ driver: virtio-pci
+ dpdk_port_num: 1
+ local_ip: "152.16.40.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:06"
+ routing_table:
+ - network: "152.16.100.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.100.20"
+ if: "xe0"
+ - network: "152.16.40.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.40.20"
+ if: "xe1"
+ nd_route_tbl:
+ - network: "0064:ff9b:0:0:0:0:9810:6414"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:6414"
+ if: "xe0"
+ - network: "0064:ff9b:0:0:0:0:9810:2814"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:2814"
+ if: "xe1"
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone_sample.yaml b/yardstick/tests/unit/benchmark/contexts/standalone_sample.yaml
new file mode 100644
index 000000000..95e12d62f
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/standalone_sample.yaml
@@ -0,0 +1,112 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+---
+# Sample config file about the POD information, including the
+# name/IP/user/ssh key of Bare Metal and Controllers/Computes
+#
+# The options of this config file include:
+# name: the name of this node
+# role: node's role, support role: Master/Controller/Comupte/BareMetal
+# ip: the node's IP address
+# user: the username for login
+# key_filename:the path of the private key file for login
+
+nodes:
+-
+ name: node1
+ role: TrafficGen
+ ip: 1.1.1.1
+ user: root
+ password: r00t
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:05:00.0"
+ driver: i40e
+ dpdk_port_num: 0
+ local_ip: "152.16.100.20"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:01"
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:05:00.1"
+ driver: i40e
+ dpdk_port_num: 1
+ local_ip: "152.16.100.21"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:02"
+-
+ name: node2
+ role: nfvi_node
+ class: OvsDpdk
+ ip: 1.1.1.2
+ user: root
+ password: r00t
+ vports:
+ - dpdkvhostuser0
+ - dpdkvhostuser1
+ vports_mac:
+ - "00:00:00:00:00:03"
+ - "00:00:00:00:00:04"
+ phy_ports: # Physical ports to configure sriov
+ - "0000:05:00.0"
+ - "0000:05:00.1"
+ flow:
+ - ovs-ofctl add-flow br0 in_port=1,action=output:3
+ - ovs-ofctl add-flow br0 in_port=3,action=output:1
+ - ovs-ofctl add-flow br0 in_port=4,action=output:2
+ - ovs-ofctl add-flow br0 in_port=2,action=output:4
+ phy_driver: i40e # kernel driver
+ images: "/var/lib/libvirt/images/ubuntu.qcow2"
+
+-
+ name: node3
+ role: vnf
+ ip: 1.1.1.3
+ user: root
+ password: r00t
+ host: 1.1.1.1
+ interfaces:
+ xe0: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:00:04.0"
+ driver: virtio-pci
+ dpdk_port_num: 0
+ local_ip: "152.16.100.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:05"
+
+ xe1: # logical name from topology.yaml and vnfd.yaml
+ vpci: "0000:00:05.0"
+ driver: virtio-pci
+ dpdk_port_num: 1
+ local_ip: "152.16.40.19"
+ netmask: "255.255.255.0"
+ local_mac: "00:00:00:00:00:06"
+ routing_table:
+ - network: "152.16.100.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.100.20"
+ if: "xe0"
+ - network: "152.16.40.20"
+ netmask: "255.255.255.0"
+ gateway: "152.16.40.20"
+ if: "xe1"
+ nd_route_tbl:
+ - network: "0064:ff9b:0:0:0:0:9810:6414"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:6414"
+ if: "xe0"
+ - network: "0064:ff9b:0:0:0:0:9810:2814"
+ netmask: "112"
+ gateway: "0064:ff9b:0:0:0:0:9810:2814"
+ if: "xe1"
diff --git a/yardstick/tests/unit/benchmark/contexts/test_base.py b/yardstick/tests/unit/benchmark/contexts/test_base.py
new file mode 100644
index 000000000..5fd7352f5
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/test_base.py
@@ -0,0 +1,174 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import errno
+
+import mock
+
+from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts.base import Context
+from yardstick.common import yaml_loader
+from yardstick.tests.unit import base as ut_base
+from yardstick.common.constants import YARDSTICK_ROOT_PATH
+
+
+class DummyContextClass(Context):
+
+ __context_type__ = "Dummy"
+
+ def __init__(self, host_name_separator='.'):
+ super(DummyContextClass, self).__init__\
+ (host_name_separator=host_name_separator)
+ self.nodes = []
+ self.controllers = []
+ self.computes = []
+ self.baremetals = []
+
+ def _get_network(self, *args):
+ pass
+
+ def _get_server(self, *args):
+ pass
+
+ def deploy(self):
+ pass
+
+ def undeploy(self):
+ pass
+
+ def _get_physical_nodes(self):
+ pass
+
+ def _get_physical_node_for_server(self, server_name):
+ pass
+
+
+class FlagsTestCase(ut_base.BaseUnitTestCase):
+
+ def setUp(self):
+ self.flags = base.Flags()
+
+ def test___init__(self):
+ self.assertFalse(self.flags.no_setup)
+ self.assertFalse(self.flags.no_teardown)
+ self.assertEqual({'verify': False}, self.flags.os_cloud_config)
+
+ def test___init__with_flags(self):
+ flags = base.Flags(no_setup=True)
+ self.assertTrue(flags.no_setup)
+ self.assertFalse(flags.no_teardown)
+
+ def test_parse(self):
+ self.flags.parse(no_setup=True, no_teardown='False',
+ os_cloud_config={'verify': True})
+
+ self.assertTrue(self.flags.no_setup)
+ self.assertEqual('False', self.flags.no_teardown)
+ self.assertEqual({'verify': True}, self.flags.os_cloud_config)
+
+ def test_parse_forbidden_flags(self):
+ self.flags.parse(foo=42)
+ with self.assertRaises(AttributeError):
+ _ = self.flags.foo
+
+
+class ContextTestCase(ut_base.BaseUnitTestCase):
+
+ @staticmethod
+ def _remove_ctx(ctx_obj):
+ if ctx_obj in base.Context.list:
+ base.Context.list.remove(ctx_obj)
+
+ def test_split_host_name(self):
+ ctx_obj = DummyContextClass()
+ self.addCleanup(self._remove_ctx, ctx_obj)
+ config_name = 'host_name.ctx_name'
+ self.assertEqual(('host_name', 'ctx_name'),
+ ctx_obj.split_host_name(config_name))
+
+ def test_split_host_name_wrong_separator(self):
+ ctx_obj = DummyContextClass()
+ self.addCleanup(self._remove_ctx, ctx_obj)
+ config_name = 'host_name-ctx_name'
+ self.assertEqual((None, None),
+ ctx_obj.split_host_name(config_name))
+
+ def test_split_host_name_other_separator(self):
+ ctx_obj = DummyContextClass(host_name_separator='-')
+ self.addCleanup(self._remove_ctx, ctx_obj)
+ config_name = 'host_name-ctx_name'
+ self.assertEqual(('host_name', 'ctx_name'),
+ ctx_obj.split_host_name(config_name))
+
+ def test_get_physical_nodes(self):
+ ctx_obj = DummyContextClass()
+ self.addCleanup(self._remove_ctx, ctx_obj)
+
+ result = Context.get_physical_nodes()
+
+ self.assertEqual(result, {None: None})
+
+ @mock.patch.object(Context, 'get_context_from_server')
+ def test_get_physical_node_from_server(self, mock_get_ctx):
+ ctx_obj = DummyContextClass()
+ self.addCleanup(self._remove_ctx, ctx_obj)
+
+ mock_get_ctx.return_value = ctx_obj
+
+ result = Context.get_physical_node_from_server("mock_server")
+
+ mock_get_ctx.assert_called_once()
+ self.assertIsNone(result)
+
+ @mock.patch.object(yaml_loader, 'read_yaml_file')
+ def test_read_pod_file(self, mock_read_yaml_file):
+ attrs = {'name': 'foo',
+ 'task_id': '12345678',
+ 'file': 'pod.yaml'
+ }
+
+ ctx_obj = DummyContextClass()
+ cfg = {"nodes": [
+ {
+ "name": "node1",
+ "role": "Controller",
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": "/root/.yardstick_key"
+ },
+ {
+ "name": "node2",
+ "role": "Compute",
+ "ip": "10.229.47.139",
+ "user": "root",
+ "key_filename": "/root/.yardstick_key"
+ }
+ ]
+ }
+
+ mock_read_yaml_file.return_value = cfg
+ result = ctx_obj.read_pod_file(attrs)
+ self.assertEqual(result, cfg)
+
+ mock_read_yaml_file.side_effect = IOError(errno.EPERM, '')
+ with self.assertRaises(IOError):
+ ctx_obj.read_pod_file(attrs)
+
+ mock_read_yaml_file.side_effect = IOError(errno.ENOENT, '')
+ with self.assertRaises(IOError):
+ ctx_obj.read_pod_file(attrs)
+
+ file_path = os.path.join(YARDSTICK_ROOT_PATH, 'pod.yaml')
+ self.assertEqual(ctx_obj.file_path, file_path)
diff --git a/yardstick/tests/unit/benchmark/contexts/test_dummy.py b/yardstick/tests/unit/benchmark/contexts/test_dummy.py
new file mode 100644
index 000000000..33832375f
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/test_dummy.py
@@ -0,0 +1,86 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import unittest
+
+from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts import dummy
+
+
+class DummyContextTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.attrs = {
+ 'name': 'foo',
+ 'task_id': '1234567890',
+ }
+ self.test_context = dummy.DummyContext()
+ self.addCleanup(self._delete_contexts)
+
+ @staticmethod
+ def _delete_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+
+ def test___init__(self):
+ self.assertFalse(self.test_context._flags.no_setup)
+ self.assertFalse(self.test_context._flags.no_teardown)
+ self.assertIsNone(self.test_context._name)
+ self.assertIsNone(self.test_context._task_id)
+
+ def test_init(self):
+ self.test_context.init(self.attrs)
+ self.assertEqual(self.test_context._name, 'foo')
+ self.assertEqual(self.test_context._task_id, '1234567890')
+ self.assertFalse(self.test_context._flags.no_setup)
+ self.assertFalse(self.test_context._flags.no_teardown)
+
+ self.assertEqual(self.test_context.name, 'foo-12345678')
+ self.assertEqual(self.test_context.assigned_name, 'foo')
+
+ def test_init_flags_no_setup(self):
+ self.attrs['flags'] = {'no_setup': True, 'no_teardown': False}
+
+ self.test_context.init(self.attrs)
+
+ self.assertEqual(self.test_context._name, 'foo')
+ self.assertEqual(self.test_context._task_id, '1234567890')
+ self.assertTrue(self.test_context._flags.no_setup)
+ self.assertFalse(self.test_context._flags.no_teardown)
+
+ self.assertEqual(self.test_context.name, 'foo')
+ self.assertEqual(self.test_context.assigned_name, 'foo')
+
+ def test_init_flags_no_teardown(self):
+ self.attrs['flags'] = {'no_setup': False, 'no_teardown': True}
+
+ self.test_context.init(self.attrs)
+
+ self.assertFalse(self.test_context._flags.no_setup)
+ self.assertTrue(self.test_context._flags.no_teardown)
+
+ self.assertEqual(self.test_context.name, 'foo')
+ self.assertEqual(self.test_context.assigned_name, 'foo')
+
+ def test__get_server(self):
+ self.test_context.init(self.attrs)
+ self.test_context.deploy()
+
+ result = self.test_context._get_server(None)
+ self.assertEqual(result, None)
+
+ self.test_context.undeploy()
+
+ def test__get_physical_nodes(self):
+ result = self.test_context._get_physical_nodes()
+ self.assertIsNone(result)
+
+ def test__get_physical_node_for_server(self):
+ result = self.test_context._get_physical_node_for_server("fake")
+ self.assertIsNone(result)
diff --git a/yardstick/tests/unit/benchmark/contexts/test_heat.py b/yardstick/tests/unit/benchmark/contexts/test_heat.py
new file mode 100644
index 000000000..96946cded
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/test_heat.py
@@ -0,0 +1,893 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from collections import OrderedDict
+import logging
+import os
+
+import mock
+import unittest
+import collections
+
+from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts import heat
+from yardstick.benchmark.contexts import model
+from yardstick.common import constants as consts
+from yardstick.common import exceptions as y_exc
+from yardstick.common import openstack_utils
+from yardstick.common import yaml_loader
+from yardstick import ssh
+
+
+LOG = logging.getLogger(__name__)
+
+
+class HeatContextTestCase(unittest.TestCase):
+
+ HEAT_POD_SAMPLE = {
+ "nodes": [
+ {
+ "name": "node1",
+ "role": "Controller",
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": "/root/.yardstick_key"
+ },
+ {
+ "name": "node2",
+ "role": "Compute",
+ "ip": "10.229.47.139",
+ "user": "root",
+ "key_filename": "/root/.yardstick_key"
+ }
+ ]
+ }
+
+ def __init__(self, *args, **kwargs):
+
+ super(HeatContextTestCase, self).__init__(*args, **kwargs)
+
+ def setUp(self):
+ self.test_context = heat.HeatContext()
+ self.addCleanup(self._remove_contexts)
+ self.mock_context = mock.Mock(spec=heat.HeatContext())
+
+ def _remove_contexts(self):
+ if self.test_context in self.test_context.list:
+ self.test_context._delete_context()
+
+ def test___init__(self):
+ self.assertIsNone(self.test_context._name)
+ self.assertIsNone(self.test_context._task_id)
+ self.assertFalse(self.test_context._flags.no_setup)
+ self.assertFalse(self.test_context._flags.no_teardown)
+ self.assertIsNone(self.test_context.stack)
+ self.assertEqual(self.test_context.networks, OrderedDict())
+ self.assertEqual(self.test_context.servers, [])
+ self.assertEqual(self.test_context.placement_groups, [])
+ self.assertEqual(self.test_context.server_groups, [])
+ self.assertIsNone(self.test_context.keypair_name)
+ self.assertIsNone(self.test_context.secgroup_name)
+ self.assertIsNone(self.test_context.security_group)
+ self.assertEqual(self.test_context._server_map, {})
+ self.assertIsNone(self.test_context._image)
+ self.assertIsNone(self.test_context._flavor)
+ self.assertIsNone(self.test_context._user)
+ self.assertIsNone(self.test_context.template_file)
+ self.assertIsNone(self.test_context.heat_parameters)
+ self.assertIsNone(self.test_context.key_filename)
+ self.assertTrue(self.test_context.yardstick_gen_key_file)
+
+ @mock.patch.object(yaml_loader, 'read_yaml_file')
+ @mock.patch('yardstick.benchmark.contexts.heat.PlacementGroup')
+ @mock.patch('yardstick.benchmark.contexts.heat.ServerGroup')
+ @mock.patch('yardstick.benchmark.contexts.heat.Network')
+ @mock.patch('yardstick.benchmark.contexts.heat.Server')
+ def test_init(self, mock_server, mock_network, mock_sg, mock_pg,
+ mock_read_yaml):
+
+ mock_read_yaml.return_value = self.HEAT_POD_SAMPLE
+ pgs = {'pgrp1': {'policy': 'availability'}}
+ sgs = {'servergroup1': {'policy': 'affinity'}}
+ networks = {'bar': {'cidr': '10.0.1.0/24'}}
+ servers = {'baz': {'floating_ip': True, 'placement': 'pgrp1'}}
+ attrs = {'name': 'foo',
+ 'file': 'pod.yaml',
+ 'task_id': '1234567890',
+ 'placement_groups': pgs,
+ 'server_groups': sgs,
+ 'networks': networks,
+ 'servers': servers}
+
+ with mock.patch.object(openstack_utils, 'get_shade_client'), \
+ mock.patch.object(openstack_utils, 'get_shade_operator_client'):
+ self.test_context.init(attrs)
+
+ self.assertFalse(self.test_context._flags.no_setup)
+ self.assertFalse(self.test_context._flags.no_teardown)
+ self.assertEqual(self.test_context._name, "foo")
+ self.assertEqual(self.test_context._task_id, '1234567890')
+ self.assertEqual(self.test_context.name, "foo-12345678")
+ self.assertEqual(self.test_context.keypair_name, "foo-12345678-key")
+ self.assertEqual(self.test_context.secgroup_name, "foo-12345678-secgroup")
+
+ mock_pg.assert_called_with('pgrp1', self.test_context,
+ pgs['pgrp1']['policy'])
+ mock_sg.assert_called_with('servergroup1', self.test_context,
+ sgs['servergroup1']['policy'])
+ self.assertEqual(len(self.test_context.placement_groups), 1)
+ self.assertEqual(len(self.test_context.server_groups), 1)
+
+ mock_network.assert_called_with(
+ 'bar', self.test_context, networks['bar'])
+ self.assertEqual(len(self.test_context.networks), 1)
+
+ mock_server.assert_called_with('baz', self.test_context,
+ servers['baz'])
+ self.assertEqual(len(self.test_context.servers), 1)
+
+ def test_init_no_name_or_task_id(self):
+ attrs = {}
+ self.assertRaises(KeyError, self.test_context.init, attrs)
+
+ def test_name(self):
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.assertEqual(self.test_context.name, 'foo-12345678')
+ self.assertEqual(self.test_context.assigned_name, 'foo')
+
+ def test_name_flags(self):
+ self.test_context._flags = base.Flags(
+ **{"no_setup": True, "no_teardown": True})
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+
+ self.assertEqual(self.test_context.name, 'foo')
+ self.assertEqual(self.test_context.assigned_name, 'foo')
+
+ def test_init_no_setup_no_teardown(self):
+
+ attrs = {'name': 'foo',
+ 'task_id': '1234567890',
+ 'placement_groups': {},
+ 'server_groups': {},
+ 'networks': {},
+ 'servers': {},
+ 'file': "pod.yaml",
+ 'flags': {
+ 'no_setup': True,
+ 'no_teardown': True,
+ },
+ }
+
+ with mock.patch.object(openstack_utils, 'get_shade_client'), \
+ mock.patch.object(openstack_utils, 'get_shade_operator_client'):
+ self.test_context.init(attrs)
+
+ self.assertTrue(self.test_context._flags.no_setup)
+ self.assertTrue(self.test_context._flags.no_teardown)
+
+ def test_init_key_filename(self):
+ attrs = {'name': 'foo',
+ 'file': 'pod.yaml',
+ 'task_id': '1234567890',
+ 'server_groups': {},
+ 'networks': {},
+ 'servers': {},
+ 'heat_template': "/root/clearwater.yaml",
+ 'key_filename': '/etc/yardstick/yardstick.pem'}
+
+ with mock.patch.object(openstack_utils, 'get_shade_client'), \
+ mock.patch.object(openstack_utils, 'get_shade_operator_client'):
+ self.test_context.init(attrs)
+
+ self.assertIsNotNone(self.test_context.key_filename)
+ self.assertFalse(self.test_context.yardstick_gen_key_file)
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_resources_to_template_no_servers(self, mock_template):
+ self.test_context._name = 'ctx'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context.keypair_name = "ctx-key"
+ self.test_context.secgroup_name = "ctx-secgroup"
+ self.test_context.key_uuid = "2f2e4997-0a8e-4eb7-9fa4-f3f8fbbc393b"
+ netattrs = {'cidr': '10.0.0.0/24', 'provider': None,
+ 'external_network': 'ext_net'}
+
+ self.test_context.networks = OrderedDict(
+ {"mynet": model.Network("mynet", self.test_context,
+ netattrs)})
+
+ self.test_context._add_resources_to_template(mock_template)
+ mock_template.add_keypair.assert_called_with(
+ "ctx-key",
+ "ctx-12345678")
+ mock_template.add_security_group.assert_called_with("ctx-secgroup", None)
+ mock_template.add_network.assert_called_with(
+ "ctx-12345678-mynet", 'physnet1', None, None, None, None)
+ mock_template.add_router.assert_called_with(
+ "ctx-12345678-mynet-router",
+ netattrs["external_network"],
+ "ctx-12345678-mynet-subnet")
+ mock_template.add_router_interface.assert_called_with(
+ "ctx-12345678-mynet-router-if0",
+ "ctx-12345678-mynet-router",
+ "ctx-12345678-mynet-subnet")
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test_attrs_get(self, *args):
+ image, flavor, user = expected_tuple = 'foo1', 'foo2', 'foo3'
+ self.assertNotEqual(self.test_context.image, image)
+ self.assertNotEqual(self.test_context.flavor, flavor)
+ self.assertNotEqual(self.test_context.user, user)
+ self.test_context._image = image
+ self.test_context._flavor = flavor
+ self.test_context._user = user
+ attr_tuple = self.test_context.image, self.test_context.flavor, self.test_context.user
+ self.assertEqual(attr_tuple, expected_tuple)
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test_attrs_set_negative(self, *args):
+ with self.assertRaises(AttributeError):
+ self.test_context.image = 'foo'
+
+ with self.assertRaises(AttributeError):
+ self.test_context.flavor = 'foo'
+
+ with self.assertRaises(AttributeError):
+ self.test_context.user = 'foo'
+
+ def test__create_new_stack(self):
+ template = mock.Mock()
+ self.test_context._create_new_stack(template)
+ template.create.assert_called_once()
+
+ def test__create_new_stack_stack_create_failed(self):
+ template = mock.Mock()
+ template.create.side_effect = y_exc.HeatTemplateError
+
+ self.assertRaises(y_exc.HeatTemplateError,
+ self.test_context._create_new_stack,
+ template)
+
+ def test__create_new_stack_keyboard_interrupt(self):
+ template = mock.Mock()
+ template.create.side_effect = KeyboardInterrupt
+ self.assertRaises(y_exc.StackCreationInterrupt,
+ self.test_context._create_new_stack,
+ template)
+
+ @mock.patch.object(os.path, 'exists', return_value=True)
+ @mock.patch.object(heat.HeatContext, '_add_resources_to_template')
+ @mock.patch.object(heat.HeatContext, '_create_new_stack')
+ def test_deploy_stack_creation_failed(self, mock_create,
+ mock_resources_template, mock_path_exists):
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = 'foo-12345678'
+ mock_create.side_effect = y_exc.HeatTemplateError
+ self.assertRaises(y_exc.HeatTemplateError,
+ self.test_context.deploy)
+
+ mock_path_exists.assert_called()
+ mock_resources_template.assert_called_once()
+
+ @mock.patch.object(os.path, 'exists', return_value=False)
+ @mock.patch.object(ssh.SSH, 'gen_keys')
+ @mock.patch.object(heat, 'HeatTemplate')
+ def test_deploy(self, mock_template, mock_genkeys, mock_path_exists):
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context.template_file = '/bar/baz/some-heat-file'
+ self.test_context.heat_parameters = {'image': 'cirros'}
+ self.test_context.get_neutron_info = mock.MagicMock()
+ self.test_context.deploy()
+
+ mock_template.assert_called_with(
+ 'foo-12345678', template_file='/bar/baz/some-heat-file',
+ heat_parameters={'image': 'cirros'},
+ os_cloud_config=self.test_context._flags.os_cloud_config)
+ self.assertIsNotNone(self.test_context.stack)
+ key_filename = ''.join(
+ [consts.YARDSTICK_ROOT_PATH,
+ 'yardstick/resources/files/yardstick_key-',
+ self.test_context._name_task_id])
+ mock_genkeys.assert_called_once_with(key_filename)
+ mock_path_exists.assert_any_call(key_filename)
+
+ @mock.patch.object(heat, 'HeatTemplate')
+ @mock.patch.object(os.path, 'exists', return_value=False)
+ @mock.patch.object(ssh.SSH, 'gen_keys')
+ @mock.patch.object(heat.HeatContext, '_retrieve_existing_stack')
+ @mock.patch.object(heat.HeatContext, '_create_new_stack')
+ def test_deploy_no_setup(self, mock_create_new_stack,
+ mock_retrieve_existing_stack, mock_genkeys, mock_path_exists,
+ *args):
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context.template_file = '/bar/baz/some-heat-file'
+ self.test_context.heat_parameters = {'image': 'cirros'}
+ self.test_context.get_neutron_info = mock.MagicMock()
+ self.test_context._flags.no_setup = True
+ self.test_context.deploy()
+
+ mock_create_new_stack.assert_not_called()
+ mock_retrieve_existing_stack.assert_called_with(self.test_context.name)
+ self.assertIsNotNone(self.test_context.stack)
+ key_filename = ''.join(
+ [consts.YARDSTICK_ROOT_PATH,
+ 'yardstick/resources/files/yardstick_key-',
+ self.test_context._name])
+ mock_genkeys.assert_called_once_with(key_filename)
+ mock_path_exists.assert_any_call(key_filename)
+
+ @mock.patch.object(heat, 'HeatTemplate')
+ @mock.patch.object(os.path, 'exists', return_value=False)
+ @mock.patch.object(ssh.SSH, 'gen_keys')
+ @mock.patch.object(heat.HeatContext, '_create_new_stack')
+ @mock.patch.object(heat.HeatContext, '_retrieve_existing_stack',
+ return_value=None)
+ def test_deploy_try_retrieve_context_does_not_exist(self,
+ mock_retrieve_stack, mock_create_new_stack, mock_genkeys,
+ mock_path_exists, *args):
+ self.test_context._name = 'demo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._flags.no_setup = True
+ self.test_context.template_file = '/bar/baz/some-heat-file'
+ self.test_context.get_neutron_info = mock.MagicMock()
+ self.test_context.deploy()
+
+ mock_retrieve_stack.assert_called_once_with(self.test_context._name)
+ mock_create_new_stack.assert_called()
+ key_filename = ''.join(
+ [consts.YARDSTICK_ROOT_PATH,
+ 'yardstick/resources/files/yardstick_key-',
+ self.test_context._name])
+ mock_genkeys.assert_called_once_with(key_filename)
+ mock_path_exists.assert_any_call(key_filename)
+
+ @mock.patch.object(heat, 'HeatTemplate', return_value='heat_template')
+ @mock.patch.object(heat.HeatContext, '_add_resources_to_template')
+ @mock.patch.object(os.path, 'exists', return_value=False)
+ @mock.patch.object(ssh.SSH, 'gen_keys')
+ def test_deploy_ssh_key_before_adding_resources(self, mock_genkeys,
+ mock_path_exists, mock_add_resources, *args):
+ mock_manager = mock.Mock()
+ mock_manager.attach_mock(mock_add_resources,
+ '_add_resources_to_template')
+ mock_manager.attach_mock(mock_genkeys, 'gen_keys')
+ mock_manager.reset_mock()
+ self.test_context._name_task_id = 'demo-12345678'
+ self.test_context.get_neutron_info = mock.Mock()
+ with mock.patch.object(self.test_context, '_create_new_stack') as \
+ mock_create_stack, \
+ mock.patch.object(self.test_context, 'get_neutron_info') as \
+ mock_neutron_info:
+ self.test_context.deploy()
+
+ mock_neutron_info.assert_called_once()
+ mock_create_stack.assert_called_once()
+ key_filename = ''.join(
+ [consts.YARDSTICK_ROOT_PATH,
+ 'yardstick/resources/files/yardstick_key-',
+ self.test_context._name_task_id])
+ mock_genkeys.assert_called_once_with(key_filename)
+ mock_path_exists.assert_any_call(key_filename)
+
+ mock_call_gen_keys = mock.call.gen_keys(key_filename)
+ mock_call_add_resources = (
+ mock.call._add_resources_to_template('heat_template'))
+ self.assertTrue(mock_manager.mock_calls.index(mock_call_gen_keys) <
+ mock_manager.mock_calls.index(mock_call_add_resources))
+
+ @mock.patch.object(heat, 'HeatTemplate')
+ @mock.patch.object(ssh.SSH, 'gen_keys')
+ @mock.patch.object(heat.HeatContext, '_create_new_stack')
+ def test_deploy_with_key_filename_provided(self, mock_create_new_stack,
+ mock_gen_keys, *args):
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context.template_file = '/bar/baz/some-heat-file'
+ self.test_context.heat_parameters = {'image': 'cirros'}
+ self.test_context.yardstick_gen_key_file = False
+ self.test_context.key_filename = '/etc/yardstick/yardstick.pem'
+ self.test_context.get_neutron_info = mock.MagicMock()
+ self.test_context.deploy()
+
+ mock_create_new_stack.assert_called()
+ mock_gen_keys.assert_not_called()
+
+ def test_check_for_context(self):
+ pass
+ # check that the context exists
+
+ def test_add_server_port(self):
+ network1 = mock.MagicMock()
+ network2 = mock.MagicMock()
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context.stack = mock.MagicMock()
+ self.test_context.networks = {
+ 'a': network1,
+ 'c': network2,
+ }
+ self.test_context.stack.outputs = {
+ u'b': u'10.20.30.45',
+ u'b-subnet_id': 1,
+ u'foo-12345678-a-subnet-cidr': u'10.20.0.0/15',
+ u'foo-12345678-a-subnet-gateway_ip': u'10.20.30.1',
+ u'b-mac_address': u'00:01',
+ u'b-device_id': u'dev21',
+ u'b-network_id': u'net789',
+ u'd': u'40.30.20.15',
+ u'd-subnet_id': 2,
+ u'foo-12345678-c-subnet-cidr': u'40.30.0.0/18',
+ u'foo-12345678-c-subnet-gateway_ip': u'40.30.20.254',
+ u'd-mac_address': u'00:10',
+ u'd-device_id': u'dev43',
+ u'd-network_id': u'net987',
+ u'e': u'40.30.20.15',
+ u'e-subnet_id': 2,
+ u'e-mac_address': u'00:10',
+ u'e-device_id': u'dev43',
+ u'e-network_id': u'net987',
+ }
+ server = mock.MagicMock()
+ server.private_ip = None
+ server.ports = OrderedDict([
+ ('a', [{'stack_name': 'b', 'port': 'port_a'}]),
+ ('c', [{'stack_name': 'd', 'port': 'port_c'},
+ {'stack_name': 'e', 'port': 'port_f'}]),
+ ])
+
+ expected = {
+ "private_ip": '10.20.30.45',
+ "subnet_id": 1,
+ "subnet_cidr": '10.20.0.0/15',
+ "network": '10.20.0.0',
+ "netmask": '255.254.0.0',
+ "name": "port_a",
+ "gateway_ip": '10.20.30.1',
+ "mac_address": '00:01',
+ "device_id": 'dev21',
+ "network_id": 'net789',
+ "network_name": 'a',
+ "local_mac": '00:01',
+ "local_ip": '10.20.30.45',
+ }
+ self.test_context.add_server_port(server)
+ self.assertEqual(server.private_ip, '10.20.30.45')
+ self.assertEqual(len(server.interfaces), 3)
+ self.assertDictEqual(server.interfaces['port_a'], expected)
+
+ @mock.patch('yardstick.benchmark.contexts.heat.os')
+ @mock.patch.object(heat.HeatContext, '_delete_key_file')
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test_undeploy(self, mock_template, mock_delete_key, *args):
+ self.test_context.stack = mock_template
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ # mock_os.path.exists.return_value = True
+ self.test_context.key_filename = 'foo/bar/foobar'
+ self.test_context.undeploy()
+ mock_delete_key.assert_called()
+ mock_template.delete.assert_called_once()
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test_undeploy_no_teardown(self, mock_template):
+ self.test_context.stack = mock_template
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._flags.no_teardown = True
+ self.test_context.undeploy()
+
+ mock_template.delete.assert_not_called()
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ @mock.patch('yardstick.benchmark.contexts.heat.os')
+ def test_undeploy_key_filename(self, mock_os, mock_template):
+ self.test_context.stack = mock_template
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id)
+ mock_os.path.exists.return_value = True
+ self.test_context.key_filename = 'foo/bar/foobar'
+ self.assertIsNone(self.test_context.undeploy())
+
+ @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
+ def test__get_server_found_dict(self, *args):
+ """
+ Use HeatContext._get_server to get a server that matches
+ based on a dictionary input.
+ """
+ foo2_server = mock.Mock()
+ foo2_server.key_filename = None
+ foo2_server.private_ip = '10.0.0.2'
+ foo2_server.public_ip = '127.0.0.2'
+ foo2_server.context.user = 'oof'
+
+ baz3_server = mock.Mock()
+ baz3_server.key_filename = None
+ baz3_server.private_ip = '10.0.0.3'
+ baz3_server.public_ip = '127.0.0.3'
+ baz3_server.context.user = 'zab'
+
+ self.test_context._name = 'bar'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context._user = 'bot'
+ self.test_context.stack = mock.Mock()
+ self.test_context.stack.outputs = {
+ 'private_ip': '10.0.0.1',
+ 'public_ip': '127.0.0.1',
+ }
+ self.test_context._server_map = {
+ 'baz3': baz3_server,
+ 'foo2': foo2_server,
+ }
+
+ attr_name = {
+ 'name': 'foo.bar-12345678',
+ 'private_ip_attr': 'private_ip',
+ 'public_ip_attr': 'public_ip',
+ }
+ self.test_context.key_uuid = 'foo-42'
+ result = self.test_context._get_server(attr_name)
+ self.assertEqual(result['user'], 'bot')
+ self.assertEqual(result['ip'], '127.0.0.1')
+ self.assertEqual(result['private_ip'], '10.0.0.1')
+
+ @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
+ def test__get_server_found_dict_no_attrs(self, *args):
+ """
+ Use HeatContext._get_server to get a server that matches
+ based on a dictionary input.
+ """
+ foo2_server = mock.Mock()
+ foo2_server.private_ip = '10.0.0.2'
+ foo2_server.public_ip = '127.0.0.2'
+ foo2_server.context.user = 'oof'
+
+ baz3_server = mock.Mock()
+ baz3_server.private_ip = '10.0.0.3'
+ baz3_server.public_ip = '127.0.0.3'
+ baz3_server.context.user = 'zab'
+
+ self.test_context._name = 'bar'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context._user = 'bot'
+ self.test_context.stack = mock.Mock()
+ self.test_context.stack.outputs = {
+ 'private_ip': '10.0.0.1',
+ 'public_ip': '127.0.0.1',
+ }
+ self.test_context._server_map = {
+ 'baz3': baz3_server,
+ 'foo2': foo2_server,
+ }
+
+ attr_name = {
+ 'name': 'foo.bar-12345678',
+ }
+
+ self.test_context.key_uuid = 'foo-42'
+ result = self.test_context._get_server(attr_name)
+ self.assertEqual(result['user'], 'bot')
+ # no private ip attr mapping in the map results in None value in the result
+ self.assertIsNone(result['private_ip'])
+ # no public ip attr mapping in the map results in no value in the result
+ self.assertNotIn('ip', result)
+
+ @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
+ def test__get_server_found_not_dict(self, *args):
+ """
+ Use HeatContext._get_server to get a server that matches
+ based on a non-dictionary input
+ """
+ foo2_server = mock.Mock()
+ foo2_server.private_ip = '10.0.0.2'
+ foo2_server.public_ip = '127.0.0.2'
+ foo2_server.context.user = 'oof'
+
+ baz3_server = mock.Mock()
+ baz3_server.private_ip = '10.0.0.3'
+ baz3_server.public_ip = None
+ baz3_server.context.user = 'zab'
+
+ self.test_context._name = 'bar1'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = 'bar1-12345678'
+ self.test_context.stack = mock.Mock()
+ self.test_context.stack.outputs = {
+ 'private_ip': '10.0.0.1',
+ 'public_ip': '127.0.0.1',
+ }
+ self.test_context.generate_routing_table = mock.MagicMock(return_value=[])
+
+ self.test_context._server_map = {
+ 'baz3': baz3_server,
+ 'foo2': foo2_server,
+ }
+
+ attr_name = 'baz3'
+ result = self.test_context._get_server(attr_name)
+ self.assertEqual(result['user'], 'zab')
+ self.assertEqual(result['private_ip'], '10.0.0.3')
+ # no public_ip on the server results in no value in the result
+ self.assertNotIn('public_ip', result)
+
+ @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
+ def test__get_server_none_found_not_dict(self, *args):
+ """
+ Use HeatContext._get_server to not get a server due to
+ None value associated with the match to a non-dictionary
+ input
+ """
+ foo2_server = mock.Mock()
+ foo2_server.private_ip = '10.0.0.2'
+ foo2_server.public_ip = '127.0.0.2'
+ foo2_server.context.user = 'oof'
+
+ baz3_server = mock.Mock()
+ baz3_server.private_ip = '10.0.0.3'
+ baz3_server.public_ip = None
+ baz3_server.context.user = 'zab'
+
+ self.test_context._name = 'bar1'
+ self.test_context.stack = mock.Mock()
+ self.test_context.stack.outputs = {
+ 'private_ip': '10.0.0.1',
+ 'public_ip': '127.0.0.1',
+ }
+ self.test_context._server_map = {
+ 'baz3': baz3_server,
+ 'foo2': foo2_server,
+ 'wow4': None,
+ }
+
+ self.test_context.key_uuid = 'foo-42'
+ attr_name = 'wow4'
+ result = self.test_context._get_server(attr_name)
+ self.assertIsNone(result)
+
+ @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
+ def test__get_server_not_found_dict(self, *args):
+ """
+ Use HeatContext._get_server to not get a server for lack
+ of a match to a dictionary input
+ """
+ foo2_server = mock.Mock()
+ foo2_server.private_ip = '10.0.0.2'
+ foo2_server.public_ip = '127.0.0.2'
+ foo2_server.context.user = 'oof'
+
+ baz3_server = mock.Mock()
+ baz3_server.private_ip = '10.0.0.3'
+ baz3_server.public_ip = None
+ baz3_server.context.user = 'zab'
+
+ self.test_context._name = 'bar1'
+ self.test_context._task_id = '1235467890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context.stack = mock.Mock()
+ self.test_context.stack.outputs = {
+ 'private_ip': '10.0.0.1',
+ 'public_ip': '127.0.0.1',
+ }
+ self.test_context._server_map = {
+ 'baz3': baz3_server,
+ 'foo2': foo2_server,
+ }
+
+ self.test_context.key_uuid = 'foo-42'
+ attr_name = {
+ 'name': 'foo.wow4',
+ 'private_ip_attr': 'private_ip',
+ 'public_ip_attr': 'public_ip',
+ }
+ result = self.test_context._get_server(attr_name)
+ self.assertIsNone(result)
+
+ @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
+ def test__get_server_not_found_not_dict(self, *args):
+ """
+ Use HeatContext._get_server to not get a server for lack
+ of a match to a non-dictionary input
+ """
+ foo2_server = mock.Mock()
+ foo2_server.private_ip = '10.0.0.2'
+ foo2_server.public_ip = '127.0.0.2'
+ foo2_server.context.user = 'oof'
+
+ baz3_server = mock.Mock()
+ baz3_server.private_ip = '10.0.0.3'
+ baz3_server.public_ip = None
+ baz3_server.context.user = 'zab'
+
+ self.mock_context._name = 'bar1'
+ self.test_context.stack = mock.Mock()
+ self.mock_context.stack.outputs = {
+ 'private_ip': '10.0.0.1',
+ 'public_ip': '127.0.0.1',
+ }
+ self.mock_context._server_map = {
+ 'baz3': baz3_server,
+ 'foo2': foo2_server,
+ }
+
+ self.test_context.key_uuid = 'foo-42'
+ attr_name = 'foo.wow4'
+ result = self.test_context._get_server(attr_name)
+ self.assertIsNone(result)
+
+ @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
+ def test__get_server_found_dict_found_interfaces_dict(self, *args):
+ """
+ Use HeatContext._get_server to get a server that matches
+ based on a dictionary input.
+ """
+ self.test_context._name = 'bar'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context._user = 'bot'
+ self.test_context.stack = mock.Mock()
+ self.test_context.stack.outputs = {
+ 'private_ip': '10.0.0.1',
+ 'public_ip': '127.0.0.1',
+ 'local_mac_addr': '64:00:6a:18:0f:d6',
+ 'private_netmask': '255.255.255.0',
+ 'private_net_name': 'private_network',
+ 'private_net_gateway': '127.0.0.254'
+ }
+
+ attr_name = {
+ 'name': 'foo.bar-12345678',
+ 'private_ip_attr': 'private_ip',
+ 'public_ip_attr': 'public_ip',
+ 'interfaces': {
+ 'data_net': {
+ 'local_ip': 'private_ip',
+ 'local_mac': 'local_mac_addr',
+ 'netmask': 'private_netmask',
+ 'network': 'private_net_name',
+ 'gateway_ip': 'private_net_gateway'
+ }
+ }
+ }
+ self.test_context.key_uuid = 'foo-42'
+ result = self.test_context._get_server(attr_name)
+ self.assertIsInstance(result['interfaces'], collections.Mapping)
+ for key in attr_name.get("interfaces").keys():
+ self.assertEqual(result['interfaces'][key]['local_ip'], '10.0.0.1')
+ self.assertEqual(result['interfaces'][key]['local_mac'], '64:00:6a:18:0f:d6')
+ self.assertEqual(result['interfaces'][key]['netmask'], '255.255.255.0')
+ self.assertEqual(result['interfaces'][key]['gateway_ip'], '127.0.0.254')
+
+ # TODO: Split this into more granular tests
+ def test__get_network(self):
+ network1 = mock.MagicMock()
+ network1.name = 'net_1'
+ network1.vld_id = 'vld111'
+ network1.segmentation_id = 'seg54'
+ network1.network_type = 'type_a'
+ network1.physical_network = 'phys'
+
+ network2 = mock.MagicMock()
+ network2.name = 'net_2'
+ network2.segmentation_id = 'seg45'
+ network2.network_type = 'type_b'
+ network2.physical_network = 'virt'
+
+ self.test_context.networks = {
+ 'a': network1,
+ 'b': network2,
+ }
+
+ attr_name = None
+ self.assertIsNone(self.test_context._get_network(attr_name))
+
+ attr_name = {}
+ self.assertIsNone(self.test_context._get_network(attr_name))
+
+ attr_name = {'network_type': 'nosuch'}
+ self.assertIsNone(self.test_context._get_network(attr_name))
+
+ attr_name = 'vld777'
+ self.assertIsNone(self.test_context._get_network(attr_name))
+
+ attr_name = {'segmentation_id': 'seg45'}
+ expected = {
+ "name": 'net_2',
+ "segmentation_id": 'seg45',
+ "network_type": 'type_b',
+ "physical_network": 'virt',
+ }
+ result = self.test_context._get_network(attr_name)
+ self.assertDictEqual(result, expected)
+
+ attr_name = 'a'
+ expected = {
+ "name": 'net_1',
+ "segmentation_id": 'seg54',
+ "network_type": 'type_a',
+ "physical_network": 'phys',
+ }
+ result = self.test_context._get_network(attr_name)
+ self.assertDictEqual(result, expected)
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.join(curr_path, filename)
+ return file_path
+
+ def test__get_physical_nodes(self):
+ self.test_context.nodes = {}
+ nodes = self.test_context._get_physical_nodes()
+ self.assertEquals(nodes, {})
+
+ @mock.patch.object(yaml_loader, 'read_yaml_file')
+ def test__get_physical_node_for_server(self, mock_read_yaml):
+ attrs = {'name': 'foo',
+ 'task_id': '12345678',
+ 'file': "pod.yaml",
+ 'servers': {'vnf': {}},
+ 'networks': {'mgmt': {'cidr': '10.0.1.0/24'}}
+ }
+
+ with mock.patch.object(openstack_utils, 'get_shade_client'), \
+ mock.patch.object(openstack_utils, 'get_shade_operator_client'):
+ mock_read_yaml.return_value = self.HEAT_POD_SAMPLE
+ self.test_context.init(attrs)
+
+ with mock.patch('yardstick.common.openstack_utils.get_server') as mock_get_server:
+ mock_get_server.return_value = {'vnf': {}}
+
+ # When server is not from this context
+ result = self.test_context._get_physical_node_for_server('node1.foo-context')
+ self.assertIsNone(result)
+
+ # When node_name is not from this context
+ result = self.test_context._get_physical_node_for_server('fake.foo-12345678')
+ self.assertIsNone(result)
+
+ mock_munch = mock.Mock()
+ mock_munch.toDict = mock.Mock(return_value={
+ 'OS-EXT-SRV-ATTR:hypervisor_hostname': 'hypervisor_hostname'
+ })
+ mock_get_server.return_value = mock_munch
+
+ hypervisor = mock.Mock()
+ hypervisor.hypervisor_hostname = 'hypervisor_hostname'
+ hypervisor.host_ip = '10.229.47.137'
+
+ self.test_context.operator_client.list_hypervisors = mock.Mock(
+ return_value=[hypervisor])
+
+ mock_get_server.return_value = mock_munch
+
+ result = self.test_context._get_physical_node_for_server('vnf.foo-12345678')
+ self.assertEqual(result, 'node1.foo')
diff --git a/yardstick/tests/unit/benchmark/contexts/test_kubernetes.py b/yardstick/tests/unit/benchmark/contexts/test_kubernetes.py
new file mode 100644
index 000000000..b526e7cc7
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/test_kubernetes.py
@@ -0,0 +1,273 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import collections
+import time
+
+import mock
+import unittest
+
+from yardstick.benchmark import contexts
+from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts import kubernetes
+from yardstick.common import constants
+from yardstick.common import exceptions
+from yardstick.common import kubernetes_utils as k8s_utils
+from yardstick.orchestrator import kubernetes as orchestrator_kubernetes
+
+
+CONTEXT_CFG = {
+ 'type': contexts.CONTEXT_KUBERNETES,
+ 'name': 'k8s',
+ 'task_id': '1234567890',
+ 'servers': {
+ 'host': {
+ 'image': 'openretriever/yardstick',
+ 'command': '/bin/bash',
+ 'args': ['-c', 'chmod 700 ~/.ssh; chmod 600 ~/.ssh/*; '
+ 'service ssh restart;while true ; do sleep 10000; done']
+ },
+ 'target': {
+ 'image': 'openretriever/yardstick',
+ 'command': '/bin/bash',
+ 'args': ['-c', 'chmod 700 ~/.ssh; chmod 600 ~/.ssh/*; '
+ 'service ssh restart;while true ; do sleep 10000; done']
+ }
+ },
+ 'networks': {
+ 'flannel': {
+ 'args': 'flannel_args',
+ 'plugin': 'flannel'
+ },
+ 'sriov01': {
+ 'args': 'sriov_args',
+ 'plugin': 'sriov'
+ },
+ }
+}
+
+
+class NodePort(object):
+ def __init__(self):
+ self.node_port = 30000
+ self.port = constants.SSH_PORT
+ self.name = 'port_name'
+ self.protocol = 'TCP'
+ self.target_port = constants.SSH_PORT
+
+
+class Service(object):
+ def __init__(self):
+ self.ports = [NodePort()]
+
+
+class Status(object):
+ def __init__(self):
+ self.pod_ip = '172.16.10.131'
+
+
+class Pod(object):
+ def __init__(self):
+ self.status = Status()
+
+
+class KubernetesTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.k8s_context = kubernetes.KubernetesContext()
+ self.addCleanup(self._remove_contexts)
+ self.k8s_context.init(CONTEXT_CFG)
+
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
+
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_services')
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_ssh_key')
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_rcs')
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_pods')
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_networks')
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_crd')
+ def test_undeploy(self, mock_delete_pods, mock_delete_rcs,
+ mock_delete_ssh, mock_delete_services,
+ mock_delete_networks, mock_delete_crd):
+
+ self.k8s_context.undeploy()
+ mock_delete_ssh.assert_called_once()
+ mock_delete_rcs.assert_called_once()
+ mock_delete_pods.assert_called_once()
+ mock_delete_services.assert_called_once()
+ mock_delete_networks.assert_called_once()
+ mock_delete_crd.assert_called_once()
+
+ @mock.patch.object(kubernetes.KubernetesContext, '_create_services')
+ @mock.patch.object(kubernetes.KubernetesContext, '_wait_until_running')
+ @mock.patch.object(orchestrator_kubernetes.KubernetesTemplate,
+ 'get_rc_pods')
+ @mock.patch.object(kubernetes.KubernetesContext, '_create_rcs')
+ @mock.patch.object(kubernetes.KubernetesContext, '_set_ssh_key')
+ @mock.patch.object(kubernetes.KubernetesContext, '_create_networks')
+ @mock.patch.object(kubernetes.KubernetesContext, '_create_crd')
+ def test_deploy(self, mock_set_ssh_key, mock_create_rcs, mock_get_rc_pods,
+ mock_wait_until_running, mock_create_services,
+ mock_create_networks, mock_create_crd):
+
+ with mock.patch.object(time, 'sleep'):
+ self.k8s_context.deploy()
+ mock_set_ssh_key.assert_called_once()
+ mock_create_rcs.assert_called_once()
+ mock_create_services.assert_called_once()
+ mock_get_rc_pods.assert_called_once()
+ mock_wait_until_running.assert_called_once()
+ mock_create_networks.assert_called_once()
+ mock_create_crd.assert_called_once()
+
+ @mock.patch.object(kubernetes, 'paramiko', **{"resource_filename.return_value": ""})
+ @mock.patch.object(kubernetes, 'pkg_resources', **{"resource_filename.return_value": ""})
+ @mock.patch.object(kubernetes, 'utils')
+ @mock.patch.object(kubernetes, 'open', create=True)
+ @mock.patch.object(k8s_utils, 'delete_config_map')
+ @mock.patch.object(k8s_utils, 'create_config_map')
+ def test_ssh_key(self, mock_create, mock_delete, *args):
+ self.k8s_context._set_ssh_key()
+ self.k8s_context._delete_ssh_key()
+
+ mock_create.assert_called_once()
+ mock_delete.assert_called_once()
+
+ @mock.patch.object(k8s_utils, 'read_pod_status')
+ def test_wait_until_running(self, mock_read_pod_status):
+
+ self.k8s_context.template.pods = ['server']
+ mock_read_pod_status.return_value = 'Running'
+ self.k8s_context._wait_until_running()
+
+ @mock.patch.object(k8s_utils, 'get_pod_by_name')
+ @mock.patch.object(kubernetes.KubernetesContext, '_get_node_ip')
+ def test_get_server(self, mock_get_node_ip, mock_get_pod_by_name):
+ mock_get_pod_by_name.return_value = Pod()
+ mock_get_node_ip.return_value = '172.16.10.131'
+ with mock.patch.object(self.k8s_context, '_get_service_ports') as \
+ mock_get_sports:
+ mock_get_sports.return_value = [
+ {'port': constants.SSH_PORT, 'node_port': 30000}]
+ server = self.k8s_context._get_server('server_name')
+ self.assertEqual('server_name', server['name'])
+ self.assertEqual(30000, server['ssh_port'])
+
+ @mock.patch.object(kubernetes.KubernetesContext, '_create_rc')
+ def test_create_rcs(self, mock_create_rc):
+ self.k8s_context._create_rcs()
+ mock_create_rc.assert_called()
+
+ @mock.patch.object(k8s_utils, 'create_replication_controller')
+ def test_create_rc(self, mock_create_replication_controller):
+ self.k8s_context._create_rc({})
+ mock_create_replication_controller.assert_called_once()
+
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_rc')
+ def test_delete_rcs(self, mock_delete_rc):
+ self.k8s_context._delete_rcs()
+ mock_delete_rc.assert_called()
+
+ @mock.patch.object(k8s_utils, 'delete_replication_controller')
+ def test_delete_rc(self, mock_delete_replication_controller):
+ self.k8s_context._delete_rc({})
+ mock_delete_replication_controller.assert_called_once()
+
+ @mock.patch.object(k8s_utils, 'get_node_list')
+ def test_get_node_ip(self, mock_get_node_list):
+ self.k8s_context._get_node_ip()
+ mock_get_node_list.assert_called_once()
+
+ @mock.patch.object(orchestrator_kubernetes.ServiceNodePortObject, 'create')
+ def test_create_services(self, mock_create):
+ self.k8s_context._create_services()
+ mock_create.assert_called()
+
+ @mock.patch.object(orchestrator_kubernetes.ServiceNodePortObject, 'delete')
+ def test_delete_services(self, mock_delete):
+ self.k8s_context._delete_services()
+ mock_delete.assert_called()
+
+ def test_init(self):
+ self.k8s_context._delete_context()
+ with mock.patch.object(orchestrator_kubernetes, 'KubernetesTemplate',
+ return_value='fake_template') as mock_k8stemplate:
+ self.k8s_context = kubernetes.KubernetesContext()
+ self.k8s_context.init(CONTEXT_CFG)
+ mock_k8stemplate.assert_called_once_with(self.k8s_context.name,
+ CONTEXT_CFG)
+ self.assertEqual('fake_template', self.k8s_context.template)
+ self.assertEqual(2, len(self.k8s_context._networks))
+ self.assertIn('flannel', self.k8s_context._networks.keys())
+ self.assertIn('sriov01', self.k8s_context._networks.keys())
+
+ def test__get_physical_nodes(self):
+ result = self.k8s_context._get_physical_nodes()
+ self.assertIsNone(result)
+
+ def test__get_physical_node_for_server(self):
+ result = self.k8s_context._get_physical_node_for_server("fake")
+ self.assertIsNone(result)
+
+ def test__get_network(self):
+ networks = collections.OrderedDict([('n1', 'data1'), ('n2', 'data2')])
+ self.k8s_context._networks = networks
+ self.assertEqual({'name': 'n1'}, self.k8s_context._get_network('n1'))
+ self.assertEqual({'name': 'n2'}, self.k8s_context._get_network('n2'))
+ self.assertIsNone(self.k8s_context._get_network('n3'))
+
+ @mock.patch.object(orchestrator_kubernetes.KubernetesTemplate,
+ 'get_rc_by_name')
+ def test__get_interfaces(self, mock_get_rc):
+ rc = orchestrator_kubernetes.ReplicationControllerObject('rc_name')
+ rc._networks = ['net1', 'net2']
+ mock_get_rc.return_value = rc
+ expected = {'net1': {'network_name': 'net1',
+ 'local_mac': None,
+ 'local_ip': None},
+ 'net2': {'network_name': 'net2',
+ 'local_mac': None,
+ 'local_ip': None}}
+ self.assertEqual(expected, self.k8s_context._get_interfaces('rc_name'))
+
+ @mock.patch.object(orchestrator_kubernetes.KubernetesTemplate,
+ 'get_rc_by_name')
+ def test__get_interfaces_no_networks(self, mock_get_rc):
+ rc = orchestrator_kubernetes.ReplicationControllerObject('rc_name')
+ mock_get_rc.return_value = rc
+ self.assertEqual({}, self.k8s_context._get_interfaces('rc_name'))
+
+ @mock.patch.object(orchestrator_kubernetes.KubernetesTemplate,
+ 'get_rc_by_name', return_value=None)
+ def test__get_interfaces_no_rc(self, *args):
+ self.assertEqual({}, self.k8s_context._get_interfaces('rc_name'))
+
+ @mock.patch.object(k8s_utils, 'get_service_by_name',
+ return_value=Service())
+ def test__get_service_ports(self, mock_get_service_by_name):
+ name = 'rc_name'
+ service_ports = self.k8s_context._get_service_ports(name)
+ mock_get_service_by_name.assert_called_once_with(name + '-service')
+ expected = {'node_port': 30000,
+ 'port': constants.SSH_PORT,
+ 'name': 'port_name',
+ 'protocol': 'TCP',
+ 'target_port': constants.SSH_PORT}
+ self.assertEqual(expected, service_ports[0])
+
+ @mock.patch.object(k8s_utils, 'get_service_by_name',
+ return_value=None)
+ def test__get_service_ports_exception(self, *args):
+ name = 'rc_name'
+ with self.assertRaises(exceptions.KubernetesServiceObjectNotDefined):
+ self.k8s_context._get_service_ports(name)
diff --git a/yardstick/tests/unit/benchmark/contexts/test_model.py b/yardstick/tests/unit/benchmark/contexts/test_model.py
new file mode 100644
index 000000000..20cc00b4e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/test_model.py
@@ -0,0 +1,586 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.contexts.model
+
+from __future__ import absolute_import
+import unittest
+import mock
+
+from yardstick.benchmark.contexts import model
+
+
+class ObjectTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.mock_context = mock.Mock()
+
+ def test_construct(self):
+
+ test_object = model.Object('foo', self.mock_context)
+
+ self.assertEqual(test_object.name, 'foo')
+ self.assertEqual(test_object._context, self.mock_context)
+ self.assertIsNone(test_object.stack_name)
+ self.assertIsNone(test_object.stack_id)
+
+ def test_dn(self):
+
+ self.mock_context.name = 'bar'
+ test_object = model.Object('foo', self.mock_context)
+
+ self.assertEqual('foo.bar', test_object.dn)
+
+
+class PlacementGroupTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.mock_context = mock.Mock()
+ self.mock_context.name = 'bar'
+
+ def tearDown(self):
+ model.PlacementGroup.map = {}
+
+ def test_sucessful_construct(self):
+
+ test_pg = model.PlacementGroup('foo', self.mock_context, 'affinity')
+
+ self.assertEqual(test_pg.name, 'foo')
+ self.assertEqual(test_pg.members, set())
+ self.assertEqual(test_pg.stack_name, 'bar-foo')
+ self.assertEqual(test_pg.policy, 'affinity')
+
+ test_map = {'foo': test_pg}
+ self.assertEqual(model.PlacementGroup.map, test_map)
+
+ def test_wrong_policy_in_construct(self):
+
+ self.assertRaises(ValueError, model.PlacementGroup, 'foo',
+ self.mock_context, 'baz')
+
+ def test_add_member(self):
+
+ test_pg = model.PlacementGroup('foo', self.mock_context, 'affinity')
+ test_pg.add_member('foo')
+
+ self.assertEqual(test_pg.members, set(['foo']))
+
+ def test_get_name_successful(self):
+
+ model.PlacementGroup.map = {'foo': True}
+ self.assertTrue(model.PlacementGroup.get('foo'))
+
+ def test_get_name_unsuccessful(self):
+
+ self.assertIsNone(model.PlacementGroup.get('foo'))
+
+
+class RouterTestCase(unittest.TestCase):
+
+ def test_construct(self):
+
+ mock_context = mock.Mock()
+ mock_context.name = 'baz'
+ test_router = model.Router('foo', 'bar', mock_context, 'qux')
+
+ self.assertEqual(test_router.stack_name, 'baz-bar-foo')
+ self.assertEqual(test_router.stack_if_name, 'baz-bar-foo-if0')
+ self.assertEqual(test_router.external_gateway_info, 'qux')
+
+
+class NetworkTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.mock_context = mock.Mock()
+ self.mock_context.name = 'bar'
+
+ def tearDown(self):
+ model.Network.list = []
+
+ def test_construct_no_external_network(self):
+
+ attrs = {'cidr': '10.0.0.0/24'}
+ test_network = model.Network('foo', self.mock_context, attrs)
+
+ self.assertEqual(test_network.stack_name, 'bar-foo')
+ self.assertEqual(test_network.subnet_stack_name, 'bar-foo-subnet')
+ self.assertEqual(test_network.subnet_cidr, attrs['cidr'])
+ self.assertIsNone(test_network.router)
+ self.assertIn(test_network, model.Network.list)
+
+ def test_construct_has_external_network(self):
+
+ attrs = {'external_network': 'ext_net'}
+ test_network = model.Network('foo', self.mock_context, attrs)
+ exp_router = model.Router('router', 'foo', self.mock_context,
+ 'ext_net')
+
+ self.assertEqual(test_network.router.stack_name, exp_router.stack_name)
+ self.assertEqual(test_network.router.stack_if_name,
+ exp_router.stack_if_name)
+ self.assertEqual(test_network.router.external_gateway_info,
+ exp_router.external_gateway_info)
+
+ def test_has_route_to(self):
+
+ attrs = {'external_network': 'ext_net'}
+ test_network = model.Network('foo', self.mock_context, attrs)
+
+ self.assertTrue(test_network.has_route_to('ext_net'))
+
+ def test_has_no_route_to(self):
+
+ attrs = {}
+ test_network = model.Network('foo', self.mock_context, attrs)
+
+ self.assertFalse(test_network.has_route_to('ext_net'))
+
+ @mock.patch('yardstick.benchmark.contexts.model.Network.has_route_to')
+ def test_find_by_route_to(self, mock_has_route_to):
+
+ mock_network = mock.Mock()
+ model.Network.list = [mock_network]
+ mock_has_route_to.return_value = True
+
+ self.assertIs(mock_network, model.Network.find_by_route_to('foo'))
+
+ def test_find_external_network(self):
+
+ mock_network = mock.Mock()
+ mock_network.router = mock.Mock() #pylint ignore assignment-from-none
+ mock_network.router.external_gateway_info = 'ext_net'
+ model.Network.list = [mock_network]
+
+ self.assertEqual(model.Network.find_external_network(), 'ext_net')
+
+ def test_construct_gateway_ip_is_null(self):
+
+ attrs = {'gateway_ip': 'null'}
+ test_network = model.Network('foo', self.mock_context, attrs)
+ self.assertEqual(test_network.gateway_ip, 'null')
+
+ def test_construct_gateway_ip_is_none(self):
+
+ attrs = {'gateway_ip': None}
+ test_network = model.Network('foo', self.mock_context, attrs)
+ self.assertEqual(test_network.gateway_ip, 'null')
+
+ def test_construct_gateway_ip_is_absent(self):
+
+ attrs = {}
+ test_network = model.Network('foo', self.mock_context, attrs)
+ self.assertIsNone(test_network.gateway_ip)
+
+
+class ServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.mock_context = mock.Mock()
+ self.mock_context.name = 'bar'
+ self.mock_context.keypair_name = 'some-keys'
+ self.mock_context.secgroup_name = 'some-secgroup'
+ self.mock_context.user = "some-user"
+ netattrs = {'cidr': '10.0.0.0/24', 'provider': None, 'external_network': 'ext_net'}
+ self.mock_context.networks = [model.Network("some-network", self.mock_context, netattrs)]
+
+ def test_construct_defaults(self):
+
+ attrs = None
+ test_server = model.Server('foo', self.mock_context, attrs)
+
+ self.assertEqual(test_server.stack_name, 'foo.bar')
+ self.assertEqual(test_server.keypair_name, 'some-keys')
+ self.assertEqual(test_server.secgroup_name, 'some-secgroup')
+ self.assertEqual(test_server.placement_groups, [])
+ self.assertIsNone(test_server.server_group)
+ self.assertEqual(test_server.instances, 1)
+ self.assertIsNone(test_server.floating_ip)
+ self.assertIsNone(test_server._image)
+ self.assertIsNone(test_server._flavor)
+ self.assertIn(test_server, model.Server.list)
+
+ @mock.patch('yardstick.benchmark.contexts.model.PlacementGroup')
+ def test_construct_get_wrong_placement_group(self, mock_pg):
+
+ attrs = {'placement': 'baz'}
+ mock_pg.get.return_value = None
+
+ self.assertRaises(ValueError, model.Server, 'foo',
+ self.mock_context, attrs)
+
+ @mock.patch('yardstick.benchmark.contexts.model.PlacementGroup')
+ def test_construct_get_wrong_server_group(self, mock_sg):
+
+ attrs = {'server_group': 'baz'}
+ mock_sg.get.return_value = None
+
+ self.assertRaises(ValueError, model.Server, 'foo',
+ self.mock_context, attrs)
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_instance(self, mock_template):
+ attrs = {'image': 'some-image', 'flavor': 'some-flavor', 'floating_ip': '192.168.1.10',
+ 'floating_ip_assoc': 'some-vm',
+ 'availability_zone': 'zone'}
+ test_server = model.Server('foo', self.mock_context, attrs)
+
+ self.mock_context.flavors = ['flavor1', 'flavor2', 'some-flavor']
+
+ mock_network = mock.Mock()
+ mock_network.name = 'some-network'
+ mock_network.stack_name = 'some-network-stack'
+ mock_network.allowed_address_pairs = ["1", "2"]
+ mock_network.vnic_type = 'normal'
+ mock_network.subnet_stack_name = 'some-network-stack-subnet'
+ mock_network.provider = 'sriov'
+ mock_network.net_flags = {}
+ mock_network.external_network = 'ext_net'
+ mock_network.router = model.Router('some-router', 'some-network', self.mock_context,
+ 'ext_net')
+
+ test_server._add_instance(mock_template, 'some-server',
+ [mock_network], 'hints')
+
+ mock_template.add_port.assert_called_with(
+ 'some-server-some-network-port',
+ mock_network,
+ sec_group_id=self.mock_context.secgroup_name,
+ provider=mock_network.provider,
+ allowed_address_pairs=mock_network.allowed_address_pairs)
+
+ mock_template.add_floating_ip.assert_called_with(
+ 'some-server-fip',
+ mock_network.external_network,
+ 'some-server-some-network-port',
+ 'bar-some-network-some-router-if0',
+ 'some-secgroup'
+ )
+
+ mock_template.add_floating_ip_association.assert_called_with(
+ 'some-server-fip-assoc',
+ 'some-server-fip',
+ 'some-server-some-network-port'
+ )
+
+ mock_template.add_server.assert_called_with(
+ 'some-server', 'some-image',
+ flavor='some-flavor',
+ flavors=['flavor1', 'flavor2', 'some-flavor'],
+ ports=['some-server-some-network-port'],
+ user=self.mock_context.user,
+ key_name=self.mock_context.keypair_name,
+ user_data='',
+ scheduler_hints='hints',
+ availability_zone='zone')
+
+ def test_override_ip(self):
+ network_ports = {
+ 'mgmt': ['mgmt'],
+ 'uplink_0': [
+ {'xe0': {'local_ip': '10.44.0.20', 'netmask': '255.255.255.0'}},
+ ],
+ 'downlink_0': [
+ {'xe1': {'local_ip': '10.44.0.30', 'netmask': '255.255.255.0'}},
+ ],
+ }
+ attrs = {
+ 'image': 'some-image', 'flavor': 'some-flavor',
+ }
+ test_server = model.Server('foo', self.mock_context, attrs)
+ test_server.interfaces = {
+ "xe0": {
+ "local_ip": "1.2.3.4",
+ "netmask": "255.255.255.0",
+ },
+ "xe1": {
+ "local_ip": "1.2.3.5",
+ "netmask": "255.255.255.0"
+ }
+ }
+ test_server.network_ports = network_ports
+
+ test_server.override_ip("uplink_0", {"port": "xe0"})
+ self.assertEqual(test_server.interfaces["xe0"], network_ports["uplink_0"][0]["xe0"])
+
+ def test_override_ip_multiple(self):
+ network_ports = {
+ 'mgmt': ['mgmt'],
+ 'uplink_0': [
+ {'xe0': {'local_ip': '10.44.0.20', 'netmask': '255.255.255.0'}},
+ {'xe0': {'local_ip': '10.44.0.21', 'netmask': '255.255.255.0'}},
+ ],
+ 'downlink_0': [
+ {'xe1': {'local_ip': '10.44.0.30', 'netmask': '255.255.255.0'}},
+ ],
+ }
+ attrs = {
+ 'image': 'some-image', 'flavor': 'some-flavor',
+ }
+ test_server = model.Server('foo', self.mock_context, attrs)
+ test_server.interfaces = {
+ "xe0": {
+ "local_ip": "1.2.3.4",
+ "netmask": "255.255.255.0",
+ },
+ "xe1": {
+ "local_ip": "1.2.3.5",
+ "netmask": "255.255.255.0"
+ }
+ }
+ test_server.network_ports = network_ports
+ test_server.override_ip("uplink_0", {"port": "xe0"})
+ self.assertEqual(test_server.interfaces["xe0"], network_ports["uplink_0"][0]["xe0"])
+
+ def test_override_ip_mixed(self):
+ network_ports = {
+ 'mgmt': ['mgmt'],
+ 'uplink_0': [
+ 'xe0',
+ {'xe0': {'local_ip': '10.44.0.21', 'netmask': '255.255.255.0'}},
+ ],
+ 'downlink_0': [
+ {'xe1': {'local_ip': '10.44.0.30', 'netmask': '255.255.255.0'}},
+ ],
+ }
+ attrs = {
+ 'image': 'some-image', 'flavor': 'some-flavor',
+ }
+ test_server = model.Server('foo', self.mock_context, attrs)
+ test_server.interfaces = {
+ "xe0": {
+ "local_ip": "1.2.3.4",
+ "netmask": "255.255.255.0",
+ },
+ "xe1": {
+ "local_ip": "1.2.3.5",
+ "netmask": "255.255.255.0"
+ }
+ }
+ test_server.network_ports = network_ports
+ test_server.override_ip("uplink_0", {"port": "xe0"})
+ self.assertEqual(test_server.interfaces["xe0"], network_ports["uplink_0"][1]["xe0"])
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_instance_with_ip_override_invalid_syntax(self, mock_template):
+ network_ports = {
+ 'mgmt': ['mgmt'],
+ 'uplink_0': 'xe0',
+ 'downlink_0': [
+ {'xe1': {'local_ip': '10.44.0.30', 'netmask': '255.255.255.0'}},
+ ],
+ }
+ attrs = {
+ 'image': 'some-image', 'flavor': 'some-flavor',
+ }
+ test_server = model.Server('foo', self.mock_context, attrs)
+ test_server.network_ports = network_ports
+ context = type("Context", (object,), {})
+ # can't use Mock because Mock.name is reserved
+ context.name = "context"
+ networks = [model.Network(n, context, {}) for n in network_ports]
+
+ with self.assertRaises(SyntaxError):
+ test_server._add_instance(mock_template, 'some-server',
+ networks, 'hints')
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_instance_with_ip_override(self, mock_template):
+ network_ports = {
+ 'mgmt': ['mgmt'],
+ 'uplink_0': [
+ {'xe0': {'local_ip': '10.44.0.20', 'netmask': '255.255.255.0'}},
+ ],
+ 'downlink_0': [
+ {'xe1': {'local_ip': '10.44.0.30', 'netmask': '255.255.255.0'}},
+ ],
+ }
+ attrs = {
+ 'image': 'some-image', 'flavor': 'some-flavor',
+ }
+ test_server = model.Server('foo', self.mock_context, attrs)
+ test_server.network_ports = network_ports
+ context = type("Context", (object,), {})
+ # can't use Mock because Mock.name is reserved
+ context.name = "context"
+ networks = [model.Network(n, context, {}) for n in network_ports]
+
+ test_server._add_instance(mock_template, 'some-server',
+ networks, 'hints')
+ self.assertEqual(test_server.ports, {
+ 'downlink_0': [{'port': 'xe1', 'stack_name': 'some-server-xe1-port'}],
+ 'mgmt': [{'port': 'mgmt', 'stack_name': 'some-server-mgmt-port'}],
+ 'uplink_0': [{'port': 'xe0', 'stack_name': 'some-server-xe0-port'}]
+ })
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_instance_with_multiple_ip_override(self, mock_template):
+ network_ports = {
+ 'mgmt': ['mgmt'],
+ 'uplink_0': [
+ {'xe0': {'local_ip': '10.44.0.20', 'netmask': '255.255.255.0'}},
+ {'xe0': {'local_ip': '10.44.0.21', 'netmask': '255.255.255.0'}},
+ ],
+ 'downlink_0': [
+ {'xe1': {'local_ip': '10.44.0.30', 'netmask': '255.255.255.0'}},
+ ],
+ }
+ attrs = {
+ 'image': 'some-image', 'flavor': 'some-flavor',
+ }
+ test_server = model.Server('foo', self.mock_context, attrs)
+ test_server.network_ports = network_ports
+ context = type("Context", (object,), {})
+ # can't use Mock because Mock.name is reserved
+ context.name = "context"
+ networks = [model.Network(n, context, {}) for n in network_ports]
+
+ test_server._add_instance(mock_template, 'some-server',
+ networks, 'hints')
+ self.assertEqual(test_server.ports, {
+ 'downlink_0': [{'port': 'xe1', 'stack_name': 'some-server-xe1-port'}],
+ 'mgmt': [{'port': 'mgmt', 'stack_name': 'some-server-mgmt-port'}],
+ 'uplink_0': [{'port': 'xe0', 'stack_name': 'some-server-xe0-port'},
+ # this is not an error, we can produce this, it is left to Heat
+ # to detect duplicate ports and error
+ {'port': 'xe0', 'stack_name': 'some-server-xe0-port'}]
+ })
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_instance_with_user_data(self, mock_template):
+ user_data = "USER_DATA"
+ attrs = {
+ 'image': 'some-image', 'flavor': 'some-flavor',
+ 'user_data': user_data,
+ }
+ test_server = model.Server('foo', self.mock_context, attrs)
+
+ test_server._add_instance(mock_template, 'some-server',
+ [], 'hints')
+
+ mock_template.add_server.assert_called_with(
+ 'some-server', 'some-image',
+ flavor='some-flavor',
+ flavors=self.mock_context.flavors,
+ ports=[],
+ user=self.mock_context.user,
+ key_name=self.mock_context.keypair_name,
+ user_data=user_data,
+ scheduler_hints='hints',
+ availability_zone=None)
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_instance_with_availablity_zone(self, mock_template):
+ attrs = {
+ 'image': 'some-image', 'flavor': 'some-flavor',
+ 'availability_zone': 'zone',
+ }
+ test_server = model.Server('foo', self.mock_context, attrs)
+
+ test_server._add_instance(mock_template, 'some-server',
+ [], 'hints')
+
+ mock_template.add_server.assert_called_with(
+ 'some-server', 'some-image',
+ flavor='some-flavor',
+ flavors=self.mock_context.flavors,
+ ports=[],
+ user=self.mock_context.user,
+ key_name=self.mock_context.keypair_name,
+ user_data='',
+ scheduler_hints='hints',
+ availability_zone='zone')
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_instance_plus_flavor(self, mock_template):
+
+ user_data = ''
+ attrs = {
+ 'image': 'some-image', 'flavor': 'flavor1',
+ 'flavors': ['flavor2'], 'user_data': user_data
+ }
+ test_server = model.Server('ServerFlavor-2', self.mock_context, attrs)
+
+ self.mock_context.flavors = ['flavor2']
+ mock_network = mock.Mock()
+ mock_network.allowed_address_pairs = ["1", "2"]
+ mock_network.vnic_type = 'normal'
+ mock_network.net_flags = {}
+ mock_network.configure_mock(name='some-network', stack_name='some-network-stack',
+ subnet_stack_name='some-network-stack-subnet',
+ provider='some-provider')
+
+ test_server._add_instance(mock_template, 'ServerFlavor-2',
+ [mock_network], 'hints')
+
+ mock_template.add_port.assert_called_with(
+ 'ServerFlavor-2-some-network-port',
+ mock_network,
+ provider=mock_network.provider,
+ sec_group_id=self.mock_context.secgroup_name,
+ allowed_address_pairs=mock_network.allowed_address_pairs)
+
+ mock_template.add_server.assert_called_with(
+ 'ServerFlavor-2', 'some-image',
+ flavor='flavor1',
+ flavors=['flavor2'],
+ ports=['ServerFlavor-2-some-network-port'],
+ user=self.mock_context.user,
+ key_name=self.mock_context.keypair_name,
+ user_data=user_data,
+ scheduler_hints='hints',
+ availability_zone=None)
+
+ @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ def test__add_instance_misc(self, mock_template):
+
+ user_data = ''
+ attrs = {
+ 'image': 'some-image', 'flavor': 'flavor1',
+ 'flavors': ['flavor2'], 'user_data': user_data
+ }
+ test_server = model.Server('ServerFlavor-3', self.mock_context, attrs)
+
+ self.mock_context.flavors = ['flavor2']
+ self.mock_context.flavor = {'vcpus': 4}
+ mock_network = mock.Mock()
+ mock_network.name = 'some-network'
+ mock_network.stack_name = 'some-network-stack'
+ mock_network.subnet_stack_name = 'some-network-stack-subnet'
+ mock_network.net_flags = {}
+
+ test_server._add_instance(mock_template, 'ServerFlavor-3',
+ [mock_network], 'hints')
+
+ mock_template.add_port(
+ 'ServerFlavor-3-some-network-port',
+ mock_network.stack_name,
+ mock_network.subnet_stack_name,
+ sec_group_id=self.mock_context.secgroup_name)
+
+ mock_template.add_flavor(
+ vcpus=4,
+ ram=2048,
+ disk=1)
+
+ mock_template.add_flavor(
+ vcpus=4,
+ ram=2048,
+ disk=1,
+ extra_specs={'cat': 1, 'dog': 2, 'dragon': 1000})
+
+ mock_template.add_server.assert_called_with(
+ 'ServerFlavor-3', 'some-image',
+ flavor='flavor1',
+ flavors=['flavor2'],
+ ports=['ServerFlavor-3-some-network-port'],
+ user=self.mock_context.user,
+ key_name=self.mock_context.keypair_name,
+ user_data=user_data,
+ scheduler_hints='hints',
+ availability_zone=None)
diff --git a/yardstick/tests/unit/benchmark/contexts/test_node.py b/yardstick/tests/unit/benchmark/contexts/test_node.py
new file mode 100644
index 000000000..da16074d9
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/contexts/test_node.py
@@ -0,0 +1,392 @@
+##############################################################################
+# Copyright (c) 2015-2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import os
+import errno
+
+import mock
+import unittest
+
+from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts import node
+from yardstick.common import constants as consts
+from yardstick.common import exceptions
+from yardstick.common import yaml_loader
+
+
+class NodeContextTestCase(unittest.TestCase):
+
+ PREFIX = 'yardstick.benchmark.contexts.node'
+
+ NODES_SAMPLE = "nodes_sample.yaml"
+ NODES_DUPLICATE_SAMPLE = "nodes_duplicate_sample.yaml"
+
+ def setUp(self):
+ self.test_context = node.NodeContext()
+ self.addCleanup(self._remove_contexts)
+ self.os_path_join = os.path.join
+ self.attrs = {
+ 'name': 'foo',
+ 'task_id': '1234567890',
+ 'file': self._get_file_abspath(self.NODES_SAMPLE)
+ }
+
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = self.os_path_join(curr_path, filename)
+ return file_path
+
+ def test___init__(self):
+ self.assertIsNone(self.test_context._name)
+ self.assertIsNone(self.test_context.file_path)
+ self.assertEqual(self.test_context.nodes, [])
+ self.assertEqual(self.test_context.controllers, [])
+ self.assertEqual(self.test_context.computes, [])
+ self.assertEqual(self.test_context.baremetals, [])
+ self.assertEqual(self.test_context.env, {})
+ self.assertEqual(self.test_context.attrs, {})
+
+ @mock.patch.object(yaml_loader, 'read_yaml_file')
+ @mock.patch('{}.os.path.join'.format(PREFIX))
+ def test_init_negative(self, mock_path_join, read_mock):
+ special_path = '/foo/bar/error_file'
+ error_path = self._get_file_abspath("error_file")
+
+ def path_join(*args):
+ if args == (consts.YARDSTICK_ROOT_PATH, error_path):
+ return special_path
+ return self.os_path_join(*args)
+
+ # we can't count mock_path_join calls because
+ # it can catch join calls for .pyc files.
+ mock_path_join.side_effect = path_join
+ read_calls = 0
+
+ with self.assertRaises(KeyError):
+ self.test_context.init({})
+
+ self.assertEqual(read_mock.call_count, read_calls)
+
+ attrs = {
+ 'name': 'foo',
+ 'task_id': '1234567890',
+ 'file': error_path,
+ }
+ read_mock.side_effect = IOError(errno.EBUSY, 'busy')
+ with self.assertRaises(IOError) as raised:
+ self.test_context.init(attrs)
+
+ read_calls += 1
+ self.assertEqual(read_mock.call_count, read_calls)
+ self.assertIn(attrs['file'], self.test_context.file_path)
+ self.assertEqual(raised.exception.errno, errno.EBUSY)
+ self.assertEqual(str(raised.exception), str(read_mock.side_effect))
+
+ read_mock.side_effect = IOError(errno.ENOENT, 'not found')
+ with self.assertRaises(IOError) as raised:
+ self.test_context.init(attrs)
+
+ read_calls += 2
+ self.assertEqual(read_mock.call_count, read_calls)
+ self.assertEqual(self.test_context.file_path, special_path)
+ self.assertEqual(raised.exception.errno, errno.ENOENT)
+ self.assertEqual(str(raised.exception), str(read_mock.side_effect))
+
+ def test__dispatch_script(self):
+ self.test_context.init(self.attrs)
+
+ self.test_context.env = {'bash': [{'script': 'dummy'}]}
+ self.test_context._execute_script = mock.Mock()
+ self.assertEqual(self.test_context._dispatch_script('bash'), None)
+
+ def test__dispatch_ansible(self):
+ self.test_context.init(self.attrs)
+
+ self.test_context.env = {'ansible': [{'script': 'dummy'}]}
+ self.test_context._do_ansible_job = mock.Mock()
+ self.assertEqual(self.test_context._dispatch_ansible('ansible'), None)
+ self.test_context.env = {}
+ self.assertEqual(self.test_context._dispatch_ansible('ansible'), None)
+
+ @mock.patch("{}.AnsibleCommon".format(PREFIX))
+ def test__do_ansible_job(self, *args):
+ self.assertIsNone(self.test_context._do_ansible_job('dummy'))
+
+ def test_init(self):
+ self.test_context.init(self.attrs)
+
+ self.assertEqual(self.test_context.name, "foo-12345678")
+ self.assertEqual(len(self.test_context.nodes), 4)
+ self.assertEqual(len(self.test_context.controllers), 2)
+ self.assertEqual(len(self.test_context.computes), 1)
+ self.assertEqual(self.test_context.computes[0]["name"], "node3")
+ self.assertEqual(len(self.test_context.baremetals), 1)
+ self.assertEqual(self.test_context.baremetals[0]["name"], "node4")
+
+ def test__get_server_with_dict_attr_name(self):
+ self.test_context.init(self.attrs)
+ result = self.test_context._get_server({'name': 'node1.foo-12345678'})
+
+ self.assertIsNone(result, None)
+
+ def test__get_server_not_found(self):
+ self.test_context.init(self.attrs)
+
+ self.assertIsNone(self.test_context._get_server('bar.foo-12345678'))
+
+ def test__get_server_mismatch(self):
+ self.test_context.init(self.attrs)
+
+ self.assertIsNone(self.test_context._get_server('bar.foo1'))
+
+ def test__get_server_duplicate(self):
+ self.attrs['file'] = self._get_file_abspath(
+ self.NODES_DUPLICATE_SAMPLE)
+ self.test_context.init(self.attrs)
+
+ with self.assertRaises(ValueError):
+ self.test_context._get_server('node1.foo-12345678')
+
+ def test__get_server_found(self):
+ self.test_context.init(self.attrs)
+
+ result = self.test_context._get_server('node1.foo-12345678')
+
+ self.assertEqual(result['ip'], '10.229.47.137')
+ self.assertEqual(result['name'], 'node1.foo-12345678')
+ self.assertEqual(result['user'], 'root')
+ self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+
+ def test__get_physical_nodes(self):
+ self.test_context.init(self.attrs)
+ nodes = self.test_context._get_physical_nodes()
+ self.assertEqual(nodes, self.test_context.nodes)
+
+ def test__get_physical_node_for_server(self):
+ self.test_context.init(self.attrs)
+
+ # When server is not from this context
+ result = self.test_context._get_physical_node_for_server('node1.another-context')
+ self.assertIsNone(result)
+
+ # When node_name is not from this context
+ result = self.test_context._get_physical_node_for_server('fake.foo-12345678')
+ self.assertIsNone(result)
+
+ result = self.test_context._get_physical_node_for_server('node1.foo-12345678')
+ self.assertEqual(result, 'node1.foo')
+
+ def test_update_collectd_options_for_node(self):
+ self.test_context.init(self.attrs)
+ options = {'collectd': {'interval': 5}}
+
+ with self.assertRaises(exceptions.ContextUpdateCollectdForNodeError):
+ self.test_context.update_collectd_options_for_node(options, 'fake.foo-12345678')
+
+ self.test_context.update_collectd_options_for_node(options, 'node1.foo-12345678')
+
+ node_collectd_options = [node for node in self.test_context.nodes
+ if node['name'] == 'node1'][0]['collectd']
+
+ self.assertEqual(node_collectd_options, options)
+
+ @mock.patch('{}.NodeContext._dispatch_script'.format(PREFIX))
+ def test_deploy(self, dispatch_script_mock):
+ obj = node.NodeContext()
+ self.addCleanup(obj._delete_context)
+ obj.env = {
+ 'type': 'script'
+ }
+ obj.deploy()
+ dispatch_script_mock.assert_called_once()
+
+ @mock.patch('{}.NodeContext._dispatch_ansible'.format(PREFIX))
+ def test_deploy_anisible(self, dispatch_ansible_mock):
+ obj = node.NodeContext()
+ self.addCleanup(obj._delete_context)
+ obj.env = {
+ 'type': 'ansible'
+ }
+ obj.deploy()
+ dispatch_ansible_mock.assert_called_once()
+
+ @mock.patch('{}.NodeContext._dispatch_script'.format(PREFIX))
+ def test_undeploy(self, dispatch_script_mock):
+ obj = node.NodeContext()
+ obj.env = {
+ 'type': 'script'
+ }
+ obj.undeploy()
+ dispatch_script_mock.assert_called_once()
+
+ @mock.patch('{}.NodeContext._dispatch_ansible'.format(PREFIX))
+ def test_undeploy_anisble(self, dispatch_ansible_mock):
+ obj = node.NodeContext()
+ obj.env = {
+ 'type': 'ansible'
+ }
+ obj.undeploy()
+ dispatch_ansible_mock.assert_called_once()
+
+ @mock.patch('{}.ssh.SSH._put_file_shell'.format(PREFIX))
+ @mock.patch('{}.ssh.SSH.execute'.format(PREFIX))
+ def test_execute_remote_script(self, execute_mock, put_file_mock):
+ obj = node.NodeContext()
+ self.addCleanup(obj._delete_context)
+ obj.env = {'prefix': 'yardstick.benchmark.scenarios.compute'}
+ node_name_args = 'node5'
+ obj.nodes = [{
+ 'name': node_name_args,
+ 'user': 'ubuntu',
+ 'ip': '10.10.10.10',
+ 'pwd': 'ubuntu',
+ }]
+
+ info = {'script': 'computecapacity.bash'}
+ execute_mock.return_value = (0, '', '')
+ obj._execute_remote_script('node5', info)
+
+ put_file_mock.assert_called_once()
+ execute_mock.assert_called()
+
+ @mock.patch('{}.NodeContext._execute_local_script'.format(PREFIX))
+ def test_execute_script_local(self, local_execute_mock):
+ node_name = 'local'
+ info = {}
+ obj = node.NodeContext()
+ self.addCleanup(obj._delete_context)
+ obj._execute_script(node_name, info)
+ local_execute_mock.assert_called_once()
+
+ @mock.patch('{}.NodeContext._execute_remote_script'.format(PREFIX))
+ def test_execute_script_remote(self, remote_execute_mock):
+ node_name = 'node5'
+ info = {}
+ obj = node.NodeContext()
+ self.addCleanup(obj._delete_context)
+ obj._execute_script(node_name, info)
+ remote_execute_mock.assert_called_once()
+
+ def test_get_script(self):
+ script_args = 'hello.bash'
+ info_args = {
+ 'script': script_args
+ }
+ obj = node.NodeContext()
+ self.addCleanup(obj._delete_context)
+ script, options = obj._get_script(info_args)
+ self.assertEqual(script_args, script)
+ self.assertEqual('', options)
+
+ def test_node_info(self):
+ node_name_args = 'node5'
+ obj = node.NodeContext()
+ self.addCleanup(obj._delete_context)
+ obj.nodes = [{'name': node_name_args, 'check': node_name_args}]
+ node_info = obj._get_node_info(node_name_args)
+ self.assertEqual(node_info.get('check'), node_name_args)
+
+ @mock.patch('{}.ssh.SSH.wait'.format(PREFIX))
+ def test_get_client(self, wait_mock):
+ node_name_args = 'node5'
+ obj = node.NodeContext()
+ self.addCleanup(obj._delete_context)
+ obj.nodes = [{
+ 'name': node_name_args,
+ 'user': 'ubuntu',
+ 'ip': '10.10.10.10',
+ 'pwd': 'ubuntu',
+ }]
+ obj._get_client(node_name_args)
+ wait_mock.assert_called_once()
+
+ def test_get_server(self):
+ self.test_context.init(self.attrs)
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.assertEqual('foo-12345678', self.test_context.name)
+ self.assertIsNotNone(self.test_context._task_id)
+
+ result = self.test_context.get_server('node1.foo-12345678')
+
+ self.assertEqual(result['ip'], '10.229.47.137')
+ self.assertEqual(result['name'], 'node1.foo-12345678')
+ self.assertEqual(result['user'], 'root')
+ self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+
+ def test_get_server_server_not_in_context(self):
+ self.test_context.init(self.attrs)
+
+ with self.assertRaises(ValueError):
+ self.test_context.get_server('my2.foo-12345678')
+
+ def test_get_context_from_server(self):
+ self.test_context._name = 'vnf1'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context.nodes = [{'name': 'my', 'value': 100}]
+ self.test_context.attrs = {'attr1': 200}
+
+ self.assertIs(
+ self.test_context.get_context_from_server('my.vnf1-12345678'),
+ self.test_context)
+
+ # TODO: Split this into more granular tests
+ def test__get_network(self):
+ network1 = {
+ 'name': 'net_1',
+ 'vld_id': 'vld111',
+ 'segmentation_id': 'seg54',
+ 'network_type': 'type_a',
+ 'physical_network': 'phys',
+ }
+ network2 = {
+ 'name': 'net_2',
+ 'vld_id': 'vld999',
+ }
+ self.test_context.networks = {
+ 'a': network1,
+ 'b': network2,
+ }
+
+ attr_name = {}
+ self.assertIsNone(self.test_context._get_network(attr_name))
+
+ attr_name = {'vld_id': 'vld777'}
+ self.assertIsNone(self.test_context._get_network(attr_name))
+
+ self.assertIsNone(self.test_context._get_network(None))
+
+ attr_name = 'vld777'
+ self.assertIsNone(self.test_context._get_network(attr_name))
+
+ attr_name = {'vld_id': 'vld999'}
+ expected = {
+ "name": 'net_2',
+ "vld_id": 'vld999',
+ "segmentation_id": None,
+ "network_type": None,
+ "physical_network": None,
+ }
+ result = self.test_context._get_network(attr_name)
+ self.assertDictEqual(result, expected)
+
+ attr_name = 'a'
+ expected = network1
+ result = self.test_context._get_network(attr_name)
+ self.assertDictEqual(result, expected)
diff --git a/yardstick/tests/unit/benchmark/core/__init__.py b/yardstick/tests/unit/benchmark/core/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/__init__.py
diff --git a/yardstick/tests/unit/benchmark/core/no_constraint_no_args_scenario_sample.yaml b/yardstick/tests/unit/benchmark/core/no_constraint_no_args_scenario_sample.yaml
new file mode 100644
index 000000000..44c4a31ff
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/no_constraint_no_args_scenario_sample.yaml
@@ -0,0 +1,21 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+# Huawei US bare daily task suite
+
+schema: "yardstick:suite:0.1"
+
+name: "os-nosdn-nofeature-ha"
+test_cases_dir: "tests/opnfv/test_cases/"
+test_cases:
+-
+ file_name: opnfv_yardstick_tc037.yaml
+-
+ file_name: opnfv_yardstick_tc043.yaml
+
diff --git a/yardstick/tests/unit/benchmark/core/no_constraint_with_args_scenario_sample.yaml b/yardstick/tests/unit/benchmark/core/no_constraint_with_args_scenario_sample.yaml
new file mode 100644
index 000000000..ced13f19e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/no_constraint_with_args_scenario_sample.yaml
@@ -0,0 +1,23 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+# Huawei US bare daily task suite
+
+schema: "yardstick:suite:0.1"
+
+name: "os-nosdn-nofeature-ha"
+test_cases_dir: "tests/opnfv/test_cases/"
+test_cases:
+-
+ file_name: opnfv_yardstick_tc037.yaml
+-
+ file_name: opnfv_yardstick_tc043.yaml
+ task_args:
+ huawei-pod1: '{"host": "node1.LF","target": "node2.LF"}'
+
diff --git a/yardstick/tests/unit/benchmark/core/test_plugin.py b/yardstick/tests/unit/benchmark/core/test_plugin.py
new file mode 100644
index 000000000..53621316b
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/test_plugin.py
@@ -0,0 +1,148 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import copy
+import os
+import pkg_resources
+
+import mock
+import six
+import testtools
+
+from yardstick import ssh
+from yardstick.benchmark.core import plugin
+from yardstick.tests import fixture
+
+
+class PluginTestCase(testtools.TestCase):
+
+ FILE = """
+schema: "yardstick:plugin:0.1"
+
+plugins:
+ name: sample
+
+deployment:
+ ip: 10.1.0.50
+ user: root
+ password: root
+"""
+
+ NAME = 'sample'
+ DEPLOYMENT = {'ip': '10.1.0.50', 'user': 'root', 'password': 'root'}
+
+ def setUp(self):
+ super(PluginTestCase, self).setUp()
+ self.plugin_parser = plugin.PluginParser(mock.Mock())
+ self.plugin = plugin.Plugin()
+ self.useFixture(fixture.PluginParserFixture(PluginTestCase.FILE))
+
+ self._mock_ssh_from_node = mock.patch.object(ssh.SSH, 'from_node')
+ self.mock_ssh_from_node = self._mock_ssh_from_node.start()
+ self.mock_ssh_obj = mock.Mock()
+ self.mock_ssh_from_node.return_value = self.mock_ssh_obj
+ self.mock_ssh_obj.wait = mock.Mock()
+ self.mock_ssh_obj._put_file_shell = mock.Mock()
+ self._mock_log_info = mock.patch.object(plugin.LOG, 'info')
+ self.mock_log_info = self._mock_log_info.start()
+
+ self.addCleanup(self._cleanup)
+
+ def _cleanup(self):
+ self._mock_ssh_from_node.stop()
+ self._mock_log_info.stop()
+
+ @mock.patch.object(six.moves.builtins, 'print')
+ def test_install(self, *args):
+ args = mock.Mock()
+ args.input_file = [mock.Mock()]
+ with mock.patch.object(self.plugin, '_install_setup') as \
+ mock_install, \
+ mock.patch.object(self.plugin, '_run') as mock_run:
+ self.plugin.install(args)
+ mock_install.assert_called_once_with(PluginTestCase.NAME,
+ PluginTestCase.DEPLOYMENT)
+ mock_run.assert_called_once_with(PluginTestCase.NAME)
+
+ @mock.patch.object(six.moves.builtins, 'print')
+ def test_remove(self, *args):
+ args = mock.Mock()
+ args.input_file = [mock.Mock()]
+ with mock.patch.object(self.plugin, '_remove_setup') as \
+ mock_remove, \
+ mock.patch.object(self.plugin, '_run') as mock_run:
+ self.plugin.remove(args)
+ mock_remove.assert_called_once_with(PluginTestCase.NAME,
+ PluginTestCase.DEPLOYMENT)
+ mock_run.assert_called_once_with(PluginTestCase.NAME)
+
+ @mock.patch.object(pkg_resources, 'resource_filename',
+ return_value='script')
+ def test__install_setup(self, mock_resource_filename):
+ plugin_name = 'plugin_name'
+ self.plugin._install_setup(plugin_name, PluginTestCase.DEPLOYMENT)
+ mock_resource_filename.assert_called_once_with(
+ 'yardstick.resources', 'scripts/install/' + plugin_name + '.bash')
+ self.mock_ssh_from_node.assert_called_once_with(
+ PluginTestCase.DEPLOYMENT)
+ self.mock_ssh_obj.wait.assert_called_once_with(timeout=600)
+ self.mock_ssh_obj._put_file_shell.assert_called_once_with(
+ 'script', '~/{0}.sh'.format(plugin_name))
+
+ @mock.patch.object(pkg_resources, 'resource_filename',
+ return_value='script')
+ @mock.patch.object(os, 'environ', return_value='1.2.3.4')
+ def test__install_setup_with_ip_local(self, mock_os_environ,
+ mock_resource_filename):
+ plugin_name = 'plugin_name'
+ deployment = copy.deepcopy(PluginTestCase.DEPLOYMENT)
+ deployment['ip'] = 'local'
+ self.plugin._install_setup(plugin_name, deployment)
+ mock_os_environ.__getitem__.assert_called_once_with('JUMP_HOST_IP')
+ mock_resource_filename.assert_called_once_with(
+ 'yardstick.resources',
+ 'scripts/install/' + plugin_name + '.bash')
+ self.mock_ssh_from_node.assert_called_once_with(
+ deployment, overrides={'ip': os.environ["JUMP_HOST_IP"]})
+ self.mock_ssh_obj.wait.assert_called_once_with(timeout=600)
+ self.mock_ssh_obj._put_file_shell.assert_called_once_with(
+ 'script', '~/{0}.sh'.format(plugin_name))
+
+ @mock.patch.object(pkg_resources, 'resource_filename',
+ return_value='script')
+ def test__remove_setup(self, mock_resource_filename):
+ plugin_name = 'plugin_name'
+ self.plugin._remove_setup(plugin_name, PluginTestCase.DEPLOYMENT)
+ mock_resource_filename.assert_called_once_with(
+ 'yardstick.resources',
+ 'scripts/remove/' + plugin_name + '.bash')
+ self.mock_ssh_from_node.assert_called_once_with(
+ PluginTestCase.DEPLOYMENT)
+ self.mock_ssh_obj.wait.assert_called_once_with(timeout=600)
+ self.mock_ssh_obj._put_file_shell.assert_called_once_with(
+ 'script', '~/{0}.sh'.format(plugin_name))
+
+ @mock.patch.object(pkg_resources, 'resource_filename',
+ return_value='script')
+ @mock.patch.object(os, 'environ', return_value='1.2.3.4')
+ def test__remove_setup_with_ip_local(self, mock_os_environ,
+ mock_resource_filename):
+ plugin_name = 'plugin_name'
+ deployment = copy.deepcopy(PluginTestCase.DEPLOYMENT)
+ deployment['ip'] = 'local'
+ self.plugin._remove_setup(plugin_name, deployment)
+ mock_os_environ.__getitem__.assert_called_once_with('JUMP_HOST_IP')
+ mock_resource_filename.assert_called_once_with(
+ 'yardstick.resources',
+ 'scripts/remove/' + plugin_name + '.bash')
+ self.mock_ssh_from_node.assert_called_once_with(
+ deployment, overrides={'ip': os.environ["JUMP_HOST_IP"]})
+ self.mock_ssh_obj.wait.assert_called_once_with(timeout=600)
+ self.mock_ssh_obj._put_file_shell.mock_os_environ(
+ 'script', '~/{0}.sh'.format(plugin_name))
diff --git a/yardstick/tests/unit/benchmark/core/test_report.py b/yardstick/tests/unit/benchmark/core/test_report.py
new file mode 100644
index 000000000..89fb1e90a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/test_report.py
@@ -0,0 +1,584 @@
+##############################################################################
+# Copyright (c) 2017 Rajesh Kudaka.
+# Copyright (c) 2018-2019 Intel Corporation.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import six
+import unittest
+import uuid
+
+from api.utils import influx
+from yardstick.benchmark.core import report
+from yardstick.cmd.commands import change_osloobj_to_paras
+
+GOOD_YAML_NAME = 'fake_name'
+GOOD_TASK_ID = str(uuid.uuid4())
+GOOD_DB_FIELDKEYS = [{'fieldKey': 'fake_key'}]
+GOOD_DB_METRICS = [{
+ 'fake_key': 1.234,
+ 'time': '0000-00-00T12:34:56.789012Z',
+ }]
+GOOD_TIMESTAMP = ['12:34:56.789012']
+BAD_YAML_NAME = 'F@KE_NAME'
+BAD_TASK_ID = 'aaaaaa-aaaaaaaa-aaaaaaaaaa-aaaaaa'
+MORE_DB_FIELDKEYS = [
+ {'fieldKey': 'fake_key'},
+ {'fieldKey': 'str_str'},
+ {'fieldKey': u'str_unicode'},
+ {u'fieldKey': 'unicode_str'},
+ {u'fieldKey': u'unicode_unicode'},
+ ]
+MORE_DB_METRICS = [{
+ 'fake_key': None,
+ 'time': '0000-00-00T00:00:00.000000Z',
+ }, {
+ 'fake_key': 123,
+ 'time': '0000-00-00T00:00:01.000000Z',
+ }, {
+ 'fake_key': 4.56,
+ 'time': '0000-00-00T00:00:02.000000Z',
+ }, {
+ 'fake_key': 9876543210987654321,
+ 'time': '0000-00-00T00:00:03.000000Z',
+ }, {
+ 'fake_key': 'str_str value',
+ 'time': '0000-00-00T00:00:04.000000Z',
+ }, {
+ 'fake_key': u'str_unicode value',
+ 'time': '0000-00-00T00:00:05.000000Z',
+ }, {
+ u'fake_key': 'unicode_str value',
+ 'time': '0000-00-00T00:00:06.000000Z',
+ }, {
+ u'fake_key': u'unicode_unicode value',
+ 'time': '0000-00-00T00:00:07.000000Z',
+ }, {
+ 'fake_key': '7.89',
+ 'time': '0000-00-00T00:00:08.000000Z',
+ }, {
+ 'fake_key': '1011',
+ 'time': '0000-00-00T00:00:09.000000Z',
+ }, {
+ 'fake_key': '9876543210123456789',
+ 'time': '0000-00-00T00:00:10.000000Z',
+ }]
+MORE_TIMESTAMP = ['00:00:%02d.000000' % n for n in range(len(MORE_DB_METRICS))]
+MORE_EMPTY_DATA = [None] * len(MORE_DB_METRICS)
+MORE_EXPECTED_TABLE_VALS = {
+ 'Timestamp': MORE_TIMESTAMP,
+ 'fake_key': [
+ None,
+ 123,
+ 4.56,
+ 9876543210987654321 if six.PY3 else 9.876543210987655e+18,
+ None,
+ None,
+ None,
+ None,
+ 7.89,
+ 1011,
+ 9876543210123456789 if six.PY3 else 9.876543210123457e+18,
+ ],
+ 'str_str': MORE_EMPTY_DATA,
+ 'str_unicode': MORE_EMPTY_DATA,
+ 'unicode_str': MORE_EMPTY_DATA,
+ 'unicode_unicode': MORE_EMPTY_DATA,
+ }
+MORE_EXPECTED_DATASETS = [{
+ 'label': key,
+ 'data': MORE_EXPECTED_TABLE_VALS[key],
+ }
+ for key in map(str, [field['fieldKey'] for field in MORE_DB_FIELDKEYS])
+ ]
+
+
+class JSTreeTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.jstree = report.JSTree()
+
+ def test__create_node(self):
+ _id = "tg__0.DropPackets"
+
+ expected_data = [
+ {"id": "tg__0", "text": "tg__0", "parent": "#"},
+ {"id": "tg__0.DropPackets", "text": "DropPackets", "parent": "tg__0"}
+ ]
+ self.jstree._create_node(_id)
+
+ self.assertEqual(self.jstree._created_nodes, ['#', 'tg__0', 'tg__0.DropPackets'])
+ self.assertEqual(self.jstree.jstree_data, expected_data)
+
+ def test_format_for_jstree(self):
+ data = [
+ 'tg__0.DropPackets',
+ 'tg__0.LatencyAvg.5', 'tg__0.LatencyAvg.6',
+ 'tg__0.LatencyMax.5', 'tg__0.LatencyMax.6',
+ 'tg__0.RxThroughput', 'tg__0.TxThroughput',
+ 'tg__1.DropPackets',
+ 'tg__1.LatencyAvg.5', 'tg__1.LatencyAvg.6',
+ 'tg__1.LatencyMax.5', 'tg__1.LatencyMax.6',
+ 'tg__1.RxThroughput', 'tg__1.TxThroughput',
+ 'vnf__0.curr_packets_in', 'vnf__0.packets_dropped', 'vnf__0.packets_fwd',
+ ]
+
+ expected_output = [
+ {"id": "tg__0", "text": "tg__0", "parent": "#"},
+ {"id": "tg__0.DropPackets", "text": "DropPackets", "parent": "tg__0"},
+ {"id": "tg__0.LatencyAvg", "text": "LatencyAvg", "parent": "tg__0"},
+ {"id": "tg__0.LatencyAvg.5", "text": "5", "parent": "tg__0.LatencyAvg"},
+ {"id": "tg__0.LatencyAvg.6", "text": "6", "parent": "tg__0.LatencyAvg"},
+ {"id": "tg__0.LatencyMax", "text": "LatencyMax", "parent": "tg__0"},
+ {"id": "tg__0.LatencyMax.5", "text": "5", "parent": "tg__0.LatencyMax"},
+ {"id": "tg__0.LatencyMax.6", "text": "6", "parent": "tg__0.LatencyMax"},
+ {"id": "tg__0.RxThroughput", "text": "RxThroughput", "parent": "tg__0"},
+ {"id": "tg__0.TxThroughput", "text": "TxThroughput", "parent": "tg__0"},
+ {"id": "tg__1", "text": "tg__1", "parent": "#"},
+ {"id": "tg__1.DropPackets", "text": "DropPackets", "parent": "tg__1"},
+ {"id": "tg__1.LatencyAvg", "text": "LatencyAvg", "parent": "tg__1"},
+ {"id": "tg__1.LatencyAvg.5", "text": "5", "parent": "tg__1.LatencyAvg"},
+ {"id": "tg__1.LatencyAvg.6", "text": "6", "parent": "tg__1.LatencyAvg"},
+ {"id": "tg__1.LatencyMax", "text": "LatencyMax", "parent": "tg__1"},
+ {"id": "tg__1.LatencyMax.5", "text": "5", "parent": "tg__1.LatencyMax"},
+ {"id": "tg__1.LatencyMax.6", "text": "6", "parent": "tg__1.LatencyMax"},
+ {"id": "tg__1.RxThroughput", "text": "RxThroughput", "parent": "tg__1"},
+ {"id": "tg__1.TxThroughput", "text": "TxThroughput", "parent": "tg__1"},
+ {"id": "vnf__0", "text": "vnf__0", "parent": "#"},
+ {"id": "vnf__0.curr_packets_in", "text": "curr_packets_in", "parent": "vnf__0"},
+ {"id": "vnf__0.packets_dropped", "text": "packets_dropped", "parent": "vnf__0"},
+ {"id": "vnf__0.packets_fwd", "text": "packets_fwd", "parent": "vnf__0"},
+ ]
+
+ result = self.jstree.format_for_jstree(data)
+ self.assertEqual(expected_output, result)
+
+
+class ReportTestCase(unittest.TestCase):
+
+ def setUp(self):
+ super(ReportTestCase, self).setUp()
+ self.param = change_osloobj_to_paras({})
+ self.param.yaml_name = [GOOD_YAML_NAME]
+ self.param.task_id = [GOOD_TASK_ID]
+ self.rep = report.Report()
+
+ def test___init__(self):
+ self.assertEqual([], self.rep.Timestamp)
+ self.assertEqual("", self.rep.yaml_name)
+ self.assertEqual("", self.rep.task_id)
+
+ def test__validate(self):
+ self.rep._validate(GOOD_YAML_NAME, GOOD_TASK_ID)
+ self.assertEqual(GOOD_YAML_NAME, self.rep.yaml_name)
+ self.assertEqual(GOOD_TASK_ID, str(self.rep.task_id))
+
+ def test__validate_invalid_yaml_name(self):
+ with six.assertRaisesRegex(self, ValueError, "yaml*"):
+ self.rep._validate(BAD_YAML_NAME, GOOD_TASK_ID)
+
+ def test__validate_invalid_task_id(self):
+ with six.assertRaisesRegex(self, ValueError, "task*"):
+ self.rep._validate(GOOD_YAML_NAME, BAD_TASK_ID)
+
+ @mock.patch.object(influx, 'query')
+ def test__get_fieldkeys(self, mock_query):
+ mock_query.return_value = GOOD_DB_FIELDKEYS
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ self.assertEqual(GOOD_DB_FIELDKEYS, self.rep._get_fieldkeys())
+
+ @mock.patch.object(influx, 'query')
+ def test__get_fieldkeys_nodbclient(self, mock_query):
+ mock_query.side_effect = RuntimeError
+ self.assertRaises(RuntimeError, self.rep._get_fieldkeys)
+
+ @mock.patch.object(influx, 'query')
+ def test__get_fieldkeys_testcase_not_found(self, mock_query):
+ mock_query.return_value = []
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ six.assertRaisesRegex(self, KeyError, "Test case", self.rep._get_fieldkeys)
+
+ @mock.patch.object(influx, 'query')
+ def test__get_metrics(self, mock_query):
+ mock_query.return_value = GOOD_DB_METRICS
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ self.assertEqual(GOOD_DB_METRICS, self.rep._get_metrics())
+
+ @mock.patch.object(influx, 'query')
+ def test__get_metrics_task_not_found(self, mock_query):
+ mock_query.return_value = []
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ six.assertRaisesRegex(self, KeyError, "Task ID", self.rep._get_metrics)
+
+ @mock.patch.object(influx, 'query')
+ def test__get_task_start_time(self, mock_query):
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ mock_query.return_value = [{
+ u'free.memory0.used': u'9789088',
+ u'free.memory0.available': u'22192984',
+ u'free.memory0.shared': u'219152',
+ u'time': u'2019-01-22T16:20:14.568075776Z',
+ }]
+ expected = "2019-01-22T16:20:14.568075776Z"
+
+ self.assertEqual(
+ expected,
+ self.rep._get_task_start_time()
+ )
+
+ def test__get_task_start_time_task_not_found(self):
+ pass
+
+ @mock.patch.object(influx, 'query')
+ def test__get_task_end_time(self, mock_query):
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ # TODO(elfoley): write this test!
+ mock_query.return_value = [{
+
+ }]
+
+ @mock.patch.object(influx, 'query')
+ def test__get_baro_metrics(self, mock_query):
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ self.rep._get_task_start_time = mock.Mock(return_value=0)
+ self.rep._get_task_end_time = mock.Mock(return_value=0)
+
+ influx_return_values = ([{
+ u'value': 324050, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:25.383698038Z',
+ u'type_instance': u'user', u'type': u'cpu',
+ }, {
+ u'value': 193798, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:25.383712594Z',
+ u'type_instance': u'system', u'type': u'cpu',
+ }, {
+ u'value': 324051, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:35.383696624Z',
+ u'type_instance': u'user', u'type': u'cpu',
+ }, {
+ u'value': 193800, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:35.383713481Z',
+ u'type_instance': u'system', u'type': u'cpu',
+ }, {
+ u'value': 324054, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:45.3836966789Z',
+ u'type_instance': u'user', u'type': u'cpu',
+ }, {
+ u'value': 193801, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:45.383716296Z',
+ u'type_instance': u'system', u'type': u'cpu',
+ }],
+ [{
+ u'value': 3598453000, u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:25.383698038Z',
+ u'type_instance': u'0', u'type': u'cpufreq',
+ }, {
+ u'value': 3530250000, u'type_instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:35.383712594Z', u'type': u'cpufreq',
+ }, {
+ u'value': 3600281000, u'type_instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:45.383696624Z', u'type': u'cpufreq',
+ }],
+ )
+
+ def ret_vals(vals):
+ for x in vals:
+ yield x
+ while True:
+ yield []
+
+ mock_query.side_effect = ret_vals(influx_return_values)
+
+ BARO_EXPECTED_METRICS = {
+ 'Timestamp': [
+ '14:11:25.3836', '14:11:25.3837',
+ '14:11:35.3836', '14:11:35.3837',
+ '14:11:45.3836', '14:11:45.3837'],
+ 'myhostname.cpu_value.cpu.user.0': {
+ '14:11:25.3836': 324050,
+ '14:11:35.3836': 324051,
+ '14:11:45.3836': 324054,
+ },
+ 'myhostname.cpu_value.cpu.system.0': {
+ '14:11:25.3837': 193798,
+ '14:11:35.3837': 193800,
+ '14:11:45.3837': 193801,
+ },
+ 'myhostname.cpufreq_value.cpufreq.0': {
+ '14:11:25.3836': 3598453000,
+ '14:11:35.3837': 3530250000,
+ '14:11:45.3836': 3600281000,
+ }
+ }
+ self.assertEqual(
+ BARO_EXPECTED_METRICS,
+ self.rep._get_baro_metrics()
+ )
+
+ def test__get_timestamps(self):
+
+ metrics = MORE_DB_METRICS
+ self.assertEqual(
+ MORE_TIMESTAMP,
+ self.rep._get_timestamps(metrics)
+ )
+
+ def test__format_datasets(self):
+ metric_name = "free.memory0.used"
+ metrics = [{
+ u'free.memory1.free': u'1958664',
+ u'free.memory0.used': u'9789560',
+ }, {
+ u'free.memory1.free': u'1958228',
+ u'free.memory0.used': u'9789790',
+ }, {
+ u'free.memory1.free': u'1956156',
+ u'free.memory0.used': u'9791092',
+ }, {
+ u'free.memory1.free': u'1956280',
+ u'free.memory0.used': u'9790796',
+ }]
+ self.assertEqual(
+ [9789560, 9789790, 9791092, 9790796,],
+ self.rep._format_datasets(metric_name, metrics)
+ )
+
+ def test__format_datasets_val_none(self):
+ metric_name = "free.memory0.used"
+ metrics = [{
+ u'free.memory1.free': u'1958664',
+ u'free.memory0.used': 9876543109876543210,
+ }, {
+ u'free.memory1.free': u'1958228',
+ }, {
+ u'free.memory1.free': u'1956156',
+ u'free.memory0.used': u'9791092',
+ }, {
+ u'free.memory1.free': u'1956280',
+ u'free.memory0.used': u'9790796',
+ }]
+
+ exp0 = 9876543109876543210 if six.PY3 else 9.876543109876543e+18
+ self.assertEqual(
+ [exp0, None, 9791092, 9790796],
+ self.rep._format_datasets(metric_name, metrics)
+ )
+
+ def test__format_datasets_val_incompatible(self):
+ metric_name = "free.memory0.used"
+ metrics = [{
+ u'free.memory0.used': "some incompatible value",
+ }, {
+ }]
+ self.assertEqual(
+ [None, None],
+ self.rep._format_datasets(metric_name, metrics)
+ )
+
+ def test__combine_times(self):
+ yard_times = [
+ '00:00:00.000000',
+ '00:00:01.000000',
+ '00:00:02.000000',
+ '00:00:06.000000',
+ '00:00:08.000000',
+ '00:00:09.000000',
+ ]
+ baro_times = [
+ '00:00:01.000000',
+ '00:00:03.000000',
+ '00:00:04.000000',
+ '00:00:05.000000',
+ '00:00:07.000000',
+ '00:00:10.000000',
+ ]
+ expected_combo = [
+ '00:00:00.000000',
+ '00:00:01.000000',
+ '00:00:02.000000',
+ '00:00:03.000000',
+ '00:00:04.000000',
+ '00:00:05.000000',
+ '00:00:06.000000',
+ '00:00:07.000000',
+ '00:00:08.000000',
+ '00:00:09.000000',
+ '00:00:10.000000',
+ ]
+
+ actual_combo = self.rep._combine_times(yard_times, baro_times)
+ self.assertEqual(len(expected_combo), len(actual_combo))
+
+ self.assertEqual(
+ expected_combo,
+ actual_combo,
+ )
+
+ def test__combine_times_2(self):
+ time1 = ['14:11:25.383698', '14:11:25.383712', '14:11:35.383696',]
+ time2 = [
+ '16:20:14.568075', '16:20:24.575083',
+ '16:20:34.580989', '16:20:44.586801', ]
+ time_exp = [
+ '14:11:25.383698', '14:11:25.383712', '14:11:35.383696',
+ '16:20:14.568075', '16:20:24.575083', '16:20:34.580989',
+ '16:20:44.586801',
+ ]
+ self.assertEqual(time_exp, self.rep._combine_times(time1, time2))
+
+ def test__combine_metrics(self):
+ BARO_METRICS = {
+ 'myhostname.cpu_value.cpu.user.0': {
+ '14:11:25.3836': 324050, '14:11:35.3836': 324051,
+ '14:11:45.3836': 324054,
+ },
+ 'myhostname.cpu_value.cpu.system.0': {
+ '14:11:25.3837': 193798, '14:11:35.3837': 193800,
+ '14:11:45.3837': 193801,
+ }
+ }
+ BARO_TIMES = [
+ '14:11:25.3836', '14:11:25.3837', '14:11:35.3836',
+ '14:11:35.3837', '14:11:45.3836', '14:11:45.3837',
+ ]
+ YARD_METRICS = {
+ 'free.memory9.free': {
+ '16:20:14.5680': 1958244, '16:20:24.5750': 1955964,
+ '16:20:34.5809': 1956040, '16:20:44.5868': 1956428,
+ },
+ 'free.memory7.used': {
+ '16:20:14.5680': 9789068, '16:20:24.5750': 9791284,
+ '16:20:34.5809': 9791228, '16:20:44.5868': 9790692,
+ },
+ 'free.memory2.total':{
+ '16:20:14.5680': 32671288, '16:20:24.5750': 32671288,
+ '16:20:34.5809': 32671288, '16:20:44.5868': 32671288,
+ },
+ 'free.memory7.free': {
+ '16:20:14.5680': 1958368, '16:20:24.5750': 1956104,
+ '16:20:34.5809': 1956040, '16:20:44.5868': 1956552,
+ },
+ 'free.memory1.used': {
+ '16:20:14.5680': 9788872, '16:20:24.5750': 9789212,
+ '16:20:34.5809': 9791168, '16:20:44.5868': 9790996,
+ },
+ }
+ YARD_TIMES = [
+ '16:20:14.5680', '16:20:24.5750',
+ '16:20:34.5809', '16:20:44.5868',
+ ]
+
+ expected_output = {
+ 'myhostname.cpu_value.cpu.user.0': [{
+ 'x': '14:11:25.3836', 'y': 324050, }, {
+ 'x': '14:11:35.3836', 'y': 324051, }, {
+ 'x': '14:11:45.3836', 'y': 324054, }],
+ 'myhostname.cpu_value.cpu.system.0' : [{
+ 'x': '14:11:25.3837', 'y': 193798, }, {
+ 'x': '14:11:35.3837', 'y': 193800, }, {
+ 'x': '14:11:45.3837', 'y': 193801, }],
+ 'free.memory9.free': [{
+ 'x': '16:20:14.5680', 'y': 1958244, }, {
+ 'x': '16:20:24.5750', 'y': 1955964, }, {
+ 'x': '16:20:34.5809', 'y': 1956040, }, {
+ 'x': '16:20:44.5868', 'y': 1956428, }],
+ 'free.memory7.used': [{
+ 'x': '16:20:14.5680', 'y': 9789068, }, {
+ 'x': '16:20:24.5750', 'y': 9791284, }, {
+ 'x': '16:20:34.5809', 'y': 9791228, }, {
+ 'x': '16:20:44.5868', 'y': 9790692, }],
+ 'free.memory2.total': [{
+ 'x': '16:20:14.5680', 'y': 32671288, }, {
+ 'x': '16:20:24.5750', 'y': 32671288, }, {
+ 'x': '16:20:34.5809', 'y': 32671288, }, {
+ 'x': '16:20:44.5868', 'y': 32671288, }],
+ 'free.memory7.free': [{
+ 'x': '16:20:14.5680', 'y': 1958368, }, {
+ 'x': '16:20:24.5750', 'y': 1956104, }, {
+ 'x': '16:20:34.5809', 'y': 1956040, }, {
+ 'x': '16:20:44.5868', 'y': 1956552, }],
+ 'free.memory1.used': [{
+ 'x': '16:20:14.5680', 'y': 9788872, }, {
+ 'x': '16:20:24.5750', 'y': 9789212, }, {
+ 'x': '16:20:34.5809', 'y': 9791168, }, {
+ 'x': '16:20:44.5868', 'y': 9790996, }],
+ }
+
+ actual_output, _, _ = self.rep._combine_metrics(
+ BARO_METRICS, BARO_TIMES, YARD_METRICS, YARD_TIMES
+ )
+ self.assertEquals(
+ sorted(expected_output.keys()),
+ sorted(actual_output.keys())
+ )
+
+ self.assertEquals(
+ expected_output,
+ actual_output,
+ )
+
+ @mock.patch.object(report.Report, '_get_metrics')
+ @mock.patch.object(report.Report, '_get_fieldkeys')
+ def test__generate_common(self, mock_keys, mock_metrics):
+ mock_metrics.return_value = MORE_DB_METRICS
+ mock_keys.return_value = MORE_DB_FIELDKEYS
+ datasets, table_vals = self.rep._generate_common(self.param)
+ self.assertEqual(MORE_EXPECTED_DATASETS, datasets)
+ self.assertEqual(MORE_EXPECTED_TABLE_VALS, table_vals)
+
+ @mock.patch.object(report.Report, '_get_metrics')
+ @mock.patch.object(report.Report, '_get_fieldkeys')
+ @mock.patch.object(report.Report, '_validate')
+ def test_generate(self, mock_valid, mock_keys, mock_metrics):
+ mock_metrics.return_value = GOOD_DB_METRICS
+ mock_keys.return_value = GOOD_DB_FIELDKEYS
+ self.rep.generate(self.param)
+ mock_valid.assert_called_once_with(GOOD_YAML_NAME, GOOD_TASK_ID)
+ mock_metrics.assert_called_once_with()
+ mock_keys.assert_called_once_with()
+ self.assertEqual(GOOD_TIMESTAMP, self.rep.Timestamp)
+
+ @mock.patch.object(report.Report, '_get_baro_metrics')
+ @mock.patch.object(report.Report, '_get_metrics')
+ @mock.patch.object(report.Report, '_get_fieldkeys')
+ @mock.patch.object(report.Report, '_validate')
+ def test_generate_nsb(
+ self, mock_valid, mock_keys, mock_metrics, mock_baro_metrics):
+
+ mock_metrics.return_value = GOOD_DB_METRICS
+ mock_keys.return_value = GOOD_DB_FIELDKEYS
+ BARO_METRICS = {
+ # TODO: is timestamp needed here?
+ 'Timestamp': [
+ '14:11:25.383698', '14:11:25.383712', '14:11:35.383696',
+ '14:11:35.383713', '14:11:45.383700', '14:11:45.383716'],
+ 'myhostname.cpu_value.cpu.user.0': {
+ '14:11:25.383698': 324050,
+ '14:11:35.383696': 324051,
+ '14:11:45.383700': 324054,
+ },
+ 'myhostname.cpu_value.cpu.system.0': {
+ '14:11:25.383712': 193798,
+ '14:11:35.383713': 193800,
+ '14:11:45.383716': 193801,
+ }
+ }
+ mock_baro_metrics.return_value = BARO_METRICS
+
+ self.rep.generate_nsb(self.param)
+ mock_valid.assert_called_once_with(GOOD_YAML_NAME, GOOD_TASK_ID)
+ mock_metrics.assert_called_once_with()
+ mock_keys.assert_called_once_with()
+ self.assertEqual(GOOD_TIMESTAMP, self.rep.Timestamp)
diff --git a/yardstick/tests/unit/benchmark/core/test_task.py b/yardstick/tests/unit/benchmark/core/test_task.py
new file mode 100644
index 000000000..0f09b3e59
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/test_task.py
@@ -0,0 +1,644 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import copy
+import io
+import logging
+import os
+import sys
+
+import mock
+import six
+from six.moves import builtins
+import unittest
+import uuid
+import collections
+
+from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts import dummy
+from yardstick.benchmark.core import task
+from yardstick.common import constants as consts
+from yardstick.common import exceptions
+from yardstick.common import task_template
+from yardstick.common import utils
+
+
+class TaskTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_log = mock.patch.object(task, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_log.stop()
+
+ @mock.patch.object(base, 'Context')
+ def test_parse_nodes_with_context_same_context(self, mock_context):
+ scenario_cfg = {
+ "nodes": {
+ "host": "node1.LF",
+ "target": "node2.LF"
+ }
+ }
+ server_info = {
+ "ip": "10.20.0.3",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ mock_context.get_server.return_value = server_info
+
+ context_cfg = task.parse_nodes_with_context(scenario_cfg)
+
+ self.assertEqual(context_cfg["host"], server_info)
+ self.assertEqual(context_cfg["target"], server_info)
+
+ def test_set_dispatchers(self):
+ t = task.Task()
+ output_config = {"DEFAULT": {"dispatcher": "file, http"}}
+ t._set_dispatchers(output_config)
+ self.assertEqual(output_config, output_config)
+
+ @mock.patch.object(task, 'DispatcherBase')
+ def test__do_output(self, mock_dispatcher):
+ t = task.Task()
+ output_config = {"DEFAULT": {"dispatcher": "file, http"}}
+
+ dispatcher1 = mock.MagicMock()
+ dispatcher1.__dispatcher_type__ = 'file'
+
+ dispatcher2 = mock.MagicMock()
+ dispatcher2.__dispatcher_type__ = 'http'
+
+ mock_dispatcher.get = mock.MagicMock(return_value=[dispatcher1,
+ dispatcher2])
+ self.assertIsNone(t._do_output(output_config, {}))
+
+ @mock.patch.object(base, 'Context')
+ def test_parse_networks_from_nodes(self, mock_context):
+ nodes = {
+ 'node1': {
+ 'interfaces': {
+ 'mgmt': {
+ 'network_name': 'mgmt',
+ },
+ 'xe0': {
+ 'network_name': 'uplink_0',
+ },
+ 'xe1': {
+ 'network_name': 'downlink_0',
+ },
+ },
+ },
+ 'node2': {
+ 'interfaces': {
+ 'mgmt': {
+ 'network_name': 'mgmt',
+ },
+ 'uplink_0': {
+ 'network_name': 'uplink_0',
+ },
+ 'downlink_0': {
+ 'network_name': 'downlink_0',
+ },
+ },
+ },
+ }
+
+ mock_context.get_network.side_effect = iter([
+ None,
+ {
+ 'name': 'mgmt',
+ 'network_type': 'flat',
+ },
+ {},
+ {
+ 'name': 'uplink_0',
+ 'subnet_cidr': '10.20.0.0/16',
+ },
+ {
+ 'name': 'downlink_0',
+ 'segmentation_id': '1001',
+ },
+ {
+ 'name': 'uplink_1',
+ },
+ ])
+
+ # one for each interface
+ expected_get_network_calls = 6
+ expected = {
+ 'mgmt': {'name': 'mgmt', 'network_type': 'flat'},
+ 'uplink_0': {'name': 'uplink_0', 'subnet_cidr': '10.20.0.0/16'},
+ 'uplink_1': {'name': 'uplink_1'},
+ 'downlink_0': {'name': 'downlink_0', 'segmentation_id': '1001'},
+ }
+
+ networks = task.get_networks_from_nodes(nodes)
+ self.assertEqual(mock_context.get_network.call_count, expected_get_network_calls)
+ self.assertDictEqual(networks, expected)
+
+ @mock.patch.object(base, 'Context')
+ @mock.patch.object(task, 'base_runner')
+ def test_run(self, mock_base_runner, *args):
+ scenario = {
+ 'host': 'athena.demo',
+ 'target': 'ares.demo',
+ 'runner': {
+ 'duration': 60,
+ 'interval': 1,
+ 'type': 'Duration'
+ },
+ 'type': 'Ping'
+ }
+
+ t = task.Task()
+ runner = mock.Mock()
+ runner.join.return_value = 0
+ runner.get_output.return_value = {}
+ runner.get_result.return_value = []
+ mock_base_runner.Runner.get.return_value = runner
+ t._run([scenario], False, "yardstick.out")
+ runner.run.assert_called_once()
+
+ @mock.patch.object(base, 'Context')
+ @mock.patch.object(task, 'base_runner')
+ def test_run_ProxDuration(self, mock_base_runner, *args):
+ scenario = {
+ 'host': 'athena.demo',
+ 'target': 'ares.demo',
+ 'runner': {
+ 'duration': 60,
+ 'interval': 1,
+ 'sampled': 'yes',
+ 'confirmation': 1,
+ 'type': 'ProxDuration'
+ },
+ 'type': 'Ping'
+ }
+
+ t = task.Task()
+ runner = mock.Mock()
+ runner.join.return_value = 0
+ runner.get_output.return_value = {}
+ runner.get_result.return_value = []
+ mock_base_runner.Runner.get.return_value = runner
+ t._run([scenario], False, "yardstick.out")
+ runner.run.assert_called_once()
+
+ @mock.patch.object(os, 'environ')
+ def test_check_precondition(self, mock_os_environ):
+ cfg = {
+ 'precondition': {
+ 'installer_type': 'compass',
+ 'deploy_scenarios': 'os-nosdn',
+ 'pod_name': 'huawei-pod1'
+ }
+ }
+
+ t = task.TaskParser('/opt')
+ mock_os_environ.get.side_effect = ['compass',
+ 'os-nosdn',
+ 'huawei-pod1']
+ result = t._check_precondition(cfg)
+ self.assertTrue(result)
+
+ def test_parse_suite_no_constraint_no_args(self):
+ SAMPLE_SCENARIO_PATH = "no_constraint_no_args_scenario_sample.yaml"
+ t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
+ with mock.patch.object(os, 'environ',
+ new={'NODE_NAME': 'huawei-pod1', 'INSTALLER_TYPE': 'compass'}):
+ task_files, task_args, task_args_fnames = t.parse_suite()
+
+ self.assertEqual(task_files[0], self.change_to_abspath(
+ 'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml'))
+ self.assertEqual(task_files[1], self.change_to_abspath(
+ 'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml'))
+ self.assertIsNone(task_args[0])
+ self.assertIsNone(task_args[1])
+ self.assertIsNone(task_args_fnames[0])
+ self.assertIsNone(task_args_fnames[1])
+
+ def test_parse_suite_no_constraint_with_args(self):
+ SAMPLE_SCENARIO_PATH = "no_constraint_with_args_scenario_sample.yaml"
+ t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
+ with mock.patch.object(os, 'environ',
+ new={'NODE_NAME': 'huawei-pod1', 'INSTALLER_TYPE': 'compass'}):
+ task_files, task_args, task_args_fnames = t.parse_suite()
+
+ self.assertEqual(task_files[0], self.change_to_abspath(
+ 'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml'))
+ self.assertEqual(task_files[1], self.change_to_abspath(
+ 'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml'))
+ self.assertIsNone(task_args[0])
+ self.assertEqual(task_args[1],
+ '{"host": "node1.LF","target": "node2.LF"}')
+ self.assertIsNone(task_args_fnames[0])
+ self.assertIsNone(task_args_fnames[1])
+
+ def test_parse_suite_with_constraint_no_args(self):
+ SAMPLE_SCENARIO_PATH = "with_constraint_no_args_scenario_sample.yaml"
+ t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
+ with mock.patch.object(os, 'environ',
+ new={'NODE_NAME': 'huawei-pod1', 'INSTALLER_TYPE': 'compass'}):
+ task_files, task_args, task_args_fnames = t.parse_suite()
+ self.assertEqual(task_files[0], self.change_to_abspath(
+ 'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml'))
+ self.assertEqual(task_files[1], self.change_to_abspath(
+ 'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml'))
+ self.assertIsNone(task_args[0])
+ self.assertIsNone(task_args[1])
+ self.assertIsNone(task_args_fnames[0])
+ self.assertIsNone(task_args_fnames[1])
+
+ def test_parse_suite_with_constraint_with_args(self):
+ SAMPLE_SCENARIO_PATH = "with_constraint_with_args_scenario_sample.yaml"
+ t = task.TaskParser(self._get_file_abspath(SAMPLE_SCENARIO_PATH))
+ with mock.patch('os.environ',
+ new={'NODE_NAME': 'huawei-pod1', 'INSTALLER_TYPE': 'compass'}):
+ task_files, task_args, task_args_fnames = t.parse_suite()
+
+ self.assertEqual(task_files[0], self.change_to_abspath(
+ 'tests/opnfv/test_cases/opnfv_yardstick_tc037.yaml'))
+ self.assertEqual(task_files[1], self.change_to_abspath(
+ 'tests/opnfv/test_cases/opnfv_yardstick_tc043.yaml'))
+ self.assertIsNone(task_args[0])
+ self.assertEqual(task_args[1],
+ '{"host": "node1.LF","target": "node2.LF"}')
+ self.assertIsNone(task_args_fnames[0])
+ self.assertIsNone(task_args_fnames[1])
+
+ def test_parse_options(self):
+ options = {
+ 'openstack': {
+ 'EXTERNAL_NETWORK': '$network'
+ },
+ 'nodes': ['node1', '$node'],
+ 'host': '$host'
+ }
+
+ t = task.Task()
+ t.outputs = {
+ 'network': 'ext-net',
+ 'node': 'node2',
+ 'host': 'server.yardstick'
+ }
+
+ expected_result = {
+ 'openstack': {
+ 'EXTERNAL_NETWORK': 'ext-net'
+ },
+ 'nodes': ['node1', 'node2'],
+ 'host': 'server.yardstick'
+ }
+
+ actual_result = t._parse_options(options)
+ self.assertEqual(expected_result, actual_result)
+
+ def test_parse_options_no_teardown(self):
+ options = {
+ 'openstack': {
+ 'EXTERNAL_NETWORK': '$network'
+ },
+ 'nodes': ['node1', '$node'],
+ 'host': '$host',
+ 'contexts' : {'name': "my-context",
+ 'no_teardown': True}
+ }
+
+ t = task.Task()
+ t.outputs = {
+ 'network': 'ext-net',
+ 'node': 'node2',
+ 'host': 'server.yardstick'
+ }
+
+ expected_result = {
+ 'openstack': {
+ 'EXTERNAL_NETWORK': 'ext-net'
+ },
+ 'nodes': ['node1', 'node2'],
+ 'host': 'server.yardstick',
+ 'contexts': {'name': 'my-context',
+ 'no_teardown': True,
+ }
+ }
+
+ actual_result = t._parse_options(options)
+ self.assertEqual(expected_result, actual_result)
+
+ @mock.patch.object(builtins, 'open', side_effect=mock.mock_open())
+ @mock.patch.object(task, 'utils')
+ @mock.patch.object(logging, 'root')
+ def test_set_log(self, mock_logging_root, *args):
+ task_obj = task.Task()
+ task_obj.task_id = 'task_id'
+ task_obj._set_log()
+ mock_logging_root.addHandler.assert_called()
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.join(curr_path, filename)
+ return file_path
+
+ def change_to_abspath(self, filepath):
+ return os.path.join(consts.YARDSTICK_ROOT_PATH, filepath)
+
+
+class TaskParserTestCase(unittest.TestCase):
+
+ TASK = """
+{% set value1 = value1 or 'var1' %}
+{% set value2 = value2 or 'var2' %}
+key1: {{ value1 }}
+key2:
+ - {{ value2 }}"""
+
+ TASK_RENDERED_1 = u"""
+
+
+key1: var1
+key2:
+ - var2"""
+
+ TASK_RENDERED_2 = u"""
+
+
+key1: var3
+key2:
+ - var4"""
+
+ def setUp(self):
+ self.parser = task.TaskParser('fake/path')
+ self.scenario = {
+ 'host': 'athena.demo',
+ 'target': 'kratos.demo',
+ 'targets': [
+ 'ares.demo', 'mars.demo'
+ ],
+ 'options': {
+ 'server_name': {
+ 'host': 'jupiter.demo',
+ 'target': 'saturn.demo',
+ },
+ },
+ 'nodes': {
+ 'tg__0': 'tg_0.demo',
+ 'vnf__0': 'vnf_0.demo',
+ }
+ }
+
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
+
+ def test__change_node_names(self):
+
+ ctx_attrs = {
+ 'name': 'demo',
+ 'task_id': '1234567890',
+ 'servers': [
+ 'athena', 'kratos',
+ 'ares', 'mars',
+ 'jupiter', 'saturn',
+ 'tg_0', 'vnf_0'
+ ]
+ }
+
+ my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
+ my_context.init(ctx_attrs)
+
+ expected_scenario = {
+ 'host': 'athena.demo-12345678',
+ 'target': 'kratos.demo-12345678',
+ 'targets': [
+ 'ares.demo-12345678', 'mars.demo-12345678'
+ ],
+ 'options': {
+ 'server_name': {
+ 'host': 'jupiter.demo-12345678',
+ 'target': 'saturn.demo-12345678',
+ },
+ },
+ 'nodes': {
+ 'tg__0': 'tg_0.demo-12345678',
+ 'vnf__0': 'vnf_0.demo-12345678',
+ }
+ }
+
+ scenario = copy.deepcopy(self.scenario)
+
+ self.parser._change_node_names(scenario, [my_context])
+ self.assertEqual(scenario, expected_scenario)
+
+ def test__change_node_names_context_not_found(self):
+ scenario = copy.deepcopy(self.scenario)
+ self.assertRaises(exceptions.ScenarioConfigContextNameNotFound,
+ self.parser._change_node_names,
+ scenario, [])
+
+ def test__change_node_names_context_name_unchanged(self):
+ ctx_attrs = {
+ 'name': 'demo',
+ 'task_id': '1234567890',
+ 'flags': {
+ 'no_setup': True,
+ 'no_teardown': True
+ }
+ }
+
+ my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
+ my_context.init(ctx_attrs)
+
+ scenario = copy.deepcopy(self.scenario)
+ expected_scenario = copy.deepcopy(self.scenario)
+
+ self.parser._change_node_names(scenario, [my_context])
+ self.assertEqual(scenario, expected_scenario)
+
+ def test__change_node_names_options_empty(self):
+ ctx_attrs = {
+ 'name': 'demo',
+ 'task_id': '1234567890'
+ }
+
+ my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
+ my_context.init(ctx_attrs)
+ scenario = copy.deepcopy(self.scenario)
+ scenario['options'] = None
+
+ self.parser._change_node_names(scenario, [my_context])
+ self.assertIsNone(scenario['options'])
+
+ def test__change_node_names_options_server_name_empty(self):
+ ctx_attrs = {
+ 'name': 'demo',
+ 'task_id': '1234567890'
+ }
+
+ my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
+ my_context.init(ctx_attrs)
+ scenario = copy.deepcopy(self.scenario)
+ scenario['options']['server_name'] = None
+
+ self.parser._change_node_names(scenario, [my_context])
+ self.assertIsNone(scenario['options']['server_name'])
+
+ def test__change_node_names_target_map(self):
+ ctx_attrs = {
+ 'name': 'demo',
+ 'task_id': '1234567890'
+ }
+ my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
+ my_context.init(ctx_attrs)
+ scenario = copy.deepcopy(self.scenario)
+ scenario['nodes'] = {
+ 'tg__0': {
+ 'name': 'tg__0.demo',
+ 'public_ip_attr': "1.1.1.1",
+ },
+ 'vnf__0': {
+ 'name': 'vnf__0.demo',
+ 'public_ip_attr': "2.2.2.2",
+ }
+ }
+ self.parser._change_node_names(scenario, [my_context])
+ for target in scenario['nodes'].values():
+ self.assertIsInstance(target, collections.Mapping)
+
+ def test__change_node_names_not_target_map(self):
+ ctx_attrs = {
+ 'name': 'demo',
+ 'task_id': '1234567890'
+ }
+ my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
+ my_context.init(ctx_attrs)
+ scenario = copy.deepcopy(self.scenario)
+ self.parser._change_node_names(scenario, [my_context])
+ for target in scenario['nodes'].values():
+ self.assertNotIsInstance(target, collections.Mapping)
+
+ def test__parse_tasks(self):
+ task_obj = task.Task()
+ _uuid = uuid.uuid4()
+ task_obj.task_id = _uuid
+ task_files = ['/directory/task_file_name.yml']
+ mock_parser = mock.Mock()
+ mock_parser.parse_task.return_value = {'rendered': 'File content'}
+ mock_args = mock.Mock()
+ mock_args.render_only = False
+
+ tasks = task_obj._parse_tasks(mock_parser, task_files, mock_args,
+ ['arg1'], ['file_arg1'])
+ self.assertEqual(
+ [{'rendered': 'File content', 'case_name': 'task_file_name'}],
+ tasks)
+ mock_parser.parse_task.assert_called_once_with(
+ _uuid, 'arg1', 'file_arg1')
+
+ @mock.patch.object(sys, 'exit')
+ @mock.patch.object(utils, 'write_file')
+ @mock.patch.object(utils, 'makedirs')
+ def test__parse_tasks_render_only(self, mock_makedirs, mock_write_file,
+ mock_exit):
+ task_obj = task.Task()
+ _uuid = uuid.uuid4()
+ task_obj.task_id = _uuid
+ task_files = ['/directory/task_file_name.yml']
+ mock_parser = mock.Mock()
+ mock_parser.parse_task.return_value = {'rendered': 'File content'}
+ mock_args = mock.Mock()
+ mock_args.render_only = '/output_directory'
+
+ task_obj._parse_tasks(mock_parser, task_files, mock_args,
+ ['arg1'], ['file_arg1'])
+ mock_makedirs.assert_called_once_with('/output_directory')
+ mock_write_file.assert_called_once_with(
+ '/output_directory/000-task_file_name.yml', 'File content')
+ mock_exit.assert_called_once_with(0)
+
+ def test__render_task_no_args(self):
+ task_parser = task.TaskParser('task_file')
+ task_str = io.StringIO(six.text_type(self.TASK))
+ with mock.patch.object(six.moves.builtins, 'open',
+ return_value=task_str) as mock_open:
+ parsed, rendered = task_parser._render_task(None, None)
+
+ self.assertEqual(self.TASK_RENDERED_1, rendered)
+ self.assertEqual({'key1': 'var1', 'key2': ['var2']}, parsed)
+ mock_open.assert_called_once_with('task_file')
+
+ def test__render_task_arguments(self):
+ task_parser = task.TaskParser('task_file')
+ task_str = io.StringIO(six.text_type(self.TASK))
+ with mock.patch.object(six.moves.builtins, 'open',
+ return_value=task_str) as mock_open:
+ parsed, rendered = task_parser._render_task('value1: "var1"', None)
+
+ self.assertEqual(self.TASK_RENDERED_1, rendered)
+ self.assertEqual({'key1': 'var1', 'key2': ['var2']}, parsed)
+ mock_open.assert_called_once_with('task_file')
+
+ def test__render_task_file_arguments(self):
+ task_parser = task.TaskParser('task_file')
+ with mock.patch.object(six.moves.builtins, 'open') as mock_open:
+ mock_open.side_effect = (
+ io.StringIO(six.text_type('value2: var4')),
+ io.StringIO(six.text_type(self.TASK))
+ )
+ parsed, rendered = task_parser._render_task('value1: "var3"',
+ 'args_file')
+
+ self.assertEqual(self.TASK_RENDERED_2, rendered)
+ self.assertEqual({'key1': 'var3', 'key2': ['var4']}, parsed)
+ mock_open.assert_has_calls([mock.call('args_file'),
+ mock.call('task_file')])
+
+ @mock.patch.object(builtins, 'print')
+ def test__render_task_error_arguments(self, *args):
+ with self.assertRaises(exceptions.TaskRenderArgumentError):
+ task.TaskParser('task_file')._render_task('value1="var3"', None)
+
+ def test__render_task_error_task_file(self):
+ task_parser = task.TaskParser('task_file')
+ with mock.patch.object(six.moves.builtins, 'open') as mock_open:
+ mock_open.side_effect = (
+ io.StringIO(six.text_type('value2: var4')),
+ IOError()
+ )
+ with self.assertRaises(exceptions.TaskReadError):
+ task_parser._render_task('value1: "var3"', 'args_file')
+
+ mock_open.assert_has_calls([mock.call('args_file'),
+ mock.call('task_file')])
+
+ def test__render_task_render_error(self):
+ task_parser = task.TaskParser('task_file')
+ with mock.patch.object(six.moves.builtins, 'open') as mock_open, \
+ mock.patch.object(task_template.TaskTemplate, 'render',
+ side_effect=TypeError) as mock_render:
+ mock_open.side_effect = (
+ io.StringIO(six.text_type('value2: var4')),
+ io.StringIO(six.text_type(self.TASK))
+ )
+ with self.assertRaises(exceptions.TaskRenderError):
+ task_parser._render_task('value1: "var3"', 'args_file')
+
+ mock_open.assert_has_calls([mock.call('args_file'),
+ mock.call('task_file')])
+ mock_render.assert_has_calls(
+ [mock.call(self.TASK, value1='var3', value2='var4')])
diff --git a/yardstick/tests/unit/benchmark/core/test_testcase.py b/yardstick/tests/unit/benchmark/core/test_testcase.py
new file mode 100644
index 000000000..077848d77
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/test_testcase.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+from six.moves import builtins
+
+from yardstick.benchmark.core import testcase
+from yardstick.tests.unit import base as ut_base
+
+
+class Arg(object):
+
+ def __init__(self):
+ self.casename = ('opnfv_yardstick_tc001', )
+
+
+class TestcaseTestCase(ut_base.BaseUnitTestCase):
+
+ def test_list_all(self):
+ t = testcase.Testcase()
+ result = t.list_all("")
+ self.assertIsInstance(result, list)
+
+ @mock.patch.object(builtins, 'print')
+ def test_show(self, *args):
+ t = testcase.Testcase()
+ casename = Arg()
+ result = t.show(casename)
+ self.assertTrue(result)
diff --git a/yardstick/tests/unit/benchmark/core/with_constraint_no_args_scenario_sample.yaml b/yardstick/tests/unit/benchmark/core/with_constraint_no_args_scenario_sample.yaml
new file mode 100644
index 000000000..168d4b01a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/with_constraint_no_args_scenario_sample.yaml
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+# Huawei US bare daily task suite
+
+schema: "yardstick:suite:0.1"
+
+name: "os-nosdn-nofeature-ha"
+test_cases_dir: "tests/opnfv/test_cases/"
+test_cases:
+-
+ file_name: opnfv_yardstick_tc037.yaml
+-
+ file_name: opnfv_yardstick_tc043.yaml
+ constraint:
+ installer: compass
+ pod: huawei-pod1
+
diff --git a/yardstick/tests/unit/benchmark/core/with_constraint_with_args_scenario_sample.yaml b/yardstick/tests/unit/benchmark/core/with_constraint_with_args_scenario_sample.yaml
new file mode 100644
index 000000000..299e5de56
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/core/with_constraint_with_args_scenario_sample.yaml
@@ -0,0 +1,26 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+# Huawei US bare daily task suite
+
+schema: "yardstick:suite:0.1"
+
+name: "os-nosdn-nofeature-ha"
+test_cases_dir: "tests/opnfv/test_cases/"
+test_cases:
+-
+ file_name: opnfv_yardstick_tc037.yaml
+-
+ file_name: opnfv_yardstick_tc043.yaml
+ constraint:
+ installer: compass
+ pod: huawei-pod1
+ task_args:
+ huawei-pod1: '{"host": "node1.LF","target": "node2.LF"}'
+
diff --git a/yardstick/tests/unit/benchmark/runner/__init__.py b/yardstick/tests/unit/benchmark/runner/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/__init__.py
diff --git a/yardstick/tests/unit/benchmark/runner/test_arithmetic.py b/yardstick/tests/unit/benchmark/runner/test_arithmetic.py
new file mode 100644
index 000000000..35d935cd5
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_arithmetic.py
@@ -0,0 +1,446 @@
+##############################################################################
+# Copyright (c) 2018 Nokia and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import multiprocessing
+import os
+import time
+
+from yardstick.benchmark.runners import arithmetic
+from yardstick.common import exceptions as y_exc
+
+
+class ArithmeticRunnerTest(unittest.TestCase):
+ class MyMethod(object):
+ SLA_VALIDATION_ERROR_SIDE_EFFECT = 1
+ BROAD_EXCEPTION_SIDE_EFFECT = 2
+
+ def __init__(self, side_effect=0):
+ self.count = 101
+ self.side_effect = side_effect
+
+ def __call__(self, data):
+ self.count += 1
+ data['my_key'] = self.count
+ if self.side_effect == self.SLA_VALIDATION_ERROR_SIDE_EFFECT:
+ raise y_exc.SLAValidationError(case_name='My Case',
+ error_msg='my error message')
+ elif self.side_effect == self.BROAD_EXCEPTION_SIDE_EFFECT:
+ raise y_exc.YardstickException
+ return self.count
+
+ def setUp(self):
+ self.scenario_cfg = {
+ 'runner': {
+ 'interval': 0,
+ 'iter_type': 'nested_for_loops',
+ 'iterators': [
+ {
+ 'name': 'stride',
+ 'start': 64,
+ 'stop': 128,
+ 'step': 64
+ },
+ {
+ 'name': 'size',
+ 'start': 500,
+ 'stop': 2000,
+ 'step': 500
+ }
+ ]
+ },
+ 'type': 'some_type'
+ }
+
+ self.benchmark = mock.Mock()
+ self.benchmark_cls = mock.Mock(return_value=self.benchmark)
+
+ def _assert_defaults__worker_process_run_setup_and_teardown(self):
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_called_once()
+
+ @mock.patch.object(os, 'getpid')
+ @mock.patch.object(multiprocessing, 'Process')
+ def test__run_benchmark_called_with(self, mock_multiprocessing_process,
+ mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ runner = arithmetic.ArithmeticRunner({})
+ benchmark_cls = mock.Mock()
+ runner._run_benchmark(benchmark_cls, 'my_method', self.scenario_cfg,
+ {})
+ mock_multiprocessing_process.assert_called_once_with(
+ name='Arithmetic-some_type-101',
+ target=arithmetic._worker_process,
+ args=(runner.result_queue, benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, runner.aborted, runner.output_queue))
+
+ @mock.patch.object(os, 'getpid')
+ def test__worker_process_runner_id(self, mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.assertEqual(self.scenario_cfg['runner']['runner_id'], 101)
+
+ @mock.patch.object(time, 'sleep')
+ def test__worker_process_calls_nested_for_loops(self, mock_time_sleep):
+ self.scenario_cfg['runner']['interval'] = 99
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.benchmark.my_method.assert_has_calls([mock.call({})] * 8)
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
+ mock_time_sleep.assert_has_calls([mock.call(99)] * 8)
+ self.assertEqual(mock_time_sleep.call_count, 8)
+
+ @mock.patch.object(time, 'sleep')
+ def test__worker_process_calls_tuple_loops(self, mock_time_sleep):
+ self.scenario_cfg['runner']['interval'] = 99
+ self.scenario_cfg['runner']['iter_type'] = 'tuple_loops'
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.benchmark.my_method.assert_has_calls([mock.call({})] * 2)
+ self.assertEqual(self.benchmark.my_method.call_count, 2)
+ mock_time_sleep.assert_has_calls([mock.call(99)] * 2)
+ self.assertEqual(mock_time_sleep.call_count, 2)
+
+ def test__worker_process_stored_options_nested_for_loops(self):
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+
+ def test__worker_process_stored_options_tuple_loops(self):
+ self.scenario_cfg['runner']['iter_type'] = 'tuple_loops'
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 1000})
+
+ def test__worker_process_aborted_set_early(self):
+ aborted = multiprocessing.Event()
+ aborted.set()
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ aborted, mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.scenario_cfg['options'], {})
+ self.benchmark.my_method.assert_not_called()
+
+ def test__worker_process_output_queue_nested_for_loops(self):
+ self.benchmark.my_method = self.MyMethod()
+
+ output_queue = multiprocessing.Queue()
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ result = []
+ while not output_queue.empty():
+ result.append(output_queue.get())
+ self.assertListEqual(result, [102, 103, 104, 105, 106, 107, 108, 109])
+
+ def test__worker_process_output_queue_tuple_loops(self):
+ self.scenario_cfg['runner']['iter_type'] = 'tuple_loops'
+ self.benchmark.my_method = self.MyMethod()
+
+ output_queue = multiprocessing.Queue()
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 103)
+ result = []
+ while not output_queue.empty():
+ result.append(output_queue.get())
+ self.assertListEqual(result, [102, 103])
+
+ def test__worker_process_queue_nested_for_loops(self):
+ self.benchmark.my_method = self.MyMethod()
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+ self.assertGreater(result['timestamp'], timestamp)
+ timestamp = result['timestamp']
+
+ def test__worker_process_queue_tuple_loops(self):
+ self.scenario_cfg['runner']['iter_type'] = 'tuple_loops'
+ self.benchmark.my_method = self.MyMethod()
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 103)
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+ self.assertGreater(result['timestamp'], timestamp)
+ timestamp = result['timestamp']
+
+ def test__worker_process_except_sla_validation_error_no_sla_cfg(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+
+ def test__worker_process_output_on_sla_validation_error_no_sla_cfg(self):
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+ self.assertGreater(result['timestamp'], timestamp)
+ timestamp = result['timestamp']
+ self.assertEqual(count, 8)
+ self.assertTrue(output_queue.empty())
+
+ def test__worker_process_except_sla_validation_error_sla_cfg_monitor(self):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+
+ def test__worker_process_output_sla_validation_error_sla_cfg_monitor(self):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertEqual(result['errors'],
+ ('My Case SLA validation failed. '
+ 'Error: my error message',))
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+ self.assertGreater(result['timestamp'], timestamp)
+ timestamp = result['timestamp']
+ self.assertEqual(count, 8)
+ self.assertTrue(output_queue.empty())
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_assert(self):
+ self.scenario_cfg['sla'] = {'action': 'assert'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.my_method.assert_called_once()
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_not_called()
+
+ def test__worker_process_output_sla_validation_error_sla_cfg_assert(self):
+ self.scenario_cfg['sla'] = {'action': 'assert'}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ with self.assertRaisesRegexp(
+ y_exc.SLAValidationError,
+ 'My Case SLA validation failed. Error: my error message'):
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.assertEqual(self.benchmark.my_method.count, 102)
+ self.benchmark.teardown.assert_not_called()
+ self.assertTrue(queue.empty())
+ self.assertTrue(output_queue.empty())
+
+ def test__worker_process_broad_exception_no_sla_cfg_early_exit(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.benchmark.my_method.assert_called_once()
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 64, 'size': 500})
+
+ def test__worker_process_output_on_broad_exception_no_sla_cfg(self):
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.BROAD_EXCEPTION_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 102)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 64, 'size': 500})
+ self.assertEqual(queue.qsize(), 1)
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['data'], {'my_key': 102})
+ self.assertRegexpMatches(
+ result['errors'],
+ 'YardstickException: An unknown exception occurred.')
+ self.assertEqual(result['sequence'], 1)
+ self.assertTrue(output_queue.empty())
+
+ def test__worker_process_broad_exception_sla_cfg_not_none(self):
+ self.scenario_cfg['sla'] = {'action': 'some action'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+
+ def test__worker_process_output_on_broad_exception_sla_cfg_not_none(self):
+ self.scenario_cfg['sla'] = {'action': 'some action'}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.BROAD_EXCEPTION_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+ self.assertTrue(output_queue.empty())
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertRegexpMatches(
+ result['errors'],
+ 'YardstickException: An unknown exception occurred.')
+ self.assertEqual(result['sequence'], count)
+
+ def test__worker_process_benchmark_teardown_on_broad_exception(self):
+ self.benchmark.teardown = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ with self.assertRaises(SystemExit) as raised:
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ self.assertEqual(raised.exception.code, 1)
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
diff --git a/yardstick/tests/unit/benchmark/runner/test_base.py b/yardstick/tests/unit/benchmark/runner/test_base.py
new file mode 100644
index 000000000..07d6f1843
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_base.py
@@ -0,0 +1,119 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import time
+
+import mock
+import subprocess
+
+from yardstick.benchmark.runners import base as runner_base
+from yardstick.benchmark.runners import iteration
+from yardstick.tests.unit import base as ut_base
+
+
+class ActionTestCase(ut_base.BaseUnitTestCase):
+
+ def setUp(self):
+ self._mock_log = mock.patch.object(runner_base.log, 'error')
+ self.mock_log = self._mock_log.start()
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self._mock_log.stop()
+
+ @mock.patch.object(subprocess, 'check_output')
+ def test__execute_shell_command(self, mock_subprocess):
+ mock_subprocess.side_effect = subprocess.CalledProcessError(-1, '')
+ self.assertEqual(runner_base._execute_shell_command("")[0], -1)
+
+ @mock.patch.object(subprocess, 'check_output')
+ def test__single_action(self, mock_subprocess):
+ mock_subprocess.side_effect = subprocess.CalledProcessError(-1, '')
+ runner_base._single_action(0, 'echo', mock.Mock())
+
+ @mock.patch.object(subprocess, 'check_output')
+ def test__periodic_action(self, mock_subprocess):
+ mock_subprocess.side_effect = subprocess.CalledProcessError(-1, '')
+ runner_base._periodic_action(0, 'echo', mock.Mock())
+
+
+class ScenarioOutputTestCase(ut_base.BaseUnitTestCase):
+
+ def setUp(self):
+ self.output_queue = mock.Mock()
+ self.scenario_output = runner_base.ScenarioOutput(self.output_queue,
+ sequence=1)
+
+ @mock.patch.object(time, 'time')
+ def test_push(self, mock_time):
+ mock_time.return_value = 2
+ data = {"value1": 1}
+ self.scenario_output.push(data)
+ self.output_queue.put.assert_called_once_with({'timestamp': 2,
+ 'sequence': 1,
+ 'data': data}, True, 10)
+
+ def test_push_no_timestamp(self):
+ self.scenario_output["value1"] = 1
+ self.scenario_output.push(None, False)
+ self.output_queue.put.assert_called_once_with({'sequence': 1,
+ 'value1': 1}, True, 10)
+
+
+class RunnerTestCase(ut_base.BaseUnitTestCase):
+
+ def setUp(self):
+ config = {
+ 'output_config': {
+ 'DEFAULT': {
+ 'dispatcher': 'file'
+ }
+ }
+ }
+ self.runner = iteration.IterationRunner(config)
+
+ @mock.patch("yardstick.benchmark.runners.iteration.multiprocessing")
+ def test_get_output(self, *args):
+ self.runner.output_queue.put({'case': 'opnfv_yardstick_tc002'})
+ self.runner.output_queue.put({'criteria': 'PASS'})
+
+ idle_result = {
+ 'case': 'opnfv_yardstick_tc002',
+ 'criteria': 'PASS'
+ }
+
+ for _ in range(1000):
+ time.sleep(0.01)
+ if not self.runner.output_queue.empty():
+ break
+ actual_result = self.runner.get_output()
+ self.assertEqual(idle_result, actual_result)
+
+ @mock.patch("yardstick.benchmark.runners.iteration.multiprocessing")
+ def test_get_result(self, *args):
+ self.runner.result_queue.put({'case': 'opnfv_yardstick_tc002'})
+ self.runner.result_queue.put({'criteria': 'PASS'})
+
+ idle_result = [
+ {'case': 'opnfv_yardstick_tc002'},
+ {'criteria': 'PASS'}
+ ]
+
+ for _ in range(1000):
+ time.sleep(0.01)
+ if not self.runner.result_queue.empty():
+ break
+ actual_result = self.runner.get_result()
+ self.assertEqual(idle_result, actual_result)
+
+ def test__run_benchmark(self):
+ runner = runner_base.Runner(mock.Mock())
+
+ with self.assertRaises(NotImplementedError):
+ runner._run_benchmark(mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock())
diff --git a/yardstick/tests/unit/benchmark/runner/test_duration.py b/yardstick/tests/unit/benchmark/runner/test_duration.py
new file mode 100644
index 000000000..fa47e96bf
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_duration.py
@@ -0,0 +1,315 @@
+##############################################################################
+# Copyright (c) 2018 Nokia and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import multiprocessing
+import os
+import time
+
+from yardstick.benchmark.runners import duration
+from yardstick.common import exceptions as y_exc
+
+
+class DurationRunnerTest(unittest.TestCase):
+ class MyMethod(object):
+ SLA_VALIDATION_ERROR_SIDE_EFFECT = 1
+ BROAD_EXCEPTION_SIDE_EFFECT = 2
+
+ def __init__(self, side_effect=0):
+ self.count = 101
+ self.side_effect = side_effect
+
+ def __call__(self, data):
+ self.count += 1
+ data['my_key'] = self.count
+ if self.side_effect == self.SLA_VALIDATION_ERROR_SIDE_EFFECT:
+ raise y_exc.SLAValidationError(case_name='My Case',
+ error_msg='my error message')
+ elif self.side_effect == self.BROAD_EXCEPTION_SIDE_EFFECT:
+ raise y_exc.YardstickException
+ return self.count
+
+ def setUp(self):
+ self.scenario_cfg = {
+ 'runner': {'interval': 0, "duration": 0},
+ 'type': 'some_type'
+ }
+
+ self.benchmark = mock.Mock()
+ self.benchmark_cls = mock.Mock(return_value=self.benchmark)
+
+ def _assert_defaults__worker_run_setup_and_teardown(self):
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_called_once()
+
+ def _assert_defaults__worker_run_one_iteration(self):
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.my_method.assert_called_once_with({})
+ self.benchmark.post_run_wait_time.assert_called_once_with(0)
+
+ @mock.patch.object(os, 'getpid')
+ @mock.patch.object(multiprocessing, 'Process')
+ def test__run_benchmark_called_with(self, mock_multiprocessing_process,
+ mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ runner = duration.DurationRunner({})
+ benchmark_cls = mock.Mock()
+ runner._run_benchmark(benchmark_cls, 'my_method', self.scenario_cfg,
+ {})
+ mock_multiprocessing_process.assert_called_once_with(
+ name='Duration-some_type-101',
+ target=duration._worker_process,
+ args=(runner.result_queue, benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, runner.aborted, runner.output_queue))
+
+ @mock.patch.object(os, 'getpid')
+ def test__worker_process_runner_id(self, mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.assertEqual(self.scenario_cfg['runner']['runner_id'], 101)
+
+ def test__worker_process_called_with_cfg(self):
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+
+ def test__worker_process_called_with_cfg_loop(self):
+ self.scenario_cfg['runner']['duration'] = 0.01
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.assertGreater(self.benchmark.pre_run_wait_time.call_count, 0)
+ self.assertGreater(self.benchmark.my_method.call_count, 0)
+ self.assertGreater(self.benchmark.post_run_wait_time.call_count, 0)
+
+ def test__worker_process_called_without_cfg(self):
+ scenario_cfg = {'runner': {}}
+ aborted = multiprocessing.Event()
+ aborted.set()
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ scenario_cfg, {}, aborted, mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(1)
+ self.benchmark.my_method.assert_called_once_with({})
+ self.benchmark.post_run_wait_time.assert_called_once_with(1)
+ self.benchmark.teardown.assert_called_once()
+
+ def test__worker_process_output_queue(self):
+ self.benchmark.my_method = mock.Mock(return_value='my_result')
+
+ output_queue = multiprocessing.Queue()
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+ self.assertEquals(output_queue.get(), 'my_result')
+
+ def test__worker_process_output_queue_multiple_iterations(self):
+ self.scenario_cfg['runner']['duration'] = 0.01
+ self.benchmark.my_method = self.MyMethod()
+
+ output_queue = multiprocessing.Queue()
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.assertGreater(self.benchmark.pre_run_wait_time.call_count, 0)
+ self.assertGreater(self.benchmark.my_method.count, 1)
+ self.assertGreater(self.benchmark.post_run_wait_time.call_count, 0)
+
+ count = 101
+ while not output_queue.empty():
+ count += 1
+ self.assertEquals(output_queue.get(), count)
+
+ def test__worker_process_queue(self):
+ self.benchmark.my_method = self.MyMethod()
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ duration._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.post_run_wait_time.assert_called_once_with(0)
+
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': 102})
+ self.assertEqual(result['sequence'], 1)
+
+ def test__worker_process_queue_multiple_iterations(self):
+ self.scenario_cfg['runner']['duration'] = 0.5
+ self.benchmark.my_method = self.MyMethod()
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ duration._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.assertGreater(self.benchmark.pre_run_wait_time.call_count, 0)
+ self.assertGreater(self.benchmark.my_method.count, 1)
+ self.assertGreater(self.benchmark.post_run_wait_time.call_count, 0)
+
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+
+ def test__worker_process_except_sla_validation_error_no_sla_cfg(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+
+ def test__worker_process_except_sla_validation_error_sla_cfg_monitor(self):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_default(self):
+ self.scenario_cfg['sla'] = {}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ duration._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.my_method.assert_called_once_with({})
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_assert(self):
+ self.scenario_cfg['sla'] = {'action': 'assert'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ duration._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.my_method.assert_called_once_with({})
+
+ def test__worker_process_queue_on_sla_validation_error_monitor(self):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ duration._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.post_run_wait_time.assert_called_once_with(0)
+
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['errors'], ('My Case SLA validation failed. '
+ 'Error: my error message',))
+ self.assertEqual(result['data'], {'my_key': 102})
+ self.assertEqual(result['sequence'], 1)
+
+ def test__worker_process_broad_exception(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+
+ def test__worker_process_queue_on_broad_exception(self):
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.BROAD_EXCEPTION_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ duration._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.post_run_wait_time.assert_called_once_with(0)
+
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertNotEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': 102})
+ self.assertEqual(result['sequence'], 1)
+
+ def test__worker_process_benchmark_teardown_on_broad_exception(self):
+ self.benchmark.teardown = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ with self.assertRaises(SystemExit) as raised:
+ duration._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ self.assertEqual(raised.exception.code, 1)
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
diff --git a/yardstick/tests/unit/benchmark/runner/test_iteration.py b/yardstick/tests/unit/benchmark/runner/test_iteration.py
new file mode 100644
index 000000000..783b236f5
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_iteration.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import multiprocessing
+from yardstick.benchmark.runners import iteration
+from yardstick.common import exceptions as y_exc
+
+
+class IterationRunnerTest(unittest.TestCase):
+ def setUp(self):
+ self.scenario_cfg = {
+ 'runner': {'interval': 0, "duration": 0},
+ 'type': 'some_type'
+ }
+
+ self.benchmark = mock.Mock()
+ self.benchmark_cls = mock.Mock(return_value=self.benchmark)
+
+ def _assert_defaults__worker_run_setup_and_teardown(self):
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+
+ def _assert_defaults__worker_run_one_iteration(self):
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.my_method.assert_called_once_with({})
+
+ def test__worker_process_broad_exception(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ with self.assertRaises(Exception):
+ iteration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_one_iteration()
+ self._assert_defaults__worker_run_setup_and_teardown()
diff --git a/yardstick/tests/unit/benchmark/runner/test_proxduration.py b/yardstick/tests/unit/benchmark/runner/test_proxduration.py
new file mode 100644
index 000000000..056195fd3
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_proxduration.py
@@ -0,0 +1,286 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import unittest
+import multiprocessing
+import os
+
+from yardstick.benchmark.runners import proxduration
+from yardstick.common import constants
+from yardstick.common import exceptions as y_exc
+
+
+class ProxDurationRunnerTest(unittest.TestCase):
+
+ class MyMethod(object):
+ SLA_VALIDATION_ERROR_SIDE_EFFECT = 1
+ BROAD_EXCEPTION_SIDE_EFFECT = 2
+
+ def __init__(self, side_effect=0):
+ self.count = 101
+ self.side_effect = side_effect
+
+ def __call__(self, data):
+ self.count += 1
+ data['my_key'] = self.count
+ if self.side_effect == self.SLA_VALIDATION_ERROR_SIDE_EFFECT:
+ raise y_exc.SLAValidationError(case_name='My Case',
+ error_msg='my error message')
+ elif self.side_effect == self.BROAD_EXCEPTION_SIDE_EFFECT:
+ raise y_exc.YardstickException
+ return self.count
+
+ def setUp(self):
+ self.scenario_cfg = {
+ 'runner': {'interval': 0, "duration": 0},
+ 'type': 'some_type'
+ }
+
+ self.benchmark = mock.Mock()
+ self.benchmark_cls = mock.Mock(return_value=self.benchmark)
+
+ def _assert_defaults__worker_run_setup_and_teardown(self):
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_called_once()
+
+ @mock.patch.object(os, 'getpid')
+ @mock.patch.object(multiprocessing, 'Process')
+ def test__run_benchmark_called_with(self, mock_multiprocessing_process,
+ mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ runner = proxduration.ProxDurationRunner({})
+ benchmark_cls = mock.Mock()
+ runner._run_benchmark(benchmark_cls, 'my_method', self.scenario_cfg,
+ {})
+ mock_multiprocessing_process.assert_called_once_with(
+ name='ProxDuration-some_type-101',
+ target=proxduration._worker_process,
+ args=(runner.result_queue, benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, runner.aborted, runner.output_queue))
+
+ @mock.patch.object(os, 'getpid')
+ def test__worker_process_runner_id(self, mock_os_getpid):
+ mock_os_getpid.return_value = 101
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self.assertEqual(101, self.scenario_cfg['runner']['runner_id'])
+
+ def test__worker_process_called_with_cfg(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+
+ def test__worker_process_called_with_cfg_loop(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.assertGreater(self.benchmark.my_method.call_count, 0)
+
+ def test__worker_process_called_without_cfg(self):
+ scenario_cfg = {'runner': {}}
+ aborted = multiprocessing.Event()
+ aborted.set()
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', scenario_cfg, {},
+ aborted, mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_called_once()
+
+ def test__worker_process_output_queue(self):
+ self.benchmark.my_method = mock.Mock(return_value='my_result')
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ output_queue = mock.Mock()
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), output_queue)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ output_queue.put.assert_has_calls(
+ [mock.call('my_result', True, constants.QUEUE_PUT_TIMEOUT)])
+
+ def test__worker_process_output_queue_multiple_iterations(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = self.MyMethod()
+ output_queue = mock.Mock()
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), output_queue)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ for idx in range(102, 101 + len(output_queue.method_calls)):
+ output_queue.put.assert_has_calls(
+ [mock.call(idx, True, constants.QUEUE_PUT_TIMEOUT)])
+
+ def test__worker_process_queue(self):
+ self.benchmark.my_method = self.MyMethod()
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ queue = mock.Mock()
+ proxduration._worker_process(
+ queue, self.benchmark_cls, 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ benchmark_output = {'timestamp': mock.ANY,
+ 'sequence': 1,
+ 'data': {'my_key': 102},
+ 'errors': ''}
+ queue.put.assert_has_calls(
+ [mock.call(benchmark_output, True, constants.QUEUE_PUT_TIMEOUT)])
+
+ def test__worker_process_queue_multiple_iterations(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = self.MyMethod()
+ queue = mock.Mock()
+ proxduration._worker_process(
+ queue, self.benchmark_cls, 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ for idx in range(102, 101 + len(queue.method_calls)):
+ benchmark_output = {'timestamp': mock.ANY,
+ 'sequence': idx - 101,
+ 'data': {'my_key': idx},
+ 'errors': ''}
+ queue.put.assert_has_calls(
+ [mock.call(benchmark_output, True,
+ constants.QUEUE_PUT_TIMEOUT)])
+
+ def test__worker_process_except_sla_validation_error_no_sla_cfg(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+
+ @mock.patch.object(proxduration.LOG, 'warning')
+ def test__worker_process_except_sla_validation_error_sla_cfg_monitor(
+ self, *args):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_default(self):
+ self.scenario_cfg['sla'] = {}
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+ with self.assertRaises(y_exc.SLAValidationError):
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, multiprocessing.Event(), mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.my_method.assert_called_once_with({})
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_assert(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.scenario_cfg['sla'] = {'action': 'assert'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, multiprocessing.Event(), mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.my_method.assert_called_once_with({})
+
+ @mock.patch.object(proxduration.LOG, 'warning')
+ def test__worker_process_queue_on_sla_validation_error_monitor(
+ self, *args):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+ queue = mock.Mock()
+ proxduration._worker_process(
+ queue, self.benchmark_cls, 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ benchmark_output = {'timestamp': mock.ANY,
+ 'sequence': 1,
+ 'data': {'my_key': 102},
+ 'errors': ('My Case SLA validation failed. '
+ 'Error: my error message', )}
+ queue.put.assert_has_calls(
+ [mock.call(benchmark_output, True, constants.QUEUE_PUT_TIMEOUT)])
+
+ @mock.patch.object(proxduration.LOG, 'exception')
+ def test__worker_process_broad_exception(self, *args):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+
+ @mock.patch.object(proxduration.LOG, 'exception')
+ def test__worker_process_queue_on_broad_exception(self, *args):
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.BROAD_EXCEPTION_SIDE_EFFECT)
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ queue = mock.Mock()
+ proxduration._worker_process(
+ queue, self.benchmark_cls, 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ benchmark_output = {'timestamp': mock.ANY,
+ 'sequence': 1,
+ 'data': {'my_key': 102},
+ 'errors': mock.ANY}
+ queue.put.assert_has_calls(
+ [mock.call(benchmark_output, True, constants.QUEUE_PUT_TIMEOUT)])
+
+ @mock.patch.object(proxduration.LOG, 'exception')
+ def test__worker_process_benchmark_teardown_on_broad_exception(
+ self, *args):
+ self.benchmark.teardown = mock.Mock(
+ side_effect=y_exc.YardstickException)
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+
+ with self.assertRaises(SystemExit) as raised:
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, multiprocessing.Event(), mock.Mock())
+ self.assertEqual(1, raised.exception.code)
+ self._assert_defaults__worker_run_setup_and_teardown()
diff --git a/yardstick/tests/unit/benchmark/runner/test_search.py b/yardstick/tests/unit/benchmark/runner/test_search.py
new file mode 100644
index 000000000..d5d1b8ded
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_search.py
@@ -0,0 +1,192 @@
+# Copyright (c) 2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+
+import mock
+import unittest
+
+from yardstick.benchmark.runners.search import SearchRunner
+from yardstick.benchmark.runners.search import SearchRunnerHelper
+from yardstick.common import exceptions as y_exc
+
+
+class TestSearchRunnerHelper(unittest.TestCase):
+
+ def test___call__(self):
+ scenario_cfg = {
+ 'runner': {},
+ }
+
+ benchmark = mock.Mock()
+ method = getattr(benchmark(), 'my_method')
+ helper = SearchRunnerHelper(
+ benchmark, 'my_method', scenario_cfg, {}, mock.Mock())
+
+ with helper.get_benchmark_instance():
+ helper()
+
+ method.assert_called_once()
+
+ def test___call___error(self):
+ scenario_cfg = {
+ 'runner': {},
+ }
+
+ helper = SearchRunnerHelper(
+ mock.Mock(), 'my_method', scenario_cfg, {}, mock.Mock())
+
+ with self.assertRaises(RuntimeError):
+ helper()
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(time, 'time')
+ def test_is_not_done(self, mock_time, *args):
+ scenario_cfg = {
+ 'runner': {},
+ }
+
+ mock_time.side_effect = range(1000)
+
+ helper = SearchRunnerHelper(
+ mock.Mock(), 'my_method', scenario_cfg, {}, mock.Mock())
+
+ index = -1
+ for index in helper.is_not_done():
+ if index >= 10:
+ break
+
+ self.assertGreaterEqual(index, 10)
+
+ @mock.patch.object(time, 'sleep')
+ def test_is_not_done_immediate_stop(self, *args):
+ scenario_cfg = {
+ 'runner': {
+ 'run_step': '',
+ },
+ }
+
+ helper = SearchRunnerHelper(
+ mock.Mock(), 'my_method', scenario_cfg, {}, mock.Mock())
+
+ index = -1
+ for index in helper.is_not_done():
+ if index >= 10:
+ break
+
+ self.assertEqual(index, -1)
+
+
+class TestSearchRunner(unittest.TestCase):
+
+ def test__worker_run_once(self):
+ def update(*args):
+ args[-1].update(data)
+
+ data = {
+ 'key1': {
+ 'inner1': 'value1',
+ 'done': 0,
+ },
+ 'key2': {
+ 'done': None,
+ },
+ }
+
+ runner = SearchRunner({})
+ runner.worker_helper = mock.Mock(side_effect=update)
+
+ self.assertFalse(runner._worker_run_once('sequence 1'))
+
+ def test__worker_run_once_done(self):
+ def update(*args):
+ args[-1].update(data)
+
+ data = {
+ 'key1': {
+ 'inner1': 'value1',
+ 'done': 0,
+ },
+ 'key2': {
+ 'done': None,
+ },
+ 'key3': {
+ 'done': True,
+ },
+ 'key4': [],
+ 'key5': 'value5',
+ }
+
+ runner = SearchRunner({})
+ runner.worker_helper = mock.Mock(side_effect=update)
+
+ self.assertTrue(runner._worker_run_once('sequence 1'))
+
+ def test__worker_run_once_assertion_error_assert(self):
+ runner = SearchRunner({})
+ runner.sla_action = 'assert'
+ runner.worker_helper = mock.Mock(side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ runner._worker_run_once('sequence 1')
+
+ def test__worker_run_once_assertion_error_monitor(self):
+ runner = SearchRunner({})
+ runner.sla_action = 'monitor'
+ runner.worker_helper = mock.Mock(side_effect=y_exc.SLAValidationError)
+
+ self.assertFalse(runner._worker_run_once('sequence 1'))
+
+ def test__worker_run_once_non_assertion_error_none(self):
+ runner = SearchRunner({})
+ runner.worker_helper = mock.Mock(side_effect=RuntimeError)
+
+ self.assertTrue(runner._worker_run_once('sequence 1'))
+
+ def test__worker_run_once_non_assertion_error(self):
+ runner = SearchRunner({})
+ runner.sla_action = 'monitor'
+ runner.worker_helper = mock.Mock(side_effect=RuntimeError)
+
+ self.assertFalse(runner._worker_run_once('sequence 1'))
+
+ def test__worker_run(self):
+ scenario_cfg = {
+ 'runner': {'interval': 0, 'timeout': 1},
+ }
+
+ runner = SearchRunner({})
+ runner._worker_run_once = mock.Mock(side_effect=[0, 0, 1])
+
+ runner._worker_run(mock.Mock(), 'my_method', scenario_cfg, {})
+
+ def test__worker_run_immediate_stop(self):
+ scenario_cfg = {
+ 'runner': {
+ 'run_step': '',
+ },
+ }
+
+ runner = SearchRunner({})
+ runner._worker_run(mock.Mock(), 'my_method', scenario_cfg, {})
+
+ @mock.patch('yardstick.benchmark.runners.search.multiprocessing')
+ def test__run_benchmark(self, mock_multi_process):
+ scenario_cfg = {
+ 'runner': {},
+ }
+
+ runner = SearchRunner({})
+ runner._run_benchmark(mock.Mock(), 'my_method', scenario_cfg, {})
+ mock_multi_process.Process.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/__init__.py b/yardstick/tests/unit/benchmark/scenarios/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/__init__.py b/yardstick/tests/unit/benchmark/scenarios/availability/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
new file mode 100644
index 000000000..35455a49c
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
@@ -0,0 +1,93 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import \
+ attacker_baremetal
+
+
+class ExecuteShellTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_subprocess = mock.patch.object(attacker_baremetal,
+ 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self._mock_subprocess.stop()
+
+ def test__execute_shell_command_successful(self):
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, _ = attacker_baremetal._execute_shell_command("env")
+ self.assertEqual(exitcode, 0)
+
+ @mock.patch.object(attacker_baremetal, 'LOG')
+ def test__execute_shell_command_fail_cmd_exception(self, mock_log):
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, _ = attacker_baremetal._execute_shell_command("env")
+ self.assertEqual(exitcode, -1)
+ mock_log.error.assert_called_once()
+
+
+class AttackerBaremetalTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_ssh = mock.patch.object(attacker_baremetal, 'ssh')
+ self.mock_ssh = self._mock_ssh.start()
+ self._mock_subprocess = mock.patch.object(attacker_baremetal,
+ 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mocks)
+
+ self.mock_ssh.SSH.from_node().execute.return_value = (
+ 0, "running", '')
+
+ host = {
+ "ipmi_ip": "10.20.0.5",
+ "ipmi_user": "root",
+ "ipmi_password": "123456",
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.attacker_cfg = {
+ 'fault_type': 'bear-metal-down',
+ 'host': 'node1',
+ }
+
+ self.ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+ self.context)
+
+ def _stop_mocks(self):
+ self._mock_ssh.stop()
+ self._mock_subprocess.stop()
+
+ def test__attacker_baremetal_all_successful(self):
+ self.ins.setup()
+ self.ins.inject_fault()
+ self.ins.recover()
+
+ def test__attacker_baremetal_check_failure(self):
+ self.mock_ssh.SSH.from_node().execute.return_value = (
+ 0, "error check", '')
+ self.ins.setup()
+
+ def test__attacker_baremetal_recover_successful(self):
+ self.attacker_cfg["jump_host"] = 'node1'
+ self.context["node1"]["password"] = "123456"
+ ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+ self.context)
+
+ ins.setup()
+ ins.recover()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py
new file mode 100644
index 000000000..c1b3c0d72
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2016 Juan Qiu and others
+# juan_ qiu@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.attacker
+# .attacker_general
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import baseattacker
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.attacker.'
+ 'attacker_general.ssh')
+class GeneralAttackerServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.attacker_cfg = {
+ 'fault_type': 'general-attacker',
+ 'action_parameter': {'process_name': 'nova_api'},
+ 'rollback_parameter': {'process_name': 'nova_api'},
+ 'key': 'stop-service',
+ 'attack_key': 'stop-service',
+ 'host': 'node1',
+ }
+
+ def test__attacker_service_all_successful(self, mock_ssh):
+
+ cls = baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
+ ins = cls(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.setup()
+ ins.inject_fault()
+ ins.recover()
+
+ def test__attacker_service_check_failuer(self, mock_ssh):
+
+ cls = baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
+ ins = cls(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "error check", '')
+ ins.setup()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py
new file mode 100644
index 000000000..2e9f1c6bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.availability.attacker.attacker_process
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import baseattacker
+
+
+@mock.patch(
+ 'yardstick.benchmark.scenarios.availability.attacker.attacker_process.ssh')
+class AttackerServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.attacker_cfg = {
+ 'fault_type': 'kill-process',
+ 'process_name': 'nova-api',
+ 'host': 'node1',
+ }
+
+ def test__attacker_service_all_successful(self, mock_ssh):
+
+ cls = baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
+ ins = cls(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "10", '')
+ ins.setup()
+ ins.inject_fault()
+ ins.recover()
+
+ def test__attacker_service_check_failuer(self, mock_ssh):
+
+ cls = baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
+ ins = cls(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, None, '')
+ ins.setup()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py
new file mode 100644
index 000000000..74f86983b
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import baseattacker
+
+
+class BaseAttackerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.attacker_cfg = {
+ 'fault_type': 'test-attacker',
+ 'action_parameter': {'process_name': 'nova_api'},
+ 'rollback_parameter': {'process_name': 'nova_api'},
+ 'key': 'stop-service',
+ 'attack_key': 'stop-service',
+ 'host': 'node1',
+ }
+ self.base_attacker = baseattacker.BaseAttacker({}, {})
+
+ def test__init__(self):
+ self.assertEqual(self.base_attacker.data, {})
+ self.assertFalse(self.base_attacker.mandatory)
+ self.assertEqual(self.base_attacker.intermediate_variables, {})
+ self.assertFalse(self.base_attacker.mandatory)
+
+ def test_get_attacker_cls(self):
+ with self.assertRaises(RuntimeError):
+ baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
new file mode 100644
index 000000000..8d042c406
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
@@ -0,0 +1,119 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import time
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
+
+
+class MonitorMgrTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.monitor_configs = [
+ {
+ "monitor_type": "openstack-cmd",
+ "command_name": "openstack router list",
+ "monitor_time": 10,
+ "monitor_number": 3,
+ "sla": {
+ "max_outage_time": 5
+ }
+ },
+ {
+ "monitor_type": "process",
+ "process_name": "neutron-server",
+ "host": "node1",
+ "monitor_time": 20,
+ "monitor_number": 3,
+ "sla": {
+ "max_recover_time": 20
+ }
+ }
+ ]
+ self.MonitorMgr = basemonitor.MonitorMgr([])
+ self.MonitorMgr.init_monitors(self.monitor_configs, None)
+ self.monitor_list = self.MonitorMgr._monitor_list
+ for mo in self.monitor_list:
+ mo._result = {"outage_time": 10}
+
+ @mock.patch.object(basemonitor, 'BaseMonitor')
+ def test__MonitorMgr_setup_successful(self, *args):
+ instance = basemonitor.MonitorMgr({"nova-api": 10})
+ instance.init_monitors(self.monitor_configs, None)
+ instance.start_monitors()
+ instance.wait_monitors()
+
+ # TODO(elfoley): Check the return value
+ ret = instance.verify_SLA() # pylint: disable=unused-variable
+
+ @mock.patch.object(basemonitor, 'BaseMonitor')
+ def test_MonitorMgr_getitem(self, *args):
+ monitorMgr = basemonitor.MonitorMgr({"nova-api": 10})
+ monitorMgr.init_monitors(self.monitor_configs, None)
+
+ @mock.patch.object(basemonitor, 'BaseMonitor')
+ def test_store_result(self, *args):
+ expect = {'process_neutron-server_outage_time': 10,
+ 'openstack-router-list_outage_time': 10}
+ result = {}
+ self.MonitorMgr.store_result(result)
+ self.assertDictEqual(result, expect)
+
+
+class BaseMonitorTestCase(unittest.TestCase):
+
+ class MonitorSimple(basemonitor.BaseMonitor):
+ __monitor_type__ = "MonitorForTest"
+
+ def setup(self):
+ self.monitor_result = False
+
+ def monitor_func(self):
+ return self.monitor_result
+
+ def setUp(self):
+ self.monitor_cfg = {
+ 'monitor_type': 'MonitorForTest',
+ 'command_name': 'nova image-list',
+ 'monitor_time': 0.01,
+ 'sla': {'max_outage_time': 5}
+ }
+
+ def _close_queue(self, instace):
+ time.sleep(0.1)
+ instace._queue.close()
+
+ def test__basemonitor_start_wait_successful(self):
+ ins = basemonitor.BaseMonitor(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
+ ins.start_monitor()
+ ins.wait_monitor()
+
+ def test__basemonitor_all_successful(self):
+ ins = self.MonitorSimple(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
+ ins.setup()
+ ins.run()
+ ins.verify_SLA()
+
+ @mock.patch.object(basemonitor, 'multiprocessing')
+ def test__basemonitor_func_false(self, mock_multiprocess):
+ ins = self.MonitorSimple(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
+ ins.setup()
+ mock_multiprocess.Event().is_set.return_value = False
+ ins.run()
+ ins.verify_SLA()
+
+ def test__basemonitor_getmonitorcls_successfule(self):
+ with self.assertRaises(RuntimeError):
+ basemonitor.BaseMonitor.get_monitor_cls(self.monitor_cfg)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_baseoperation.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseoperation.py
new file mode 100644
index 000000000..b7c9f62ff
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseoperation.py
@@ -0,0 +1,79 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.operation import baseoperation
+
+
+class OperationMgrTestCase(unittest.TestCase):
+
+ def setUp(self):
+ config = {
+ 'operation_type': 'general-operation',
+ 'key': 'service-status'
+ }
+
+ self.operation_configs = []
+ self.operation_configs.append(config)
+
+ @mock.patch.object(baseoperation, 'BaseOperation')
+ def test_all_successful(self, *args):
+ mgr_ins = baseoperation.OperationMgr()
+ mgr_ins.init_operations(self.operation_configs, None)
+ _ = mgr_ins["service-status"]
+ mgr_ins.rollback()
+
+ @mock.patch.object(baseoperation, 'BaseOperation')
+ def test_getitem_fail(self, *args):
+ mgr_ins = baseoperation.OperationMgr()
+ mgr_ins.init_operations(self.operation_configs, None)
+ with self.assertRaises(KeyError):
+ _ = mgr_ins["operation-not-exist"]
+
+
+class TestOperation(baseoperation.BaseOperation):
+ __operation__type__ = "test-operation"
+
+ def setup(self):
+ pass
+
+ def run(self):
+ pass
+
+ def rollback(self):
+ pass
+
+
+class BaseOperationTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.config = {
+ 'operation_type': 'general-operation',
+ 'key': 'service-status'
+ }
+ self.base_ins = baseoperation.BaseOperation(self.config, None)
+
+ def test_all_successful(self):
+ self.base_ins.setup()
+ self.base_ins.run()
+ self.base_ins.rollback()
+
+ def test_get_script_fullpath(self):
+ self.base_ins.get_script_fullpath("ha_tools/test.bash")
+
+ # TODO(elfoley): Fix test to check on expected outputs
+ # pylint: disable=unused-variable
+ def test_get_operation_cls_successful(self):
+ operation_ins = self.base_ins.get_operation_cls("test-operation")
+
+ def test_get_operation_cls_fail(self):
+ with self.assertRaises(RuntimeError):
+ self.base_ins.get_operation_cls("operation-not-exist")
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py
new file mode 100644
index 000000000..d4df02819
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py
@@ -0,0 +1,89 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.result_checker import \
+ baseresultchecker
+
+
+class ResultCheckerMgrTestCase(unittest.TestCase):
+
+ def setUp(self):
+ config = {
+ 'checker_type': 'general-result-checker',
+ 'key': 'process-checker'
+ }
+
+ self.checker_configs = []
+ self.checker_configs.append(config)
+
+ self.mgr_ins = baseresultchecker.ResultCheckerMgr()
+
+ self._mock_basechecker = mock.patch.object(baseresultchecker,
+ 'BaseResultChecker')
+ self.mock_basechecker = self._mock_basechecker.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_basechecker.stop()
+
+ def test_ResultCheckerMgr_setup_successful(self):
+ self.mgr_ins.verify()
+
+ def test_getitem_succeessful(self):
+ self.mgr_ins.init_ResultChecker(self.checker_configs, None)
+ _ = self.mgr_ins["process-checker"]
+
+ def test_getitem_fail(self):
+ self.mgr_ins.init_ResultChecker(self.checker_configs, None)
+ with self.assertRaises(KeyError):
+ _ = self.mgr_ins["checker-not-exist"]
+
+
+class BaseResultCheckerTestCase(unittest.TestCase):
+
+ class ResultCheckeSimple(baseresultchecker.BaseResultChecker):
+ __result_checker__type__ = "ResultCheckeForTest"
+
+ def setup(self):
+ self.success = False
+
+ def verify(self):
+ return self.success
+
+ def setUp(self):
+ self.checker_cfg = {
+ 'checker_type': 'general-result-checker',
+ 'key': 'process-checker'
+ }
+ self.ins = baseresultchecker.BaseResultChecker(self.checker_cfg, None)
+
+ def test_baseresultchecker_setup_verify_successful(self):
+ self.ins.setup()
+ self.ins.verify()
+
+ def test_baseresultchecker_verfiy_pass(self):
+ self.ins.setup()
+ self.ins.actualResult = True
+ self.ins.expectedResult = True
+ self.ins.verify()
+
+ def test_get_script_fullpath(self):
+ self.ins.get_script_fullpath("test.bash")
+
+ def test_get_resultchecker_cls_successful(self):
+ baseresultchecker.BaseResultChecker.get_resultchecker_cls(
+ "ResultCheckeForTest")
+
+ def test_get_resultchecker_cls_fail(self):
+ with self.assertRaises(RuntimeError):
+ baseresultchecker.BaseResultChecker.get_resultchecker_cls(
+ "ResultCheckeNotExist")
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_director.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_director.py
new file mode 100644
index 000000000..e49544e1c
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_director.py
@@ -0,0 +1,106 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.director
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.director import Director
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.director.basemonitor')
+@mock.patch('yardstick.benchmark.scenarios.availability.director.baseattacker')
+@mock.patch(
+ 'yardstick.benchmark.scenarios.availability.director.baseoperation')
+@mock.patch(
+ 'yardstick.benchmark.scenarios.availability.director.baseresultchecker')
+class DirectorTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.scenario_cfg = {
+ 'type': "general_scenario",
+ 'options': {
+ 'attackers': [{
+ 'fault_type': "general-attacker",
+ 'key': "kill-process"}],
+ 'monitors': [{
+ 'monitor_type': "general-monitor",
+ 'key': "service-status"}],
+ 'operations': [{
+ 'operation_type': 'general-operation',
+ 'key': 'service-status'}],
+ 'resultCheckers': [{
+ 'checker_type': 'general-result-checker',
+ 'key': 'process-checker', }],
+ 'steps': [
+ {
+ 'actionKey': "service-status",
+ 'actionType': "operation",
+ 'index': 1},
+ {
+ 'actionKey': "kill-process",
+ 'actionType': "attacker",
+ 'index': 2},
+ {
+ 'actionKey': "process-checker",
+ 'actionType': "resultchecker",
+ 'index': 3},
+ {
+ 'actionKey': "service-status",
+ 'actionType': "monitor",
+ 'index': 4},
+ ]
+ }
+ }
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.ctx = {"nodes": {"node1": host}}
+
+ def test_director_all_successful(self, mock_checer, mock_opertion,
+ mock_attacker, mock_monitor):
+ ins = Director(self.scenario_cfg, self.ctx)
+ opertion_action = ins.createActionPlayer("operation", "service-status")
+ attacker_action = ins.createActionPlayer("attacker", "kill-process")
+ checker_action = ins.createActionPlayer("resultchecker",
+ "process-checker")
+ monitor_action = ins.createActionPlayer("monitor", "service-status")
+
+ opertion_rollback = ins.createActionRollbacker("operation",
+ "service-status")
+ attacker_rollback = ins.createActionRollbacker("attacker",
+ "kill-process")
+ ins.executionSteps.append(opertion_rollback)
+ ins.executionSteps.append(attacker_rollback)
+
+ opertion_action.action()
+ attacker_action.action()
+ checker_action.action()
+ monitor_action.action()
+
+ attacker_rollback.rollback()
+ opertion_rollback.rollback()
+
+ ins.stopMonitors()
+ ins.verify()
+ ins.knockoff()
+
+ def test_director_get_wrong_item(self, mock_checer, mock_opertion,
+ mock_attacker, mock_monitor):
+ ins = Director(self.scenario_cfg, self.ctx)
+ ins.createActionPlayer("wrong_type", "wrong_key")
+ ins.createActionRollbacker("wrong_type", "wrong_key")
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_command.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
new file mode 100644
index 000000000..1aebcc85b
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
@@ -0,0 +1,95 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import monitor_command
+
+
+class ExecuteShellTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_subprocess = mock.patch.object(monitor_command, 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_subprocess.stop()
+
+ def test__fun_execute_shell_command_successful(self):
+ cmd = "env"
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, _t = monitor_command._execute_shell_command(cmd)
+ self.assertEqual(exitcode, 0)
+
+ @mock.patch.object(monitor_command, 'LOG')
+ def test__fun_execute_shell_command_fail_cmd_exception(self, mock_log):
+ cmd = "env"
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, _ = monitor_command._execute_shell_command(cmd)
+ self.assertEqual(exitcode, -1)
+ mock_log.error.assert_called_once()
+
+
+class MonitorOpenstackCmdTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.config = {
+ 'monitor_type': 'openstack-api',
+ 'command_name': 'nova image-list',
+ 'monitor_time': 1,
+ 'sla': {'max_outage_time': 5}
+ }
+ self._mock_subprocess = mock.patch.object(monitor_command, 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_subprocess.stop()
+
+ def test__monitor_command_monitor_func_successful(self):
+
+ instance = monitor_command.MonitorOpenstackCmd(self.config, None, {"nova-api": 10})
+ instance.setup()
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ ret = instance.monitor_func()
+ self.assertTrue(ret)
+ instance._result = {"outage_time": 0}
+ instance.verify_SLA()
+
+ @mock.patch.object(monitor_command, 'LOG')
+ def test__monitor_command_monitor_func_failure(self, mock_log):
+ self.mock_subprocess.check_output.return_value = (1, 'unittest')
+ instance = monitor_command.MonitorOpenstackCmd(self.config, None, {"nova-api": 10})
+ instance.setup()
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ ret = instance.monitor_func()
+ self.assertFalse(ret)
+ mock_log.error.assert_called_once()
+ instance._result = {"outage_time": 10}
+ instance.verify_SLA()
+
+ @mock.patch.object(monitor_command, 'ssh')
+ def test__monitor_command_ssh_monitor_successful(self, mock_ssh):
+
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ self.config["host"] = "node1"
+ instance = monitor_command.MonitorOpenstackCmd(
+ self.config, self.context, {"nova-api": 10})
+ instance.setup()
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ret = instance.monitor_func()
+ self.assertTrue(ret)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py
new file mode 100644
index 000000000..5907c8b6a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py
@@ -0,0 +1,82 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.monitor
+# .monitor_general
+
+from __future__ import absolute_import
+import mock
+import unittest
+from yardstick.benchmark.scenarios.availability.monitor import monitor_general
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
+ 'monitor_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
+ 'monitor_general.open')
+class GeneralMonitorServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.monitor_cfg = {
+ 'monitor_type': 'general-monitor',
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 3,
+ 'parameter': {'serviceName': 'haproxy'},
+ 'sla': {'max_outage_time': 1}
+ }
+ self.monitor_cfg_noparam = {
+ 'monitor_type': 'general-monitor',
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 3,
+ 'sla': {'max_outage_time': 1}
+ }
+
+ def test__monitor_general_all_successful(self, mock_open, mock_ssh):
+ ins = monitor_general.GeneralMonitor(self.monitor_cfg, self.context, {"nova-api": 10})
+
+ ins.setup()
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.monitor_func()
+ ins._result = {'outage_time': 0}
+ ins.verify_SLA()
+
+ def test__monitor_general_all_successful_noparam(self, mock_open,
+ mock_ssh):
+ ins = monitor_general.GeneralMonitor(
+ self.monitor_cfg_noparam, self.context, {"nova-api": 10})
+
+ ins.setup()
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.monitor_func()
+ ins._result = {'outage_time': 0}
+ ins.verify_SLA()
+
+ def test__monitor_general_failure(self, mock_open, mock_ssh):
+ ins = monitor_general.GeneralMonitor(
+ self.monitor_cfg_noparam, self.context, {"nova-api": 10})
+
+ ins.setup()
+ mock_ssh.SSH.from_node().execute.return_value = (1, "error", 'error')
+ ins.monitor_func()
+ ins._result = {'outage_time': 2}
+ ins.verify_SLA()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
new file mode 100644
index 000000000..dc3a4b99a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
@@ -0,0 +1,82 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.monitor
+# .monitor_multi
+
+from __future__ import absolute_import
+import mock
+import unittest
+from yardstick.benchmark.scenarios.availability.monitor import monitor_multi
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
+ 'monitor_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
+ 'monitor_general.open')
+class MultiMonitorServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.monitor_cfg = {
+ 'monitor_type': 'general-monitor',
+ 'monitor_number': 3,
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 0.1,
+ 'parameter': {'serviceName': 'haproxy'},
+ 'sla': {'max_outage_time': 1}
+ }
+
+ def test__monitor_multi_all_successful(self, mock_open, mock_ssh):
+ ins = monitor_multi.MultiMonitor(
+ self.monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+
+ ins.start_monitor()
+ ins.wait_monitor()
+ ins.verify_SLA()
+
+ def test__monitor_multi_all_fail(self, mock_open, mock_ssh):
+ ins = monitor_multi.MultiMonitor(
+ self.monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+
+ ins.start_monitor()
+ ins.wait_monitor()
+ ins.verify_SLA()
+
+ def test__monitor_multi_no_sla(self, mock_open, mock_ssh):
+ monitor_cfg = {
+ 'monitor_type': 'general-monitor',
+ 'monitor_number': 3,
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 0.1,
+ 'parameter': {'serviceName': 'haproxy'}
+ }
+ ins = monitor_multi.MultiMonitor(
+ monitor_cfg, self.context, {"nova-api": 10})
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.start_monitor()
+ ins.wait_monitor()
+ self.assertTrue(ins.verify_SLA())
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
new file mode 100644
index 000000000..8c73bf221
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
@@ -0,0 +1,73 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.availability.monitor.monitor_process
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import monitor_process
+
+
+@mock.patch(
+ 'yardstick.benchmark.scenarios.availability.monitor.monitor_process.ssh')
+class MonitorProcessTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.monitor_cfg = {
+ 'monitor_type': 'process',
+ 'process_name': 'nova-api',
+ 'host': "node1",
+ 'monitor_time': 1,
+ 'sla': {'max_recover_time': 5}
+ }
+
+ def test__monitor_process_all_successful(self, mock_ssh):
+
+ ins = monitor_process.MonitorProcess(self.monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 0}
+ ins.verify_SLA()
+
+ def test__monitor_process_down_failuer(self, mock_ssh):
+
+ ins = monitor_process.MonitorProcess(self.monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 10}
+ ins.verify_SLA()
+
+ def test__monitor_process_no_sla(self, mock_ssh):
+
+ monitor_cfg = {
+ 'monitor_type': 'process',
+ 'process_name': 'nova-api',
+ 'host': "node1",
+ 'monitor_time': 1,
+ }
+ ins = monitor_process.MonitorProcess(monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 10}
+ self.assertTrue(ins.verify_SLA())
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py
new file mode 100644
index 000000000..2b09c0385
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py
@@ -0,0 +1,74 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.operation
+# .operation_general
+
+from __future__ import absolute_import
+import mock
+import unittest
+from yardstick.benchmark.scenarios.availability.operation import \
+ operation_general
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.operation.'
+ 'operation_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.operation.'
+ 'operation_general.open')
+class GeneralOperaionTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.operation_cfg = {
+ 'operation_type': 'general-operation',
+ 'action_parameter': {'ins_cup': 2},
+ 'rollback_parameter': {'ins_id': 'id123456'},
+ 'key': 'nova-create-instance',
+ 'operation_key': 'nova-create-instance',
+ 'host': 'node1',
+ }
+ self.operation_cfg_noparam = {
+ 'operation_type': 'general-operation',
+ 'key': 'nova-create-instance',
+ 'operation_key': 'nova-create-instance',
+ 'host': 'node1',
+ }
+
+ def test__operation_successful(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "success", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
+
+ def test__operation_successful_noparam(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg_noparam,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "success", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
+
+ def test__operation_fail(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (1, "failed", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py
new file mode 100644
index 000000000..324a5bda2
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py
@@ -0,0 +1,118 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.result_checker
+# .result_checker_general
+
+from __future__ import absolute_import
+import mock
+import unittest
+import copy
+
+from yardstick.benchmark.scenarios.availability.result_checker import \
+ result_checker_general
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.result_checker.'
+ 'result_checker_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.result_checker.'
+ 'result_checker_general.open')
+class GeneralResultCheckerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.checker_cfg = {
+ 'parameter': {'processname': 'process'},
+ 'checker_type': 'general-result-checker',
+ 'condition': 'eq',
+ 'expectedValue': 1,
+ 'key': 'process-checker',
+ 'checker_key': 'process-checker',
+ 'host': 'node1'
+ }
+
+ def test__result_checker_eq(self, mock_open, mock_ssh):
+ ins = result_checker_general.GeneralResultChecker(self.checker_cfg,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_gt(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'gt'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "2", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_gt_eq(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'gt_eq'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_lt(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'lt'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_lt_eq(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'lt_eq'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_in(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'in'
+ config['expectedValue'] = "value"
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "value return", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_wrong(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'wrong'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ self.assertFalse(ins.verify())
+
+ def test__result_checker_fail(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config.pop('parameter')
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (1, "fail", '')
+ ins.setup()
+ ins.verify()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py
new file mode 100644
index 000000000..dbf3d83b2
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py
@@ -0,0 +1,76 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability import scenario_general
+from yardstick.common import exceptions as y_exc
+
+
+class ScenarioGeneralTestCase(unittest.TestCase):
+
+ @mock.patch.object(scenario_general, 'Director')
+ def setUp(self, *args):
+ self.scenario_cfg = {
+ 'type': "general_scenario",
+ 'options': {
+ 'attackers': [{
+ 'fault_type': "general-attacker",
+ 'key': "kill-process"}],
+ 'monitors': [{
+ 'monitor_type': "general-monitor",
+ 'key': "service-status"}],
+ 'steps': [
+ {
+ 'actionKey': "kill-process",
+ 'actionType': "attacker",
+ 'index': 1},
+ {
+ 'actionKey': "service-status",
+ 'actionType': "monitor",
+ 'index': 2}]
+ }
+ }
+ self.instance = scenario_general.ScenarioGeneral(self.scenario_cfg,
+ None)
+ self.instance.setup()
+ self.instance.director.verify.return_value = True
+
+ def test_scenario_general_all_successful(self):
+ ret = {}
+ self.instance.run(ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 1)
+
+ @mock.patch.object(scenario_general.LOG, 'exception')
+ def test_scenario_general_exception(self, *args):
+ self.instance.director.createActionPlayer.side_effect = (
+ KeyError('Wrong'))
+ self.instance.director.data = {}
+ ret = {}
+ self.instance.run(ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 1)
+
+ def test_scenario_general_case_fail(self):
+ self.instance.director.verify.return_value = False
+ self.instance.director.data = {}
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, self.instance.run, ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 0)
+
+ def test_scenario_general_case_service_not_found_fail(self):
+ self.instance.director.verify.return_value = True
+ self.instance.director.data = {"general-attacker": 0}
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, self.instance.run, ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 0)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py
new file mode 100644
index 000000000..d61fa67c7
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py
@@ -0,0 +1,131 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability import serviceha
+from yardstick.common import exceptions as y_exc
+
+
+class ServicehaTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.ctx = {"nodes": {"node1": host}}
+ attacker_cfg = {
+ "fault_type": "kill-process",
+ "process_name": "nova-api",
+ "host": "node1"
+ }
+ attacker_cfgs = []
+ attacker_cfgs.append(attacker_cfg)
+ monitor_cfg = {
+ "monitor_cmd": "nova image-list",
+ "monitor_time": 0.1
+ }
+ monitor_cfgs = []
+ monitor_cfgs.append(monitor_cfg)
+
+ options = {
+ "attackers": attacker_cfgs,
+ "monitors": monitor_cfgs
+ }
+ sla = {"outage_time": 5}
+ self.args = {"options": options, "sla": sla}
+ self.test__serviceha = serviceha.ServiceHA(self.args, self.ctx)
+
+ def test___init__(self):
+
+ self.assertEqual(self.test__serviceha.data, {})
+ self.assertFalse(self.test__serviceha.setup_done)
+ self.assertFalse(self.test__serviceha.sla_pass)
+
+ # NOTE(elfoley): This should be split into test_setup and test_run
+ # NOTE(elfoley): This should explicitly test outcomes and states
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_setup_run_successful(self, mock_monitor, *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+
+ p.setup()
+ self.assertTrue(p.setup_done)
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+ ret = {}
+ p.run(ret)
+ p.teardown()
+
+ p.setup()
+ self.assertTrue(p.setup_done)
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_run_sla_error(self, mock_monitor, *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+
+ p.setup()
+ self.assertEqual(p.setup_done, True)
+
+ mock_monitor.MonitorMgr().verify_SLA.return_value = False
+
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, p.run, ret)
+ self.assertEqual(ret['sla_pass'], 0)
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_run_service_not_found_sla_error(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+
+ p.setup()
+ self.assertTrue(p.setup_done)
+ p.data["kill-process"] = 0
+
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, p.run, ret)
+ self.assertEqual(ret['sla_pass'], 0)
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_no_teardown_when_sla_pass(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+ p.setup()
+ self.assertTrue(p.setup_done)
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+ ret = {}
+ p.run(ret)
+ attacker = mock.Mock()
+ attacker.mandatory = False
+ p.attackers = [attacker]
+ p.teardown()
+ attacker.recover.assert_not_called()
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_teardown_when_mandatory(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+ p.setup()
+ self.assertTrue(p.setup_done)
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+ ret = {}
+ p.run(ret)
+ attacker = mock.Mock()
+ attacker.mandatory = True
+ p.attackers = [attacker]
+ p.teardown()
+ attacker.recover.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_util.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_util.py
new file mode 100644
index 000000000..4d97585d4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_util.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2016 Kanglin Yin and others
+# 14_ykl@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.utils
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability import util
+
+
+class ExecuteShellTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.param_config = {'serviceName': '@serviceName', 'value': 1}
+ self.intermediate_variables = {'@serviceName': 'nova-api'}
+ self.std_output = '| id | 1 |'
+ self.cmd_config = {'cmd': 'ls', 'param': '-a'}
+
+ self._mock_subprocess = mock.patch.object(util, 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_subprocess.stop()
+
+ def test_util_build_command_shell(self):
+ result = util.build_shell_command(self.param_config, True,
+ self.intermediate_variables)
+ self.assertIn("nova-api", result)
+
+ def test_read_stdout_item(self):
+ result = util.read_stdout_item(self.std_output, 'id')
+ self.assertEqual('1', result)
+
+ def test_buildshellparams(self):
+ result = util.buildshellparams(self.cmd_config, True)
+ self.assertEqual('/bin/bash -s {0} {1}', result)
+
+ def test__fun_execute_shell_command_successful(self):
+ cmd = "env"
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, _ = util.execute_shell_command(cmd)
+ self.assertEqual(exitcode, 0)
+
+ def test__fun_execute_shell_command_fail_cmd_exception(self):
+ cmd = "env"
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, _ = util.execute_shell_command(cmd)
+ self.assertEqual(exitcode, -1)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/__init__.py b/yardstick/tests/unit/benchmark/scenarios/compute/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/cachestat_sample_output.txt b/yardstick/tests/unit/benchmark/scenarios/compute/cachestat_sample_output.txt
new file mode 100644
index 000000000..e2c79a9b1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/cachestat_sample_output.txt
@@ -0,0 +1,5 @@
+Counting cache functions... Output every 1 seconds.
+ HITS MISSES DIRTIES RATIO BUFFERS_MB CACHE_MB
+ 6462 0 29 100.0% 1157 66782
+
+Ending tracing...
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output1.txt b/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output1.txt
new file mode 100644
index 000000000..723e64bcb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output1.txt
@@ -0,0 +1,9 @@
+Linux 3.13.0-68-generic (elxg482ls42) 11/30/2015 _x86_64_ (1 CPU)
+
+04:34:26 PM CPU %usr %nice %sys %iowait %irq %soft %steal %guest %gnice %idle
+04:34:26 PM all 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 100.00
+04:34:26 PM 0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 100.00
+
+Average: CPU %usr %nice %sys %iowait %irq %soft %steal %guest %gnice %idle
+Average: all 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 100.00
+Average: 0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 100.00
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output2.txt b/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output2.txt
new file mode 100644
index 000000000..c66520a27
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output2.txt
@@ -0,0 +1,2 @@
+cpu 245813227 366650 17338727 1195600354 2652765 178 177114 0 80439531 0
+cpu0 32334587 35782 1659040 87008833 401178 60 73571 0 8030817 0
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/memload_sample_output.txt b/yardstick/tests/unit/benchmark/scenarios/compute/memload_sample_output.txt
new file mode 100644
index 000000000..1793e2f10
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/memload_sample_output.txt
@@ -0,0 +1,3 @@
+ total used free shared buff/cache available
+Mem: 263753976 76737332 187016644 2844 853528 67252400
+Swap: 268029948 0 268029948
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py
new file mode 100644
index 000000000..6f66c30f9
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py
@@ -0,0 +1,95 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.cachestat.CACHEstat
+
+from __future__ import absolute_import
+import mock
+import unittest
+import os
+
+from yardstick.benchmark.scenarios.compute import cachestat
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.cachestat.ssh')
+class CACHEstatTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_cachestat_successful_setup(self, mock_ssh):
+ c = cachestat.CACHEstat({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ c.setup()
+ self.assertIsNotNone(c.client)
+ self.assertTrue(c.setup_done)
+
+ def test_execute_command_success(self, mock_ssh):
+ c = cachestat.CACHEstat({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ expected_result = 'abcdefg'
+ mock_ssh.SSH.from_node().execute.return_value = (0, expected_result, '')
+ result = c._execute_command("foo")
+ self.assertEqual(result, expected_result)
+
+ def test_execute_command_failed(self, mock_ssh):
+ c = cachestat.CACHEstat({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', 'Failed executing \
+ command')
+ self.assertRaises(RuntimeError, c._execute_command,
+ "cat /proc/meminfo")
+
+ def test_get_cache_usage_successful(self, mock_ssh):
+ options = {
+ "interval": 1,
+ }
+ args = {"options": options}
+ c = cachestat.CACHEstat(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ output = self._read_file("cachestat_sample_output.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, output, '')
+ result = c._get_cache_usage()
+ expected_result = {"cachestat": {"cache0": {"HITS": "6462",
+ "DIRTIES": "29",
+ "RATIO": "100.0%",
+ "MISSES": "0",
+ "BUFFERS_MB": "1157",
+ "CACHE_MB": "66782"}},
+ "average": {"HITS": 6462, "DIRTIES": 29,
+ "RATIO": "100.0%",
+ "MISSES": 0, "BUFFERS_MB": 1157,
+ "CACHE_MB": 66782},
+ "max": {"HITS": 6462,
+ "DIRTIES": 29, "RATIO": 100.0, "MISSES": 0,
+ "BUFFERS_MB": 1157, "CACHE_MB": 66782}}
+
+ self.assertEqual(result, expected_result)
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py
new file mode 100644
index 000000000..4bef589f4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py
@@ -0,0 +1,64 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.compute.computecapacity.ComputeCapacity
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import computecapacity
+
+SAMPLE_OUTPUT = '{"Cpu_number": "2", "Core_number": "24",\
+ "Memory_size": "263753976 kB", "Thread_number": "48",\
+ "Cache_size": "30720 KB", "HT_Open": "0"}'
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.computecapacity.ssh')
+class ComputeCapacityTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'nodes': {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key",
+ 'password': "root"
+ },
+ }
+ }
+
+ self.result = {}
+
+ def test_capacity_successful_setup(self, mock_ssh):
+ c = computecapacity.ComputeCapacity({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ c.setup()
+ self.assertIsNotNone(c.client)
+ self.assertTrue(c.setup_done)
+
+ def test_capacity_successful(self, mock_ssh):
+ c = computecapacity.ComputeCapacity({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, SAMPLE_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_unsuccessful_script_error(self, mock_ssh):
+ c = computecapacity.ComputeCapacity({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, c.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py
new file mode 100644
index 000000000..da6e6a22e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py
@@ -0,0 +1,262 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.lmbench.Lmbench
+
+from __future__ import absolute_import
+import mock
+import unittest
+import os
+
+from yardstick.benchmark.scenarios.compute import cpuload
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.cpuload.ssh')
+class CPULoadTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_setup_mpstat_installed(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ l.setup()
+ self.assertIsNotNone(l.client)
+ self.assertTrue(l.setup_done)
+ self.assertTrue(l.has_mpstat)
+
+ def test_setup_mpstat_not_installed(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', '')
+
+ l.setup()
+ self.assertIsNotNone(l.client)
+ self.assertTrue(l.setup_done)
+ self.assertFalse(l.has_mpstat)
+
+ def test_execute_command_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ expected_result = 'abcdefg'
+ mock_ssh.SSH.from_node().execute.return_value = (0, expected_result, '')
+ result = l._execute_command("foo")
+ self.assertEqual(result, expected_result)
+
+ def test_execute_command_failed(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', 'abcdefg')
+ self.assertRaises(RuntimeError, l._execute_command,
+ "cat /proc/loadavg")
+
+ def test_get_loadavg(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = \
+ (0, '1.50 1.45 1.51 3/813 14322', '')
+ result = l._get_loadavg()
+ expected_result = \
+ {'loadavg': ['1.50', '1.45', '1.51', '3/813', '14322']}
+ self.assertEqual(result, expected_result)
+
+ def test_get_cpu_usage_mpstat(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ l.interval = 1
+ l.count = 1
+ mpstat_output = self._read_file("cpuload_sample_output1.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, mpstat_output, '')
+ result = l._get_cpu_usage_mpstat()
+
+ expected_result = \
+ {"mpstat_minimum":
+ {"cpu": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"},
+ "cpu0": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"}},
+ "mpstat_average":
+ {"cpu": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"},
+ "cpu0": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"}},
+ "mpstat_maximun":
+ {"cpu": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"},
+ "cpu0": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"}}}
+
+ self.assertDictEqual(result, expected_result)
+
+ def test_get_cpu_usage(self, mock_ssh):
+ options = {
+ "interval": 0,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ l.interval = 0
+ output = self._read_file("cpuload_sample_output2.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, output, '')
+ result = l._get_cpu_usage()
+
+ expected_result = \
+ {'mpstat':
+ {'cpu':
+ {'%steal': '0.00',
+ '%usr': '11.31',
+ '%gnice': '0.00',
+ '%idle': '81.78',
+ '%iowait': '0.18',
+ '%guest': '5.50',
+ '%sys': '1.19',
+ '%soft': '0.01',
+ '%irq': '0.00',
+ '%nice': '0.03'},
+ 'cpu0':
+ {'%steal': '0.00',
+ '%usr': '20.00',
+ '%gnice': '0.00',
+ '%idle': '71.60',
+ '%iowait': '0.33',
+ '%guest': '6.61',
+ '%sys': '1.37',
+ '%soft': '0.06',
+ '%irq': '0.00',
+ '%nice': '0.03'}}}
+
+ self.assertDictEqual(result, expected_result)
+
+ def test_run_proc_stat(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ l.setup()
+
+ l.interval = 0
+ stat_output = self._read_file("cpuload_sample_output2.txt")
+ mock_ssh.SSH.from_node().execute.side_effect = \
+ [(0, '1.50 1.45 1.51 3/813 14322', ''), (0, stat_output, '')]
+
+ l.run(self.result)
+ expected_result = {
+ 'loadavg': ['1.50', '1.45', '1.51', '3/813', '14322'],
+ 'mpstat':
+ {'cpu':
+ {'%steal': '0.00',
+ '%usr': '11.31',
+ '%gnice': '0.00',
+ '%idle': '81.78',
+ '%iowait': '0.18',
+ '%guest': '5.50',
+ '%sys': '1.19',
+ '%soft': '0.01',
+ '%irq': '0.00',
+ '%nice': '0.03'},
+ 'cpu0':
+ {'%steal': '0.00',
+ '%usr': '20.00',
+ '%gnice': '0.00',
+ '%idle': '71.60',
+ '%iowait': '0.33',
+ '%guest': '6.61',
+ '%sys': '1.37',
+ '%soft': '0.06',
+ '%irq': '0.00',
+ '%nice': '0.03'}}}
+
+ self.assertDictEqual(self.result, expected_result)
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
new file mode 100644
index 000000000..4fadde4dc
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
@@ -0,0 +1,167 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.cyclictest.Cyclictest
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import cyclictest
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.cyclictest.ssh')
+class CyclictestTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.scenario_cfg = {
+ "host": "kvm.LF",
+ "setup_options": {
+ "rpm_dir": "/opt/rpm",
+ "host_setup_seqs": [
+ "host-setup0.sh",
+ "host-setup1.sh",
+ "host-run-qemu.sh"
+ ],
+ "script_dir": "/opt/scripts",
+ "image_dir": "/opt/image",
+ "guest_setup_seqs": [
+ "guest-setup0.sh",
+ "guest-setup1.sh"
+ ]
+ },
+ "sla": {
+ "action": "monitor",
+ "max_min_latency": 50,
+ "max_avg_latency": 100,
+ "max_max_latency": 1000
+ },
+ "options": {
+ "priority": 99,
+ "threads": 1,
+ "loops": 1000,
+ "affinity": 1,
+ "interval": 1000,
+ "histogram": 90
+ }
+ }
+ self.context_cfg = {
+ "host": {
+ "ip": "10.229.43.154",
+ "key_filename": "/yardstick/resources/files/yardstick_key",
+ "role": "BareMetal",
+ "name": "kvm.LF",
+ "user": "root"
+ }
+ }
+
+ def test_cyclictest_successful_setup(self, mock_ssh):
+
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ c.setup()
+ self.assertIsNotNone(c.guest)
+ self.assertIsNotNone(c.host)
+ self.assertTrue(c.setup_done)
+
+ def test_cyclictest_successful_no_sla(self, mock_ssh):
+ result = {}
+ self.scenario_cfg.pop("sla", None)
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ c.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_cyclictest_successful_sla(self, mock_ssh):
+ result = {}
+ self.scenario_cfg.update({"sla": {
+ "action": "monitor",
+ "max_min_latency": 100,
+ "max_avg_latency": 500,
+ "max_max_latency": 1000
+ }
+ })
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ c.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_cyclictest_unsuccessful_sla_min_latency(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_min_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
+
+ def test_cyclictest_unsuccessful_sla_avg_latency(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_avg_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
+
+ def test_cyclictest_unsuccessful_sla_max_latency(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_max_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
+
+ def test_cyclictest_unsuccessful_script_error(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_max_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, c.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py
new file mode 100644
index 000000000..ba63e5f9e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py
@@ -0,0 +1,192 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import lmbench
+from yardstick.common import exceptions as y_exc
+from yardstick import ssh
+
+
+class LmbenchTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ self._mock_ssh = mock.patch.object(ssh, 'SSH')
+ self.mock_ssh = self._mock_ssh.start()
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self._mock_ssh.stop()
+
+ def test_successful_setup(self):
+
+ l = lmbench.Lmbench({}, self.ctx)
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
+
+ l.setup()
+ self.assertIsNotNone(l.client)
+ self.assertTrue(l.setup_done)
+
+ def test_unsuccessful_unknown_type_run(self):
+
+ options = {
+ "test_type": "foo"
+ }
+ args = {'options': options}
+
+ l = lmbench.Lmbench(args, self.ctx)
+
+ self.assertRaises(RuntimeError, l.run, self.result)
+
+ def test_successful_latency_run_no_sla(self):
+
+ options = {
+ "test_type": "latency",
+ "stride": 64,
+ "stop_size": 16
+ }
+ args = {'options': options}
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '[{"latency": 4.944, "size": 0.00049}]'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = {"latencies0.latency": 4.944, "latencies0.size": 0.00049}
+ self.assertEqual(self.result, expected_result)
+
+ def test_successful_bandwidth_run_no_sla(self):
+
+ options = {
+ "test_type": "bandwidth",
+ "size": 500,
+ "benchmark": "rd",
+ "warmup": 0
+ }
+ args = {"options": options}
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(self.result, expected_result)
+
+ def test_successful_latency_run_sla(self):
+
+ options = {
+ "test_type": "latency",
+ "stride": 64,
+ "stop_size": 16
+ }
+ args = {
+ "options": options,
+ "sla": {"max_latency": 35}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '[{"latency": 4.944, "size": 0.00049}]'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = {"latencies0.latency": 4.944, "latencies0.size": 0.00049}
+ self.assertEqual(self.result, expected_result)
+
+ def test_successful_bandwidth_run_sla(self):
+
+ options = {
+ "test_type": "bandwidth",
+ "size": 500,
+ "benchmark": "rd",
+ "warmup": 0
+ }
+ args = {
+ "options": options,
+ "sla": {"min_bandwidth": 10000}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(self.result, expected_result)
+
+ def test_unsuccessful_latency_run_sla(self):
+
+ options = {
+ "test_type": "latency",
+ "stride": 64,
+ "stop_size": 16
+ }
+ args = {
+ "options": options,
+ "sla": {"max_latency": 35}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '[{"latency": 37.5, "size": 0.00049}]'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, l.run, self.result)
+
+ def test_unsuccessful_bandwidth_run_sla(self):
+
+ options = {
+ "test_type": "bandwidth",
+ "size": 500,
+ "benchmark": "rd",
+ "warmup": 0
+ }
+ args = {
+ "options": options,
+ "sla": {"min_bandwidth": 10000}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 9925.5}'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, l.run, self.result)
+
+ def test_successful_latency_for_cache_run_sla(self):
+
+ options = {
+ "test_type": "latency_for_cache",
+ "repetition": 1,
+ "warmup": 0
+ }
+ args = {
+ "options": options,
+ "sla": {"max_latency": 35}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = "{\"L1cache\": 1.6}"
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(self.result, expected_result)
+
+ def test_unsuccessful_script_error(self):
+
+ options = {"test_type": "bandwidth"}
+ args = {"options": options}
+ l = lmbench.Lmbench(args, self.ctx)
+
+ self.mock_ssh.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, l.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py
new file mode 100644
index 000000000..8213d4490
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py
@@ -0,0 +1,109 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.memload.MEMLoad
+
+from __future__ import absolute_import
+import mock
+import unittest
+import os
+
+from yardstick.benchmark.scenarios.compute import memload
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.memload.ssh')
+class MEMLoadTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_memload_successful_setup(self, mock_ssh):
+ m = memload.MEMLoad({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ m.setup()
+ self.assertIsNotNone(m.client)
+ self.assertTrue(m.setup_done)
+
+ def test_execute_command_success(self, mock_ssh):
+ m = memload.MEMLoad({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ m.setup()
+
+ expected_result = 'abcdefg'
+ mock_ssh.SSH.from_node().execute.return_value = (0, expected_result, '')
+ result = m._execute_command("foo")
+ self.assertEqual(result, expected_result)
+
+ def test_execute_command_failed(self, mock_ssh):
+ m = memload.MEMLoad({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ m.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', 'Failed executing \
+ command')
+ self.assertRaises(RuntimeError, m._execute_command,
+ "cat /proc/meminfo")
+
+ def test_get_mem_usage_successful(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {"options": options}
+ m = memload.MEMLoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ m.setup()
+
+ output = self._read_file("memload_sample_output.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, output, '')
+ result = m._get_mem_usage()
+ expected_result = {
+ "max": {
+ 'shared': 2844,
+ 'buff/cache': 853528,
+ 'total': 263753976,
+ 'free': 187016644,
+ 'used': 76737332
+ },
+ "average": {
+ 'shared': 2844,
+ 'buff/cache': 853528,
+ 'total': 263753976,
+ 'free': 187016644,
+ 'used': 76737332
+ },
+ "free": {
+ "memory0": {
+ "used": "76737332",
+ "buff/cache": "853528",
+ "free": "187016644",
+ "shared": "2844",
+ "total": "263753976",
+ "available": "67252400"
+ }
+ }
+ }
+
+ self.assertEqual(result, expected_result)
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py
new file mode 100644
index 000000000..875301729
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py
@@ -0,0 +1,60 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.plugintest.PluginTest
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import plugintest
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.plugintest.ssh')
+class PluginTestTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'nodes': {
+ 'host1': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key",
+ 'password': "root"
+ },
+ }
+ }
+
+ self.result = {}
+
+ def test_sample_successful_setup(self, mock_ssh):
+ s = plugintest.PluginTest({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ s.setup()
+ self.assertIsNotNone(s.client)
+ self.assertTrue(s.setup_done)
+
+ def test_sample_successful(self, mock_ssh):
+ s = plugintest.PluginTest({}, self.ctx)
+
+ sample_output = '{"Test Output": "Hello world!"}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ s.run(self.result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(self.result, expected_result)
+
+ def test_sample_unsuccessful_script_error(self, mock_ssh):
+ s = plugintest.PluginTest({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, s.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py
new file mode 100644
index 000000000..02040ca01
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py
@@ -0,0 +1,158 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.qemu_migrate.QemuMigrate
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import qemu_migrate
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.qemu_migrate.ssh')
+class QemuMigrateTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.scenario_cfg = {
+ "host": "kvm.LF",
+ "setup_options": {
+ "rpm_dir": "/opt/rpm",
+ "script_dir": "/opt/scripts",
+ "image_dir": "/opt/image",
+ "host_setup_seqs": [
+ "host-setup0.sh",
+ "host-setup1.sh",
+ "setup-ovsdpdk.sh",
+ "host-install-qemu.sh",
+ "host-run-qemu4lm.sh"
+ ]
+ },
+ "sla": {
+ "action": "monitor",
+ "max_totaltime": 10,
+ "max_downtime": 0.10,
+ "max_setuptime": 0.50
+ },
+ "options": {
+ "smp": 99,
+ "migrate_to_port": 4444,
+ "incoming_ip": 0,
+ "qmp_src_path": "/tmp/qmp-sock-src",
+ "qmp_dst_path": "/tmp/qmp-sock-dst",
+ "max_down_time": "0.10"
+ }
+ }
+ self.context_cfg = {
+ "host": {
+ "ip": "10.229.43.154",
+ "key_filename": "/yardstick/resources/files/yardstick_key",
+ "role": "BareMetal",
+ "name": "kvm.LF",
+ "user": "root"
+ }
+ }
+
+ def test_qemu_migrate_successful_setup(self, mock_ssh):
+
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ q.setup()
+ self.assertIsNotNone(q.host)
+ self.assertTrue(q.setup_done)
+
+ def test_qemu_migrate_successful_no_sla(self, mock_ssh):
+ result = {}
+ self.scenario_cfg.pop("sla", None)
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ q.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_qemu_migrate_successful_sla(self, mock_ssh):
+ result = {}
+ self.scenario_cfg.update({"sla": {
+ "action": "monitor",
+ "max_totaltime": 15,
+ "max_downtime": 2,
+ "max_setuptime": 1
+ }
+ })
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ q.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_qemu_migrate_unsuccessful_sla_totaltime(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_totaltime": 10}})
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
+
+ def test_qemu_migrate_unsuccessful_sla_downtime(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_downtime": 0.10}})
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
+
+ def test_qemu_migrate_unsuccessful_sla_setuptime(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_setuptime": 0.50}})
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
+
+ def test_qemu_migrate_unsuccessful_script_error(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_totaltime": 10}})
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, q.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py
new file mode 100644
index 000000000..9e055befe
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py
@@ -0,0 +1,236 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.ramspeed.Ramspeed
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.common import utils
+from yardstick.benchmark.scenarios.compute import ramspeed
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.ramspeed.ssh')
+class RamspeedTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_ramspeed_successful_setup(self, mock_ssh):
+
+ r = ramspeed.Ramspeed({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ r.setup()
+ self.assertIsNotNone(r.client)
+ self.assertTrue(r.setup_done, True)
+
+ def test_ramspeed_successful__run_no_sla(self, mock_ssh):
+
+ options = {
+ "test_id": 1,
+ "load": 16,
+ "block_size": 32
+ }
+ args = {"options": options}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 1, "Bandwidth(MBps)": 19909.18}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 2, "Bandwidth(MBps)": 19873.89},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 4, "Bandwidth(MBps)":\
+ 19907.56}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 8,\
+ "Bandwidth(MBps)": 19906.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 16, "Bandwidth(MBps)": 19881.74}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 32, "Bandwidth(MBps)": 19395.65},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 64, "Bandwidth(MBps)":\
+ 17623.14}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 128,\
+ "Bandwidth(MBps)": 17677.36}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 256, "Bandwidth(MBps)": 16113.49}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 512, "Bandwidth(MBps)": 14659.19},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 1024, "Bandwidth(MBps)":\
+ 14680.75}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 2048,\
+ "Bandwidth(MBps)": 14756.45}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 4096, "Bandwidth(MBps)": 14604.44}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 8192, "Bandwidth(MBps)": 14159.86},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384,\
+ "Bandwidth(MBps)": 14128.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ r.run(self.result)
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_successful_run_sla(self, mock_ssh):
+
+ options = {
+ "test_id": 1,
+ "load": 16,
+ "block_size": 32
+ }
+ args = {"options": options, "sla": {"min_bandwidth": 6000}}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 1, "Bandwidth(MBps)": 19909.18}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 2, "Bandwidth(MBps)": 19873.89},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 4, "Bandwidth(MBps)":\
+ 19907.56}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 8,\
+ "Bandwidth(MBps)": 19906.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 16, "Bandwidth(MBps)": 19881.74}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 32, "Bandwidth(MBps)": 19395.65},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 64, "Bandwidth(MBps)":\
+ 17623.14}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 128,\
+ "Bandwidth(MBps)": 17677.36}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 256, "Bandwidth(MBps)": 16113.49}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 512, "Bandwidth(MBps)": 14659.19},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 1024, "Bandwidth(MBps)":\
+ 14680.75}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 2048,\
+ "Bandwidth(MBps)": 14756.45}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 4096, "Bandwidth(MBps)": 14604.44}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 8192, "Bandwidth(MBps)": 14159.86},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384,\
+ "Bandwidth(MBps)": 14128.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ r.run(self.result)
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_unsuccessful_run_sla(self, mock_ssh):
+ options = {
+ "test_id": 1,
+ "load": 8,
+ "block_size": 64
+ }
+ args = {"options": options, "sla": {"min_bandwidth": 100000}}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 1, "Bandwidth(MBps)": 5000.18}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 2, "Bandwidth(MBps)": 5000.89},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 4,\
+ "Bandwidth(MBps)": 5000.56}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 8, "Bandwidth(MBps)": 19906.94}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 16, "Bandwidth(MBps)": 19881.74},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 32,\
+ "Bandwidth(MBps)": 19395.65}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 64, "Bandwidth(MBps)": 17623.14}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 128, "Bandwidth(MBps)": 17677.36},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 256, "Bandwidth(MBps)":\
+ 16113.49}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 512,\
+ "Bandwidth(MBps)": 14659.19}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 1024, "Bandwidth(MBps)": 14680.75}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 2048, "Bandwidth(MBps)": 14756.45},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 4096, "Bandwidth(MBps)":\
+ 14604.44}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 8192,\
+ "Bandwidth(MBps)": 14159.86}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 16384, "Bandwidth(MBps)": 14128.94}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, r.run, self.result)
+
+ def test_ramspeed_unsuccessful_script_error(self, mock_ssh):
+ options = {
+ "test_id": 1,
+ "load": 16,
+ "block_size": 32
+ }
+ args = {"options": options}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, r.run, self.result)
+
+ def test_ramspeed_mem_successful_run_no_sla(self, mock_ssh):
+ options = {
+ "test_id": 3,
+ "load": 16,
+ "block_size": 32,
+ "iteration": 1
+ }
+ args = {"options": options}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER Copy:",\
+ "Bandwidth(MBps)": 8353.97}, {"Test_type": "INTEGER Scale:",\
+ "Bandwidth(MBps)": 9078.59}, {"Test_type": "INTEGER Add:",\
+ "Bandwidth(MBps)": 10057.48}, {"Test_type": "INTEGER Triad:",\
+ "Bandwidth(MBps)": 10116.27}, {"Test_type": "INTEGER AVERAGE:",\
+ "Bandwidth(MBps)": 9401.58}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ r.run(self.result)
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_mem_successful_run_sla(self, mock_ssh):
+ options = {
+ "test_id": 3,
+ "load": 16,
+ "block_size": 32,
+ "iteration": 1
+ }
+ args = {"options": options, "sla": {"min_bandwidth": 6000}}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER Copy:",\
+ "Bandwidth(MBps)": 8353.97}, {"Test_type": "INTEGER Scale:",\
+ "Bandwidth(MBps)": 9078.59}, {"Test_type": "INTEGER Add:",\
+ "Bandwidth(MBps)": 10057.48}, {"Test_type": "INTEGER Triad:",\
+ "Bandwidth(MBps)": 10116.27}, {"Test_type": "INTEGER AVERAGE:",\
+ "Bandwidth(MBps)": 9401.58}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ r.run(self.result)
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_mem_unsuccessful_run_sla(self, mock_ssh):
+ options = {
+ "test_id": 3,
+ "load": 16,
+ "block_size": 32,
+ "iteration": 1
+ }
+ args = {"options": options, "sla": {"min_bandwidth": 86000}}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER Copy:",\
+ "Bandwidth(MBps)": 4000.97}, {"Test_type": "INTEGER Scale:",\
+ "Bandwidth(MBps)": 4400.59}, {"Test_type": "INTEGER Add:",\
+ "Bandwidth(MBps)": 4300.48}, {"Test_type": "INTEGER Triad:",\
+ "Bandwidth(MBps)": 1300.27}, {"Test_type": "INTEGER AVERAGE:",\
+ "Bandwidth(MBps)": 2401.58}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, r.run, self.result)
+
+ def test_ramspeed_unsuccessful_unknown_type_run(self, mock_ssh):
+ options = {
+ "test_id": 30,
+ "load": 16,
+ "block_size": 32
+ }
+ args = {'options': options}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'No such type_id: 30 for \
+ Ramspeed scenario')
+ self.assertRaises(RuntimeError, r.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py
new file mode 100644
index 000000000..643e1eae2
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.spec_cpu.SpecCPU
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+
+from yardstick.benchmark.scenarios.compute import spec_cpu
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.spec_cpu.ssh')
+class SpecCPUTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_spec_cpu_successful_setup(self, mock_ssh):
+
+ options = {
+ "SPECint_benchmark": "perlbench",
+ "output_format": "all",
+ "runspec_iterations": "1",
+ "runspec_tune": "base",
+ "runspec_size": "test"
+ }
+ args = {"options": options}
+ s = spec_cpu.SpecCPU(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ s.setup()
+ self.assertIsNotNone(s.client)
+ self.assertTrue(s.setup_done, True)
+
+ def test_spec_cpu_successful__run_no_sla(self, mock_ssh):
+
+ options = {
+ "SPECint_benchmark": "perlbench",
+ "runspec_tune": "all",
+ "output_format": "all"
+ }
+ args = {"options": options}
+ s = spec_cpu.SpecCPU(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ s.run(self.result)
+ expected_result = {}
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_unsuccessful_script_error(self, mock_ssh):
+ options = {
+ "benchmark_subset": "int"
+ }
+ args = {"options": options}
+ s = spec_cpu.SpecCPU(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, s.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py
new file mode 100644
index 000000000..74ef576b6
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py
@@ -0,0 +1,76 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.spec_cpu_for_vm.SpecCPUforVM
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+
+from yardstick.benchmark.scenarios.compute import spec_cpu_for_vm
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.spec_cpu_for_vm.ssh')
+class SpecCPUforVMTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_spec_cpu_successful_setup(self, mock_ssh):
+
+ options = {
+ "SPECint_benchmark": "perlbench",
+ "runspec_tune": "all",
+ "output_format": "all",
+ "runspec_iterations": "1",
+ "runspec_size": "test"
+ }
+ args = {"options": options}
+ s = spec_cpu_for_vm.SpecCPUforVM(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ s.setup()
+ self.assertIsNotNone(s.client)
+ self.assertTrue(s.setup_done, True)
+
+ def test_spec_cpu_successful__run_no_sla(self, mock_ssh):
+
+ options = {
+ "SPECint_benchmark": "perlbench",
+ "runspec_tune": "all",
+ "output_format": "all"
+ }
+ args = {"options": options}
+ s = spec_cpu_for_vm.SpecCPUforVM(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ mock_ssh.SSH.from_node().get.return_value = (0, '', '')
+ s.run(self.result)
+ expected_result = {'SPEC_CPU_result': ''}
+ self.assertEqual(self.result, expected_result)
+
+ def test_spec_cpu_unsuccessful_script_error(self, mock_ssh):
+ options = {
+ "benchmark_subset": "int"
+ }
+ args = {"options": options}
+ s = spec_cpu_for_vm.SpecCPUforVM(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, s.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py
new file mode 100644
index 000000000..e4a8d6e26
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py
@@ -0,0 +1,163 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.unixbench.Unixbench
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import unixbench
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.unixbench.ssh')
+class UnixbenchTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ "host": {
+ "ip": "192.168.50.28",
+ "user": "root",
+ "key_filename": "mykey.key"
+ }
+ }
+
+ def test_unixbench_successful_setup(self, mock_ssh):
+
+ u = unixbench.Unixbench({}, self.ctx)
+ u.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(u.client)
+ self.assertTrue(u.setup_done)
+
+ def test_unixbench_successful_no_sla(self, mock_ssh):
+
+ options = {
+ "test_type": 'dhry2reg',
+ "run_mode": 'verbose'
+ }
+ args = {
+ "options": options,
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+
+ sample_output = '{"Score":"4425.4"}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ u.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_unixbench_successful_in_quiet_mode(self, mock_ssh):
+
+ options = {
+ "test_type": 'dhry2reg',
+ "run_mode": 'quiet',
+ "copies": 1
+ }
+ args = {
+ "options": options,
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+
+ sample_output = '{"Score":"4425.4"}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ u.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_unixbench_successful_sla(self, mock_ssh):
+
+ options = {
+ "test_type": 'dhry2reg',
+ "run_mode": 'verbose'
+ }
+ sla = {
+ "single_score": '100',
+ "parallel_score": '500'
+ }
+ args = {
+ "options": options,
+ "sla": sla
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+
+ sample_output = '{"signle_score":"2251.7","parallel_score":"4395.9"}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ u.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_unixbench_unsuccessful_sla_single_score(self, mock_ssh):
+
+ args = {
+ "options": {},
+ "sla": {"single_score": "500"}
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+ sample_output = '{"single_score":"200.7","parallel_score":"4395.9"}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, u.run, result)
+
+ def test_unixbench_unsuccessful_sla_parallel_score(self, mock_ssh):
+
+ args = {
+ "options": {},
+ "sla": {"parallel_score": "4000"}
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+ sample_output = '{"signle_score":"2251.7","parallel_score":"3395.9"}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, u.run, result)
+
+ def test_unixbench_unsuccessful_script_error(self, mock_ssh):
+
+ options = {
+ "test_type": 'dhry2reg',
+ "run_mode": 'verbose'
+ }
+ sla = {
+ "single_score": '100',
+ "parallel_score": '500'
+ }
+ args = {
+ "options": options,
+ "sla": sla
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, u.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/dummy/__init__.py b/yardstick/tests/unit/benchmark/scenarios/dummy/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/dummy/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py b/yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py
new file mode 100644
index 000000000..875302da8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py
@@ -0,0 +1,32 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.dummy.dummy
+
+from __future__ import absolute_import
+import unittest
+
+from yardstick.benchmark.scenarios.dummy import dummy
+
+
+class DummyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.test_context = dummy.Dummy(None, None)
+
+ self.assertIsNone(self.test_context.scenario_cfg)
+ self.assertIsNone(self.test_context.context_cfg)
+ self.assertFalse(self.test_context.setup_done)
+
+ def test_run(self):
+ result = {}
+ self.test_context.run(result)
+
+ self.assertEqual(result["hello"], "yardstick")
+ self.assertTrue(self.test_context.setup_done)
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py b/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt
new file mode 100644
index 000000000..9b3afd1fb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt
@@ -0,0 +1,14 @@
+{
+ "@odata.id": "/redfish/v1/Chassis",
+ "Name": "ChassisCollection",
+ "@odata.context": "/redfish/v1/$metadata#ChassisCollection.ChassisCollection",
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.type": "#ChassisCollection.ChassisCollection",
+ "@odata.etag": "\"af5a94479815eb5f87fe91ea08fde0ac\"",
+ "Members@odata.count": 1,
+ "Description": "A collection of Chassis resource instances."
+}
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt
new file mode 100644
index 000000000..343ed3667
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt
@@ -0,0 +1,300 @@
+{
+ "PowerControl@odata.count": 1,
+ "@odata.id": "/redfish/v1/Chassis/1/Power",
+ "Redundancy@odata.count": 1,
+ "@odata.context": "/redfish/v1/$metadata#Power.Power",
+ "Voltages": [
+ {
+ "MaxReadingRange": 14.28,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/0",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 140,
+ "Name": "SysBrd 12V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 10.81,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "0",
+ "MinReadingRange": null,
+ "ReadingVolts": 12.15,
+ "UpperThresholdCritical": 13.22
+ },
+ {
+ "MaxReadingRange": 3.95,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/1",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 141,
+ "Name": "SysBrd 3.3V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 2.98,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "1",
+ "MinReadingRange": null,
+ "UpperThresholdCritical": 3.63,
+ "ReadingVolts": 3.36
+ },
+ {
+ "MaxReadingRange": 5.97,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/2",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 142,
+ "Name": "SysBrd 5V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 4.49,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "2",
+ "MinReadingRange": null,
+ "UpperThresholdCritical": 5.5,
+ "ReadingVolts": 5.03
+ },
+ {
+ "MaxReadingRange": 3.32,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/3",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 3,
+ "Name": "CMOS Battery",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 2.25,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "3",
+ "MinReadingRange": null,
+ "LowerThresholdNonCritical": 2.39,
+ "ReadingVolts": 3.12
+ }
+ ],
+ "Voltages@odata.count": 4,
+ "Redundancy": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Redundancy/0",
+ "Status": {
+ "State": "Enabled",
+ "Health": "OK"
+ },
+ "Name": "PSU Redundancy",
+ "MinNumNeeded": 2,
+ "Oem": {
+ "Lenovo": {
+ "NonRedundantAvailablePower": 1100,
+ "@odata.type": "#LenovoRedundancy.v1_0_0.LenovoRedundancyProperties",
+ "PowerRedundancySettings": {
+ "EstimatedUsage": "58.55%",
+ "MaxPowerLimitWatts": 1100,
+ "PowerFailureLimit": 0,
+ "PowerRedundancyPolicy": "RedundantWithThrottling"
+ }
+ }
+ },
+ "RedundancyEnabled": true,
+ "RedundancySet": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/0"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/1"
+ }
+ ],
+ "RedundancySet@odata.count": 2,
+ "MaxNumSupported": 2,
+ "Mode": "N+m",
+ "MemberId": "0"
+ }
+ ],
+ "Description": "Power Consumption and Power Limiting",
+ "Name": "Power",
+ "PowerSupplies@odata.count": 2,
+ "Oem": {
+ "Lenovo": {
+ "@odata.type": "#LenovoPower.v1_0_0.Capabilities",
+ "LocalPowerControlEnabled": true,
+ "PowerOnPermissionEnabled": true,
+ "PowerRestorePolicy": "Restore",
+ "WakeOnLANEnabled": true
+ }
+ },
+ "@odata.type": "#Power.v1_5_1.Power",
+ "Id": "Power",
+ "@odata.etag": "\"ad85a1403e07a433386e9907d00565cc\"",
+ "PowerControl": [
+ {
+ "PowerAllocatedWatts": 1100,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerControl/0",
+ "Status": {
+ "HealthRollup": "Warning",
+ "State": "Enabled"
+ },
+ "PowerLimit": {
+ "LimitException": "NoAction",
+ "LimitInWatts": null
+ },
+ "Name": "Server Power Control",
+ "Oem": {
+ "Lenovo": {
+ "PowerUtilization": {
+ "MaxLimitInWatts": 1100,
+ "EnablePowerCapping": false,
+ "LimitMode": "AC",
+ "EnablePowerCapping@Redfish.Deprecated": "The property is deprecated. Please use LimitInWatts instead.",
+ "CapacityMinAC": 617,
+ "MinLimitInWatts": 0,
+ "GuaranteedInWatts": 617,
+ "CapacityMinDC": 578,
+ "CapacityMaxDC": 749,
+ "CapacityMaxAC": 802
+ },
+ "HistoryPowerMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerControl/0/Oem/Lenovo/HistoryPowerMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerControl"
+ }
+ },
+ "PowerAvailableWatts": 0,
+ "PowerMetrics": {
+ "IntervalInMin": 60,
+ "AverageConsumedWatts": 314.716675,
+ "MinConsumedWatts": 311,
+ "MaxConsumedWatts": 318
+ },
+ "RelatedItem@odata.count": 1,
+ "MemberId": "0",
+ "PowerRequestedWatts": 802,
+ "PowerConsumedWatts": 344,
+ "PowerCapacityWatts": 1100
+ }
+ ],
+ "PowerSupplies": [
+ {
+ "SerialNumber": "A4DB8BP11WJ",
+ "InputRanges": [
+ {
+ "InputType": null,
+ "OutputWattage": null,
+ "MinimumVoltage": null,
+ "MaximumVoltage": null
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/0",
+ "RelatedItem@odata.count": 1,
+ "MemberId": "0",
+ "PartNumber": "SP57A02023",
+ "FirmwareVersion": "4.52",
+ "Status": {
+ "State": "Enabled",
+ "Health": "Warning"
+ },
+ "LineInputVoltage": null,
+ "Name": "PSU1",
+ "PowerSupplyType": "Unknown",
+ "LastPowerOutputWatts": 316,
+ "Oem": {
+ "Lenovo": {
+ "Location": {
+ "InfoFormat": "Slot X",
+ "Info": "Slot 1"
+ },
+ "HistoryPowerSupplyMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerSupplies/0/Oem/Lenovo/HistoryPowerSupplyMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerSupply"
+ }
+ },
+ "PowerCapacityWatts": null,
+ "Manufacturer": "ACBE",
+ "LineInputVoltageType": "Unknown",
+ "Model": "LENOVO-SP57A02023",
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ]
+ },
+ {
+ "SerialNumber": "A4DB8BP12J7",
+ "InputRanges": [
+ {
+ "InputType": "AC",
+ "OutputWattage": 1100,
+ "MinimumVoltage": 200,
+ "MaximumVoltage": 240
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/1",
+ "RelatedItem@odata.count": 1,
+ "MemberId": "1",
+ "PartNumber": "SP57A02023",
+ "FirmwareVersion": "4.52",
+ "Status": {
+ "State": "Enabled",
+ "Health": "OK"
+ },
+ "LineInputVoltage": 220,
+ "Name": "PSU2",
+ "PowerSupplyType": "AC",
+ "LastPowerOutputWatts": 316,
+ "Oem": {
+ "Lenovo": {
+ "Location": {
+ "InfoFormat": "Slot X",
+ "Info": "Slot 2"
+ },
+ "HistoryPowerSupplyMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerSupplies/1/Oem/Lenovo/HistoryPowerSupplyMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerSupply"
+ }
+ },
+ "PowerCapacityWatts": 1100,
+ "Manufacturer": "ACBE",
+ "LineInputVoltageType": "ACMidLine",
+ "Model": "LENOVO-SP57A02023",
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ]
+ }
+ ]
+}
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py b/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py
new file mode 100644
index 000000000..98daefeb7
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py
@@ -0,0 +1,182 @@
+##############################################################################
+# Copyright (c) 2019 Lenovo Group Limited Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.energy.energy.Energy
+
+from __future__ import absolute_import
+import unittest
+import mock
+import os
+from yardstick.benchmark.scenarios.energy import energy
+
+
+class EnergyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'target': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'password': 'passw0rd',
+ 'redfish_ip': '10.229.17.105',
+ 'redfish_user': 'USERID',
+ 'redfish_pwd': "PASSW0RD",
+ }
+ }
+ self.result = {}
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_setup_response_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ p.setup()
+ self.assertTrue(p.get_response)
+ self.assertTrue(p.setup_done)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_setup_response_failed(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ p.setup()
+ self.assertFalse(p.get_response)
+ self.assertTrue(p.setup_done)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_list_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ expect_result = self._read_file("energy_sample_chassis_output.txt")
+ expect_result = str(expect_result)
+ expect_result = expect_result.replace("'", '"')
+ mock_send_request.return_value.status_code = 200
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, ["/redfish/v1/Chassis/1"])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_response_fail(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_wrongtype_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = {}
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_inproper_key(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = '{"some_key": "some_value"}'
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ expect_result = self._read_file("energy_sample_power_metrics.txt")
+ expect_result = str(expect_result)
+ expect_result = expect_result.replace("'", '"')
+ mock_send_request.return_value.status_code = 200
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, 344)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_response_fail(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_wrongtype_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = {}
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_inproper_key(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = '{"some_key": "some_value"}'
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ chassis_list = mock.Mock(return_value=["/redfish/v1/Chassis/1"])
+ p.load_chassis_list = chassis_list
+ power = mock.Mock(return_value=344)
+ p.get_power = power
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": 344})
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_no_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ chassis_list = mock.Mock(return_value=["/redfish/v1/Chassis/1"])
+ p.load_chassis_list = chassis_list
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": -1})
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_wrong_chassis(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ chassis_list = mock.Mock(return_value=[])
+ p.load_chassis_list = chassis_list
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": -1})
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/__init__.py b/yardstick/tests/unit/benchmark/scenarios/lib/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py
new file mode 100644
index 000000000..af4f0c8ab
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.add_memory_load import AddMemoryLoad
+
+
+class AddMemoryLoadTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.ssh.SSH.from_node')
+ def test_add_memory_load_with_load(self, mock_from_node):
+ scenario_cfg = {
+ 'options': {
+ 'memory_load': 0.5
+ }
+ }
+ context_cfg = {
+ 'host': {}
+ }
+ mock_from_node().execute.return_value = (0, '0 2048 512', '')
+ obj = AddMemoryLoad(scenario_cfg, context_cfg)
+ obj.run({})
+ mock_from_node.assert_called()
+
+ @mock.patch('yardstick.ssh.SSH.from_node')
+ def test_add_memory_load_without_load(self, mock_from_node):
+ scenario_cfg = {
+ 'options': {
+ 'memory_load': 0
+ }
+ }
+ context_cfg = {
+ 'host': {}
+ }
+ obj = AddMemoryLoad(scenario_cfg, context_cfg)
+ obj.run({})
+ mock_from_node.assert_called_once()
+
+ @mock.patch('yardstick.ssh.SSH.from_node')
+ def test_add_memory_load_without_args(self, mock_from_node):
+ scenario_cfg = {
+ 'options': {
+ }
+ }
+ context_cfg = {
+ 'host': {}
+ }
+ obj = AddMemoryLoad(scenario_cfg, context_cfg)
+ obj.run({})
+ mock_from_node.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py
new file mode 100644
index 000000000..bb7fa4536
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import attach_volume
+
+
+class AttachVolumeTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_attach_volume_to_server = mock.patch.object(
+ openstack_utils, 'attach_volume_to_server')
+ self.mock_attach_volume_to_server = (
+ self._mock_attach_volume_to_server.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(attach_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ _uuid = uuidutils.generate_uuid()
+ self.args = {'options': {'server_name_or_id': _uuid,
+ 'volume_name_or_id': _uuid}}
+ self.result = {}
+ self.addCleanup(self._stop_mock)
+ self.attachvol_obj = attach_volume.AttachVolume(self.args, mock.ANY)
+
+ def _stop_mock(self):
+ self._mock_attach_volume_to_server.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_attach_volume_to_server.return_value = True
+ self.assertIsNone(self.attachvol_obj.run(self.result))
+ self.assertEqual({'attach_volume': 1}, self.result)
+ self.mock_log.info.asset_called_once_with(
+ 'Attach volume to server successful!')
+
+ def test_run_fail(self):
+ self.mock_attach_volume_to_server.return_value = False
+ with self.assertRaises(exceptions.ScenarioAttachVolumeError):
+ self.attachvol_obj.run(self.result)
+ self.assertEqual({'attach_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Attach volume to server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py
new file mode 100644
index 000000000..a48353a4f
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py
@@ -0,0 +1,78 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.lib.check_connectivity.CheckConnectivity
+
+from __future__ import absolute_import
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.lib import check_connectivity
+
+
+class CheckConnectivityTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ssh_port': '22'
+ },
+ 'target': {
+ 'ipaddr': '172.16.0.138'
+ }
+ }
+
+ @mock.patch('yardstick.benchmark.scenarios.lib.check_connectivity.ssh')
+ def test_check_connectivity(self, mock_ssh):
+
+ args = {
+ 'options': {'src_ip_addr': '192.168.23.2',
+ 'dest_ip_addr': '192.168.23.10',
+ 'ssh_user': 'root',
+ 'ssh_passwd': 'root',
+ 'ssh_port': '22',
+ 'ssh_timeout': 600,
+ 'ping_parameter': "-s 2048"
+ },
+ 'sla': {'status': 'True',
+ 'action': 'assert'}
+ }
+
+ # TODO(elfoley): Properly check the outputs
+ result = {} # pylint: disable=unused-variable
+
+ obj = check_connectivity.CheckConnectivity(args, {})
+ obj.setup()
+ mock_ssh.SSH.execute.return_value = (0, '100', '')
+
+ @mock.patch('yardstick.benchmark.scenarios.lib.check_connectivity.ssh')
+ def test_check_connectivity_key(self, mock_ssh):
+
+ args = {
+ 'options': {'ssh_user': 'root',
+ 'ssh_key': '/root/.ssh/id_rsa',
+ 'ssh_port': '22',
+ 'ssh_timeout': 600,
+ 'ping_parameter': "-s 2048"
+ },
+ 'sla': {'status': 'True',
+ 'action': 'assert'}
+ }
+
+ # TODO(elfoley): Properly check the outputs
+ result = {} # pylint: disable=unused-variable
+
+ obj = check_connectivity.CheckConnectivity(args, self.ctx)
+ obj.setup()
+
+ mock_ssh.SSH.execute.return_value = (0, '100', '')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py
new file mode 100644
index 000000000..270c9d3c9
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py
@@ -0,0 +1,76 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.lib.check_numa_info import CheckNumaInfo
+
+
+class CheckNumaInfoTestCase(unittest.TestCase):
+
+ @mock.patch.object(CheckNumaInfo, '_check_vm2_status')
+ def test_run(self, mock_check_vm2):
+ scenario_cfg = {'info1': {}, 'info2': {}}
+ obj = CheckNumaInfo(scenario_cfg, {})
+ obj.run({})
+ mock_check_vm2.assert_called_once()
+
+ def test_check_vm2_status_length_eq_1(self):
+ info1 = {
+ 'pinning': [0],
+ 'vcpupin': [{
+ 'cpuset': '1,2'
+ }]
+ }
+ info2 = {
+ 'pinning': [0],
+ 'vcpupin': [{
+ 'cpuset': '1,2'
+ }]
+ }
+ scenario_cfg = {'info1': info1, 'info2': info2}
+ obj = CheckNumaInfo(scenario_cfg, {})
+ status = obj._check_vm2_status(info1, info2)
+ self.assertTrue(status)
+
+ def test_check_vm2_status_length_gt_1(self):
+ info1 = {
+ 'pinning': [0, 1],
+ 'vcpupin': [{
+ 'cpuset': '1,2'
+ }]
+ }
+ info2 = {
+ 'pinning': [0, 1],
+ 'vcpupin': [{
+ 'cpuset': '1,2'
+ }]
+ }
+ scenario_cfg = {'info1': info1, 'info2': info2}
+ obj = CheckNumaInfo(scenario_cfg, {})
+ status = obj._check_vm2_status(info1, info2)
+ self.assertFalse(status)
+
+ def test_check_vm2_status_length_not_in_set(self):
+ info1 = {
+ 'pinning': [0],
+ 'vcpupin': [{
+ 'cpuset': '1,7'
+ }]
+ }
+ info2 = {
+ 'pinning': [0],
+ 'vcpupin': [{
+ 'cpuset': '1,7'
+ }]
+ }
+ scenario_cfg = {'info1': info1, 'info2': info2}
+ obj = CheckNumaInfo(scenario_cfg, {})
+ status = obj._check_vm2_status(info1, info2)
+ self.assertFalse(status)
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py
new file mode 100644
index 000000000..b0488bacd
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+
+from yardstick.benchmark.scenarios.lib import check_value
+from yardstick.common import exceptions as y_exc
+
+
+class CheckValueTestCase(unittest.TestCase):
+
+ def test_eq_pass(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 1}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertEqual({}, result)
+
+ def test_ne_pass(self):
+ scenario_cfg = {'options': {'operator': 'ne',
+ 'value1': 1,
+ 'value2': 2}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertEqual({}, result)
+
+ def test_result(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 1},
+ 'output': 'foo'}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertDictEqual(result, {'foo': 'PASS'})
+
+ def test_eq(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 2}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+
+ with self.assertRaises(y_exc.ValueCheckError):
+ result = obj.run({})
+ self.assertEqual({}, result)
+
+ def test_ne(self):
+ scenario_cfg = {'options': {'operator': 'ne',
+ 'value1': 1,
+ 'value2': 1}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+
+ with self.assertRaises(y_exc.ValueCheckError):
+ result = obj.run({})
+ self.assertEqual({}, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py
new file mode 100644
index 000000000..0b175fae8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py
@@ -0,0 +1,29 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.create_flavor import CreateFlavor
+
+
+class CreateFlavorTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.create_flavor')
+ def test_create_flavor(self, mock_create_flavor):
+ options = {
+ 'flavor_name': 'yardstick_test_flavor',
+ 'vcpus': '2',
+ 'ram': '1024',
+ 'disk': '100',
+ 'is_public': 'True'
+ }
+ args = {"options": options}
+ obj = CreateFlavor(args, {})
+ obj.run({})
+ mock_create_flavor.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_floating_ip.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_floating_ip.py
new file mode 100644
index 000000000..894cc1c2a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_floating_ip.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib import create_floating_ip
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+
+
+class CreateFloatingIpTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_create_floating_ip = mock.patch.object(
+ openstack_utils, 'create_floating_ip')
+ self.mock_create_floating_ip = self._mock_create_floating_ip.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_floating_ip, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'network_name_or_id': 'yardstick_net'}}
+ self.result = {}
+
+ self.fip_obj = create_floating_ip.CreateFloatingIp(self.args, mock.ANY)
+ self.fip_obj.scenario_cfg = {'output': 'key1\nkey2'}
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_floating_ip.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_create_floating_ip.return_value = {'fip_id': 'value1',
+ 'fip_addr': 'value2'}
+ output = self.fip_obj.run(self.result)
+ self.assertEqual({'floating_ip_create': 1}, self.result)
+ self.assertEqual({'key1': 'value1', 'key2': 'value2'}, output)
+ self.mock_log.info.asset_called_once_with(
+ 'Creating floating ip successful!')
+
+ def test_run_no_fip(self):
+ self.mock_create_floating_ip.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateFloatingIPError):
+ self.fip_obj.run(self.result)
+ self.assertEqual({'floating_ip_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Creating floating ip failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py
new file mode 100644
index 000000000..aebd1dfe8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+from oslo_utils import uuidutils
+import unittest
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_image
+
+
+class CreateImageTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_create_image = mock.patch.object(
+ openstack_utils, 'create_image')
+ self.mock_create_image = (
+ self._mock_create_image.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_image, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'image_name': 'yardstick_image'}}
+ self.result = {}
+ self.cimage_obj = create_image.CreateImage(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_image.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cimage_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_image.return_value = _uuid
+ output = self.cimage_obj.run(self.result)
+ self.assertEqual({'image_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create image successful!')
+
+ def test_run_fail(self):
+ self.mock_create_image.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateImageError):
+ self.cimage_obj.run(self.result)
+ self.assertEqual({'image_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create image failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py
new file mode 100644
index 000000000..a7b683f47
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_keypair
+
+
+class CreateKeypairTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_keypair = mock.patch.object(
+ openstack_utils, 'create_keypair')
+ self.mock_create_keypair = (
+ self._mock_create_keypair.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_keypair, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'key_name': 'yardstick_key'}}
+ self.result = {}
+
+ self.ckeypair_obj = create_keypair.CreateKeypair(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_keypair.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.ckeypair_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_keypair.return_value = {
+ 'name': 'key-name', 'type': 'ssh', 'id': _uuid}
+ output = self.ckeypair_obj.run(self.result)
+ self.assertDictEqual({'keypair_create': 1}, self.result)
+ self.assertDictEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create keypair successful!')
+
+ def test_run_fail(self):
+ self.mock_create_keypair.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateKeypairError):
+ self.ckeypair_obj.run(self.result)
+ self.assertDictEqual({'keypair_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create keypair failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py
new file mode 100644
index 000000000..17a4ef2e1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_network
+
+
+class CreateNetworkTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_neutron_net = mock.patch.object(
+ openstack_utils, 'create_neutron_net')
+ self.mock_create_neutron_net = self._mock_create_neutron_net.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_network, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'network_name': 'yardstick_net'}}
+ self.result = {}
+
+ self._cnet_obj = create_network.CreateNetwork(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_neutron_net.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self._cnet_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_neutron_net.return_value = _uuid
+ output = self._cnet_obj.run(self.result)
+ self.assertEqual({"network_create": 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create network successful!')
+
+ def test_run_fail_exception(self):
+ self.mock_create_neutron_net.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateNetworkError):
+ self._cnet_obj.run(self.result)
+ self.assertEqual({"network_create": 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Create network failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py
new file mode 100644
index 000000000..bea02a630
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py
@@ -0,0 +1,27 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.create_port import CreatePort
+
+
+class CreatePortTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
+ def test_create_port(self, mock_get_neutron_client):
+ options = {
+ 'openstack_paras': {
+ 'name': 'yardstick_port'
+ }
+ }
+ args = {"options": options}
+ obj = CreatePort(args, {})
+ obj.run({})
+ mock_get_neutron_client.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py
new file mode 100644
index 000000000..8d6f119ab
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_router
+
+
+class CreateRouterTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_neutron_router = mock.patch.object(
+ openstack_utils, 'create_neutron_router')
+ self.mock_create_neutron_router = (
+ self._mock_create_neutron_router.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_router, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name': 'yardstick_net'}}
+ self.result = {}
+
+ self.crouter_obj = create_router.CreateRouter(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_neutron_router.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.crouter_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_neutron_router.return_value = _uuid
+ output = self.crouter_obj.run(self.result)
+ self.assertEqual({"router_create": 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create router successful!')
+
+ def test_run_fail(self):
+ self.mock_create_neutron_router.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateRouterError):
+ self.crouter_obj.run(self.result)
+ self.assertEqual({"router_create": 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create router failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py
new file mode 100644
index 000000000..0477a49d4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_sec_group
+
+
+class CreateSecurityGroupTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_security_group_full = mock.patch.object(
+ openstack_utils, 'create_security_group_full')
+ self.mock_create_security_group_full = (
+ self._mock_create_security_group_full.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_sec_group, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'sg_name': 'yardstick_sg'}}
+ self.result = {}
+
+ self.csecgp_obj = create_sec_group.CreateSecgroup(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_security_group_full.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.csecgp_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_security_group_full.return_value = _uuid
+ output = self.csecgp_obj.run(self.result)
+ self.assertEqual({'sg_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with(
+ 'Create security group successful!')
+
+ def test_run_fail(self):
+ self.mock_create_security_group_full.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateSecurityGroupError):
+ self.csecgp_obj.run(self.result)
+ self.assertEqual({'sg_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Create security group failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py
new file mode 100644
index 000000000..b58785112
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_server
+
+
+class CreateServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_instance_and_wait_for_active = mock.patch.object(
+ openstack_utils, 'create_instance_and_wait_for_active')
+ self.mock_create_instance_and_wait_for_active = (
+ self._mock_create_instance_and_wait_for_active.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {
+ 'options': {'name': 'server-name', 'image': 'image-name',
+ 'flavor': 'flavor-name'}}
+ self.result = {}
+
+ self.addCleanup(self._stop_mock)
+ self.cserver_obj = create_server.CreateServer(self.args, mock.ANY)
+
+ def _stop_mock(self):
+ self._mock_create_instance_and_wait_for_active.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cserver_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_instance_and_wait_for_active.return_value = (
+ {'name': 'server-name', 'flavor': 'flavor-name', 'id': _uuid})
+ output = self.cserver_obj.run(self.result)
+ self.assertEqual({'instance_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create server successful!')
+
+ def test_run_fail(self):
+ self.mock_create_instance_and_wait_for_active.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateServerError):
+ self.cserver_obj.run(self.result)
+ self.assertEqual({'instance_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py
new file mode 100644
index 000000000..856e985c4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_subnet
+
+
+class CreateSubnetTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_neutron_subnet = mock.patch.object(
+ openstack_utils, 'create_neutron_subnet')
+ self.mock_create_neutron_subnet = (
+ self._mock_create_neutron_subnet.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_subnet, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'network_name_or_id': 'yardstick_net'}}
+ self.result = {"subnet_create": 0}
+
+ self._csubnet_obj = create_subnet.CreateSubnet(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_neutron_subnet.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self._csubnet_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_neutron_subnet.return_value = _uuid
+ output = self._csubnet_obj.run(self.result)
+ self.assertDictEqual({"subnet_create": 1}, self.result)
+ self.assertDictEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create subnet successful!')
+
+ def test_run_fail(self):
+ self._csubnet_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_neutron_subnet.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateSubnetError):
+ self._csubnet_obj.run(self.result)
+ self.assertDictEqual({"subnet_create": 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create subnet failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py
new file mode 100644
index 000000000..f91d2c3f4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_volume
+
+
+class CreateVolumeTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_volume = mock.patch.object(
+ openstack_utils, 'create_volume')
+ self.mock_create_volume = (
+ self._mock_create_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'size_gb': 1}}
+ self.result = {}
+
+ self.cvolume_obj = create_volume.CreateVolume(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cvolume_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_volume.return_value = {'name': 'yardstick_volume',
+ 'id': _uuid,
+ 'status': 'available'}
+ output = self.cvolume_obj.run(self.result)
+ self.assertDictEqual({'volume_create': 1}, self.result)
+ self.assertDictEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create volume successful!')
+
+ def test_run_fail(self):
+ self.mock_create_volume.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateVolumeError):
+ self.cvolume_obj.run(self.result)
+ self.assertDictEqual({'volume_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create volume failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py
new file mode 100644
index 000000000..24dbf8a16
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py
@@ -0,0 +1,27 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.delete_flavor import DeleteFlavor
+
+
+class DeleteFlavorTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.delete_flavor')
+ @mock.patch('yardstick.common.openstack_utils.get_nova_client')
+ def test_delete_flavor(self, mock_get_nova_client, mock_delete_flavor):
+ options = {
+ 'flavor_name': 'yardstick_test_flavor'
+ }
+ args = {"options": options}
+ obj = DeleteFlavor(args, {})
+ obj.run({})
+ mock_get_nova_client.assert_called_once()
+ mock_delete_flavor.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py
new file mode 100644
index 000000000..45a39eba2
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_floating_ip
+
+
+class DeleteFloatingIpTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_floating_ip = mock.patch.object(
+ openstack_utils, 'delete_floating_ip')
+ self.mock_delete_floating_ip = self._mock_delete_floating_ip.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_floating_ip, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'floating_ip_id': uuidutils.generate_uuid()}}
+ self.result = {}
+
+ self.del_obj = delete_floating_ip.DeleteFloatingIp(
+ self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_floating_ip.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_floating_ip.return_value = True
+ self.assertIsNone(self.del_obj.run(self.result))
+ self.assertEqual({"delete_floating_ip": 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ "Delete floating ip successful!")
+
+ def test_run_fail(self):
+ self.mock_delete_floating_ip.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteFloatingIPError):
+ self.del_obj.run(self.result)
+ self.assertEqual({"delete_floating_ip": 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ "Delete floating ip failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py
new file mode 100644
index 000000000..8a1d6d695
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_image
+
+
+class DeleteImageTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_image = mock.patch.object(
+ openstack_utils, 'delete_image')
+ self.mock_delete_image = (
+ self._mock_delete_image.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_image, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_image'}}
+ self.result = {}
+
+ self.delimg_obj = delete_image.DeleteImage(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_image.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_image.return_value = True
+ self.assertIsNone(self.delimg_obj.run(self.result))
+ self.assertEqual({'delete_image': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete image successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_image.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteImageError):
+ self.delimg_obj.run(self.result)
+ self.assertEqual({'delete_image': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete image failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py
new file mode 100644
index 000000000..c7940251e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_keypair
+
+
+class DeleteKeypairTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_keypair = mock.patch.object(
+ openstack_utils, 'delete_keypair')
+ self.mock_delete_keypair = self._mock_delete_keypair.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_keypair, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'key_name': 'yardstick_key'}}
+ self.result = {}
+ self.delkey_obj = delete_keypair.DeleteKeypair(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_keypair.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_keypair.return_value = True
+ self.assertIsNone(self.delkey_obj.run(self.result))
+ self.assertEqual({'delete_keypair': 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ 'Delete keypair successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_keypair.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteKeypairError):
+ self.delkey_obj.run(self.result)
+ self.assertEqual({'delete_keypair': 0}, self.result)
+ self.mock_log.error.assert_called_once_with("Delete keypair failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py
new file mode 100644
index 000000000..b6dbf4791
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_network
+
+
+class DeleteNetworkTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_neutron_net = mock.patch.object(
+ openstack_utils, "delete_neutron_net")
+ self.mock_delete_neutron_net = self._mock_delete_neutron_net.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, "get_shade_client")
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_network, "LOG")
+ self.mock_log = self._mock_log.start()
+ self.args = {"options": {"network_name_or_id": (
+ uuidutils.generate_uuid())}}
+ self.result = {}
+ self.del_obj = delete_network.DeleteNetwork(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_neutron_net.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_neutron_net.return_value = True
+ self.assertIsNone(self.del_obj.run(self.result))
+ self.assertEqual({"delete_network": 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ "Delete network successful!")
+
+ def test_run_fail(self):
+ self.mock_delete_neutron_net.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteNetworkError):
+ self.del_obj.run(self.result)
+ self.assertEqual({"delete_network": 0}, self.result)
+ self.mock_log.error.assert_called_once_with("Delete network failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py
new file mode 100644
index 000000000..9fd318580
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py
@@ -0,0 +1,25 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.delete_port import DeletePort
+
+
+class DeletePortTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
+ def test_delete_port(self, mock_get_neutron_client):
+ options = {
+ 'port_id': '123-123-123'
+ }
+ args = {"options": options}
+ obj = DeletePort(args, {})
+ obj.run({})
+ mock_get_neutron_client.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py
new file mode 100644
index 000000000..b76100f19
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_router
+
+
+class DeleteRouterTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_neutron_router = mock.patch.object(
+ openstack_utils, 'delete_neutron_router')
+ self.mock_delete_neutron_router = (
+ self._mock_delete_neutron_router.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_router, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'router_id': uuidutils.generate_uuid()}}
+ self.result = {"delete_router": 0}
+
+ self._del_obj = delete_router.DeleteRouter(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_neutron_router.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_neutron_router.return_value = True
+ self.assertIsNone(self._del_obj.run(self.result))
+ self.assertEqual({"delete_router": 1}, self.result)
+ self.mock_log.info.assert_called_once_with("Delete router successful!")
+
+ def test_run_fail(self):
+ self.mock_delete_neutron_router.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteRouterError):
+ self._del_obj.run(self.result)
+ self.assertEqual({"delete_router": 0}, self.result)
+ self.mock_log.error.assert_called_once_with("Delete router failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py
new file mode 100644
index 000000000..0c9cf7c17
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py
@@ -0,0 +1,27 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.delete_router_gateway import DeleteRouterGateway
+
+
+class DeleteRouterGatewayTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
+ @mock.patch('yardstick.common.openstack_utils.remove_gateway_router')
+ def test_delete_router_gateway(self, mock_get_neutron_client, mock_remove_gateway_router):
+ options = {
+ 'router_id': '123-123-123'
+ }
+ args = {"options": options}
+ obj = DeleteRouterGateway(args, {})
+ obj.run({})
+ mock_get_neutron_client.assert_called_once()
+ mock_remove_gateway_router.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py
new file mode 100644
index 000000000..823cb951a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib import delete_router_interface
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+
+
+class DeleteRouterInterfaceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_remove_router_interface = mock.patch.object(
+ openstack_utils, 'remove_router_interface')
+ self.mock_remove_router_interface = (
+ self._mock_remove_router_interface.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_router_interface, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'router': uuidutils.generate_uuid()}}
+ self.result = {}
+ self.delrout_obj = delete_router_interface.DeleteRouterInterface(
+ self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_remove_router_interface.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_remove_router_interface.return_value = True
+ self.assertIsNone(self.delrout_obj.run(self.result))
+ self.assertEqual({"delete_router_interface": 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ "Delete router interface successful!")
+
+ def test_run_fail(self):
+ self.mock_remove_router_interface.return_value = False
+ with self.assertRaises(exceptions.ScenarioRemoveRouterIntError):
+ self.delrout_obj.run(self.result)
+ self.assertEqual({"delete_router_interface": 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ "Delete router interface failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py
new file mode 100644
index 000000000..55fe53df8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_server
+
+
+class DeleteServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_instance = mock.patch.object(
+ openstack_utils, 'delete_instance')
+ self.mock_delete_instance = (
+ self._mock_delete_instance.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': uuidutils.generate_uuid()
+ }}
+ self.result = {}
+
+ self.delserver_obj = delete_server.DeleteServer(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_instance.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_instance.return_value = True
+ self.assertIsNone(self.delserver_obj.run(self.result))
+ self.assertEqual({'delete_server': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete server successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_instance.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteServerError):
+ self.delserver_obj.run(self.result)
+ self.assertEqual({'delete_server': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py
new file mode 100644
index 000000000..0db16f396
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_volume
+
+
+class DeleteVolumeTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_volume = mock.patch.object(
+ openstack_utils, 'delete_volume')
+ self.mock_delete_volume = (
+ self._mock_delete_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_volume'}}
+ self.result = {}
+
+ self.delvol_obj = delete_volume.DeleteVolume(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_volume.return_value = True
+ self.assertIsNone(self.delvol_obj.run(self.result))
+ self.assertEqual({'delete_volume': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete volume successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_volume.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteVolumeError):
+ self.delvol_obj.run(self.result)
+ self.assertEqual({'delete_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete volume failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py
new file mode 100644
index 000000000..2bc57f495
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import detach_volume
+
+
+class DetachVolumeTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_detach_volume = mock.patch.object(
+ openstack_utils, 'detach_volume')
+ self.mock_detach_volume = (
+ self._mock_detach_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(detach_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ _uuid = uuidutils.generate_uuid()
+ self.args = {'options': {'server_name_or_id': _uuid,
+ 'volume_name_or_id': _uuid}}
+ self.result = {}
+
+ self.detachvol_obj = detach_volume.DetachVolume(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_detach_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_detach_volume.return_value = True
+ self.assertIsNone(self.detachvol_obj.run(self.result))
+ self.assertEqual({'detach_volume': 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ 'Detach volume from server successful!')
+
+ def test_run_fail(self):
+ self.mock_detach_volume.return_value = False
+ with self.assertRaises(exceptions.ScenarioDetachVolumeError):
+ self.detachvol_obj.run(self.result)
+ self.assertEqual({'detach_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Detach volume from server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py
new file mode 100644
index 000000000..1c1364348
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import get_flavor
+
+
+class GetFlavorTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_get_flavor = mock.patch.object(
+ openstack_utils, 'get_flavor')
+ self.mock_get_flavor = self._mock_get_flavor.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(get_flavor, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_flavor'}}
+ self.result = {}
+
+ self.getflavor_obj = get_flavor.GetFlavor(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_get_flavor.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.getflavor_obj.scenario_cfg = {'output': 'flavor'}
+ self.mock_get_flavor.return_value = (
+ {'name': 'flavor-name', 'id': _uuid})
+ output = self.getflavor_obj.run(self.result)
+ self.assertDictEqual({'get_flavor': 1}, self.result)
+ self.assertDictEqual({'flavor': {'name': 'flavor-name', 'id': _uuid}},
+ output)
+ self.mock_log.info.asset_called_once_with('Get flavor successful!')
+
+ def test_run_fail(self):
+ self.mock_get_flavor.return_value = None
+ with self.assertRaises(exceptions.ScenarioGetFlavorError):
+ self.getflavor_obj.run(self.result)
+ self.assertDictEqual({'get_flavor': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Get flavor failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py
new file mode 100644
index 000000000..879b2b988
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.get_migrate_target_host import GetMigrateTargetHost
+
+BASE = 'yardstick.benchmark.scenarios.lib.get_migrate_target_host'
+
+
+class GetMigrateTargetHostTestCase(unittest.TestCase):
+
+ @mock.patch('{}.openstack_utils.get_nova_client'.format(BASE))
+ @mock.patch('{}.GetMigrateTargetHost._get_migrate_host'.format(BASE))
+ @mock.patch('{}.GetMigrateTargetHost._get_current_host_name'.format(BASE))
+ def test_get_migrate_target_host(self,
+ mock_get_current_host_name,
+ mock_get_migrate_host,
+ mock_get_nova_client):
+ obj = GetMigrateTargetHost({}, {})
+ obj.run({})
+ mock_get_nova_client.assert_called_once()
+ mock_get_current_host_name.assert_called_once()
+ mock_get_migrate_host.assert_called_once()
+
+ @mock.patch('{}.openstack_utils.get_nova_client'.format(BASE))
+ def test_get_migrate_host(self, mock_get_nova_client):
+ class A(object):
+ def __init__(self, service):
+ self.service = service
+ self.host = 'host4'
+
+ mock_get_nova_client().hosts.list_all.return_value = [A('compute')]
+ obj = GetMigrateTargetHost({}, {})
+ host = obj._get_migrate_host('host5')
+ mock_get_nova_client.assert_called()
+ self.assertEqual(host, 'host4')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py
new file mode 100644
index 000000000..bea978b8a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py
@@ -0,0 +1,103 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.get_numa_info import GetNumaInfo
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+BASE = 'yardstick.benchmark.scenarios.lib.get_numa_info'
+
+
+class GetNumaInfoTestCase(unittest.TestCase):
+
+ @mock.patch('{}.GetNumaInfo._check_numa_node'.format(BASE))
+ @mock.patch('{}.GetNumaInfo._get_current_host_name'.format(BASE))
+ @mock.patch('yardstick.benchmark.scenarios.lib.get_numa_info.yaml_load')
+ @mock.patch('yardstick.common.task_template.TaskTemplate.render')
+ def test_get_numa_info(self,
+ mock_render,
+ mock_safe_load,
+ mock_get_current_host_name,
+ mock_check_numa_node):
+ scenario_cfg = {
+ 'options': {
+ 'server': {
+ 'id': '1'
+ },
+ 'file': 'yardstick/ssh.py'
+ },
+ 'output': 'numa_info'
+ }
+ mock_safe_load.return_value = {
+ 'nodes': []
+ }
+ obj = GetNumaInfo(scenario_cfg, {})
+ obj.run({})
+ mock_get_current_host_name.assert_called_once()
+ mock_check_numa_node.assert_called_once()
+
+ @mock.patch('yardstick.ssh.SSH.from_node')
+ @mock.patch('{}.GetNumaInfo._get_current_host_name'.format(BASE))
+ @mock.patch('yardstick.benchmark.scenarios.lib.get_numa_info.yaml_load')
+ @mock.patch('yardstick.common.task_template.TaskTemplate.render')
+ def test_check_numa_node(self,
+ mock_render,
+ mock_safe_load,
+ mock_get_current_host_name,
+ mock_from_node):
+ scenario_cfg = {
+ 'options': {
+ 'server': {
+ 'id': '1'
+ },
+ 'file': 'yardstick/ssh.py'
+ },
+ 'output': 'numa_info'
+ }
+ mock_safe_load.return_value = {
+ 'nodes': []
+ }
+ data = """
+ <data>
+ </data>
+ """
+ mock_from_node().execute.return_value = (0, data, '')
+ obj = GetNumaInfo(scenario_cfg, {})
+ result = obj._check_numa_node('1', 'host4')
+ self.assertEqual(result, {'pinning': [], 'vcpupin': []})
+
+ @mock.patch('{}.change_obj_to_dict'.format(BASE))
+ @mock.patch('{}.get_nova_client'.format(BASE))
+ @mock.patch('yardstick.benchmark.scenarios.lib.get_numa_info.yaml_load')
+ @mock.patch('yardstick.common.task_template.TaskTemplate.render')
+ def test_get_current_host_name(self,
+ mock_render,
+ mock_safe_load,
+ mock_get_nova_client,
+ mock_change_obj_to_dict):
+ scenario_cfg = {
+ 'options': {
+ 'server': {
+ 'id': '1'
+ },
+ 'file': 'yardstick/ssh.py'
+ },
+ 'output': 'numa_info'
+ }
+ mock_get_nova_client().servers.get.return_value = ''
+ mock_change_obj_to_dict.return_value = {'OS-EXT-SRV-ATTR:host': 'host5'}
+
+ obj = GetNumaInfo(scenario_cfg, {})
+ result = obj._get_current_host_name('1')
+ self.assertEqual(result, 'host5')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py
new file mode 100644
index 000000000..5b5329cb0
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import get_server
+
+
+class GetServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_get_server = mock.patch.object(
+ openstack_utils, 'get_server')
+ self.mock_get_server = self._mock_get_server.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(get_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_key'}}
+ self.result = {}
+
+ self.getserver_obj = get_server.GetServer(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_get_server.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.getserver_obj.scenario_cfg = {'output': 'server'}
+ self.mock_get_server.return_value = (
+ {'name': 'server-name', 'id': _uuid})
+ output = self.getserver_obj.run(self.result)
+ self.assertDictEqual({'get_server': 1}, self.result)
+ self.assertDictEqual({'server': {'name': 'server-name', 'id': _uuid}},
+ output)
+ self.mock_log.info.asset_called_once_with('Get Server successful!')
+
+ def test_run_fail(self):
+ self.mock_get_server.return_value = None
+ with self.assertRaises(exceptions.ScenarioGetServerError):
+ self.getserver_obj.run(self.result)
+ self.assertDictEqual({'get_server': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Get Server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py
new file mode 100644
index 000000000..04fca16aa
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+
+from yardstick.benchmark.scenarios.lib.get_server_ip import GetServerIp
+
+
+class GetServerIpTestCase(unittest.TestCase):
+ def test_get_server_ip(self):
+ scenario_cfg = {
+ 'options': {
+ 'server': {
+ 'addresses': {
+ 'net1': [
+ {
+ 'OS-EXT-IPS:type': 'floating',
+ 'addr': '127.0.0.1'
+ }
+ ]
+ }
+ }
+ },
+ 'output': 'ip'
+ }
+ obj = GetServerIp(scenario_cfg, {})
+ result = obj.run({})
+ self.assertEqual(result, {'ip': '127.0.0.1'})
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/__init__.py b/yardstick/tests/unit/benchmark/scenarios/networking/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/imix_voice.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/imix_voice.yaml
new file mode 100644
index 000000000..b8f8e5358
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/imix_voice.yaml
@@ -0,0 +1,41 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#imix definition for voice traffic
+#
+# it is a typical case for testing the synthetic VNF performance.
+#
+#percentage of the packets can be less than 100%
+#the traffic in downstream and upstream direction could be different
+
+schema: "nsb:imix:0.1"
+
+imix:
+ private:
+ imix_small: 100 #ipv4 case - 72B should be 0 ipv6 case - 84B
+ imix_128B: 0
+ imix_256B: 0
+ imix_373B: 0
+ imix_570B: 0
+ imix_1400B: 0
+ imix_1500B: 0
+
+ public:
+ imix_small: 100 #ipv4 case - 72B ipv6 - 84B
+ imix_128B: 0
+ imix_256B: 0
+ imix_373B: 0
+ imix_570B: 0
+ imix_1400B: 0
+ imix_1500B: 0
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json
new file mode 100644
index 000000000..b56009ba1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json
@@ -0,0 +1 @@
+{"start": {"connecting_to": {"host": "172.16.0.252", "port": 5201}, "timestamp": {"timesecs": 1436254758, "time": "Tue, 07 Jul 2015 07:39:18 GMT"}, "test_start": {"protocol": "TCP", "num_streams": 1, "omit": 0, "bytes": 0, "blksize": 131072, "duration": 10, "blocks": 0, "reverse": 0}, "system_info": "Linux client 3.13.0-55-generic #94-Ubuntu SMP Thu Jun 18 00:27:10 UTC 2015 x86_64 x86_64 x86_64 GNU/Linux\n", "version": "iperf 3.0.7", "connected": [{"local_host": "10.0.1.2", "local_port": 37633, "remote_host": "172.16.0.252", "socket": 4, "remote_port": 5201}], "cookie": "client.1436254758.606879.1fb328dc230", "tcp_mss_default": 1448}, "intervals": [{"sum": {"end": 1.00068, "seconds": 1.00068, "bytes": 16996624, "bits_per_second": 135881000.0, "start": 0, "retransmits": 0, "omitted": false}, "streams": [{"end": 1.00068, "socket": 4, "seconds": 1.00068, "bytes": 16996624, "bits_per_second": 135881000.0, "start": 0, "retransmits": 0, "omitted": false, "snd_cwnd": 451776}]}, {"sum": {"end": 2.00048, "seconds": 0.999804, "bytes": 20010192, "bits_per_second": 160113000.0, "start": 1.00068, "retransmits": 0, "omitted": false}, "streams": [{"end": 2.00048, "socket": 4, "seconds": 0.999804, "bytes": 20010192, "bits_per_second": 160113000.0, "start": 1.00068, "retransmits": 0, "omitted": false, "snd_cwnd": 713864}]}, {"sum": {"end": 3.00083, "seconds": 1.00035, "bytes": 18330464, "bits_per_second": 146592000.0, "start": 2.00048, "retransmits": 0, "omitted": false}, "streams": [{"end": 3.00083, "socket": 4, "seconds": 1.00035, "bytes": 18330464, "bits_per_second": 146592000.0, "start": 2.00048, "retransmits": 0, "omitted": false, "snd_cwnd": 768888}]}, {"sum": {"end": 4.00707, "seconds": 1.00624, "bytes": 19658376, "bits_per_second": 156292000.0, "start": 3.00083, "retransmits": 0, "omitted": false}, "streams": [{"end": 4.00707, "socket": 4, "seconds": 1.00624, "bytes": 19658376, "bits_per_second": 156292000.0, "start": 3.00083, "retransmits": 0, "omitted": false, "snd_cwnd": 812328}]}, {"sum": {"end": 5.00104, "seconds": 0.993972, "bytes": 15709072, "bits_per_second": 126435000.0, "start": 4.00707, "retransmits": 0, "omitted": false}, "streams": [{"end": 5.00104, "socket": 4, "seconds": 0.993972, "bytes": 15709072, "bits_per_second": 126435000.0, "start": 4.00707, "retransmits": 0, "omitted": false, "snd_cwnd": 849976}]}, {"sum": {"end": 6.00049, "seconds": 0.999443, "bytes": 19616288, "bits_per_second": 157018000.0, "start": 5.00104, "retransmits": 53, "omitted": false}, "streams": [{"end": 6.00049, "socket": 4, "seconds": 0.999443, "bytes": 19616288, "bits_per_second": 157018000.0, "start": 5.00104, "retransmits": 53, "omitted": false, "snd_cwnd": 641464}]}, {"sum": {"end": 7.00085, "seconds": 1.00036, "bytes": 22250480, "bits_per_second": 177939000.0, "start": 6.00049, "retransmits": 0, "omitted": false}, "streams": [{"end": 7.00085, "socket": 4, "seconds": 1.00036, "bytes": 22250480, "bits_per_second": 177939000.0, "start": 6.00049, "retransmits": 0, "omitted": false, "snd_cwnd": 706624}]}, {"sum": {"end": 8.00476, "seconds": 1.00391, "bytes": 22282240, "bits_per_second": 177564000.0, "start": 7.00085, "retransmits": 0, "omitted": false}, "streams": [{"end": 8.00476, "socket": 4, "seconds": 1.00391, "bytes": 22282240, "bits_per_second": 177564000.0, "start": 7.00085, "retransmits": 0, "omitted": false, "snd_cwnd": 761648}]}, {"sum": {"end": 9.0016, "seconds": 0.996847, "bytes": 19657680, "bits_per_second": 157759000.0, "start": 8.00476, "retransmits": 28, "omitted": false}, "streams": [{"end": 9.0016, "socket": 4, "seconds": 0.996847, "bytes": 19657680, "bits_per_second": 157759000.0, "start": 8.00476, "retransmits": 28, "omitted": false, "snd_cwnd": 570512}]}, {"sum": {"end": 10.0112, "seconds": 1.00955, "bytes": 20932520, "bits_per_second": 165876000.0, "start": 9.0016, "retransmits": 0, "omitted": false}, "streams": [{"end": 10.0112, "socket": 4, "seconds": 1.00955, "bytes": 20932520, "bits_per_second": 165876000.0, "start": 9.0016, "retransmits": 0, "omitted": false, "snd_cwnd": 615400}]}], "end": {"sum_received": {"seconds": 10.0112, "start": 0, "end": 10.0112, "bytes": 193366712, "bits_per_second": 154521000.0}, "streams": [{"sender": {"end": 10.0112, "socket": 4, "seconds": 10.0112, "bytes": 195443936, "bits_per_second": 156181000.0, "start": 0, "retransmits": 81}, "receiver": {"end": 10.0112, "socket": 4, "seconds": 10.0112, "bytes": 193366712, "bits_per_second": 154521000.0, "start": 0}}], "sum_sent": {"end": 10.0112, "seconds": 10.0112, "bytes": 195443936, "bits_per_second": 156181000.0, "start": 0, "retransmits": 81}, "cpu_utilization_percent": {"remote_user": 1.10295, "remote_system": 40.0403, "host_user": 2.41785, "remote_total": 41.1438, "host_system": 5.09548, "host_total": 7.51411}}} \ No newline at end of file
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json b/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json
new file mode 100644
index 000000000..8173c8f64
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json
@@ -0,0 +1 @@
+{"start":{"connected":[{"socket":4, "local_host":"10.0.1.2", "local_port":46384, "remote_host":"172.16.9.195", "remote_port":5201}], "version":"iperf 3.0.7", "system_info":"Linux zeus 3.13.0-61-generic #100-Ubuntu SMP Wed Jul 29 11:21:34 UTC 2015 x86_64 x86_64 x86_64 GNU/Linux\n", "timestamp":{"time":"Tue, 29 Sep 2015 01:48:23 GMT", "timesecs":1443491303}, "connecting_to":{"host":"172.16.9.195", "port":5201}, "cookie":"zeus.1443491303.539703.3479129b58a5b", "test_start":{"protocol":"UDP", "num_streams":1, "blksize":8192, "omit":0, "duration":10, "bytes":0, "blocks":0, "reverse":0}}, "intervals":[{"streams":[{"socket":4, "start":0, "end":1.00022, "seconds":1.00022, "bytes":2252800, "bits_per_second":1.80184e+07, "packets":275, "omitted":false}], "sum":{"start":0, "end":1.00022, "seconds":1.00022, "bytes":2252800, "bits_per_second":1.80184e+07, "packets":275, "omitted":false}}, {"streams":[{"socket":4, "start":1.00022, "end":2.00022, "seconds":0.999993, "bytes":2498560, "bits_per_second":1.99886e+07, "packets":305, "omitted":false}], "sum":{"start":1.00022, "end":2.00022, "seconds":0.999993, "bytes":2498560, "bits_per_second":1.99886e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":2.00022, "end":3.00022, "seconds":1, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}], "sum":{"start":2.00022, "end":3.00022, "seconds":1, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}}, {"streams":[{"socket":4, "start":3.00022, "end":4.00022, "seconds":1, "bytes":2498560, "bits_per_second":19988480, "packets":305, "omitted":false}], "sum":{"start":3.00022, "end":4.00022, "seconds":1, "bytes":2498560, "bits_per_second":19988480, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":4.00022, "end":5.0002, "seconds":0.999977, "bytes":2498560, "bits_per_second":1.99889e+07, "packets":305, "omitted":false}], "sum":{"start":4.00022, "end":5.0002, "seconds":0.999977, "bytes":2498560, "bits_per_second":1.99889e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":5.0002, "end":6.00024, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99877e+07, "packets":305, "omitted":false}], "sum":{"start":5.0002, "end":6.00024, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99877e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":6.00024, "end":7.00023, "seconds":0.999998, "bytes":2498560, "bits_per_second":1.99885e+07, "packets":305, "omitted":false}], "sum":{"start":6.00024, "end":7.00023, "seconds":0.999998, "bytes":2498560, "bits_per_second":1.99885e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":7.00023, "end":8.00023, "seconds":0.999999, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}], "sum":{"start":7.00023, "end":8.00023, "seconds":0.999999, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}}, {"streams":[{"socket":4, "start":8.00023, "end":9.00018, "seconds":0.999945, "bytes":2498560, "bits_per_second":1.99896e+07, "packets":305, "omitted":false}], "sum":{"start":8.00023, "end":9.00018, "seconds":0.999945, "bytes":2498560, "bits_per_second":1.99896e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":9.00018, "end":10.0002, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99876e+07, "packets":305, "omitted":false}], "sum":{"start":9.00018, "end":10.0002, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99876e+07, "packets":305, "omitted":false}}], "end":{"streams":[{"udp":{"socket":4, "start":0, "end":10.0002, "seconds":10.0002, "bytes":24756224, "bits_per_second":1.98045e+07, "jitter_ms":0.0113579, "lost_packets":0, "packets":3022, "lost_percent":0}}], "sum":{"start":0, "end":10.0002, "seconds":10.0002, "bytes":24756224, "bits_per_second":1.98045e+07, "jitter_ms":0.0113579, "lost_packets":0, "packets":3022, "lost_percent":0}, "cpu_utilization_percent":{"host_total":0.647561, "host_user":0.146468, "host_system":0.501083, "remote_total":0.31751, "remote_user":0, "remote_system":0.31751}}}
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_1flow_Packets_vpe.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_1flow_Packets_vpe.yaml
new file mode 100644
index 000000000..f3046f463
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_1flow_Packets_vpe.yaml
@@ -0,0 +1,18 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+flow:
+ srcip4_range: '152.16.0.20'
+ dstip4_range: '152.40.0.20'
+ count: 1
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_throughput_vpe.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_throughput_vpe.yaml
new file mode 100644
index 000000000..2123e4705
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_throughput_vpe.yaml
@@ -0,0 +1,101 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# flow definition for ACL tests - 1K flows - ipv4 only
+#
+# the number of flows defines the widest range of parameters
+# for example if srcip_range=1.0.0.1-1.0.0.255 and dst_ip_range=10.0.0.1-10.0.1.255
+# and it should define only 16 flows
+#
+#there is assumption that packets generated will have a random sequences of following addresses pairs
+# in the packets
+# 1. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+# 2. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+# ...
+# 512. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+#
+# not all combination should be filled
+# Any other field with random range will be added to flow definition
+#
+# the example.yaml provides all possibilities for traffic generation
+#
+# the profile defines a public and private side to make limited traffic correlation
+# between private and public side same way as it is made by IXIA solution.
+#
+schema: "isb:traffic_profile:0.1"
+
+# This file is a template, it will be filled with values from tc.yaml before passing to the traffic generator
+
+name: rfc2544
+description: Traffic profile to run RFC2544 latency
+traffic_profile:
+ traffic_type : RFC2544Profile # defines traffic behavior - constant or look for highest possible throughput
+ frame_rate : 100 # pc of linerate
+ # that specifies a range (e.g. ipv4 address, port)
+
+
+private:
+ ipv4:
+ outer_l2:
+ framesize:
+ 64B: "{{ get(imix, 'imix.uplink.imix_small', '0') }}"
+ 128B: "{{ get(imix, 'imix.uplink.imix_128B', '0') }}"
+ 256B: "{{ get(imix, 'imix.uplink.imix_256B', '0') }}"
+ 373b: "{{ get(imix, 'imix.uplink.imix_373B', '0') }}"
+ 570B: "{{get(imix, 'imix.uplink.imix_570B', '0') }}"
+ 1400B: "{{get(imix, 'imix.uplink.imix_1400B', '0') }}"
+ 1518B: "{{get(imix, 'imix.uplink.imix_1500B', '0') }}"
+
+ QinQ:
+ S-VLAN:
+ id: 128
+ priority: 0
+ cfi: 0
+ C-VLAN:
+ id: 512
+ priority: 0
+ cfi: 0
+
+ outer_l3v4:
+ proto: "tcp"
+ srcip4: "{{get(flow, 'flow.srcip4_range', '192.168.0.0-192.168.255.255') }}"
+ dstip4: "{{get(flow, 'flow.dstip4_range', '192.16.0.0-192.16.0.31') }}"
+ ttl: 32
+ dscp: 32
+
+ outer_l4:
+ srcport: "{{get(flow, 'flow.srcport_range', '0') }}"
+ dstport: "{{get(flow, 'flow.dstport_range', '0') }}"
+public:
+ ipv4:
+ outer_l2:
+ framesize:
+ 64B: "{{ get(imix, 'imix.uplink.imix_small', '0') }}"
+ 128B: "{{ get(imix, 'imix.uplink.imix_128B', '0') }}"
+ 256B: "{{ get(imix, 'imix.uplink.imix_256B', '0') }}"
+ 373b: "{{ get(imix, 'imix.uplink.imix_373B', '0') }}"
+ 570B: "{{get(imix, 'imix.uplink.imix_570B', '0') }}"
+ 1400B: "{{get(imix, 'imix.uplink.imix_1400B', '0') }}"
+ 1518B: "{{get(imix, 'imix.uplink.imix_1500B', '0') }}"
+
+ outer_l3v4:
+ proto: "tcp"
+ srcip4: "{{get(flow, 'flow.dstip4_range', '192.16.0.0-192.16.0.31') }}"
+ dstip4: "{{get(flow, 'flow.srcip4_range', '192.168.0.0-192.168.255.255') }}"
+ ttl: 32
+ dscp: 32
+
+ outer_l4:
+ srcport: "{{get(flow, 'flow.dstport_range', '0') }}"
+ dstport: "{{get(flow, 'flow.srcport_range', '0') }}"
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json
new file mode 100755
index 000000000..bba76cfa5
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json
@@ -0,0 +1 @@
+{"mean_latency":"9.49","troughput":"823.77","troughput_unit":"10^6bits/s"} \ No newline at end of file
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output1.txt b/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output1.txt
new file mode 100644
index 000000000..f90457cb3
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output1.txt
@@ -0,0 +1,9 @@
+Linux 3.19.0-25-generic (huawei-pod4) 07/19/2016 _x86_64_ (1 CPU)
+
+02:01:50 PM IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+02:01:51 PM eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+02:01:51 PM lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+
+Average: IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+Average: eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+Average: lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output2.txt b/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output2.txt
new file mode 100644
index 000000000..417613ec1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output2.txt
@@ -0,0 +1,13 @@
+Linux 3.19.0-25-generic (huawei-pod4) 07/19/2016 _x86_64_ (1 CPU)
+
+02:01:50 PM IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+02:01:51 PM eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+02:01:51 PM lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+
+02:01:52 PM IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+02:01:53 PM eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+02:01:53 PM lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+
+Average: IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+Average: eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+Average: lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py
new file mode 100644
index 000000000..5f342df7d
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py
@@ -0,0 +1,190 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.common import utils
+from yardstick.benchmark.scenarios.networking import iperf3
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch.object(iperf3, 'ssh')
+class IperfTestCase(unittest.TestCase):
+ output_name_tcp = 'iperf3_sample_output.json'
+ output_name_udp = 'iperf3_sample_output_udp.json'
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138',
+ }
+ }
+ self._mock_log_info = mock.patch.object(iperf3.LOG, 'info')
+ self.mock_log_info = self._mock_log_info.start()
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self._mock_log_info.stop()
+
+ def test_iperf_successful_setup(self, mock_ssh):
+ p = iperf3.Iperf({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ p.setup()
+ self.assertIsNotNone(p.target)
+ self.assertIsNotNone(p.host)
+ mock_ssh.SSH.from_node().execute.assert_called_with("iperf3 -s -D")
+
+ def test_iperf_unsuccessful_setup(self, mock_ssh):
+ p = iperf3.Iperf({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.setup)
+
+ def test_iperf_successful_teardown(self, mock_ssh):
+ p = iperf3.Iperf({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+ p.target = mock_ssh.SSH.from_node()
+
+ p.teardown()
+ mock_ssh.SSH.from_node().close.assert_called()
+ mock_ssh.SSH.from_node().execute.assert_called_with("pkill iperf3")
+
+ def test_iperf_successful_no_sla(self, mock_ssh):
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_successful_sla(self, mock_ssh):
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'bytes_per_second': 15000000}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_unsuccessful_sla(self, mock_ssh):
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'bytes_per_second': 25000000}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_iperf_successful_sla_jitter(self, mock_ssh):
+ options = {"protocol": "udp", "bandwidth": "20m"}
+ args = {
+ 'options': options,
+ 'sla': {'jitter': 10}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_udp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_unsuccessful_sla_jitter(self, mock_ssh):
+ options = {"protocol": "udp", "bandwidth": "20m"}
+ args = {
+ 'options': options,
+ 'sla': {'jitter': 0.0001}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_udp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_iperf_successful_tcp_protocal(self, mock_ssh):
+ options = {"protocol": "tcp", "nodelay": "yes"}
+ args = {
+ 'options': options,
+ 'sla': {'bytes_per_second': 15000000}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_unsuccessful_script_error(self, mock_ssh):
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ @staticmethod
+ def _read_sample_output(filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_moongen_testpmd.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_moongen_testpmd.py
new file mode 100644
index 000000000..620155c7e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_moongen_testpmd.py
@@ -0,0 +1,353 @@
+#!/usr/bin/env python
+
+# Copyright 2017 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Unittest for yardstick.benchmark.scenarios.networking.MoongenTestPMD
+
+from __future__ import absolute_import
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import moongen_testpmd
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.moongen_testpmd.subprocess')
+class MoongenTestPMDTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "ubuntu",
+ "password": "ubuntu",
+ },
+ }
+ self.TestPMDargs = {
+ 'task_id': "1234-5678",
+ 'options': {
+ 'multistream': 1,
+ 'frame_size': 1024,
+ 'testpmd_queue': 2,
+ 'trafficgen_port1': 'ens5',
+ 'trafficgen_port2': 'ens6',
+ 'moongen_host_user': 'root',
+ 'moongen_host_passwd': 'root',
+ 'moongen_host_ip': '10.5.201.151',
+ 'moongen_dir': '/home/lua-trafficgen',
+ 'moongen_runBidirec': 'true',
+ 'Package_Loss': 0,
+ 'SearchRuntime': 60,
+ 'moongen_port1_mac': '88:cf:98:2f:4d:ed',
+ 'moongen_port2_mac': '88:cf:98:2f:4d:ee',
+ 'forward_type': 'testpmd',
+ },
+ 'sla': {
+ 'metrics': 'throughput_rx_mpps',
+ 'throughput_rx_mpps': 0.5,
+ 'action': 'monitor',
+ }
+ }
+ self.L2fwdargs = {
+ 'task_id': "1234-5678",
+ 'options': {
+ 'multistream': 1,
+ 'frame_size': 1024,
+ 'testpmd_queue': 2,
+ 'trafficgen_port1': 'ens5',
+ 'trafficgen_port2': 'ens6',
+ 'moongen_host_user': 'root',
+ 'moongen_host_passwd': 'root',
+ 'moongen_host_ip': '10.5.201.151',
+ 'moongen_dir': '/home/lua-trafficgen',
+ 'moongen_runBidirec': 'true',
+ 'Package_Loss': 0,
+ 'SearchRuntime': 60,
+ 'moongen_port1_mac': '88:cf:98:2f:4d:ed',
+ 'moongen_port2_mac': '88:cf:98:2f:4d:ee',
+ 'forward_type': 'l2fwd',
+ },
+ 'sla': {
+ 'metrics': 'throughput_rx_mpps',
+ 'throughput_rx_mpps': 0.5,
+ 'action': 'monitor',
+ }
+ }
+
+ self._mock_ssh = mock.patch(
+ 'yardstick.benchmark.scenarios.networking.moongen_testpmd.ssh')
+ self.mock_ssh = self._mock_ssh.start()
+
+ self.addCleanup(self._cleanup)
+
+ def _cleanup(self):
+ self._mock_ssh.stop()
+
+ def test_MoongenTestPMD_setup(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_MoongenTestPMD_teardown(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ p.teardown()
+ self.assertFalse(p.setup_done)
+
+ def test_MoongenTestPMD_l2fwd_is_forward_setup_no(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.L2fwdargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ result = p._is_forward_setup()
+ self.assertFalse(result)
+
+ def test_MoongenTestPMD_l2fwd_is_forward_setup_yes(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.L2fwdargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+
+ result = p._is_forward_setup()
+ self.assertTrue(result)
+
+ def test_MoongenTestPMD_testpmd_is_forward_setup_no(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+
+ result = p._is_forward_setup()
+ self.assertFalse(result)
+
+ def test_MoongenTestPMD_testpmd_is_forward_setup_yes(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ result = p._is_forward_setup()
+ self.assertTrue(result)
+
+ @mock.patch('time.sleep')
+ def test_MoongenTestPMD_testpmd_forward_setup_first(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+
+ p.forward_setup()
+ self.assertFalse(p._is_forward_setup())
+ self.assertTrue(p.forward_setup_done)
+
+ @mock.patch('time.sleep')
+ def test_MoongenTestPMD_testpmd_dpdk_setup_next(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ p.forward_setup()
+ self.assertTrue(p._is_forward_setup())
+ self.assertTrue(p.forward_setup_done)
+
+ @mock.patch('time.sleep')
+ def test_MoongenTestPMD_l2fwd_forward_setup_first(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.L2fwdargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ p.forward_setup()
+ self.assertFalse(p._is_forward_setup())
+ self.assertTrue(p.forward_setup_done)
+
+ @mock.patch('time.sleep')
+ def test_MoongenTestPMD_l2fwd_dpdk_setup_next(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.L2fwdargs, self.ctx)
+
+ # setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ p.forward_setup()
+ self.assertTrue(p._is_forward_setup())
+ self.assertTrue(p.forward_setup_done)
+
+ def test_moongen_testpmd_generate_config_file(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.generate_config_file(frame_size=1, multistream=1,
+ runBidirec="True", tg_port1_vlan=1,
+ tg_port2_vlan=2, SearchRuntime=1,
+ Package_Loss=0)
+ self.assertTrue(p.CONFIG_FILE)
+
+ def test_moongen_testpmd_result_to_data_match(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ mock_subprocess.call().execute.return_value = None
+ result = ("[REPORT]Device 1->0: Tx frames: 420161490 Rx Frames: 420161490"
+ " frame loss: 0, 0.000000% Rx Mpps: 7.002708\n[REPORT] "
+ "total: Tx frames: 840321216 Rx Frames: 840321216 frame loss: "
+ "0, 0.000000% Tx Mpps: 14.005388 Rx Mpps: 14.005388\n'")
+ p.result_to_data(result=result)
+ self.assertTrue(p.TO_DATA)
+
+ def test_moongen_testpmd_result_to_data_not_match(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ mock_subprocess.call().execute.return_value = None
+ result = ("")
+ p.result_to_data(result=result)
+ self.assertTrue(p.TO_DATA)
+
+ @mock.patch('time.sleep')
+ def test_moongen_testpmd_run_ok(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+ p.setup_done = True
+ p.forward_setup_done = True
+ p.setup()
+
+ # run() specific mocks
+ p.server = self.mock_ssh.SSH.from_node()
+ mock_subprocess.call().execute.return_value = None
+ mock_subprocess.call().execute.return_value = None
+ result = ("[REPORT]Device 1->0: Tx frames: 420161490 Rx Frames: 420161490"
+ " frame loss: 0, 0.000000% Rx Mpps: 7.002708\n[REPORT] "
+ "total: Tx frames: 840321216 Rx Frames: 840321216 frame loss: "
+ "0, 0.000000% Tx Mpps: 14.005388 Rx Mpps: 14.005388\n'")
+ self.mock_ssh.SSH.from_node().execute.return_value = (
+ 0, result, '')
+
+ test_result = {}
+ p.run(test_result)
+
+ self.assertEqual(test_result['rx_mpps'], 14.005388)
+
+ def test_moongen_testpmd_run_falied_vsperf_execution(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # run() specific mocks
+ mock_subprocess.call().execute.return_value = None
+ mock_subprocess.call().execute.return_value = None
+ self.mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+
+ result = {}
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def test_moongen_testpmd_run_falied_csv_report(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # run() specific mocks
+ mock_subprocess.call().execute.return_value = None
+ mock_subprocess.call().execute.return_value = None
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+
+ result = {}
+ self.assertRaises(RuntimeError, p.run, result)
+
+def main():
+ unittest.main()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py
new file mode 100755
index 000000000..a7abcd98a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py
@@ -0,0 +1,122 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.netperf.Netperf
+
+from __future__ import absolute_import
+
+import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.networking import netperf
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netperf.ssh')
+class NetperfTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
+ }
+
+ def test_netperf_successful_setup(self, mock_ssh):
+
+ p = netperf.Netperf({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ p.setup()
+ self.assertIsNotNone(p.server)
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_netperf_successful_no_sla(self, mock_ssh):
+
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = jsonutils.loads(sample_output)
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_successful_sla(self, mock_ssh):
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 100}
+ }
+ result = {}
+
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = jsonutils.loads(sample_output)
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_unsuccessful_sla(self, mock_ssh):
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 5}
+ }
+ result = {}
+
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_netperf_unsuccessful_script_error(self, mock_ssh):
+
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def _read_sample_output(self):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, 'netperf_sample_output.json')
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
new file mode 100755
index 000000000..a577dba59
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
@@ -0,0 +1,122 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.networking.netperf_node.NetperfNode
+
+from __future__ import absolute_import
+
+import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.networking import netperf_node
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netperf_node.ssh')
+class NetperfNodeTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '192.168.10.10',
+ 'user': 'root',
+ 'password': 'root'
+ },
+ 'target': {
+ 'ip': '192.168.10.11',
+ 'user': 'root',
+ 'password': 'root'
+ }
+ }
+
+ def test_netperf_node_successful_setup(self, mock_ssh):
+
+ p = netperf_node.NetperfNode({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ p.setup()
+ self.assertIsNotNone(p.server)
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_netperf_node_successful_no_sla(self, mock_ssh):
+
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = netperf_node.NetperfNode(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = jsonutils.loads(sample_output)
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_node_successful_sla(self, mock_ssh):
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 100}
+ }
+ result = {}
+
+ p = netperf_node.NetperfNode(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = jsonutils.loads(sample_output)
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_node_unsuccessful_sla(self, mock_ssh):
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 5}
+ }
+ result = {}
+
+ p = netperf_node.NetperfNode(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_netperf_node_unsuccessful_script_error(self, mock_ssh):
+
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = netperf_node.NetperfNode(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def _read_sample_output(self):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, 'netperf_sample_output.json')
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py
new file mode 100644
index 000000000..4cdfde6b1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py
@@ -0,0 +1,225 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.networking.netutilization.NetUtilization
+
+from __future__ import absolute_import
+import mock
+import unittest
+import os
+
+from yardstick.benchmark.scenarios.networking import netutilization
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netutilization.ssh')
+class NetUtilizationTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_setup_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ n.setup()
+ self.assertIsNotNone(n.client)
+ self.assertTrue(n.setup_done)
+
+ def test_execute_command_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ n.setup()
+
+ expected_result = 'abcdefg'
+ mock_ssh.SSH.from_node().execute.return_value = (0, expected_result, '')
+ result = n._execute_command("foo")
+ self.assertEqual(result, expected_result)
+
+ def test_execute_command_failed(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ n.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', 'abcdefg')
+ self.assertRaises(RuntimeError, n._execute_command,
+ "failed")
+
+ def test_get_network_utilization_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ n.setup()
+
+ mpstat_output = self._read_file("netutilization_sample_output1.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, mpstat_output, '')
+ result = n._get_network_utilization()
+
+ expected_result = \
+ {"network_utilization_maximun": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}},
+ "network_utilization_average": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}},
+ "network_utilization_minimum": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}}}
+
+ self.assertDictEqual(result, expected_result)
+
+ def test_get_network_utilization_2_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 2
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ n.setup()
+
+ mpstat_output = self._read_file("netutilization_sample_output2.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, mpstat_output, '')
+ result = n._get_network_utilization()
+
+ expected_result = \
+ {"network_utilization_maximun": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}},
+ "network_utilization_average": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}},
+ "network_utilization_minimum": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}}}
+
+ self.assertDictEqual(result, expected_result)
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
new file mode 100644
index 000000000..36e8c8a77
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.networking.networkcapacity.NetworkCapacity
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.networking import networkcapacity
+
+SAMPLE_OUTPUT = \
+ '{"Number of connections":"308","Number of frames received": "166503"}'
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.networkcapacity.ssh')
+class NetworkCapacityTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'password': "root"
+ },
+ }
+
+ self.result = {}
+
+ def test_capacity_successful_setup(self, mock_ssh):
+ c = networkcapacity.NetworkCapacity({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+ self.assertIsNotNone(c.client)
+ self.assertTrue(c.setup_done)
+
+ def test_capacity_successful(self, mock_ssh):
+ c = networkcapacity.NetworkCapacity({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, SAMPLE_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_unsuccessful_script_error(self, mock_ssh):
+ c = networkcapacity.NetworkCapacity({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, c.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py
new file mode 100644
index 000000000..b02d58437
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py
@@ -0,0 +1,105 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import nstat
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.nstat.ssh')
+class NstatTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ "host": {
+ "ip": "192.168.50.28",
+ "user": "root",
+ "key_filename": "mykey.key"
+ }
+ }
+
+ def test_nstat_successful_setup(self, mock_ssh):
+
+ n = nstat.Nstat({}, self.ctx)
+ n.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(n.client)
+ self.assertTrue(n.setup_done)
+
+ def test_nstat_successful_no_sla(self, mock_ssh):
+
+ options = {
+ "duration": 0
+ }
+ args = {
+ "options": options,
+ }
+ n = nstat.Nstat(args, self.ctx)
+ result = {}
+
+ sample_output = '#kernel\nIpInReceives 1837 0.0\nIpInHdrErrors 0 0.0\nIpInAddrErrors 2 0.0\nIcmpInMsgs 319 0.0\nIcmpInErrors 0 0.0\nTcpInSegs 36 0.0\nTcpInErrs 0 0.0\nUdpInDatagrams 1318 0.0\nUdpInErrors 0 0.0\n' # pylint: disable=line-too-long
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ n.run(result)
+ expected_result = {"TcpInErrs": 0, "UdpInDatagrams": 1318,
+ "Tcp_segment_error_rate": 0.0, "IpInAddrErrors": 2,
+ "IpInHdrErrors": 0, "IcmpInErrors": 0, "IpErrors": 2,
+ "TcpInSegs": 36, "IpInReceives": 1837, "IcmpInMsgs": 319,
+ "IP_datagram_error_rate": 0.001, "Udp_datagram_error_rate": 0.0,
+ "Icmp_message_error_rate": 0.0, "UdpInErrors": 0}
+ self.assertEqual(result, expected_result)
+
+ def test_nstat_successful_sla(self, mock_ssh):
+
+ options = {
+ "duration": 0
+ }
+ sla = {
+ "IP_datagram_error_rate": 0.1
+ }
+ args = {
+ "options": options,
+ "sla": sla
+ }
+ n = nstat.Nstat(args, self.ctx)
+ result = {}
+
+ sample_output = '#kernel\nIpInReceives 1837 0.0\nIpInHdrErrors 0 0.0\nIpInAddrErrors 2 0.0\nIcmpInMsgs 319 0.0\nIcmpInErrors 0 0.0\nTcpInSegs 36 0.0\nTcpInErrs 0 0.0\nUdpInDatagrams 1318 0.0\nUdpInErrors 0 0.0\n' # pylint: disable=line-too-long
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ n.run(result)
+ expected_result = {"TcpInErrs": 0, "UdpInDatagrams": 1318,
+ "Tcp_segment_error_rate": 0.0, "IpInAddrErrors": 2,
+ "IpInHdrErrors": 0, "IcmpInErrors": 0, "IpErrors": 2,
+ "TcpInSegs": 36, "IpInReceives": 1837, "IcmpInMsgs": 319,
+ "IP_datagram_error_rate": 0.001, "Udp_datagram_error_rate": 0.0,
+ "Icmp_message_error_rate": 0.0, "UdpInErrors": 0}
+ self.assertEqual(result, expected_result)
+
+ def test_nstat_unsuccessful_cmd_error(self, mock_ssh):
+
+ options = {
+ "duration": 0
+ }
+ sla = {
+ "IP_datagram_error_rate": 0.1
+ }
+ args = {
+ "options": options,
+ "sla": sla
+ }
+ n = nstat.Nstat(args, self.ctx)
+ result = {}
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, n.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py
new file mode 100644
index 000000000..944202658
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py
@@ -0,0 +1,107 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import ping
+from yardstick.common import exceptions as y_exc
+
+
+class PingTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ },
+ "target": {
+ "ipaddr": "10.229.17.105",
+ }
+ }
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_successful_no_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
+ p.run(result)
+ self.assertEqual(result, {'rtt.ares': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_successful_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'sla': {'max_rtt': 150},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
+ p.run(result)
+ self.assertEqual(result, {'rtt.ares': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_unsuccessful_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'sla': {'max_rtt': 50},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_unsuccessful_script_error(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'sla': {'max_rtt': 50},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_unsuccessful_no_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py
new file mode 100644
index 000000000..ad5217a14
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py
@@ -0,0 +1,117 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import ping6
+from yardstick.common import exceptions as y_exc
+
+
+class PingTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'nodes': {
+ 'host1': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'role': "Controller",
+ 'key_filename': "mykey.key",
+ 'password': "root"
+ },
+ 'host2': {
+ "ip": "172.16.0.138",
+ "key_filename": "/root/.ssh/id_rsa",
+ "role": "Compute",
+ "name": "node3.IPV6",
+ "user": "root"
+ },
+ }
+ }
+
+ def test_get_controller_node(self):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 50}
+ }
+ p = ping6.Ping6(args, self.ctx)
+ controller_node = p._get_controller_node(['host1', 'host2'])
+ self.assertEqual(controller_node, 'host1')
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_successful_setup(self, mock_ssh):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 50}
+ }
+ p = ping6.Ping6(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '0', '')
+ p.setup()
+
+ self.assertTrue(p.setup_done)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_successful_no_sla(self, mock_ssh):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH.from_node()
+ mock_ssh.SSH.from_node().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
+ p.run(result)
+ self.assertEqual(result, {'rtt': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_successful_sla(self, mock_ssh):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 150}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH.from_node()
+ mock_ssh.SSH.from_node().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
+ p.run(result)
+ self.assertEqual(result, {'rtt': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_unsuccessful_sla(self, mock_ssh):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 50}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH.from_node()
+ mock_ssh.SSH.from_node().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_unsuccessful_script_error(self, mock_ssh):
+
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 150}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH.from_node()
+ mock_ssh.SSH.from_node().execute.side_effect = [
+ (0, 'host1', ''), (1, '', 'FOOBAR')]
+ self.assertRaises(RuntimeError, p.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py
new file mode 100644
index 000000000..5761e2403
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py
@@ -0,0 +1,453 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import logging
+
+from oslo_serialization import jsonutils
+
+from yardstick import ssh
+from yardstick.benchmark.scenarios.networking import pktgen
+from yardstick.common import exceptions as y_exc
+
+
+logging.disable(logging.CRITICAL)
+
+
+class PktgenTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.context_cfg = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
+ }
+ self.scenario_cfg = {
+ 'options': {'packetsize': 60}
+ }
+
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
+
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
+ self.mock_SSH.from_node().run.return_value = 0
+
+ self.addCleanup(self._stop_mock)
+
+ self.scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ self.scenario.setup()
+
+ def _stop_mock(self):
+ self._mock_SSH.stop()
+
+ def test_setup_successful(self):
+ self.assertIsNotNone(self.scenario.server)
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_iptables_setup_successful(self):
+ self.scenario.number_of_ports = 10
+ self.scenario._iptables_setup()
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "sudo iptables -F; "
+ "sudo iptables -A INPUT -p udp --dport 1000:%s -j DROP"
+ % 1010, timeout=60)
+
+ def test_iptables_setup_unsuccessful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._iptables_setup()
+
+ def test_iptables_get_result_successful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().execute.return_value = (0, '150000', '')
+
+ result = self.scenario._iptables_get_result()
+
+ self.assertEqual(result, 150000)
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "sudo iptables -L INPUT -vnx |"
+ "awk '/dpts:1000:%s/ {{printf \"%%s\", $1}}'"
+ % 1010, raise_on_error=True)
+
+ def test_iptables_get_result_unsuccessful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._iptables_get_result()
+
+ def test_run_successful_no_sla(self):
+ self.scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110,
+ "ppm": 3179})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ self.scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_run_successful_sla(self):
+ self.scenario_cfg['sla'] = {'max_ppm': 10000}
+ scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110,
+ "ppm": 3179})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_run_unsuccessful_sla(self):
+ self.scenario_cfg['sla'] = {'max_ppm': 1000}
+ scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ scenario.run({})
+
+ def test_run_ssh_error_not_caught(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario.run({})
+
+ def test_get_vnic_driver_name(self):
+ self.mock_SSH.from_node().execute.return_value = (0, 'ixgbevf', '')
+ vnic_driver_name = self.scenario._get_vnic_driver_name()
+
+ self.assertEqual(vnic_driver_name, 'ixgbevf')
+
+ def test_get_vnic_driver_name_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_vnic_driver_name()
+
+ def test_get_sriov_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '2', '')
+
+ self.scenario.queue_number = self.scenario._get_sriov_queue_number()
+ self.assertEqual(self.scenario.queue_number, 2)
+
+ def test_get_sriov_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_sriov_queue_number()
+
+ def test_get_available_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '4', '')
+
+ self.assertEqual(self.scenario._get_available_queue_number(), 4)
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "sudo ethtool -l eth0 | grep Combined | head -1 |"
+ "awk '{printf $2}'", raise_on_error=True)
+
+ def test_get_available_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_available_queue_number()
+
+ def test_get_usable_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
+
+ self.assertEqual(self.scenario._get_usable_queue_number(), 1)
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "sudo ethtool -l eth0 | grep Combined | tail -1 |"
+ "awk '{printf $2}'", raise_on_error=True)
+
+ def test_get_usable_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_usable_queue_number()
+
+ def test_enable_ovs_multiqueue(self):
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
+ self.scenario.queue_number = self.scenario._enable_ovs_multiqueue()
+
+ self.assertEqual(self.scenario.queue_number, 4)
+ self.mock_SSH.from_node().run.assert_has_calls(
+ (mock.call("sudo ethtool -L eth0 combined 4"),
+ mock.call("sudo ethtool -L eth0 combined 4")))
+
+ def test_enable_ovs_multiqueue_1q(self):
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=1)
+ self.scenario.queue_number = self.scenario._enable_ovs_multiqueue()
+
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.mock_SSH.from_node().run.assert_not_called()
+
+ def test_enable_ovs_multiqueue_unsuccessful(self):
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._enable_ovs_multiqueue()
+
+ def test_setup_irqmapping_ovs(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_ovs(4)
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "echo 8 | sudo tee /proc/irq/10/smp_affinity")
+
+ def test_setup_irqmapping_ovs_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_ovs(1)
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "echo 1 | sudo tee /proc/irq/10/smp_affinity")
+
+ def test_setup_irqmapping_ovs_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_ovs(4)
+
+ def test_setup_irqmapping_ovs_1q_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_ovs(1)
+
+ def test_setup_irqmapping_sriov(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_sriov(2)
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "echo 2 | sudo tee /proc/irq/10/smp_affinity")
+
+ def test_setup_irqmapping_sriov_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_sriov(1)
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "echo 1 | sudo tee /proc/irq/10/smp_affinity")
+
+ def test_setup_irqmapping_sriov_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_sriov(2)
+
+ def test_setup_irqmapping_sriov_1q_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_sriov(1)
+
+ def test_is_irqbalance_disabled(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
+
+ self.assertFalse(self.scenario._is_irqbalance_disabled())
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "grep ENABLED /etc/default/irqbalance", raise_on_error=True)
+
+ def test_is_irqbalance_disabled_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._is_irqbalance_disabled()
+
+ def test_disable_irqbalance(self):
+ self.scenario._disable_irqbalance()
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "sudo service irqbalance disable")
+
+ def test_disable_irqbalance_unsuccessful(self):
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._disable_irqbalance()
+
+ def test_multiqueue_setup_ovs(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '4', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(
+ return_value="virtio_net")
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
+
+ self.scenario.multiqueue_setup()
+
+ self.assertEqual(self.scenario.queue_number, 4)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
+
+ def test_multiqueue_setup_ovs_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(
+ return_value="virtio_net")
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=1)
+
+ self.scenario.multiqueue_setup()
+
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
+
+ def test_multiqueue_setup_sriov(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '2', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
+
+ self.scenario.multiqueue_setup()
+
+ self.assertEqual(self.scenario.queue_number, 2)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
+
+ def test_multiqueue_setup_sriov_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
+
+ self.scenario.multiqueue_setup()
+
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
+
+ def test_run_with_setup_done(self):
+ scenario_cfg = {
+ 'options': {
+ 'packetsize': 60,
+ 'number_of_ports': 10,
+ 'duration': 20,
+ 'multiqueue': True},
+ 'sla': {
+ 'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.server = self.mock_SSH.from_node()
+ scenario.client = self.mock_SSH.from_node()
+ scenario.setup_done = True
+ scenario.multiqueue_setup_done = True
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_run_with_ovs_multiqueque(self):
+ scenario_cfg = {
+ 'options': {
+ 'packetsize': 60,
+ 'number_of_ports': 10,
+ 'duration': 20,
+ 'multiqueue': True},
+ 'sla': {'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._get_vnic_driver_name = mock.Mock(return_value="virtio_net")
+ scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ scenario._get_available_queue_number = mock.Mock(return_value=4)
+ scenario._enable_ovs_multiqueue = mock.Mock(return_value=4)
+ scenario._setup_irqmapping_ovs = mock.Mock()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_run_with_sriov_multiqueque(self):
+ scenario_cfg = {
+ 'options': {
+ 'packetsize': 60,
+ 'number_of_ports': 10,
+ 'duration': 20,
+ 'multiqueue': True},
+ 'sla': {'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
+ scenario._get_sriov_queue_number = mock.Mock(return_value=2)
+ scenario._setup_irqmapping_sriov = mock.Mock()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
new file mode 100644
index 000000000..70cd8ad40
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
@@ -0,0 +1,136 @@
+##############################################################################
+# Copyright (c) 2015 ZTE and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import time
+import logging
+
+import yardstick.common.utils as utils
+from yardstick import ssh
+from yardstick.benchmark.scenarios.networking import pktgen_dpdk
+from yardstick.common import exceptions as y_exc
+
+
+logging.disable(logging.CRITICAL)
+
+
+class PktgenDPDKLatencyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.context_cfg = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
+ }
+ self.scenario_cfg = {
+ 'options': {'packetsize': 60}
+ }
+
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
+
+ self._mock_time_sleep = mock.patch.object(time, 'sleep')
+ self.mock_time_sleep = self._mock_time_sleep.start()
+
+ self._mock_utils_get_port_ip = mock.patch.object(utils, 'get_port_ip')
+ self.mock_utils_get_port_ip = self._mock_utils_get_port_ip.start()
+
+ self._mock_utils_get_port_mac = mock.patch.object(utils,
+ 'get_port_mac')
+ self.mock_utils_get_port_mac = self._mock_utils_get_port_mac.start()
+
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
+
+ self.addCleanup(self._stop_mock)
+
+ self.scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+ self.scenario.server = self.mock_SSH.from_node()
+ self.scenario.client = self.mock_SSH.from_node()
+
+ def _stop_mock(self):
+ self._mock_SSH.stop()
+ self._mock_time_sleep.stop()
+ self._mock_utils_get_port_ip.stop()
+ self._mock_utils_get_port_mac.stop()
+
+ def test_setup(self):
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+ scenario.setup()
+
+ self.assertIsNotNone(scenario.server)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
+
+ def test_run_get_port_ip_command(self):
+ self.scenario.run({})
+
+ self.mock_utils_get_port_ip.assert_has_calls(
+ [mock.call(self.scenario.server, 'ens4'),
+ mock.call(self.scenario.server, 'ens5')])
+
+ def test_get_port_mac_command(self):
+ self.scenario.run({})
+
+ self.mock_utils_get_port_mac.assert_has_calls(
+ [mock.call(self.scenario.server, 'ens5'),
+ mock.call(self.scenario.server, 'ens4'),
+ mock.call(self.scenario.server, 'ens5')])
+
+ def test_run_no_sla(self):
+ sample_output = '100\n110\n112\n130\n149\n150\n90\n150\n200\n162\n'
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ self.scenario.run(result)
+ # with python 3 we get float, might be due python division changes
+ # AssertionError: {'avg_latency': 132.33333333333334} != {
+ # 'avg_latency': 132}
+ delta = result['avg_latency'] - 132
+ self.assertLessEqual(delta, 1)
+
+ def test_run_sla(self):
+ self.scenario_cfg['sla'] = {'max_latency': 100}
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+
+ sample_output = '100\n100\n100\n100\n100\n100\n100\n100\n100\n100\n'
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ self.assertEqual(result, {"avg_latency": 100})
+
+ def test_run_sla_error(self):
+ self.scenario_cfg['sla'] = {'max_latency': 100}
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+
+ sample_output = '100\n110\n112\n130\n149\n150\n90\n150\n200\n162\n'
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ scenario.run({})
+
+ def test_run_last_command_raise_on_error(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario.run({})
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py
new file mode 100644
index 000000000..39392e4bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py
@@ -0,0 +1,194 @@
+##############################################################################
+# Copyright (c) 2017 Nokia and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.pktgen.PktgenDPDK
+
+from __future__ import absolute_import
+import unittest
+
+from oslo_serialization import jsonutils
+import mock
+
+from yardstick.benchmark.scenarios.networking import pktgen_dpdk_throughput
+from yardstick.common import exceptions as y_exc
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.pktgen_dpdk_throughput.ssh')
+class PktgenDPDKTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ }
+ }
+
+ self._mock_time = mock.patch(
+ 'yardstick.benchmark.scenarios.networking.pktgen_dpdk_throughput.time')
+ self.mock_time = self._mock_time.start()
+
+ self.addCleanup(self._cleanup)
+
+ def _cleanup(self):
+ self._mock_time.stop()
+
+ def test_pktgen_dpdk_throughput_successful_setup(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60},
+ }
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+ p.setup()
+
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ self.assertIsNotNone(p.server)
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_pktgen_dpdk_throughput_successful_no_sla(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60, 'number_of_ports': 10},
+ }
+
+ result = {}
+
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_dpdk_result = mock.Mock()
+ mock_dpdk_result.return_value = 149300
+ p._dpdk_get_result = mock_dpdk_result
+
+ sample_output = '{"packets_per_second": 9753, "errors": 0, \
+ "packets_sent": 149776, "flows": 110}'
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_pktgen_dpdk_throughput_successful_sla(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60, 'number_of_ports': 10},
+ 'sla': {'max_ppm': 10000}
+ }
+ result = {}
+
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_dpdk_result = mock.Mock()
+ mock_dpdk_result.return_value = 149300
+ p._dpdk_get_result = mock_dpdk_result
+
+ sample_output = '{"packets_per_second": 9753, "errors": 0, \
+ "packets_sent": 149776, "flows": 110}'
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_pktgen_dpdk_throughput_unsuccessful_sla(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60, 'number_of_ports': 10},
+ 'sla': {'max_ppm': 1000}
+ }
+ result = {}
+
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_dpdk_result = mock.Mock()
+ mock_dpdk_result.return_value = 149300
+ p._dpdk_get_result = mock_dpdk_result
+
+ sample_output = '{"packets_per_second": 9753, "errors": 0, \
+ "packets_sent": 149776, "flows": 110}'
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_pktgen_dpdk_throughput_unsuccessful_script_error(
+ self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60, 'number_of_ports': 10},
+ 'sla': {'max_ppm': 1000}
+ }
+ result = {}
+
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def test_pktgen_dpdk_throughput_is_dpdk_setup(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60},
+ }
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+ p.server = mock_ssh.SSH()
+
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+
+ p._is_dpdk_setup("server")
+
+ mock_ssh.SSH().execute.assert_called_with(
+ "ip a | grep eth1 2>/dev/null")
+
+ def test_pktgen_dpdk_throughput_dpdk_setup(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60},
+ }
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+
+ p.dpdk_setup()
+
+ self.assertTrue(p.dpdk_setup_done)
+
+ def test_pktgen_dpdk_throughput_dpdk_get_result(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60},
+ }
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_ssh.SSH().execute.return_value = (0, '10000', '')
+
+ p._dpdk_get_result()
+
+ mock_ssh.SSH().execute.assert_called_with(
+ "sudo /dpdk/destdir/bin/dpdk-procinfo -- --stats-reset > /dev/null 2>&1")
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py
new file mode 100644
index 000000000..a5e5e39dc
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py
@@ -0,0 +1,68 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.sfc
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import sfc
+
+
+class SfcTestCase(unittest.TestCase):
+
+ def setUp(self):
+ scenario_cfg = {}
+ context_cfg = {
+ # Used in Sfc.setup()
+ 'target': {
+ 'user': 'root',
+ 'password': 'opnfv',
+ 'ip': '127.0.0.1',
+ },
+
+ # Used in Sfc.run()
+ 'host': {
+ 'user': 'root',
+ 'password': 'opnfv',
+ 'ip': None,
+ }
+ }
+
+ self.sfc = sfc.Sfc(scenario_cfg=scenario_cfg, context_cfg=context_cfg)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.ssh')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.sfc_openstack')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.subprocess')
+ def test_run_for_success(self, mock_subprocess, mock_openstack, mock_ssh):
+ # Mock a successfull SSH in Sfc.setup() and Sfc.run()
+ mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
+ mock_openstack.get_an_IP.return_value = "127.0.0.1"
+ mock_subprocess.call.return_value = 'mocked!'
+
+ result = {}
+ self.sfc.setup()
+ self.sfc.run(result)
+ self.sfc.teardown()
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.ssh')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.sfc_openstack')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.subprocess')
+ def test2_run_for_success(self, mock_subprocess, mock_openstack, mock_ssh):
+ # Mock a successfull SSH in Sfc.setup() and Sfc.run()
+ mock_ssh.SSH.from_node().execute.return_value = (
+ 0, 'vxlan_tool.py', 'succeeded timed out')
+ mock_openstack.get_an_IP.return_value = "127.0.0.1"
+ mock_subprocess.call.return_value = 'mocked!'
+
+ result = {}
+ self.sfc.setup()
+ self.sfc.run(result)
+ self.sfc.teardown()
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
new file mode 100644
index 000000000..cf9a26a76
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
@@ -0,0 +1,873 @@
+# Copyright (c) 2016-2019 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from copy import deepcopy
+import os
+import sys
+
+import mock
+import unittest
+
+from yardstick import tests
+from yardstick.common import exceptions
+from yardstick.common import utils
+from yardstick.network_services.collector.subscriber import Collector
+from yardstick.network_services.traffic_profile import base
+from yardstick.network_services.vnf_generic import vnfdgen
+from yardstick.network_services.vnf_generic.vnf.base import GenericTrafficGen
+from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
+
+
+stl_patch = mock.patch.dict(sys.modules, tests.STL_MOCKS)
+stl_patch.start()
+
+if stl_patch:
+ from yardstick.benchmark.scenarios.networking import vnf_generic
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+COMPLETE_TREX_VNFD = {
+ 'vnfd:vnfd-catalog': {
+ 'vnfd': [
+ {
+ 'benchmark': {
+ 'kpi': [
+ 'rx_throughput_fps',
+ 'tx_throughput_fps',
+ 'tx_throughput_mbps',
+ 'rx_throughput_mbps',
+ 'tx_throughput_pc_linerate',
+ 'rx_throughput_pc_linerate',
+ 'min_latency',
+ 'max_latency',
+ 'avg_latency',
+ ],
+ },
+ 'connection-point': [
+ {
+ 'name': 'xe0',
+ 'type': 'VPORT',
+ },
+ {
+ 'name': 'xe1',
+ 'type': 'VPORT',
+ },
+ ],
+ 'description': 'TRex stateless traffic generator for RFC2544',
+ 'id': 'TrexTrafficGen',
+ 'mgmt-interface': {
+ 'ip': '1.1.1.1',
+ 'password': 'berta',
+ 'user': 'berta',
+ 'vdu-id': 'trexgen-baremetal',
+ },
+ 'name': 'trexgen',
+ 'short-name': 'trexgen',
+ 'class-name': 'TrexTrafficGen',
+ 'vdu': [
+ {
+ 'description': 'TRex stateless traffic generator for RFC2544',
+ 'external-interface': [
+ {
+ 'name': 'xe0',
+ 'virtual-interface': {
+ 'bandwidth': '10 Gbps',
+ 'dst_ip': '1.1.1.1',
+ 'dst_mac': '00:01:02:03:04:05',
+ 'local_ip': '1.1.1.2',
+ 'local_mac': '00:01:02:03:05:05',
+ 'type': 'PCI-PASSTHROUGH',
+ 'netmask': "255.255.255.0",
+ 'driver': 'i40',
+ 'vpci': '0000:00:10.2',
+ },
+ 'vnfd-connection-point-ref': 'xe0',
+ },
+ {
+ 'name': 'xe1',
+ 'virtual-interface': {
+ 'bandwidth': '10 Gbps',
+ 'dst_ip': '2.1.1.1',
+ 'dst_mac': '00:01:02:03:04:06',
+ 'local_ip': '2.1.1.2',
+ 'local_mac': '00:01:02:03:05:06',
+ 'type': 'PCI-PASSTHROUGH',
+ 'netmask': "255.255.255.0",
+ 'driver': 'i40',
+ 'vpci': '0000:00:10.1',
+ },
+ 'vnfd-connection-point-ref': 'xe1',
+ },
+ ],
+ 'id': 'trexgen-baremetal',
+ 'name': 'trexgen-baremetal',
+ },
+ ],
+ },
+ ],
+ },
+}
+
+IP_ADDR_SHOW = """
+28: eth1: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP \
+group default qlen 1000
+ link/ether 90:e2:ba:a7:6a:c8 brd ff:ff:ff:ff:ff:ff
+ inet 1.1.1.1/8 brd 1.255.255.255 scope global eth1
+ inet6 fe80::92e2:baff:fea7:6ac8/64 scope link
+ valid_lft forever preferred_lft forever
+29: eth5: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP \
+group default qlen 1000
+ link/ether 90:e2:ba:a7:6a:c9 brd ff:ff:ff:ff:ff:ff
+ inet 2.1.1.1/8 brd 2.255.255.255 scope global eth5
+ inet6 fe80::92e2:baff:fea7:6ac9/64 scope link tentative
+ valid_lft forever preferred_lft forever
+"""
+
+SYS_CLASS_NET = """
+lrwxrwxrwx 1 root root 0 sie 10 14:16 eth1 -> \
+../../devices/pci0000:80/0000:80:02.2/0000:84:00.1/net/eth1
+lrwxrwxrwx 1 root root 0 sie 3 10:37 eth2 -> \
+../../devices/pci0000:00/0000:00:01.1/0000:84:00.2/net/eth5
+"""
+
+TRAFFIC_PROFILE = {
+ "schema": "isb:traffic_profile:0.1",
+ "name": "fixed",
+ "description": "Fixed traffic profile to run UDP traffic",
+ "traffic_profile": {
+ "traffic_type": "FixedTraffic",
+ "frame_rate": 100, # pps
+ "flow_number": 10,
+ "frame_size": 64,
+ },
+}
+
+
+class TestNetworkServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.tg__0 = {
+ 'name': 'trafficgen_1.yardstick',
+ 'ip': '10.10.10.11',
+ 'role': 'TrafficGen',
+ 'user': 'root',
+ 'password': 'r00t',
+ 'interfaces': {
+ 'xe0': {
+ 'netmask': '255.255.255.0',
+ 'local_ip': '152.16.100.20',
+ 'local_mac': '00:00:00:00:00:01',
+ 'driver': 'i40e',
+ 'vpci': '0000:07:00.0',
+ 'dpdk_port_num': 0,
+ },
+ 'xe1': {
+ 'netmask': '255.255.255.0',
+ 'local_ip': '152.16.40.20',
+ 'local_mac': '00:00:00:00:00:02',
+ 'driver': 'i40e',
+ 'vpci': '0000:07:00.1',
+ 'dpdk_port_num': 1,
+ },
+ },
+ }
+
+ self.vnf__0 = {
+ 'name': 'vnf.yardstick',
+ 'ip': '10.10.10.12',
+ 'host': '10.223.197.164',
+ 'role': 'vnf',
+ 'user': 'root',
+ 'password': 'r00t',
+ 'interfaces': {
+ 'xe0': {
+ 'netmask': '255.255.255.0',
+ 'local_ip': '152.16.100.19',
+ 'local_mac': '00:00:00:00:00:03',
+ 'driver': 'i40e',
+ 'vpci': '0000:07:00.0',
+ 'dpdk_port_num': 0,
+ },
+ 'xe1': {
+ 'netmask': '255.255.255.0',
+ 'local_ip': '152.16.40.19',
+ 'local_mac': '00:00:00:00:00:04',
+ 'driver': 'i40e',
+ 'vpci': '0000:07:00.1',
+ 'dpdk_port_num': 1,
+ },
+ },
+ 'routing_table': [
+ {
+ 'netmask': '255.255.255.0',
+ 'gateway': '152.16.100.20',
+ 'network': '152.16.100.20',
+ 'if': 'xe0',
+ },
+ {
+ 'netmask': '255.255.255.0',
+ 'gateway': '152.16.40.20',
+ 'network': '152.16.40.20',
+ 'if': 'xe1',
+ },
+ ],
+ 'nd_route_tbl': [
+ {
+ 'netmask': '112',
+ 'gateway': '0064:ff9b:0:0:0:0:9810:6414',
+ 'network': '0064:ff9b:0:0:0:0:9810:6414',
+ 'if': 'xe0',
+ },
+ {
+ 'netmask': '112',
+ 'gateway': '0064:ff9b:0:0:0:0:9810:2814',
+ 'network': '0064:ff9b:0:0:0:0:9810:2814',
+ 'if': 'xe1',
+ },
+ ],
+ }
+
+ self.context_cfg = {
+ 'nodes': {
+ 'tg__0': self.tg__0,
+ 'vnf__0': self.vnf__0,
+ },
+ 'networks': {
+ GenericVNF.UPLINK: {
+ 'vld_id': GenericVNF.UPLINK,
+ },
+ GenericVNF.DOWNLINK: {
+ 'vld_id': GenericVNF.DOWNLINK,
+ },
+ },
+ }
+
+ self.vld0 = {
+ 'vnfd-connection-point-ref': [
+ {
+ 'vnfd-connection-point-ref': 'xe0',
+ 'member-vnf-index-ref': '1',
+ 'vnfd-id-ref': 'trexgen'
+ },
+ {
+ 'vnfd-connection-point-ref': 'xe0',
+ 'member-vnf-index-ref': '2',
+ 'vnfd-id-ref': 'trexgen'
+ }
+ ],
+ 'type': 'ELAN',
+ 'id': GenericVNF.UPLINK,
+ 'name': 'tg__0 to vnf__0 link 1'
+ }
+
+ self.vld1 = {
+ 'vnfd-connection-point-ref': [
+ {
+ 'vnfd-connection-point-ref': 'xe1',
+ 'member-vnf-index-ref': '1',
+ 'vnfd-id-ref': 'trexgen'
+ },
+ {
+ 'vnfd-connection-point-ref': 'xe1',
+ 'member-vnf-index-ref': '2',
+ 'vnfd-id-ref': 'trexgen'
+ }
+ ],
+ 'type': 'ELAN',
+ 'id': GenericVNF.DOWNLINK,
+ 'name': 'vnf__0 to tg__0 link 2'
+ }
+
+ self.topology = {
+ 'id': 'trex-tg-topology',
+ 'short-name': 'trex-tg-topology',
+ 'name': 'trex-tg-topology',
+ 'description': 'trex-tg-topology',
+ 'constituent-vnfd': [
+ {
+ 'member-vnf-index': '1',
+ 'VNF model': 'tg_trex_tpl.yaml',
+ 'vnfd-id-ref': 'tg__0',
+ },
+ {
+ 'member-vnf-index': '2',
+ 'VNF model': 'tg_trex_tpl.yaml',
+ 'vnfd-id-ref': 'vnf__0',
+ },
+ ],
+ 'vld': [self.vld0, self.vld1],
+ }
+
+ self.scenario_cfg = {
+ 'task_path': "",
+ "topology": self._get_file_abspath("vpe_vnf_topology.yaml"),
+ 'task_id': 'a70bdf4a-8e67-47a3-9dc1-273c14506eb7',
+ 'tc': 'tc_ipv4_1Mflow_64B_packetsize',
+ 'traffic_profile': 'ipv4_throughput_vpe.yaml',
+ 'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
+ 'type': 'ISB',
+ 'tc_options': {
+ 'rfc2544': {
+ 'allowed_drop_rate': '0.8 - 1',
+ },
+ },
+ 'options': {
+ 'simulated_users': {'uplink': [1, 2]},
+ 'page_object': {'uplink': [1, 2]},
+ 'framesize': {'64B': 100}
+ },
+ 'runner': {
+ 'object': 'NetworkServiceTestCase',
+ 'interval': 35,
+ 'output_filename': 'yardstick.out',
+ 'runner_id': 74476,
+ 'duration': 400,
+ 'type': 'Duration',
+ },
+ 'traffic_options': {
+ 'flow': 'ipv4_1flow_Packets_vpe.yaml',
+ 'imix': 'imix_voice.yaml'
+ },
+ 'nodes': {
+ 'tg__2': 'trafficgen_2.yardstick',
+ 'tg__0': 'trafficgen_1.yardstick',
+ 'vnf__0': 'vnf.yardstick',
+ },
+ }
+
+ self.s = vnf_generic.NetworkServiceTestCase(self.scenario_cfg,
+ self.context_cfg)
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.join(curr_path, filename)
+ return file_path
+
+ def test___init__(self):
+ self.assertIsNotNone(self.topology)
+
+ def test__get_ip_flow_range_string(self):
+ result = '152.16.100.2-152.16.100.254'
+ self.assertEqual(result, self.s._get_ip_flow_range(
+ '152.16.100.2-152.16.100.254'))
+
+ def test__get_ip_flow_range_no_nodes(self):
+ self.assertEqual('0.0.0.0', self.s._get_ip_flow_range({}))
+
+ def test__get_ip_flow_range_no_node_data(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {}
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('0.0.0.2-0.0.0.254', result)
+
+ def test__et_ip_flow_range_ipv4(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '192.168.1.15',
+ 'netmask': '255.255.255.128'}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('192.168.1.2-192.168.1.126', result)
+
+ def test__get_ip_flow_range_ipv4_mask_30(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '192.168.1.15', 'netmask': 30}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('192.168.1.15', result)
+
+ def test__get_ip_flow_range_ipv6(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '2001::11', 'netmask': 64}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('2001::2-2001::ffff:ffff:ffff:fffe', result)
+
+ def test___get_traffic_flow(self):
+ self.scenario_cfg["traffic_options"]["flow"] = \
+ self._get_file_abspath("ipv4_1flow_Packets_vpe.yaml")
+ self.scenario_cfg['options'] = {
+ 'flow': {
+ 'src_ip': [
+ {
+ 'tg__0': 'xe0',
+ },
+ ],
+ 'dst_ip': [
+ {
+ 'tg__0': 'xe1',
+ },
+ ],
+ 'public_ip': ['1.1.1.1'],
+ },
+ }
+ expected_flow = {'flow': {'dst_ip_0': '152.16.40.2-152.16.40.254',
+ 'public_ip_0': '1.1.1.1',
+ 'src_ip_0': '152.16.100.2-152.16.100.254'}}
+ self.assertEqual(expected_flow, self.s._get_traffic_flow())
+
+ def test___get_traffic_flow_error(self):
+ self.scenario_cfg["traffic_options"]["flow"] = \
+ "ipv4_1flow_Packets_vpe.yaml1"
+ self.assertEqual({'flow': {}}, self.s._get_traffic_flow())
+
+ def test_get_vnf_imp(self):
+ vnfd = COMPLETE_TREX_VNFD['vnfd:vnfd-catalog']['vnfd'][0]['class-name']
+ with mock.patch.dict(sys.modules, tests.STL_MOCKS):
+ self.assertIsNotNone(self.s.get_vnf_impl(vnfd))
+
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
+ self.s.get_vnf_impl('NonExistentClass')
+
+ exc_str = str(raised.exception)
+ print(exc_str)
+ self.assertIn('No implementation', exc_str)
+ self.assertIn('found in', exc_str)
+
+ def test_load_vnf_models_invalid(self):
+ self.context_cfg["nodes"]['tg__0']['VNF model'] = \
+ self._get_file_abspath("tg_trex_tpl.yaml")
+ self.context_cfg["nodes"]['vnf__0']['VNF model'] = \
+ self._get_file_abspath("tg_trex_tpl.yaml")
+
+ vnf = mock.Mock(autospec=GenericVNF)
+ self.s.get_vnf_impl = mock.Mock(return_value=vnf)
+
+ self.assertIsNotNone(
+ self.s.load_vnf_models(self.scenario_cfg, self.context_cfg))
+
+ def test_load_vnf_models_no_model(self):
+ vnf = mock.Mock(autospec=GenericVNF)
+ self.s.get_vnf_impl = mock.Mock(return_value=vnf)
+
+ self.assertIsNotNone(
+ self.s.load_vnf_models(self.scenario_cfg, self.context_cfg))
+
+ def test_map_topology_to_infrastructure(self):
+ self.s.map_topology_to_infrastructure()
+
+ nodes = self.context_cfg["nodes"]
+ self.assertEqual('../../vnf_descriptors/tg_rfc2544_tpl.yaml',
+ nodes['tg__0']['VNF model'])
+ self.assertEqual('../../vnf_descriptors/vpe_vnf.yaml',
+ nodes['vnf__0']['VNF model'])
+
+ def test_map_topology_to_infrastructure_insufficient_nodes(self):
+ cfg = deepcopy(self.context_cfg)
+ del cfg['nodes']['vnf__0']
+
+ cfg_patch = mock.patch.object(self.s, 'context_cfg', cfg)
+ with cfg_patch:
+ with self.assertRaises(exceptions.IncorrectConfig):
+ self.s.map_topology_to_infrastructure()
+
+ def test_map_topology_to_infrastructure_config_invalid(self):
+ ssh_mock = mock.Mock()
+ ssh_mock.execute.return_value = 0, SYS_CLASS_NET + IP_ADDR_SHOW, ""
+
+ cfg = deepcopy(self.s.context_cfg)
+
+ # delete all, we don't know which will come first
+ del cfg['nodes']['vnf__0']['interfaces']['xe0']['local_mac']
+ del cfg['nodes']['vnf__0']['interfaces']['xe1']['local_mac']
+ del cfg['nodes']['tg__0']['interfaces']['xe0']['local_mac']
+ del cfg['nodes']['tg__0']['interfaces']['xe1']['local_mac']
+
+ config_patch = mock.patch.object(self.s, 'context_cfg', cfg)
+ with config_patch:
+ with self.assertRaises(exceptions.IncorrectConfig):
+ self.s.map_topology_to_infrastructure()
+
+ def test__resolve_topology_invalid_config(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ # purge an important key from the data structure
+ for interface in self.tg__0['interfaces'].values():
+ del interface['local_mac']
+
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
+ self.s._resolve_topology()
+
+ self.assertIn('not found', str(raised.exception))
+
+ # restore local_mac
+ for index, interface in enumerate(self.tg__0['interfaces'].values()):
+ interface['local_mac'] = '00:00:00:00:00:{:2x}'.format(index)
+
+ # make a connection point ref with 3 points
+ self.s.topology["vld"][0]['vnfd-connection-point-ref'].append(
+ self.s.topology["vld"][0]['vnfd-connection-point-ref'][0])
+
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
+ self.s._resolve_topology()
+
+ self.assertIn('wrong endpoint count', str(raised.exception))
+
+ # make a connection point ref with 1 point
+ self.s.topology["vld"][0]['vnfd-connection-point-ref'] = \
+ self.s.topology["vld"][0]['vnfd-connection-point-ref'][:1]
+
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
+ self.s._resolve_topology()
+
+ self.assertIn('wrong endpoint count', str(raised.exception))
+
+ def test_run(self):
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ result = {}
+ self.s.run(result)
+ self.assertDictEqual(result, {tgen.name: verified_dict})
+
+ def test_setup(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s._fill_traffic_profile = \
+ mock.Mock(return_value=TRAFFIC_PROFILE)
+ self.assertIsNone(self.s.setup())
+
+ def test_setup_exception(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.instantiate.side_effect = RuntimeError(
+ "error during instantiate")
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s._fill_traffic_profile = \
+ mock.Mock(return_value=TRAFFIC_PROFILE)
+ with self.assertRaises(RuntimeError):
+ self.s.setup()
+
+ def test__get_traffic_profile(self):
+ self.scenario_cfg["traffic_profile"] = \
+ self._get_file_abspath("ipv4_throughput_vpe.yaml")
+ self.assertIsNotNone(self.s._get_traffic_profile())
+
+ def test__get_traffic_profile_exception(self):
+ with mock.patch.dict(self.scenario_cfg, {'traffic_profile': ''}):
+ with self.assertRaises(IOError):
+ self.s._get_traffic_profile()
+
+ def test__key_list_to_dict(self):
+ result = self.s._key_list_to_dict("uplink", {"uplink": [1, 2]})
+ self.assertEqual({"uplink_0": 1, "uplink_1": 2}, result)
+
+ def test__get_simulated_users(self):
+ result = self.s._get_simulated_users()
+ self.assertEqual({'simulated_users': {'uplink_0': 1, 'uplink_1': 2}},
+ result)
+
+ def test__get_page_object(self):
+ result = self.s._get_page_object()
+ self.assertEqual({'page_object': {'uplink_0': 1, 'uplink_1': 2}},
+ result)
+
+ def test___get_traffic_imix_exception(self):
+ with mock.patch.dict(self.scenario_cfg["traffic_options"], {'imix': ''}):
+ self.assertEqual({'imix': {'64B': 100}},
+ self.s._get_traffic_imix())
+
+ def test__get_ip_priority(self):
+ with mock.patch.dict(self.scenario_cfg["options"],
+ {'priority': {'raw': '0x01'}}):
+ self.assertEqual({'raw': '0x01'}, self.s._get_ip_priority())
+
+ def test__get_ip_priority_exception(self):
+ self.assertEqual({}, self.s._get_ip_priority())
+
+ @mock.patch.object(base.TrafficProfile, 'get')
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__fill_traffic_profile(self, mock_generate, mock_tprofile_get):
+ fake_tprofile = mock.Mock()
+ fake_vnfd = mock.MagicMock()
+ with mock.patch.object(self.s, '_get_traffic_profile',
+ return_value=fake_tprofile) as mock_get_tp:
+ mock_generate.return_value = fake_vnfd
+ self.s._fill_traffic_profile()
+ mock_get_tp.assert_called_once()
+ mock_generate.assert_called_once_with(
+ fake_tprofile,
+ {'downlink': {},
+ 'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
+ 'flow': {'flow': {}},
+ 'imix': {'imix': {'64B': 100}},
+ 'priority': {},
+ 'uplink': {},
+ 'duration': 30,
+ 'simulated_users': {
+ 'simulated_users': {'uplink_0': 1, 'uplink_1': 2}},
+ 'page_object': {
+ 'page_object': {'uplink_0': 1, 'uplink_1': 2}},}
+ )
+ mock_tprofile_get.assert_called_once_with(fake_vnfd)
+
+ @mock.patch.object(base.TrafficProfile, 'get')
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__fill_traffic_profile2(self, mock_generate, mock_tprofile_get):
+ fake_tprofile = mock.Mock()
+ fake_vnfd = {}
+ with mock.patch.object(self.s, '_get_traffic_profile',
+ return_value=fake_tprofile) as mock_get_tp:
+ mock_generate.return_value = fake_vnfd
+
+ self.s.scenario_cfg["options"] = {"traffic_config": {"duration": 99899}}
+ self.s._fill_traffic_profile()
+ mock_get_tp.assert_called_once()
+ self.assertIn("traffic_profile", fake_vnfd)
+ self.assertIn("duration", fake_vnfd["traffic_profile"])
+ self.assertEqual(99899, fake_vnfd["traffic_profile"]["duration"])
+
+ @mock.patch.object(utils, 'open_relative_file')
+ def test__get_topology(self, mock_open_path):
+ self.s.scenario_cfg['topology'] = 'fake_topology'
+ self.s.scenario_cfg['task_path'] = 'fake_path'
+ mock_open_path.side_effect = mock.mock_open(read_data='fake_data')
+ self.assertEqual('fake_data', self.s._get_topology())
+ mock_open_path.assert_called_once_with('fake_topology', 'fake_path')
+
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__render_topology(self, mock_generate):
+ fake_topology = 'fake_topology'
+ mock_generate.return_value = {'nsd:nsd-catalog': {'nsd': ['fake_nsd']}}
+ with mock.patch.object(self.s, '_get_topology',
+ return_value=fake_topology) as mock_get_topology:
+ self.s._render_topology()
+ mock_get_topology.assert_called_once()
+
+ mock_generate.assert_called_once_with(
+ fake_topology,
+ {'extra_args': {'arg1': 'value1', 'arg2': 'value2'}}
+ )
+ self.assertEqual(self.s.topology, 'fake_nsd')
+
+ def test_teardown(self):
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.terminate = mock.Mock(return_value=True)
+ vnf.name = str(vnf)
+ self.s.vnfs = [vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.stop = \
+ mock.Mock(return_value=True)
+ self.assertIsNone(self.s.teardown())
+
+ def test_teardown_exception(self):
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.terminate = mock.Mock(
+ side_effect=RuntimeError("error duing terminate"))
+ vnf.name = str(vnf)
+ self.s.vnfs = [vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.stop = \
+ mock.Mock(return_value=True)
+ with self.assertRaises(RuntimeError):
+ self.s.teardown()
+
+
+class TestNetworkServiceRFC2544TestCase(TestNetworkServiceTestCase):
+
+ def setUp(self):
+ super(TestNetworkServiceRFC2544TestCase, self).setUp()
+ self.s = vnf_generic.NetworkServiceRFC2544(self.scenario_cfg,
+ self.context_cfg)
+
+ def test_run(self):
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.name = "tgen__1"
+ tgen.wait_on_trafic.return_value = 'COMPLETE'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s._fill_traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = mock.Mock(
+ return_value={tgen.name: verified_dict})
+ result = mock.Mock()
+ self.s.run(result)
+ self.s._fill_traffic_profile.assert_called_once()
+ result.push.assert_called_once()
+
+ def test_setup(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ tgen.run_traffic.return_value = 'tg_id'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s.setup()
+
+ def test_setup_exception(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.instantiate.side_effect = RuntimeError(
+ "error during instantiate")
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s._fill_traffic_profile = \
+ mock.Mock(return_value=TRAFFIC_PROFILE)
+ with self.assertRaises(RuntimeError):
+ self.s.setup()
+
+class TestNetworkServiceRFC3511TestCase(TestNetworkServiceTestCase):
+
+ def setUp(self):
+ super(TestNetworkServiceRFC3511TestCase, self).setUp()
+ self.s = vnf_generic.NetworkServiceRFC3511(self.scenario_cfg,
+ self.context_cfg)
+
+ def test_run(self):
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s._fill_traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = mock.Mock()
+ result = mock.Mock()
+ self.s.run(result)
+ self.s._fill_traffic_profile.assert_called_once()
+ result.push.assert_called_once()
+
+ def test_setup(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ tgen.run_traffic.return_value = 'tg_id'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s.setup()
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py
new file mode 100644
index 000000000..a1c27f5fb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py
@@ -0,0 +1,196 @@
+# Copyright 2016 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import unittest
+import subprocess
+import yardstick.ssh as ssh
+
+from yardstick.benchmark.scenarios.networking import vsperf
+from yardstick import exceptions as y_exc
+
+
+class VsperfTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.context_cfg = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "ubuntu",
+ "password": "ubuntu",
+ },
+ }
+ self.scenario_cfg = {
+ 'options': {
+ 'testname': 'p2p_rfc2544_continuous',
+ 'traffic_type': 'continuous',
+ 'frame_size': '64',
+ 'bidirectional': 'True',
+ 'iload': 100,
+ 'trafficgen_port1': 'eth1',
+ 'trafficgen_port2': 'eth3',
+ 'external_bridge': 'br-ex',
+ 'conf_file': 'vsperf-yardstick.conf',
+ 'setup_script': 'setup_yardstick.sh',
+ 'test_params': 'TRAFFICGEN_DURATION=30;',
+ },
+ 'sla': {
+ 'metrics': 'throughput_rx_fps',
+ 'throughput_rx_fps': 500000,
+ 'action': 'monitor',
+ }
+ }
+
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+
+ self._mock_subprocess_call = mock.patch.object(subprocess, 'call')
+ self.mock_subprocess_call = self._mock_subprocess_call.start()
+ self.mock_subprocess_call.return_value = None
+
+ self.addCleanup(self._stop_mock)
+
+ self.scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+
+ def _stop_mock(self):
+ self._mock_SSH.stop()
+ self._mock_subprocess_call.stop()
+
+ def test_setup(self):
+ self.scenario.setup()
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_setup_tg_port_not_set(self):
+ del self.scenario_cfg['options']['trafficgen_port1']
+ del self.scenario_cfg['options']['trafficgen_port2']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+
+ self.mock_subprocess_call.assert_called_once_with(
+ 'setup_yardstick.sh setup', shell=True)
+ self.assertIsNone(scenario.tg_port1)
+ self.assertIsNone(scenario.tg_port2)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
+
+ def test_setup_no_setup_script(self):
+ del self.scenario_cfg['options']['setup_script']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+
+ self.mock_subprocess_call.assert_has_calls(
+ (mock.call('sudo bash -c "ovs-vsctl add-port br-ex eth1"',
+ shell=True),
+ mock.call('sudo bash -c "ovs-vsctl add-port br-ex eth3"',
+ shell=True)))
+ self.assertEqual(2, self.mock_subprocess_call.call_count)
+ self.assertIsNone(scenario.setup_script)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
+
+ def test_run_ok(self):
+ self.scenario.setup()
+
+ result = {}
+ self.scenario.run(result)
+
+ self.assertEqual(result['throughput_rx_fps'], '14797660.000')
+
+ def test_run_ok_setup_not_done(self):
+ result = {}
+ self.scenario.run(result)
+
+ self.assertTrue(self.scenario.setup_done)
+ self.assertEqual(result['throughput_rx_fps'], '14797660.000')
+
+ def test_run_ssh_command_call_counts(self):
+ self.scenario.run({})
+
+ self.assertEqual(self.mock_SSH.from_node().execute.call_count, 2)
+ self.mock_SSH.from_node().run.assert_called_once()
+
+ def test_run_sla_fail(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n123456.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertTrue('VSPERF_throughput_rx_fps(123456.000000) < '
+ 'SLA_throughput_rx_fps(500000.000000)'
+ in str(raised.exception))
+
+ def test_run_sla_fail_metric_not_collected(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'nonexisting_metric\r\n14797660.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertTrue('throughput_rx_fps was not collected by VSPERF'
+ in str(raised.exception))
+
+ def test_run_faulty_result_csv(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'faulty output not csv', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertTrue('throughput_rx_fps was not collected by VSPERF'
+ in str(raised.exception))
+
+ def test_run_sla_fail_metric_not_defined_in_sla(self):
+ del self.scenario_cfg['sla']['throughput_rx_fps']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ scenario.run({})
+ self.assertTrue('throughput_rx_fps is not defined in SLA'
+ in str(raised.exception))
+
+ def test_teardown(self):
+ self.scenario.setup()
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ self.scenario.teardown()
+ self.assertFalse(self.scenario.setup_done)
+
+ def test_teardown_tg_port_not_set(self):
+ del self.scenario_cfg['options']['trafficgen_port1']
+ del self.scenario_cfg['options']['trafficgen_port2']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.teardown()
+
+ self.mock_subprocess_call.assert_called_once_with(
+ 'setup_yardstick.sh teardown', shell=True)
+ self.assertFalse(scenario.setup_done)
+
+ def test_teardown_no_setup_script(self):
+ del self.scenario_cfg['options']['setup_script']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.teardown()
+
+ self.mock_subprocess_call.assert_has_calls(
+ (mock.call('sudo bash -c "ovs-vsctl del-port br-ex eth1"',
+ shell=True),
+ mock.call('sudo bash -c "ovs-vsctl del-port br-ex eth3"',
+ shell=True)))
+ self.assertEqual(2, self.mock_subprocess_call.call_count)
+ self.assertFalse(scenario.setup_done)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py
new file mode 100644
index 000000000..8bbe6911e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py
@@ -0,0 +1,181 @@
+# Copyright 2017 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import time
+
+import mock
+import unittest
+
+from yardstick import exceptions as y_exc
+from yardstick.benchmark.scenarios.networking import vsperf_dpdk
+from yardstick.common import exceptions as y_exc
+from yardstick import ssh
+
+
+class VsperfDPDKTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "ubuntu",
+ "password": "ubuntu",
+ },
+ }
+ self.args = {
+ 'task_id': "1234-5678",
+ 'options': {
+ 'testname': 'pvp_tput',
+ 'traffic_type': 'rfc2544_throughput',
+ 'frame_size': '64',
+ 'test_params': 'TRAFFICGEN_DURATION=30;',
+ 'trafficgen_port1': 'ens4',
+ 'trafficgen_port2': 'ens5',
+ 'conf_file': 'vsperf-yardstick.conf',
+ 'setup_script': 'setup_yardstick.sh',
+ 'moongen_helper_file': '~/moongen.py',
+ 'moongen_host_ip': '10.5.201.151',
+ 'moongen_port1_mac': '8c:dc:d4:ae:7c:5c',
+ 'moongen_port2_mac': '8c:dc:d4:ae:7c:5d',
+ 'trafficgen_port1_nw': 'test2',
+ 'trafficgen_port2_nw': 'test3',
+ },
+ 'sla': {
+ 'metrics': 'throughput_rx_fps',
+ 'throughput_rx_fps': 500000,
+ 'action': 'monitor',
+ }
+ }
+ self._mock_ssh = mock.patch.object(ssh, 'SSH')
+ self.mock_ssh = self._mock_ssh.start()
+ self._mock_subprocess_call = mock.patch.object(subprocess, 'call')
+ self.mock_subprocess_call = self._mock_subprocess_call.start()
+ mock_call_obj = mock.Mock()
+ mock_call_obj.execute.return_value = None
+ self.mock_subprocess_call.return_value = mock_call_obj
+
+ self._mock_log_info = mock.patch.object(vsperf_dpdk.LOG, 'info')
+ self.mock_log_info = self._mock_log_info.start()
+
+ self.addCleanup(self._cleanup)
+
+ self.scenario = vsperf_dpdk.VsperfDPDK(self.args, self.ctx)
+ self.scenario.setup()
+
+ def _cleanup(self):
+ self._mock_ssh.stop()
+ self._mock_subprocess_call.stop()
+ self._mock_log_info.stop()
+
+ def test_setup(self):
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_teardown(self):
+ self.scenario.teardown()
+ self.assertFalse(self.scenario.setup_done)
+
+ def test_is_dpdk_setup_no(self):
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.from_node().execute.return_value = (0, 'dummy', '')
+
+ self.assertFalse(self.scenario._is_dpdk_setup())
+
+ def test_is_dpdk_setup_yes(self):
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
+
+ self.assertTrue(self.scenario._is_dpdk_setup())
+
+ @mock.patch.object(time, 'sleep')
+ def test_dpdk_setup_first(self, *args):
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.from_node().execute.return_value = (0, 'dummy', '')
+
+ self.scenario.dpdk_setup()
+ self.assertFalse(self.scenario._is_dpdk_setup())
+ self.assertTrue(self.scenario.dpdk_setup_done)
+
+ @mock.patch.object(time, 'sleep')
+ def test_dpdk_setup_next(self, *args):
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
+
+ self.scenario.dpdk_setup()
+ self.assertTrue(self.scenario._is_dpdk_setup())
+ self.assertTrue(self.scenario.dpdk_setup_done)
+
+ @mock.patch.object(subprocess, 'check_output')
+ def test_run_ok(self, *args):
+ # run() specific mocks
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+
+ result = {}
+ self.scenario.run(result)
+ self.assertEqual(result['throughput_rx_fps'], '14797660.000')
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail(self, *args):
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n123456.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('VSPERF_throughput_rx_fps(123456.000000) < '
+ 'SLA_throughput_rx_fps(500000.000000)',
+ str(raised.exception))
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_metric_not_collected(self, *args):
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'nonexisting_metric\r\n123456.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('throughput_rx_fps was not collected by VSPERF',
+ str(raised.exception))
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_metric_not_collected_faulty_csv(self, *args):
+ self.scenario.setup()
+
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'faulty output not csv', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('throughput_rx_fps was not collected by VSPERF',
+ str(raised.exception))
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_sla_not_defined(self, *args):
+ del self.scenario.scenario_cfg['sla']['throughput_rx_fps']
+ self.scenario.setup()
+
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('throughput_rx_fps is not defined in SLA',
+ str(raised.exception))
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/tg_trex_tpl.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/tg_trex_tpl.yaml
new file mode 100644
index 000000000..b1641836b
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/tg_trex_tpl.yaml
@@ -0,0 +1,75 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+vnfd:vnfd-catalog:
+ vnfd:
+ - id: TrexTrafficGen # NSPerf class mapping
+ name: trexgen
+ short-name: trexgen
+ description: TRex stateless traffic generator for tests
+ vm-flavor:
+ vcpu-count: '4'
+ memory-mb: '4096'
+ mgmt-interface:
+ vdu-id: trexgen-baremetal
+ user: '{{user}}' # Value filled by vnfdgen
+ password: '{{password}}' # Value filled by vnfdgen
+ ip: '{{ip}}' # Value filled by vnfdgen
+ connection-point:
+ - name: xe0
+ type: VPORT
+ - name: xe1
+ type: VPORT
+ vdu:
+ - id: trexgen-baremetal
+ name: trexgen-baremetal
+ description: TRex stateless traffic generator for tests
+ external-interface:
+ - name: xe0
+ virtual-interface:
+ type: PCI-PASSTHROUGH
+ # Substitution variables MUST be quoted. Otherwise Python can misinterpet them.
+ vpci: '{{ interfaces.xe0.vpci }}' # Value filled by vnfdgen
+ local_ip: '{{ interfaces.xe0.local_ip }}' # Value filled by vnfdgen
+ driver: '{{ interfaces.xe0.driver}}' # Value filled by vnfdgen
+ dst_ip: '{{ interfaces.xe0.dst_ip }}' # Value filled by vnfdgen
+ local_mac: '{{ interfaces.xe0.local_mac }}' # Value filled by vnfdgen
+ dst_mac: '{{ interfaces.xe0.dst_mac }}' # Value filled by vnfdgen
+ vld_id: '{{ interfaces.xe0.vld_id }}' # Value filled by vnfdgen
+ bandwidth: 10 Gbps
+ vnfd-connection-point-ref: xe0
+ - name: xe1
+ virtual-interface:
+ type: PCI-PASSTHROUGH
+ vpci: '{{ interfaces.xe1.vpci }}' # Value filled by vnfdgen
+ local_ip: '{{ interfaces.xe1.local_ip }}' # Value filled by vnfdgen
+ driver: '{{ interfaces.xe1.driver}}' # Value filled by vnfdgen
+ dst_ip: '{{ interfaces.xe1.dst_ip }}' # Value filled by vnfdgen
+ local_mac: '{{ interfaces.xe1.local_mac }}' # Value filled by vnfdgen
+ dst_mac: '{{ interfaces.xe1.dst_mac }}' # Value filled by vnfdgen
+ vld_id: '{{ interfaces.xe1.vld_id }}' # Value filled by vnfdgen
+ bandwidth: 10 Gbps
+ vnfd-connection-point-ref: xe1
+
+ benchmark:
+ kpi:
+ - rx_throughput_fps
+ - tx_throughput_fps
+ - tx_throughput_mbps
+ - rx_throughput_mbps
+ - tx_throughput_pc_linerate
+ - rx_throughput_pc_linerate
+ - min_latency
+ - max_latency
+ - avg_latency
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml
new file mode 100644
index 000000000..aaf84bb5e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml
@@ -0,0 +1,50 @@
+# Copyright (c) 2016-2019 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+nsd:nsd-catalog:
+ nsd:
+ - id: VPE
+ name: VPE
+ short-name: VPE
+ description: scenario with VPE,L3fwd and VNF
+ constituent-vnfd:
+ - member-vnf-index: '1'
+ vnfd-id-ref: tg__0
+ VNF model: ../../vnf_descriptors/tg_rfc2544_tpl.yaml #tg_trex_tpl.yaml #TREX
+ - member-vnf-index: '2'
+ vnfd-id-ref: vnf__0
+ VNF model: ../../vnf_descriptors/vpe_vnf.yaml #VPE VNF
+
+ vld:
+ - id: uplink
+ name: tg__0 to vnf__0 link 1
+ type: ELAN
+ vnfd-connection-point-ref:
+ - member-vnf-index-ref: '1'
+ vnfd-connection-point-ref: xe0
+ vnfd-id-ref: tg__0
+ - member-vnf-index-ref: '2'
+ vnfd-connection-point-ref: xe0
+ vnfd-id-ref: vnf__0
+
+ - id: downlink
+ name: vnf__0 to tg__0 link 2
+ type: ELAN
+ vnfd-connection-point-ref:
+ - member-vnf-index-ref: '2'
+ vnfd-connection-point-ref: xe1
+ vnfd-id-ref: vnf__0
+ - member-vnf-index-ref: '1'
+ vnfd-connection-point-ref: xe1
+ vnfd-id-ref: tg__0
diff --git a/yardstick/tests/unit/benchmark/scenarios/parser/__init__.py b/yardstick/tests/unit/benchmark/scenarios/parser/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/parser/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py b/yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py
new file mode 100644
index 000000000..9fd5cce38
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py
@@ -0,0 +1,70 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import subprocess
+
+import unittest
+import mock
+
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.parser import parser
+
+
+class ParserTestCase(unittest.TestCase):
+
+ def setUp(self):
+ args = {
+ 'options': {'yangfile': '/root/yardstick/samples/yang.yaml',
+ 'toscafile': '/root/yardstick/samples/tosca.yaml'},
+ }
+ self.scenario = parser.Parser(scenario_cfg=args, context_cfg={})
+
+ self._mock_popen = mock.patch.object(subprocess, 'Popen')
+ self.mock_popen = self._mock_popen.start()
+ self._mock_call = mock.patch.object(subprocess, 'call')
+ self.mock_call = self._mock_call.start()
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_popen.stop()
+ self._mock_call.stop()
+
+ def test_setup_successful(self):
+
+ self.mock_call.return_value = 0
+ self.scenario.setup()
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_run_successful(self):
+
+ result = {}
+
+ self.mock_popen().returncode = 0
+
+ expected_result = jsonutils.loads('{"yangtotosca": "success"}')
+
+ self.scenario.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_run_fail(self):
+ result = {}
+
+ self.mock_popen().returncode = 1
+ expected_result = jsonutils.loads('{"yangtotosca": "fail"}')
+
+ self.scenario.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_teardown_successful(self):
+
+ self.mock_call.return_value = 0
+ self.scenario.teardown()
+ self.assertTrue(self.scenario.teardown_done)
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/__init__.py b/yardstick/tests/unit/benchmark/scenarios/storage/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json
new file mode 100644
index 000000000..e9f642aba
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json
@@ -0,0 +1 @@
+{"fioversion": "fio-2.1.3","jobs": [{"jobname": "yardstick-fio","groupid": 0,"error": 0,"read": {"io_bytes": 2166860,"bw": 36113,"iops": 9028,"runtime": 60001,"slat": {"min": 7,"max": 1807,"mean": 10.49,"stddev": 3.00},"clat": {"min": 1,"max": 16902,"mean": 97.84,"stddev": 78.16,"percentile": {"1.000000": 84,"5.000000": 86,"10.000000": 87,"20.000000": 88,"30.000000": 89,"40.000000": 90,"50.000000": 91,"60.000000": 93,"70.000000": 98,"80.000000": 103,"90.000000": 111,"95.000000": 127,"99.000000": 161,"99.500000": 177,"99.900000": 215,"99.950000": 266,"99.990000": 4128,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 86,"max": 16912,"mean": 108.70,"stddev": 78.29},"bw_min": 0,"bw_max": 38128,"bw_agg": 35816.54,"bw_mean": 35816.54,"bw_dev": 3579.16},"write": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"trim": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"usr_cpu": 4.86,"sys_cpu": 19.38,"ctx": 632024,"majf": 0,"minf": 30,"iodepth_level": {"1": 116.58,"2": 0.00,"4": 0.00,"8": 0.00,"16": 0.00,"32": 0.00,">=64": 0.00},"latency_us": {"2": 0.01,"4": 0.01,"10": 0.00,"20": 0.00,"50": 0.01,"100": 72.60,"250": 27.34,"500": 0.04,"750": 0.01,"1000": 0.01},"latency_ms": {"2": 0.01,"4": 0.01,"10": 0.01,"20": 0.01,"50": 0.00,"100": 0.00,"250": 0.00,"500": 0.00,"750": 0.00,"1000": 0.00,"2000": 0.00,">=2000": 0.00}}],"disk_util": [{"name": "vda","read_ios": 631084,"write_ios": 212,"read_merges": 0,"write_merges": 232,"read_ticks": 57300,"write_ticks": 324,"in_queue": 57400,"util": 81.55}]}
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json
new file mode 100644
index 000000000..4c7501818
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json
@@ -0,0 +1 @@
+{"jobs": [{"trim": {"io_bytes": 0, "slat": {"max": 0, "mean": 0.0, "stddev": 0.0, "min": 0}, "bw_max": 0, "bw_mean": 0.0, "iops": 0, "bw": 0, "lat": {"max": 0, "mean": 0.0, "stddev": 0.0, "min": 0}, "bw_agg": 0.0, "clat": {"max": 0, "mean": 0.0, "percentile": {"70.000000": 0, "5.000000": 0, "50.000000": 0, "99.990000": 0, "30.000000": 0, "10.000000": 0, "99.000000": 0, "0.00": 0, "90.000000": 0, "95.000000": 0, "60.000000": 0, "40.000000": 0, "20.000000": 0, "99.900000": 0, "99.950000": 0, "1.000000": 0, "99.500000": 0, "80.000000": 0}, "stddev": 0.0, "min": 0}, "runtime": 0, "bw_min": 0, "bw_dev": 0.0}, "latency_us": {"10": 0.01, "750": 0.03, "20": 0.0, "50": 0.02, "2": 0.01, "4": 0.01, "100": 0.75, "250": 88.37, "500": 10.72, "1000": 0.01}, "latency_ms": {"10": 0.01, "750": 0.0, "20": 0.01, ">=2000": 0.0, "50": 0.01, "2000": 0.0, "2": 0.07, "4": 0.01, "100": 0.0, "250": 0.01, "500": 0.0, "1000": 0.01}, "read": {"io_bytes": 839056, "slat": {"max": 1990, "mean": 18.14, "stddev": 15.4, "min": 0}, "bw_max": 10328, "bw_mean": 8721.27, "iops": 20972, "bw": 83888, "lat": {"max": 776676, "mean": 236.8, "stddev": 4668.12, "min": 45}, "bw_agg": 8721.27, "clat": {"max": 776663, "mean": 217.79, "percentile": {"70.000000": 199, "5.000000": 119, "50.000000": 175, "99.990000": 15168, "30.000000": 155, "10.000000": 131, "99.000000": 342, "0.00": 0, "90.000000": 247, "95.000000": 278, "60.000000": 185, "40.000000": 165, "20.000000": 145, "99.900000": 820, "99.950000": 1272, "1.000000": 96, "99.500000": 370, "80.000000": 217}, "stddev": 4667.79, "min": 0}, "runtime": 10002, "bw_min": 4, "bw_dev": 2178.08}, "majf": 0, "ctx": 490590, "minf": 87, "jobname": "yardstick-fio", "write": {"io_bytes": 841992, "slat": {"max": 2594, "mean": 19.78, "stddev": 16.25, "min": 0}, "bw_max": 10472, "bw_mean": 8464.0, "iops": 21045, "bw": 84182, "lat": {"max": 776709, "mean": 233.55, "stddev": 3115.46, "min": 64}, "bw_agg": 8464.0, "clat": {"max": 776685, "mean": 212.87, "percentile": {"70.000000": 211, "5.000000": 135, "50.000000": 187, "99.990000": 3536, "30.000000": 169, "10.000000": 145, "99.000000": 358, "0.00": 0, "90.000000": 258, "95.000000": 290, "60.000000": 197, "40.000000": 177, "20.000000": 159, "99.900000": 756, "99.950000": 1288, "1.000000": 114, "99.500000": 382, "80.000000": 229}, "stddev": 3115.23, "min": 0}, "runtime": 10002, "bw_min": 4, "bw_dev": 2584.23}, "iodepth_level": {"16": 0.0, "32": 0.0, "1": 111.92, "2": 0.0, "4": 0.0, ">=64": 0.0, "8": 0.0}, "usr_cp": 2.87, "error": 0, "sys_cp": 12.37, "groupid": 0}], "fio version": "fio-2.1.3", "disk_util": [{"aggr_write_ticks": 42020, "read_merges": 0, "name": "dm-0", "write_ios": 233547, "aggr_write_ios": 235129, "aggr_read_ticks": 42576, "read_ios": 233492, "util": 97.22, "read_ticks": 42096, "aggr_write_merge": 0, "write_merges": 0, "aggr_in_queue": 84524, "aggr_read_ios": 235224, "aggr_util": 96.96, "aggr_read_merges": 0, "in_queue": 83732, "write_ticks": 41468}, {"read_merges": 0, "name": "vda", "write_ios": 235129, "read_ios": 235224, "util": 96.96, "read_ticks": 42576, "write_merges": 0, "in_queue": 84524, "write_ticks": 42020}]}
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json
new file mode 100644
index 000000000..7c760e8bc
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json
@@ -0,0 +1 @@
+{"fioversion": "fio-2.1.3","jobs": [{"jobname": "yardstick-fio","groupid": 0,"error": 0,"read": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"write": {"io_bytes": 2106508,"bw": 35107,"iops": 8776,"runtime": 60001,"slat": {"min": 8,"max": 5166,"mean": 11.83,"stddev": 7.05},"clat": {"min": 1,"max": 23472,"mean": 99.54,"stddev": 44.23,"percentile": {"1.000000": 85,"5.000000": 87,"10.000000": 88,"20.000000": 89,"30.000000": 90,"40.000000": 91,"50.000000": 93,"60.000000": 99,"70.000000": 104,"80.000000": 107,"90.000000": 113,"95.000000": 127,"99.000000": 161,"99.500000": 179,"99.900000": 231,"99.950000": 286,"99.990000": 628,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 87,"max": 23486,"mean": 111.74,"stddev": 45.61},"bw_min": 0,"bw_max": 37288,"bw_agg": 34839.53,"bw_mean": 34839.53,"bw_dev": 3387.37},"trim": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"usr_cpu": 5.25,"sys_cpu": 19.72,"ctx": 616160,"majf": 0,"minf": 27,"iodepth_level": {"1": 116.90,"2": 0.00,"4": 0.00,"8": 0.00,"16": 0.00,"32": 0.00,">=64": 0.00},"latency_us": {"2": 0.01,"4": 0.01,"10": 0.00,"20": 0.00,"50": 0.01,"100": 60.74,"250": 39.18,"500": 0.06,"750": 0.01,"1000": 0.01},"latency_ms": {"2": 0.01,"4": 0.01,"10": 0.01,"20": 0.00,"50": 0.01,"100": 0.00,"250": 0.00,"500": 0.00,"750": 0.00,"1000": 0.00,"2000": 0.00,">=2000": 0.00}}],"disk_util": [{"name": "vda","read_ios": 0,"write_ios": 615418,"read_merges": 0,"write_merges": 231,"read_ticks": 0,"write_ticks": 58284,"in_queue": 58024,"util": 82.45}]}
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py
new file mode 100644
index 000000000..d78506584
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+
+from yardstick.benchmark.scenarios.storage import bonnie
+
+
+class BonnieTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ @mock.patch('yardstick.benchmark.scenarios.storage.bonnie.ssh')
+ def test_bonnie_successful_setup(self, mock_ssh):
+
+ options = {
+ "file_size": "1024",
+ "ram_size": "512",
+ "test_dir": "/tmp",
+ "concurrency": "1",
+ "test_user": "root"
+ }
+ args = {"options": options}
+ b = bonnie.Bonnie(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ b.setup()
+ self.assertIsNotNone(b.client)
+ self.assertTrue(b.setup_done, True)
+
+ @mock.patch('yardstick.benchmark.scenarios.storage.bonnie.ssh')
+ def test_bonnie_unsuccessful_script_error(self, mock_ssh):
+ options = {
+ "file_size": "1024",
+ "ram_size": "512",
+ "test_dir": "/tmp",
+ "concurrency": "1",
+ "test_user": "root"
+ }
+ args = {"options": options}
+ b = bonnie.Bonnie(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, b.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py
new file mode 100644
index 000000000..6e69ddc6d
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py
@@ -0,0 +1,280 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.storage.fio.Fio
+
+from __future__ import absolute_import
+
+import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.storage import fio
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.storage.fio.ssh')
+class FioTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key'
+ }
+ }
+ self.sample_output = {
+ 'read': 'fio_read_sample_output.json',
+ 'write': 'fio_write_sample_output.json',
+ 'rw': 'fio_rw_sample_output.json'
+ }
+
+ def test_fio_successful_setup(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ p.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_fio_job_file_successful_setup(self, mock_ssh):
+
+ options = {
+ 'job_file': 'job_file.ini',
+ 'directory': '/FIO_Test'
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '/dev/vdb', '')
+ p.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_fio_job_file_no_disk__setup(self, mock_ssh):
+
+ options = {
+ 'job_file': 'job_file.ini',
+ 'directory': '/FIO_Test'
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_fio_successful_no_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
+ '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
+ '"write_lat": 233.55}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_read_no_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': "read",
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['read'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"read_bw": 36113, "read_iops": 9028,' \
+ '"read_lat": 108.7}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_write_no_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'write',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['write'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"write_bw": 35107, "write_iops": 8776,'\
+ '"write_lat": 111.74}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_lat_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'write_lat': 300.1}
+ }
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
+ '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
+ '"write_lat": 233.55}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_unsuccessful_lat_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'write_lat': 200.1}
+ }
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_fio_successful_bw_iops_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'read_iops': 20000}
+ }
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
+ '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
+ '"write_lat": 233.55}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_unsuccessful_bw_iops_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'read_iops': 30000}
+ }
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_fio_unsuccessful_script_error(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def _read_sample_output(self, file_name):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, file_name)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py
new file mode 100644
index 000000000..c1c731b0a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py
@@ -0,0 +1,99 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.storage.storagecapacity.StorageCapacity
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.storage import storagecapacity
+
+DISK_SIZE_SAMPLE_OUTPUT = \
+ '{"Numberf of devides": "2", "Total disk size in bytes": "1024000000"}'
+BLOCK_SIZE_SAMPLE_OUTPUT = '{"/dev/sda": 1024, "/dev/sdb": 4096}'
+DISK_UTIL_RAW_OUTPUT = "vda 10.00\nvda 0.00"
+DISK_UTIL_SAMPLE_OUTPUT = \
+ '{"vda": {"avg_util": 5.0, "max_util": 10.0, "min_util": 0.0}}'
+
+
+@mock.patch('yardstick.benchmark.scenarios.storage.storagecapacity.ssh')
+class StorageCapacityTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.scn = {
+ "options": {
+ 'test_type': 'disk_size'
+ }
+ }
+ self.ctx = {
+ "host": {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'password': "root"
+ }
+ }
+ self.result = {}
+
+ def test_capacity_successful_setup(self, mock_ssh):
+ c = storagecapacity.StorageCapacity(self.scn, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+ self.assertIsNotNone(c.client)
+ self.assertTrue(c.setup_done)
+
+ def test_capacity_disk_size_successful(self, mock_ssh):
+ c = storagecapacity.StorageCapacity(self.scn, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, DISK_SIZE_SAMPLE_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(
+ DISK_SIZE_SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_block_size_successful(self, mock_ssh):
+ args = {
+ "options": {
+ 'test_type': 'block_size'
+ }
+ }
+ c = storagecapacity.StorageCapacity(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, BLOCK_SIZE_SAMPLE_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(
+ BLOCK_SIZE_SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_disk_utilization_successful(self, mock_ssh):
+ args = {
+ "options": {
+ 'test_type': 'disk_utilization',
+ 'interval': 1,
+ 'count': 2
+ }
+ }
+ c = storagecapacity.StorageCapacity(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, DISK_UTIL_RAW_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(
+ DISK_UTIL_SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_unsuccessful_script_error(self, mock_ssh):
+ c = storagecapacity.StorageCapacity(self.scn, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, c.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py
new file mode 100644
index 000000000..2ba53cb93
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py
@@ -0,0 +1,513 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.storage.storperf.StorPerf
+
+from __future__ import absolute_import
+
+import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+import requests
+
+from yardstick.benchmark.scenarios.storage import storperf
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+def mocked_requests_config_post(*args, **kwargs):
+ class MockResponseConfigPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigPost(
+ '{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
+ '"stack_created": false}',
+ 200)
+
+
+def mocked_requests_config_post_fail(*args, **kwargs):
+ class MockResponseConfigPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigPost(
+ '{"message": "ERROR: Parameter \'public_network\' is invalid: ' +
+ 'Error validating value \'foo\': Unable to find network with ' +
+ 'name or id \'foo\'"}',
+ 400)
+
+
+def mocked_requests_config_get(*args, **kwargs):
+ class MockResponseConfigGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigGet(
+ '{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
+ '"stack_created": true}',
+ 200)
+
+
+def mocked_requests_config_get_not_created(*args, **kwargs):
+ class MockResponseConfigGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigGet(
+ '{"stack_id": "",'
+ '"stack_created": false}',
+ 200)
+
+
+def mocked_requests_config_get_no_payload(*args, **kwargs):
+ class MockResponseConfigGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigGet(
+ '{}',
+ 200)
+
+
+def mocked_requests_initialize_post_fail(*args, **kwargs):
+ class MockResponseJobPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobPost(
+ '{"message": "ERROR: Stack StorPerfAgentGroup does not exist"}',
+ 400)
+
+
+def mocked_requests_job_get(*args, **kwargs):
+ class MockResponseJobGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobGet(
+ '{"Status": "Completed",\
+ "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}',
+ 200)
+
+
+def mocked_requests_job_post(*args, **kwargs):
+ class MockResponseJobPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobPost('{"job_id": \
+ "d46bfb8c-36f4-4a40-813b-c4b4a437f728"}', 200)
+
+
+def mocked_requests_job_post_fail(*args, **kwargs):
+ class MockResponseJobPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobPost(
+ '{"message": "ERROR: Stack StorPerfAgentGroup does not exist"}',
+ 400)
+
+
+def mocked_requests_job_delete(*args, **kwargs):
+ class MockResponseJobDelete(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobDelete('{}', 200)
+
+
+def mocked_requests_delete(*args, **kwargs):
+ class MockResponseDelete(object):
+
+ def __init__(self, json_data, status_code):
+ self.json_data = json_data
+ self.status_code = status_code
+
+ return MockResponseDelete('{}', 200)
+
+
+def mocked_requests_delete_failed(*args, **kwargs):
+ class MockResponseDeleteFailed(object):
+
+ def __init__(self, json_data, status_code):
+ self.json_data = json_data
+ self.status_code = status_code
+
+ return MockResponseDeleteFailed('{"message": "Teardown failed"}', 400)
+
+
+class StorPerfTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(requests, 'get')
+ def test_setup(self, mock_get, mock_post):
+ mock_post.side_effect = [mocked_requests_config_post(),
+ mocked_requests_job_post()]
+ mock_get.side_effect = [mocked_requests_config_get(),
+ mocked_requests_job_get()]
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workload": "rs",
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.setup()
+
+ self.assertTrue(s.setup_done)
+
+ @mock.patch.object(requests, 'get')
+ def test_query_setup_state_unsuccessful(self, mock_get):
+ mock_get.side_effect = mocked_requests_config_get_not_created
+ args = {
+ "options": {}
+ }
+ s = storperf.StorPerf(args, self.ctx)
+ result = s._query_setup_state()
+ self.assertFalse(result)
+
+ @mock.patch.object(requests, 'get')
+ def test_query_setup_state_no_payload(self, mock_get):
+ mock_get.side_effect = mocked_requests_config_get_no_payload
+ args = {
+ "options": {}
+ }
+ s = storperf.StorPerf(args, self.ctx)
+ result = s._query_setup_state()
+ self.assertFalse(result)
+
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(requests, 'get')
+ def test_setup_config_post_failed(self, mock_get, mock_post):
+ mock_post.side_effect = mocked_requests_config_post_fail
+
+ args = {
+ "options": {
+ "public_network": "foo"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ self.assertRaises(RuntimeError, s.setup)
+
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_v1_successful(self, mock_post, mock_get):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workload": "rs",
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ "workload": "rs",
+ 'agent_count': 8
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+ s.setup_done = True
+
+ sample_output = '{"Status": "Completed",\
+ "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
+
+ expected_result = jsonutils.loads(sample_output)
+
+ s.run(self.result)
+
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/jobs',
+ json=jsonutils.loads(json.dumps(expected_post)))
+
+ self.assertEqual(self.result, expected_result)
+
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_v2_successful(self, mock_post, mock_get):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workloads": {
+ "read_sequential": {
+ "rw": "rs"
+ }
+ },
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ 'workloads': {
+ 'read_sequential': {
+ 'rw': 'rs'
+ }
+ },
+ 'agent_count': 8
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+ s.setup_done = True
+
+ sample_output = '{"Status": "Completed",\
+ "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
+
+ expected_result = jsonutils.loads(sample_output)
+
+ s.run(self.result)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v2.0/jobs',
+ json=expected_post)
+
+ self.assertEqual(self.result, expected_result)
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_failed(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post_fail
+ mock_get.side_effect = mocked_requests_job_get
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workloads": {
+ "read_sequential": {
+ "rw": "rs"
+ }
+ },
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ 'workloads': {
+ 'read_sequential': {
+ 'rw': 'rs'
+ }
+ },
+ 'agent_count': 8
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+ s.setup_done = True
+
+ self.assertRaises(RuntimeError, s.run, self.ctx)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v2.0/jobs',
+ json=expected_post)
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(storperf.StorPerf, 'setup')
+ def test_run_calls_setup(self, mock_setup, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ 'timeout': 60,
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.run(self.result)
+
+ mock_setup.assert_called_once()
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_initialize_disks(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ "StorPerf_ip": "192.168.23.2"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.initialize_disks()
+
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/initializations',
+ json={})
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_initialize_disks_post_failed(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_initialize_post_fail
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ "StorPerf_ip": "192.168.23.2"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ self.assertRaises(RuntimeError, s.initialize_disks)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/initializations',
+ json={})
+
+ @mock.patch.object(requests, 'delete')
+ def test_teardown(self, mock_delete):
+ mock_delete.side_effect = mocked_requests_job_delete
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workload": "rs",
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 10,
+ "timeout": 60
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.teardown()
+
+ self.assertFalse(s.setup_done)
+ mock_delete.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/configurations')
+
+ @mock.patch.object(requests, 'delete')
+ def test_teardown_request_delete_failed(self, mock_delete):
+ mock_delete.side_effect = mocked_requests_delete_failed
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workload": "rs",
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 10,
+ "timeout": 60
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ self.assertRaises(RuntimeError, s.teardown)
+ mock_delete.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/configurations')
diff --git a/yardstick/tests/unit/benchmark/scenarios/test_base.py b/yardstick/tests/unit/benchmark/scenarios/test_base.py
new file mode 100644
index 000000000..284a71cc8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/test_base.py
@@ -0,0 +1,135 @@
+# Copyright 2017: Intel Ltd.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import time
+
+import mock
+
+from yardstick.benchmark.scenarios import base
+from yardstick.tests.unit import base as ut_base
+
+
+class _TestScenario(base.Scenario):
+ __scenario_type__ = 'Test Scenario'
+
+ def run(self):
+ pass
+
+
+class ScenarioTestCase(ut_base.BaseUnitTestCase):
+
+ def test_get_scenario_type(self):
+ scenario_type = 'dummy scenario'
+
+ class DummyScenario(base.Scenario):
+ __scenario_type__ = scenario_type
+
+ self.assertEqual(scenario_type, DummyScenario.get_scenario_type())
+
+ def test_get_scenario_type_not_defined(self):
+ class DummyScenario(base.Scenario):
+ pass
+
+ self.assertEqual(str(None), DummyScenario.get_scenario_type())
+
+ def test_get_description(self):
+ docstring = """First line
+ Second line
+ Third line
+ """
+
+ class DummyScenario(base.Scenario):
+ __doc__ = docstring
+
+ self.assertEqual(docstring.splitlines()[0],
+ DummyScenario.get_description())
+
+ def test_get_description_empty(self):
+ class DummyScenario(base.Scenario):
+ pass
+
+ self.assertEqual(str(None), DummyScenario.get_description())
+
+ def test_get_types(self):
+ scenario_names = set(
+ scenario.__scenario_type__ for scenario in
+ base.Scenario.get_types() if hasattr(scenario,
+ '__scenario_type__'))
+ existing_scenario_class_names = {
+ 'Iperf3', 'CACHEstat', 'SpecCPU2006', 'Dummy', 'NSPerf', 'Parser'}
+ self.assertTrue(existing_scenario_class_names.issubset(scenario_names))
+
+ def test_get_cls_existing_scenario(self):
+ scenario_name = 'NSPerf'
+ scenario = base.Scenario.get_cls(scenario_name)
+ self.assertEqual(scenario_name, scenario.__scenario_type__)
+
+ def test_get_cls_non_existing_scenario(self):
+ wrong_scenario_name = 'Non-existing-scenario'
+ with self.assertRaises(RuntimeError) as exc:
+ base.Scenario.get_cls(wrong_scenario_name)
+ self.assertEqual('No such scenario type %s' % wrong_scenario_name,
+ str(exc.exception))
+
+ def test_get_existing_scenario(self):
+ scenario_name = 'NSPerf'
+ scenario_module = ('yardstick.benchmark.scenarios.networking.'
+ 'vnf_generic.NetworkServiceTestCase')
+ self.assertEqual(scenario_module, base.Scenario.get(scenario_name))
+
+ def test_get_non_existing_scenario(self):
+ wrong_scenario_name = 'Non-existing-scenario'
+ with self.assertRaises(RuntimeError) as exc:
+ base.Scenario.get(wrong_scenario_name)
+ self.assertEqual('No such scenario type %s' % wrong_scenario_name,
+ str(exc.exception))
+
+ def test_scenario_abstract_class(self):
+ # pylint: disable=abstract-class-instantiated
+ with self.assertRaises(TypeError):
+ base.Scenario()
+
+ @mock.patch.object(time, 'sleep')
+ def test_pre_run_wait_time(self, mock_sleep):
+ """Ensure default behaviour (backwards compatibility): no wait time"""
+ test_scenario = _TestScenario()
+ test_scenario.pre_run_wait_time(mock.ANY)
+ mock_sleep.assert_not_called()
+
+ @mock.patch.object(time, 'sleep')
+ def test_post_run_wait_time(self, mock_sleep):
+ """Ensure default behaviour (backwards compatibility): wait time"""
+ test_scenario = _TestScenario()
+ test_scenario.post_run_wait_time(100)
+ mock_sleep.assert_called_once_with(100)
+
+
+class IterScenarioClassesTestCase(ut_base.BaseUnitTestCase):
+
+ def test_no_scenario_type_defined(self):
+ some_existing_scenario_class_names = [
+ 'Iperf3', 'CACHEstat', 'SpecCPU2006', 'Dummy', 'NSPerf', 'Parser']
+ scenario_types = [scenario.__scenario_type__ for scenario
+ in base._iter_scenario_classes()]
+ for class_name in some_existing_scenario_class_names:
+ self.assertIn(class_name, scenario_types)
+
+ def test_scenario_type_defined(self):
+ some_existing_scenario_class_names = [
+ 'Iperf3', 'CACHEstat', 'SpecCPU2006', 'Dummy', 'NSPerf', 'Parser']
+ for class_name in some_existing_scenario_class_names:
+ scenario_class = next(base._iter_scenario_classes(
+ scenario_type=class_name))
+ self.assertEqual(class_name, scenario_class.__scenario_type__)