diff options
Diffstat (limited to 'tests')
9 files changed, 709 insertions, 0 deletions
diff --git a/tests/opnfv/test_cases/opnfv_yardstick_tc004.yaml b/tests/opnfv/test_cases/opnfv_yardstick_tc004.yaml new file mode 100644 index 000000000..2d10e4073 --- /dev/null +++ b/tests/opnfv/test_cases/opnfv_yardstick_tc004.yaml @@ -0,0 +1,85 @@ +--- +# Yardstick TC004 config file +# Measure cache hit/miss ratio and usage, network throughput and latency. +# Different amounts of flows are tested with, from 2 up to 1001000. +# All tests are run 2 times each. First 2 times with the least +# amount of ports, then 2 times with the next amount of ports, +# and so on until all packet sizes have been run with. +# +# During the measurements cache hit/miss ration, cache usage statistics and +# network latency are recorded/measured using cachestat and ping, respectively. + +schema: "yardstick:task:0.1" + +scenarios: +- + type: CACHEstat + run_in_background: true + + options: + interval: 1 + + host: demeter.yardstick +- + type: CACHEstat + run_in_background: true + + options: + interval: 1 + + host: poseidon.yardstick +- + type: Ping + run_in_background: true + + options: + packetsize: 100 + + host: demeter.yardstick + target: poseidon.yardstick + + sla: + max_rtt: 10 + action: monitor +{% for num_ports in [1, 10, 50, 100, 300, 500, 750, 1000] %} +- + type: Pktgen + options: + packetsize: 64 + number_of_ports: {{num_ports}} + duration: 20 + + host: demeter.yardstick + target: poseidon.yardstick + + runner: + type: Iteration + iterations: 2 + interval: 1 + + sla: + max_ppm: 1000 + action: monitor +{% endfor %} + +context: + name: yardstick + image: yardstick-trusty-server + flavor: yardstick-flavor + user: ubuntu + + placement_groups: + pgrp1: + policy: "availability" + + servers: + demeter: + floating_ip: true + placement: "pgrp1" + poseidon: + floating_ip: true + placement: "pgrp1" + + networks: + test: + cidr: '10.0.1.0/24' diff --git a/tests/opnfv/test_cases/opnfv_yardstick_tc044.yaml b/tests/opnfv/test_cases/opnfv_yardstick_tc044.yaml new file mode 100644 index 000000000..d7406832d --- /dev/null +++ b/tests/opnfv/test_cases/opnfv_yardstick_tc044.yaml @@ -0,0 +1,87 @@ +--- +# Yardstick TC044 config file +# Measure memory usage statistics, network throughput, latency and packet loss. +# Different amounts of flows are tested with, from 2 up to 1001000. +# All tests are run 2 times each. First 2 times with the least +# amount of ports, then 2 times with the next amount of ports, +# and so on until all packet sizes have been run with. +# +# During the measurements memory usage statistics and network latency are +# recorded/measured using sar and ping, respectively. + +schema: "yardstick:task:0.1" + +scenarios: +- + type: MEMORYload + run_in_background: true + + options: + interval: 1 + count: 1 + + host: demeter.yardstick-TC044 +- + type: MEMORYload + run_in_background: true + + options: + interval: 1 + count: 1 + + host: poseidon.yardstick-TC044 +- + type: Ping + run_in_background: true + + options: + packetsize: 100 + + host: demeter.yardstick-TC044 + target: poseidon.yardstick-TC044 + + sla: + max_rtt: 10 + action: monitor +{% for num_ports in [1, 10, 50, 100, 300, 500, 750, 1000] %} +- + type: Pktgen + options: + packetsize: 64 + number_of_ports: {{num_ports}} + duration: 20 + + host: demeter.yardstick-TC044 + target: poseidon.yardstick-TC044 + + runner: + type: Iteration + iterations: 2 + interval: 1 + + sla: + max_ppm: 1000 + action: monitor +{% endfor %} + +context: + name: yardstick-TC044 + image: yardstick-trusty-server + flavor: yardstick-flavor + user: ubuntu + + placement_groups: + pgrp1: + policy: "availability" + + servers: + demeter: + floating_ip: true + placement: "pgrp1" + poseidon: + floating_ip: true + placement: "pgrp1" + + networks: + test: + cidr: '10.0.1.0/24' diff --git a/tests/opnfv/test_cases/opnfv_yardstick_tc045.yaml b/tests/opnfv/test_cases/opnfv_yardstick_tc045.yaml new file mode 100644 index 000000000..812d53dd8 --- /dev/null +++ b/tests/opnfv/test_cases/opnfv_yardstick_tc045.yaml @@ -0,0 +1,43 @@ +--- +# Test case for TC045 :Control node Openstack service down - neutron server + +schema: "yardstick:task:0.1" + +scenarios: +- + type: ServiceHA + options: + attackers: + - fault_type: "kill-process" + process_name: "neutron-server" + host: node1 + + monitors: + - monitor_type: "openstack-cmd" + command_name: "neutron agent-list" + monitor_time: 10 + sla: + max_outage_time: 5 + - monitor_type: "process" + process_name: "neutron-server" + host: node1 + monitor_time: 10 + sla: + max_recover_time: 5 + + nodes: + node1: node1.LF + + runner: + type: Duration + duration: 1 + sla: + outage_time: 5 + action: monitor + + +context: + type: Node + name: LF + file: /root/yardstick/etc/yardstick/nodes/fuel_virtual/pod.yaml + diff --git a/tests/opnfv/test_suites/fuel_test_suite.yaml b/tests/opnfv/test_suites/fuel_test_suite.yaml new file mode 100644 index 000000000..016bf0953 --- /dev/null +++ b/tests/opnfv/test_suites/fuel_test_suite.yaml @@ -0,0 +1,12 @@ +--- +# Fuel integration test task suite + +schema: "yardstick:suite:0.1" + +name: "fuel_test_suite" +test_cases_dir: "samples/" +test_cases: +- + file_name: ping.yaml +- + file_name: iperf3.yaml diff --git a/tests/unit/benchmark/scenarios/availability/test_baseoperation.py b/tests/unit/benchmark/scenarios/availability/test_baseoperation.py new file mode 100644 index 000000000..8c341913f --- /dev/null +++ b/tests/unit/benchmark/scenarios/availability/test_baseoperation.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +############################################################################## +# Copyright (c) 2016 Huan Li and others +# lihuansse@tongji.edu.cn +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +# Unittest for yardstick.benchmark.scenarios.availability.operation.baseoperation + +import mock +import unittest + +from yardstick.benchmark.scenarios.availability.operation import baseoperation + +@mock.patch('yardstick.benchmark.scenarios.availability.operation.baseoperation.BaseOperation') +class OperationMgrTestCase(unittest.TestCase): + + def setUp(self): + config = { + 'operation_type': 'general-operation', + 'key' : 'service_status' + } + + self.operation_configs = [] + self.operation_configs.append(config) + + def test_all_successful(self, mock_operation): + mgr_ins = baseoperation.OperationMgr() + mgr_ins.init_operations(self.operation_configs, None) + operation_ins = mgr_ins["service_status"] + mgr_ins.rollback() + + def test_getitem_fail(self, mock_operation): + mgr_ins = baseoperation.OperationMgr() + mgr_ins.init_operations(self.operation_configs, None) + with self.assertRaises(KeyError): + operation_ins = mgr_ins["operation-not-exist"] + + +class TestOperation(baseoperation.BaseOperation): + __operation__type__ = "test-operation" + + def setup(self): + pass + + def run(self): + pass + + def rollback(self): + pass + + +class BaseOperationTestCase(unittest.TestCase): + + def setUp(self): + self.config = { + 'operation_type': 'general-operation', + 'key' : 'service_status' + } + + def test_all_successful(self): + base_ins = baseoperation.BaseOperation(self.config, None) + base_ins.setup() + base_ins.run() + base_ins.rollback() + + def test_get_script_fullpath(self): + base_ins = baseoperation.BaseOperation(self.config, None) + base_ins.get_script_fullpath("ha_tools/test.bash"); + + def test_get_operation_cls_successful(self): + base_ins = baseoperation.BaseOperation(self.config, None) + operation_ins = base_ins.get_operation_cls("test-operation") + + def test_get_operation_cls_fail(self): + base_ins = baseoperation.BaseOperation(self.config, None) + with self.assertRaises(RuntimeError): + operation_ins = base_ins.get_operation_cls("operation-not-exist") diff --git a/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py b/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py new file mode 100644 index 000000000..9972d6b1b --- /dev/null +++ b/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +############################################################################## +# Copyright (c) 2016 Huan Li and others +# lihuansse@tongji.edu.cn +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +# Unittest for yardstick.benchmark.scenarios.availability.result_checker +# .baseresultchecker + +import mock +import unittest + +from yardstick.benchmark.scenarios.availability.result_checker import baseresultchecker + + +@mock.patch('yardstick.benchmark.scenarios.availability.result_checker' + '.baseresultchecker.BaseResultChecker') +class ResultCheckerMgrTestCase(unittest.TestCase): + + def setUp(self): + config = { + 'checker_type': 'general-result-checker', + 'key' : 'process-checker' + } + + self.checker_configs = [] + self.checker_configs.append(config) + + def test_ResultCheckerMgr_setup_successful(self, mock_basechacer): + mgr_ins = baseresultchecker.ResultCheckerMgr() + mgr_ins.init_ResultChecker(self.checker_configs, None) + mgr_ins.verify() + + def test_getitem_succeessful(self, mock_basechacer): + mgr_ins = baseresultchecker.ResultCheckerMgr() + mgr_ins.init_ResultChecker(self.checker_configs, None) + checker_ins = mgr_ins["process-checker"] + + def test_getitem_fail(self, mock_basechacer): + mgr_ins = baseresultchecker.ResultCheckerMgr() + mgr_ins.init_ResultChecker(self.checker_configs, None) + with self.assertRaises(KeyError): + checker_ins = mgr_ins["checker-not-exist"] + + +class BaseResultCheckerTestCase(unittest.TestCase): + + class ResultCheckeSimple(baseresultchecker.BaseResultChecker): + __result_checker__type__ = "ResultCheckeForTest" + def setup(self): + self.success = False + + def verify(self): + return self.success + + def setUp(self): + self.checker_cfg = { + 'checker_type': 'general-result-checker', + 'key' : 'process-checker' + } + + def test_baseresultchecker_setup_verify_successful(self): + ins = baseresultchecker.BaseResultChecker(self.checker_cfg, None) + ins.setup() + ins.verify() + + def test_baseresultchecker_verfiy_pass(self): + ins = baseresultchecker.BaseResultChecker(self.checker_cfg, None) + ins.setup() + ins.actualResult = True + ins.expectedResult = True + ins.verify() + + def test_get_script_fullpath(self): + ins = baseresultchecker.BaseResultChecker(self.checker_cfg, None) + path = ins.get_script_fullpath("test.bash") + + def test_get_resultchecker_cls_successful(self): + baseresultchecker.BaseResultChecker.get_resultchecker_cls("ResultCheckeForTest") + + def test_get_resultchecker_cls_fail(self): + with self.assertRaises(RuntimeError): + baseresultchecker.BaseResultChecker.get_resultchecker_cls("ResultCheckeNotExist") diff --git a/tests/unit/benchmark/scenarios/availability/test_operation_general.py b/tests/unit/benchmark/scenarios/availability/test_operation_general.py new file mode 100644 index 000000000..6713733a8 --- /dev/null +++ b/tests/unit/benchmark/scenarios/availability/test_operation_general.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +############################################################################## +# Copyright (c) 2016 Huan Li and others +# lihuansse@tongji.edu.cn +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +# Unittest for yardstick.benchmark.scenarios.availability.operation +# .operation_general + +import mock +import unittest +from yardstick.benchmark.scenarios.availability.operation import operation_general + +@mock.patch('yardstick.benchmark.scenarios.availability.operation.' + 'operation_general.ssh') +@mock.patch('yardstick.benchmark.scenarios.availability.operation.' + 'operation_general.open') +class GeneralOperaionTestCase(unittest.TestCase): + + def setUp(self): + host = { + "ip": "10.20.0.5", + "user": "root", + "key_filename": "/root/.ssh/id_rsa" + } + self.context = {"node1": host} + self.operation_cfg = { + 'operation_type': 'general-operation', + 'action_parameter': {'ins_cup': 2}, + 'rollback_parameter': {'ins_id': 'id123456'}, + 'key': 'nova-create-instance', + 'host': 'node1', + } + self.operation_cfg_noparam = { + 'operation_type': 'general-operation', + 'key': 'nova-create-instance', + 'host': 'node1', + } + + def test__operation_successful(self, mock_open, mock_ssh): + ins = operation_general.GeneralOperaion(self.operation_cfg, + self.context); + mock_ssh.SSH().execute.return_value = (0, "success", '') + ins.setup() + ins.run() + ins.rollback() + + def test__operation_successful_noparam(self, mock_open, mock_ssh): + ins = operation_general.GeneralOperaion(self.operation_cfg_noparam, + self.context); + mock_ssh.SSH().execute.return_value = (0, "success", '') + ins.setup() + ins.run() + ins.rollback() + + def test__operation_fail(self, mock_open, mock_ssh): + ins = operation_general.GeneralOperaion(self.operation_cfg, + self.context); + mock_ssh.SSH().execute.return_value = (1, "failed", '') + ins.setup() + ins.run() + ins.rollback() diff --git a/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py b/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py new file mode 100644 index 000000000..88a9b9d20 --- /dev/null +++ b/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + +############################################################################## +# Copyright (c) 2016 Huan Li and others +# lihuansse@tongji.edu.cn +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +# Unittest for yardstick.benchmark.scenarios.availability.result_checker +# .result_checker_general + +import mock +import unittest +import copy + +from yardstick.benchmark.scenarios.availability.result_checker import result_checker_general + + +@mock.patch('yardstick.benchmark.scenarios.availability.result_checker.' + 'result_checker_general.ssh') +@mock.patch('yardstick.benchmark.scenarios.availability.result_checker.' + 'result_checker_general.open') +class GeneralResultCheckerTestCase(unittest.TestCase): + + def setUp(self): + host = { + "ip": "10.20.0.5", + "user": "root", + "key_filename": "/root/.ssh/id_rsa" + } + self.context = {"node1": host} + self.checker_cfg = { + 'parameter': {'processname': 'process'}, + 'checker_type': 'general-result-checker', + 'condition' : 'eq', + 'expectedValue' : 1, + 'key' : 'process-checker', + 'host': 'node1' + } + + def test__result_checker_eq(self, mock_open, mock_ssh): + ins = result_checker_general.GeneralResultChecker(self.checker_cfg, + self.context); + mock_ssh.SSH().execute.return_value = (0, "1", '') + ins.setup() + self.assertTrue(ins.verify()) + + def test__result_checker_gt(self, mock_open, mock_ssh): + config = copy.deepcopy(self.checker_cfg) + config['condition'] = 'gt' + ins = result_checker_general.GeneralResultChecker(config, + self.context); + mock_ssh.SSH().execute.return_value = (0, "2", '') + ins.setup() + self.assertTrue(ins.verify()) + + def test__result_checker_gt_eq(self, mock_open, mock_ssh): + config = copy.deepcopy(self.checker_cfg) + config['condition'] = 'gt_eq' + ins = result_checker_general.GeneralResultChecker(config, + self.context); + mock_ssh.SSH().execute.return_value = (0, "1", '') + ins.setup() + self.assertTrue(ins.verify()) + + def test__result_checker_lt(self, mock_open, mock_ssh): + config = copy.deepcopy(self.checker_cfg) + config['condition'] = 'lt' + ins = result_checker_general.GeneralResultChecker(config, + self.context); + mock_ssh.SSH().execute.return_value = (0, "0", '') + ins.setup() + self.assertTrue(ins.verify()) + + def test__result_checker_lt_eq(self, mock_open, mock_ssh): + config = copy.deepcopy(self.checker_cfg) + config['condition'] = 'lt_eq' + ins = result_checker_general.GeneralResultChecker(config, + self.context); + mock_ssh.SSH().execute.return_value = (0, "1", '') + ins.setup() + self.assertTrue(ins.verify()) + + def test__result_checker_in(self, mock_open, mock_ssh): + config = copy.deepcopy(self.checker_cfg) + config['condition'] = 'in' + config['expectedValue'] = "value" + ins = result_checker_general.GeneralResultChecker(config, + self.context); + mock_ssh.SSH().execute.return_value = (0, "value return", '') + ins.setup() + self.assertTrue(ins.verify()) + + def test__result_checker_wrong(self, mock_open, mock_ssh): + config = copy.deepcopy(self.checker_cfg) + config['condition'] = 'wrong' + ins = result_checker_general.GeneralResultChecker(config, + self.context); + mock_ssh.SSH().execute.return_value = (0, "1", '') + ins.setup() + self.assertFalse(ins.verify()) + + def test__result_checker_fail(self, mock_open, mock_ssh): + config = copy.deepcopy(self.checker_cfg) + config.pop('parameter') + ins = result_checker_general.GeneralResultChecker(config, + self.context); + mock_ssh.SSH().execute.return_value = (1, "fail", '') + ins.setup() + ins.verify()
\ No newline at end of file diff --git a/tests/unit/benchmark/scenarios/networking/test_vsperf.py b/tests/unit/benchmark/scenarios/networking/test_vsperf.py new file mode 100644 index 000000000..cb5c09ab3 --- /dev/null +++ b/tests/unit/benchmark/scenarios/networking/test_vsperf.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python + +# Copyright 2016 Intel Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Unittest for yardstick.benchmark.scenarios.networking.vsperf.Vsperf + +import mock +import unittest +import os +import subprocess + +from yardstick.benchmark.scenarios.networking import vsperf + + +@mock.patch('yardstick.benchmark.scenarios.networking.vsperf.subprocess') +@mock.patch('yardstick.benchmark.scenarios.networking.vsperf.ssh') +@mock.patch("__builtin__.open", return_value=None) +class VsperfTestCase(unittest.TestCase): + + def setUp(self): + self.ctx = { + "host": { + "ip": "10.229.47.137", + "user": "ubuntu", + "password": "ubuntu", + }, + } + self.args = { + 'options': { + 'testname': 'rfc2544_p2p_continuous', + 'traffic_type': 'continuous', + 'pkt_sizes': '64', + 'bidirectional': 'True', + 'iload': 100, + 'duration': 29, + 'trafficgen_port1': 'eth1', + 'trafficgen_port2': 'eth3', + 'external_bridge': 'br-ex', + 'conf-file': 'vsperf-yardstick.conf', + 'setup-script': 'setup_yardstick.sh', + }, + 'sla': { + 'metrics': 'throughput_rx_fps', + 'throughput_rx_fps': 500000, + 'action': 'monitor', + } + } + + def test_vsperf_setup(self, mock_open, mock_ssh, mock_subprocess): + p = vsperf.Vsperf(self.args, self.ctx) + mock_ssh.SSH().execute.return_value = (0, '', '') + mock_subprocess.call().execute.return_value = None + + p.setup() + self.assertIsNotNone(p.client) + self.assertEqual(p.setup_done, True) + + def test_vsperf_teardown(self, mock_open, mock_ssh, mock_subprocess): + p = vsperf.Vsperf(self.args, self.ctx) + + # setup() specific mocks + mock_ssh.SSH().execute.return_value = (0, '', '') + mock_subprocess.call().execute.return_value = None + + p.setup() + self.assertIsNotNone(p.client) + self.assertEqual(p.setup_done, True) + + p.teardown() + self.assertEqual(p.setup_done, False) + + def test_vsperf_run_ok(self, mock_open, mock_ssh, mock_subprocess): + p = vsperf.Vsperf(self.args, self.ctx) + + # setup() specific mocks + mock_ssh.SSH().execute.return_value = (0, '', '') + mock_subprocess.call().execute.return_value = None + + # run() specific mocks + mock_ssh.SSH().execute.return_value = (0, '', '') + mock_ssh.SSH().execute.return_value = (0, 'throughput_rx_fps\r\n14797660.000\r\n', '') + + result = {} + p.run(result) + + self.assertEqual(result['throughput_rx_fps'], '14797660.000') + + def test_vsperf_run_falied_vsperf_execution(self, mock_open, mock_ssh, mock_subprocess): + p = vsperf.Vsperf(self.args, self.ctx) + + # setup() specific mocks + mock_ssh.SSH().execute.return_value = (0, '', '') + mock_subprocess.call().execute.return_value = None + + # run() specific mocks + mock_ssh.SSH().execute.return_value = (1, '', '') + + result = {} + self.assertRaises(RuntimeError, p.run, result) + + def test_vsperf_run_falied_csv_report(self, mock_open, mock_ssh, mock_subprocess): + p = vsperf.Vsperf(self.args, self.ctx) + + # setup() specific mocks + mock_ssh.SSH().execute.return_value = (0, '', '') + mock_subprocess.call().execute.return_value = None + + # run() specific mocks + mock_ssh.SSH().execute.return_value = (0, '', '') + mock_ssh.SSH().execute.return_value = (1, '', '') + + result = {} + self.assertRaises(RuntimeError, p.run, result) + + +def main(): + unittest.main() + +if __name__ == '__main__': + main() |