aboutsummaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/opnfv/test_cases/opnfv_yardstick_tc004.yaml85
-rw-r--r--tests/opnfv/test_cases/opnfv_yardstick_tc045.yaml43
-rw-r--r--tests/opnfv/test_suites/fuel_test_suite.yaml12
-rw-r--r--tests/sfc/sfc_TC02.yaml36
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_baseoperation.py82
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_operation_general.py67
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_sfc.py18
7 files changed, 301 insertions, 42 deletions
diff --git a/tests/opnfv/test_cases/opnfv_yardstick_tc004.yaml b/tests/opnfv/test_cases/opnfv_yardstick_tc004.yaml
new file mode 100644
index 000000000..2d10e4073
--- /dev/null
+++ b/tests/opnfv/test_cases/opnfv_yardstick_tc004.yaml
@@ -0,0 +1,85 @@
+---
+# Yardstick TC004 config file
+# Measure cache hit/miss ratio and usage, network throughput and latency.
+# Different amounts of flows are tested with, from 2 up to 1001000.
+# All tests are run 2 times each. First 2 times with the least
+# amount of ports, then 2 times with the next amount of ports,
+# and so on until all packet sizes have been run with.
+#
+# During the measurements cache hit/miss ration, cache usage statistics and
+# network latency are recorded/measured using cachestat and ping, respectively.
+
+schema: "yardstick:task:0.1"
+
+scenarios:
+-
+ type: CACHEstat
+ run_in_background: true
+
+ options:
+ interval: 1
+
+ host: demeter.yardstick
+-
+ type: CACHEstat
+ run_in_background: true
+
+ options:
+ interval: 1
+
+ host: poseidon.yardstick
+-
+ type: Ping
+ run_in_background: true
+
+ options:
+ packetsize: 100
+
+ host: demeter.yardstick
+ target: poseidon.yardstick
+
+ sla:
+ max_rtt: 10
+ action: monitor
+{% for num_ports in [1, 10, 50, 100, 300, 500, 750, 1000] %}
+-
+ type: Pktgen
+ options:
+ packetsize: 64
+ number_of_ports: {{num_ports}}
+ duration: 20
+
+ host: demeter.yardstick
+ target: poseidon.yardstick
+
+ runner:
+ type: Iteration
+ iterations: 2
+ interval: 1
+
+ sla:
+ max_ppm: 1000
+ action: monitor
+{% endfor %}
+
+context:
+ name: yardstick
+ image: yardstick-trusty-server
+ flavor: yardstick-flavor
+ user: ubuntu
+
+ placement_groups:
+ pgrp1:
+ policy: "availability"
+
+ servers:
+ demeter:
+ floating_ip: true
+ placement: "pgrp1"
+ poseidon:
+ floating_ip: true
+ placement: "pgrp1"
+
+ networks:
+ test:
+ cidr: '10.0.1.0/24'
diff --git a/tests/opnfv/test_cases/opnfv_yardstick_tc045.yaml b/tests/opnfv/test_cases/opnfv_yardstick_tc045.yaml
new file mode 100644
index 000000000..812d53dd8
--- /dev/null
+++ b/tests/opnfv/test_cases/opnfv_yardstick_tc045.yaml
@@ -0,0 +1,43 @@
+---
+# Test case for TC045 :Control node Openstack service down - neutron server
+
+schema: "yardstick:task:0.1"
+
+scenarios:
+-
+ type: ServiceHA
+ options:
+ attackers:
+ - fault_type: "kill-process"
+ process_name: "neutron-server"
+ host: node1
+
+ monitors:
+ - monitor_type: "openstack-cmd"
+ command_name: "neutron agent-list"
+ monitor_time: 10
+ sla:
+ max_outage_time: 5
+ - monitor_type: "process"
+ process_name: "neutron-server"
+ host: node1
+ monitor_time: 10
+ sla:
+ max_recover_time: 5
+
+ nodes:
+ node1: node1.LF
+
+ runner:
+ type: Duration
+ duration: 1
+ sla:
+ outage_time: 5
+ action: monitor
+
+
+context:
+ type: Node
+ name: LF
+ file: /root/yardstick/etc/yardstick/nodes/fuel_virtual/pod.yaml
+
diff --git a/tests/opnfv/test_suites/fuel_test_suite.yaml b/tests/opnfv/test_suites/fuel_test_suite.yaml
new file mode 100644
index 000000000..016bf0953
--- /dev/null
+++ b/tests/opnfv/test_suites/fuel_test_suite.yaml
@@ -0,0 +1,12 @@
+---
+# Fuel integration test task suite
+
+schema: "yardstick:suite:0.1"
+
+name: "fuel_test_suite"
+test_cases_dir: "samples/"
+test_cases:
+-
+ file_name: ping.yaml
+-
+ file_name: iperf3.yaml
diff --git a/tests/sfc/sfc_TC02.yaml b/tests/sfc/sfc_TC02.yaml
deleted file mode 100644
index 85e6eeb52..000000000
--- a/tests/sfc/sfc_TC02.yaml
+++ /dev/null
@@ -1,36 +0,0 @@
-#SFC test case using Tacker as Orchestrator and Netvirt as classifier
-
-schema: "yardstick:task:0.1"
-
-scenarios:
--
- type: sfc
-
- host: http_client.sfc
- target: http_server.sfc
-
- runner:
- type: Iteration
- iterations: 1
- interval: 1
-
-contexts:
--
- name: sfc
- placement_groups:
- pgrp1:
- policy: "availability"
- servers:
- http_client:
- flavor: m1.tiny
- image: cirros-0.3.3
- floating_ip: true
- placement: "pgrp1"
- http_server:
- flavor: sfc_custom
- image: sfc
- floating_ip: true
- placement: "pgrp1"
- networks:
- net_mgmt:
- cidr: '11.0.0.0/24'
diff --git a/tests/unit/benchmark/scenarios/availability/test_baseoperation.py b/tests/unit/benchmark/scenarios/availability/test_baseoperation.py
new file mode 100644
index 000000000..8c341913f
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/availability/test_baseoperation.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.operation.baseoperation
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.operation import baseoperation
+
+@mock.patch('yardstick.benchmark.scenarios.availability.operation.baseoperation.BaseOperation')
+class OperationMgrTestCase(unittest.TestCase):
+
+ def setUp(self):
+ config = {
+ 'operation_type': 'general-operation',
+ 'key' : 'service_status'
+ }
+
+ self.operation_configs = []
+ self.operation_configs.append(config)
+
+ def test_all_successful(self, mock_operation):
+ mgr_ins = baseoperation.OperationMgr()
+ mgr_ins.init_operations(self.operation_configs, None)
+ operation_ins = mgr_ins["service_status"]
+ mgr_ins.rollback()
+
+ def test_getitem_fail(self, mock_operation):
+ mgr_ins = baseoperation.OperationMgr()
+ mgr_ins.init_operations(self.operation_configs, None)
+ with self.assertRaises(KeyError):
+ operation_ins = mgr_ins["operation-not-exist"]
+
+
+class TestOperation(baseoperation.BaseOperation):
+ __operation__type__ = "test-operation"
+
+ def setup(self):
+ pass
+
+ def run(self):
+ pass
+
+ def rollback(self):
+ pass
+
+
+class BaseOperationTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.config = {
+ 'operation_type': 'general-operation',
+ 'key' : 'service_status'
+ }
+
+ def test_all_successful(self):
+ base_ins = baseoperation.BaseOperation(self.config, None)
+ base_ins.setup()
+ base_ins.run()
+ base_ins.rollback()
+
+ def test_get_script_fullpath(self):
+ base_ins = baseoperation.BaseOperation(self.config, None)
+ base_ins.get_script_fullpath("ha_tools/test.bash");
+
+ def test_get_operation_cls_successful(self):
+ base_ins = baseoperation.BaseOperation(self.config, None)
+ operation_ins = base_ins.get_operation_cls("test-operation")
+
+ def test_get_operation_cls_fail(self):
+ base_ins = baseoperation.BaseOperation(self.config, None)
+ with self.assertRaises(RuntimeError):
+ operation_ins = base_ins.get_operation_cls("operation-not-exist")
diff --git a/tests/unit/benchmark/scenarios/availability/test_operation_general.py b/tests/unit/benchmark/scenarios/availability/test_operation_general.py
new file mode 100644
index 000000000..6713733a8
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/availability/test_operation_general.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.operation
+# .operation_general
+
+import mock
+import unittest
+from yardstick.benchmark.scenarios.availability.operation import operation_general
+
+@mock.patch('yardstick.benchmark.scenarios.availability.operation.'
+ 'operation_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.operation.'
+ 'operation_general.open')
+class GeneralOperaionTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.operation_cfg = {
+ 'operation_type': 'general-operation',
+ 'action_parameter': {'ins_cup': 2},
+ 'rollback_parameter': {'ins_id': 'id123456'},
+ 'key': 'nova-create-instance',
+ 'host': 'node1',
+ }
+ self.operation_cfg_noparam = {
+ 'operation_type': 'general-operation',
+ 'key': 'nova-create-instance',
+ 'host': 'node1',
+ }
+
+ def test__operation_successful(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg,
+ self.context);
+ mock_ssh.SSH().execute.return_value = (0, "success", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
+
+ def test__operation_successful_noparam(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg_noparam,
+ self.context);
+ mock_ssh.SSH().execute.return_value = (0, "success", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
+
+ def test__operation_fail(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg,
+ self.context);
+ mock_ssh.SSH().execute.return_value = (1, "failed", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
diff --git a/tests/unit/benchmark/scenarios/networking/test_sfc.py b/tests/unit/benchmark/scenarios/networking/test_sfc.py
index adce0824a..2d7990e59 100644
--- a/tests/unit/benchmark/scenarios/networking/test_sfc.py
+++ b/tests/unit/benchmark/scenarios/networking/test_sfc.py
@@ -26,26 +26,32 @@ class SfcTestCase(unittest.TestCase):
# Used in Sfc.setup()
context_cfg['target'] = dict()
context_cfg['target']['user'] = 'root'
- context_cfg['target']['password'] = 'octopus'
- context_cfg['target']['ip'] = None
+ context_cfg['target']['password'] = 'opnfv'
+ context_cfg['target']['ip'] = '127.0.0.1'
# Used in Sfc.run()
context_cfg['host'] = dict()
- context_cfg['host']['user'] = 'cirros'
- context_cfg['host']['password'] = 'cubslose:)'
+ context_cfg['host']['user'] = 'root'
+ context_cfg['host']['password'] = 'opnfv'
context_cfg['host']['ip'] = None
context_cfg['target'] = dict()
- context_cfg['target']['ip'] = None
+ context_cfg['target']['ip'] = '127.0.0.1'
self.sfc = sfc.Sfc(scenario_cfg=scenario_cfg, context_cfg=context_cfg)
@mock.patch('yardstick.benchmark.scenarios.networking.sfc.ssh')
- def test_run_for_success(self, mock_ssh):
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.sfc_openstack')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.subprocess')
+ def test_run_for_success(self, mock_subprocess, mock_openstack, mock_ssh):
# Mock a successfull SSH in Sfc.setup() and Sfc.run()
mock_ssh.SSH().execute.return_value = (0, '100', '')
+ mock_openstack.return_value = "127.0.0.1"
+ mock_subprocess.return_value = 'mocked!'
result = {}
+ self.sfc.setup()
self.sfc.run(result)
+ self.sfc.teardown()
def main():