aboutsummaryrefslogtreecommitdiffstats
path: root/functest/ci
diff options
context:
space:
mode:
authorMorgan Richomme <morgan.richomme@orange.com>2016-11-10 14:31:07 +0100
committerjose.lausuch <jose.lausuch@ericsson.com>2016-11-10 16:47:02 +0100
commit0157be0431600dd56aaaa3260b72cc40011f91ac (patch)
tree7af4c4a508f6c4845dedc96491abaa7bf3e8d2a4 /functest/ci
parent107e61635c2ab1feb5263380ea63e21cf2e6e65b (diff)
change path for python ci files
JIRA: FUNCTEST-525 Change-Id: I6b09eaec55e42a3ee1474b21bc7ed87a71118e60 Signed-off-by: Morgan Richomme <morgan.richomme@orange.com>
Diffstat (limited to 'functest/ci')
-rw-r--r--functest/ci/__init__.py0
-rw-r--r--functest/ci/check_os.sh90
-rw-r--r--functest/ci/config_functest.yaml198
-rw-r--r--functest/ci/config_patch.yaml24
-rw-r--r--functest/ci/exec_test.sh222
-rw-r--r--functest/ci/generate_report.py152
-rw-r--r--functest/ci/prepare_env.py299
-rw-r--r--functest/ci/run_tests.py249
-rw-r--r--functest/ci/testcases.yaml269
-rw-r--r--functest/ci/tier_builder.py90
-rw-r--r--functest/ci/tier_handler.py178
11 files changed, 1771 insertions, 0 deletions
diff --git a/functest/ci/__init__.py b/functest/ci/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/functest/ci/__init__.py
diff --git a/functest/ci/check_os.sh b/functest/ci/check_os.sh
new file mode 100644
index 00000000..38fe32f5
--- /dev/null
+++ b/functest/ci/check_os.sh
@@ -0,0 +1,90 @@
+#!/bin/bash
+#
+# Simple script to check the basic OpenStack clients
+#
+# Author:
+# jose.lausuch@ericsson.com
+#
+
+verify_connectivity() {
+ for i in $(seq 0 9); do
+ if echo "test" | nc -v -w 10 $1 $2 &>/dev/null; then
+ return 0
+ fi
+ sleep 1
+ done
+ return 1
+}
+
+
+if [ -z $OS_AUTH_URL ];then
+ echo "ERROR: OS_AUTH_URL environment variable missing... Have you sourced the OpenStack credentials?"
+ exit 1
+fi
+
+
+echo "Checking OpenStack endpoints:"
+publicURL=$OS_AUTH_URL
+publicIP=$(echo $publicURL|sed 's/^.*http\:\/\///'|sed 's/.[^:]*$//')
+publicPort=$(echo $publicURL|sed 's/^.*://'|sed 's/\/.*$//')
+echo ">>Verifying connectivity to the public endpoint $publicIP:$publicPort..."
+verify_connectivity $publicIP $publicPort
+RETVAL=$?
+if [ $RETVAL -ne 0 ]; then
+ echo "ERROR: Cannot talk to the public endpoint $publicIP:$publicPort ."
+ echo "OS_AUTH_URL=$OS_AUTH_URL"
+ exit 1
+fi
+echo " ...OK"
+
+adminURL=$(openstack catalog show identity |grep adminURL|awk '{print $4}')
+adminIP=$(echo $adminURL|sed 's/^.*http\:\/\///'|sed 's/.[^:]*$//')
+adminPort=$(echo $adminURL|sed 's/^.*://'|sed 's/.[^\/]*$//')
+echo ">>Verifying connectivity to the admin endpoint $adminIP:$adminPort..."
+verify_connectivity $adminIP $adminPort
+RETVAL=$?
+if [ $RETVAL -ne 0 ]; then
+ echo "ERROR: Cannot talk to the admin endpoint $adminIP:$adminPort ."
+ echo "$adminURL"
+ exit 1
+fi
+echo " ...OK"
+
+
+echo "Checking OpenStack basic services:"
+commands=('openstack endpoint list' 'nova list' 'neutron net-list' \
+ 'glance image-list' 'cinder list')
+for cmd in "${commands[@]}"
+do
+ service=$(echo $cmd | awk '{print $1}')
+ echo ">>Checking $service service..."
+ $cmd &>/dev/null
+ result=$?
+ if [ $result -ne 0 ];
+ then
+ echo "ERROR: Failed execution $cmd. The $service does not seem to be working."
+ exit 1
+ else
+ echo " ...OK"
+ fi
+done
+
+echo "OpenStack services are OK."
+
+echo "Checking External network..."
+networks=($(neutron net-list -F id | tail -n +4 | head -n -1 | awk '{print $2}'))
+is_external=False
+for net in "${networks[@]}"
+do
+ is_external=$(neutron net-show $net|grep "router:external"|awk '{print $4}')
+ if [ $is_external == "True" ]; then
+ echo "External network found: $net"
+ break
+ fi
+done
+if [ $is_external == "False" ]; then
+ echo "ERROR: There are no external networks in the deployment."
+ exit 1
+fi
+
+exit 0
diff --git a/functest/ci/config_functest.yaml b/functest/ci/config_functest.yaml
new file mode 100644
index 00000000..cca4959a
--- /dev/null
+++ b/functest/ci/config_functest.yaml
@@ -0,0 +1,198 @@
+general:
+ directories:
+ # Relative to the path where the repo is cloned:
+ dir_vping: functest/opnfv_tests/OpenStack/vPing/
+ dir_odl: functest/opnfv_tests/Controllers/ODL/
+ dir_rally: functest/opnfv_tests/OpenStack/rally/
+ dir_tempest_cases: functest/opnfv_tests/OpenStack/tempest/custom_tests/
+ dir_vIMS: functest/opnfv_tests/vnf/vIMS/
+ dir_onos: functest/opnfv_tests/Controllers/ONOS/Teston/
+ dir_onos_sfc: functest/opnfv_tests/Controllers/ONOS/Sfc/
+
+ # Absolute path
+ dir_repos: /home/opnfv/repos
+ dir_repo_functest: /home/opnfv/repos/functest
+ dir_repo_rally: /home/opnfv/repos/rally
+ dir_repo_tempest: /home/opnfv/repos/tempest
+ dir_repo_releng: /home/opnfv/repos/releng
+ dir_repo_vims_test: /home/opnfv/repos/vims-test
+ dir_repo_bgpvpn: /home/opnfv/repos/bgpvpn
+ dir_repo_onos: /home/opnfv/repos/onos
+ dir_repo_promise: /home/opnfv/repos/promise
+ dir_repo_doctor: /home/opnfv/repos/doctor
+ dir_repo_copper: /home/opnfv/repos/copper
+ dir_repo_ovno: /home/opnfv/repos/ovno
+ dir_repo_parser: /home/opnfv/repos/parser
+ dir_repo_domino: /home/opnfv/repos/domino
+ dir_functest: /home/opnfv/functest
+ dir_results: /home/opnfv/functest/results
+ dir_functest_conf: /home/opnfv/functest/conf
+ dir_rally_res: /home/opnfv/functest/results/rally/
+ dir_functest_data: /home/opnfv/functest/data
+ dir_vIMS_data: /home/opnfv/functest/data/vIMS
+ dir_rally_inst: /home/opnfv/.rally
+
+ openstack:
+ snapshot_file: /home/opnfv/functest/conf/openstack_snapshot.yaml
+
+ image_name: Cirros-0.3.4
+ image_file_name: cirros-0.3.4-x86_64-disk.img
+ image_disk_format: qcow2
+
+ flavor_name: opnfv_flavor
+ flavor_ram: 512
+ flavor_disk: 1
+ flavor_vcpus: 1
+
+ # Private network for functest. Will be created by config_functest.py
+ neutron_private_net_name: functest-net
+ neutron_private_subnet_name: functest-subnet
+ neutron_private_subnet_cidr: 192.168.120.0/24
+ neutron_private_subnet_start: 192.168.120.2
+ neutron_private_subnet_end: 192.168.120.254
+ neutron_private_subnet_gateway: 192.168.120.254
+ neutron_router_name: functest-router
+
+healthcheck:
+ disk_image: /home/opnfv/functest/data/cirros-0.3.4-x86_64-disk.img
+ disk_format: qcow2
+
+vping:
+ ping_timeout: 200
+ vm_flavor: m1.tiny # adapt to your environment
+ vm_name_1: opnfv-vping-1
+ vm_name_2: opnfv-vping-2
+ image_name: functest-vping
+ vping_private_net_name: vping-net
+ vping_private_subnet_name: vping-subnet
+ vping_private_subnet_cidr: 192.168.130.0/24
+ vping_router_name: vping-router
+ vping_sg_name: vPing-sg
+ vping_sg_descr: Security group for vPing test case
+
+onos_sfc:
+ image_name: TestSfcVm
+ image_file_name: firewall_block_image.img
+
+tempest:
+ identity:
+ tenant_name: tempest
+ tenant_description: Tenant for Tempest test suite
+ user_name: tempest
+ user_password: tempest
+ validation:
+ ssh_timeout: 130
+ private_net_name: tempest-net
+ private_subnet_name: tempest-subnet
+ private_subnet_cidr: 192.168.150.0/24
+ router_name: tempest-router
+ use_custom_images: False
+ use_custom_flavors: False
+
+rally:
+ deployment_name: opnfv-rally
+ network_name: rally-net
+ subnet_name: rally-subnet
+ subnet_cidr: 192.168.140.0/24
+ router_name: rally-router
+
+vIMS:
+ general:
+ tenant_name: vIMS
+ tenant_description: vIMS Functionality Testing
+ images:
+ ubuntu:
+ image_url: 'http://cloud-images.ubuntu.com/trusty/current/trusty-server-cloudimg-amd64-disk1.img'
+ image_name: ubuntu_14.04
+ centos:
+ image_url: 'http://cloud.centos.org/centos/7/images/CentOS-7-x86_64-GenericCloud-1510.qcow2'
+ image_name: centos_7
+ cloudify:
+ blueprint:
+ url: https://github.com/boucherv-orange/cloudify-manager-blueprints.git
+ branch: "3.3.1-build"
+ requierments:
+ ram_min: 3000
+ os_image: centos_7
+ inputs:
+ keystone_username: ""
+ keystone_password: ""
+ keystone_tenant_name: ""
+ keystone_url: ""
+ manager_public_key_name: 'manager-kp'
+ agent_public_key_name: 'agent-kp'
+ image_id: ""
+ flavor_id: "3"
+ external_network_name: ""
+ ssh_user: centos
+ agents_user: ubuntu
+ clearwater:
+ blueprint:
+ file_name: 'openstack-blueprint.yaml'
+ name: "clearwater-opnfv"
+ destination_folder: "opnfv-cloudify-clearwater"
+ url: 'https://github.com/Orange-OpenSource/opnfv-cloudify-clearwater.git'
+ branch: "stable"
+ deployment-name: 'clearwater-opnfv'
+ requierments:
+ ram_min: 1700
+ os_image: ubuntu_14.04
+ inputs:
+ image_id: ''
+ flavor_id: ''
+ agent_user: 'ubuntu'
+ external_network_name: ''
+ public_domain: clearwater.opnfv
+ONOS:
+ general:
+ onosbench_username: 'root'
+ onosbench_password: 'root'
+ onoscli_username: 'root'
+ onoscli_password: 'root'
+ runtimeout: 300
+ environment:
+ OCT: '10.20.0.1'
+ OC1: '10.20.0.7'
+ OC2: '10.20.0.7'
+ OC3: '10.20.0.7'
+ OCN: '10.20.0.4'
+ OCN2: '10.20.0.5'
+ installer_master: '10.20.0.2'
+ installer_master_username: 'root'
+ installer_master_password: 'r00tme'
+multisite:
+ fuel_environment:
+ installer_username: 'root'
+ installer_password: 'r00tme'
+ compass_environment:
+ installer_username: 'root'
+ installer_password: 'root'
+ multisite_controller_ip: '10.1.0.50'
+promise:
+ tenant_name: promise
+ tenant_description: promise Functionality Testing
+ user_name: promiser
+ user_pwd: test
+ image_name: promise-img
+ flavor_name: promise-flavor
+ flavor_vcpus: 1
+ flavor_ram: 128
+ flavor_disk: 0
+ network_name: promise-net
+ subnet_name: promise-subnet
+ subnet_cidr: 192.168.121.0/24
+ router_name: promise-router
+
+example:
+ example_vm_name: example-vm
+ example_flavor: m1.small
+ example_image_name: functest-example-vm
+ example_private_net_name: example-net
+ example_private_subnet_name: example-subnet
+ example_private_subnet_cidr: 192.168.170.0/24
+ example_router_name: example-router
+ example_sg_name: example-sg
+ example_sg_descr: Example Security group
+
+results:
+ test_db_url: http://testresults.opnfv.org/test/api/v1
diff --git a/functest/ci/config_patch.yaml b/functest/ci/config_patch.yaml
new file mode 100644
index 00000000..46064a07
--- /dev/null
+++ b/functest/ci/config_patch.yaml
@@ -0,0 +1,24 @@
+lxd:
+ general:
+ openstack:
+ image_name: Cirros-0.3.4
+ image_file_name: cirros-0.3.4-x86_64-lxc.tar.gz
+ image_disk_format: raw
+
+ healthcheck:
+ disk_image: /home/opnfv/functest/data/cirros-0.3.4-x86_64-lxc.tar.gz
+ disk_format: raw
+fdio:
+ general:
+ flavor_extra_specs: {'hw:mem_page_size':'large'}
+ image_properties: {'hw_mem_page_size':'large'}
+ tempest:
+ use_custom_images: True
+ use_custom_flavors: True
+ovs:
+ general:
+ flavor_extra_specs: {'hw:mem_page_size':'large'}
+ image_properties: {'hw_mem_page_size':'large'}
+ tempest:
+ use_custom_images: True
+ use_custom_flavors: True
diff --git a/functest/ci/exec_test.sh b/functest/ci/exec_test.sh
new file mode 100644
index 00000000..64018005
--- /dev/null
+++ b/functest/ci/exec_test.sh
@@ -0,0 +1,222 @@
+#!/bin/bash
+
+#
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# Morgan Richomme (morgan.richomme@orange.com)
+# Installs the Functest framework within the Docker container
+# and run the tests automatically
+#
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+usage="Script to trigger the tests automatically.
+
+usage:
+ bash $(basename "$0") [-h|--help] [-t <test_name>]
+
+where:
+ -h|--help show this help text
+ -r|--report push results to database (false by default)
+ -s|--serial run Tempest tests in one thread
+ -t|--test run specific test case
+ <test_name>"
+
+
+report=""
+serial=false
+
+# Get the list of runnable tests
+# Check if we are in CI mode
+debug=""
+if [[ "${CI_DEBUG,,}" == "true" ]];then
+ debug="--debug"
+fi
+
+FUNCTEST_REPO_DIR=${repos_dir}/functest
+FUNCTEST_TEST_DIR=${repos_dir}/functest/functest/opnfv_tests
+FUNCTEST_CONF_DIR=/home/opnfv/functest/conf
+
+export PYTHONUNBUFFERED=1
+
+function odl_tests(){
+ keystone_ip=$(openstack catalog show identity |grep publicURL| cut -f3 -d"/" | cut -f1 -d":")
+ neutron_ip=$(openstack catalog show network | grep publicURL | cut -f3 -d"/" | cut -f1 -d":")
+ odl_ip=${neutron_ip}
+ odl_port=8080
+ if [ "$INSTALLER_TYPE" == "fuel" ]; then
+ odl_port=8282
+ elif [ "$INSTALLER_TYPE" == "apex" ]; then
+ odl_ip=$SDN_CONTROLLER_IP
+ odl_port=8181
+ elif [ "$INSTALLER_TYPE" == "joid" ]; then
+ odl_ip=$SDN_CONTROLLER
+ elif [ "$INSTALLER_TYPE" == "compass" ]; then
+ odl_port=8181
+ else
+ odl_ip=$SDN_CONTROLLER_IP
+ fi
+}
+
+function sfc_prepare(){
+ ids=($(neutron security-group-list|grep default|awk '{print $2}'))
+ for id in ${ids[@]}; do
+ if ! neutron security-group-show $id|grep "22/tcp" &>/dev/null; then
+ neutron security-group-rule-create --protocol tcp \
+ --port-range-min 22 --port-range-max 22 --direction ingress $id
+ neutron security-group-rule-create --protocol tcp \
+ --port-range-min 22 --port-range-max 22 --direction egress $id
+ fi
+ done
+}
+
+function run_test(){
+ test_name=$1
+ serial_flag=""
+ if [ $serial == "true" ]; then
+ serial_flag="-s"
+ fi
+
+ case $test_name in
+ "healthcheck")
+ ${FUNCTEST_TEST_DIR}/OpenStack/healthcheck/healthcheck.sh
+ ;;
+ "vping_ssh")
+ python ${FUNCTEST_TEST_DIR}/OpenStack/vPing/vping.py -m ssh $report
+ ;;
+ "vping_userdata")
+ python ${FUNCTEST_TEST_DIR}/OpenStack/vPing/vping.py -m userdata $report
+ ;;
+ "odl")
+ odl_tests
+ [[ "$report" == "-r" ]] && args=-p
+ ${FUNCTEST_TEST_DIR}/Controllers/ODL/OpenDaylightTesting.py \
+ --keystoneip $keystone_ip --neutronip $neutron_ip \
+ --osusername ${OS_USERNAME} --ostenantname ${OS_TENANT_NAME} \
+ --ospassword ${OS_PASSWORD} \
+ --odlip $odl_ip --odlwebport $odl_port ${args}
+ ;;
+ "tempest_smoke_serial")
+ python ${FUNCTEST_TEST_DIR}/OpenStack/tempest/run_tempest.py \
+ $clean_flag -s -m smoke $report
+ ;;
+ "tempest_full_parallel")
+ python ${FUNCTEST_TEST_DIR}/OpenStack/tempest/run_tempest.py \
+ $serial_flag $clean_flag -m full $report
+ ;;
+ "vims")
+ python ${FUNCTEST_TEST_DIR}/vnf/vIMS/vIMS.py $clean_flag $report
+ ;;
+ "rally_full")
+ python ${FUNCTEST_TEST_DIR}/OpenStack/rally/run_rally-cert.py $clean_flag all $report
+ ;;
+ "rally_sanity")
+ python ${FUNCTEST_TEST_DIR}/OpenStack/rally/run_rally-cert.py \
+ $clean_flag --sanity all $report
+ ;;
+ "bgpvpn")
+ sdnvpn_repo_dir=${repos_dir}/sdnvpn/test/functest/
+ python ${sdnvpn_repo_dir}/run_tests.py $report
+ ;;
+ "onos")
+ python ${FUNCTEST_TEST_DIR}/Controllers/ONOS/Teston/onosfunctest.py
+ ;;
+ "onos_sfc")
+ python ${FUNCTEST_TEST_DIR}/Controllers/ONOS/Teston/onosfunctest.py -t sfc
+ ;;
+ "promise")
+ python ${FUNCTEST_TEST_DIR}/features/promise.py $report
+ sleep 10 # to let the instances terminate
+ ;;
+ "doctor")
+ python ${FUNCTEST_TEST_DIR}/features/doctor.py $report
+ ;;
+ "ovno")
+ # suite under rewritting for colorado
+ # no need to run anything until refactoring done
+ # ${repos_dir}/ovno/Testcases/RunTests.sh
+ ;;
+ "security_scan")
+ echo "Sourcing Credentials ${FUNCTEST_CONF_DIR}/stackrc for undercloud .."
+ source ${FUNCTEST_CONF_DIR}/stackrc
+ python ${repos_dir}/securityscanning/security_scan.py --config ${repos_dir}/securityscanning/config.ini
+ ;;
+ "copper")
+ python ${FUNCTEST_TEST_DIR}/features/copper.py $report
+ ;;
+ "moon")
+ python ${repos_dir}/moon/tests/run_tests.py $report
+ ;;
+ "multisite")
+ python ${FUNCTEST_TEST_DIR}/OpenStack/tempest/gen_tempest_conf.py
+ python ${FUNCTEST_TEST_DIR}/OpenStack/tempest/run_tempest.py \
+ $clean_flag -s -m feature_multisite $report \
+ -c ${FUNCTEST_TEST_DIR}/OpenStack/tempest/tempest_multisite.conf
+ ;;
+ "domino")
+ python ${FUNCTEST_TEST_DIR}/features/domino.py $report
+ ;;
+ "odl-sfc")
+ ODL_SFC_DIR=${FUNCTEST_TEST_DIR}/features/sfc
+ # pass FUNCTEST_REPO_DIR inside prepare_odl_sfc.bash
+ FUNCTEST_REPO_DIR=${FUNCTEST_REPO_DIR} python ${ODL_SFC_DIR}/prepare_odl_sfc.py || exit $?
+ source ${ODL_SFC_DIR}/tackerc
+ python ${ODL_SFC_DIR}/sfc_colorado1.py $report
+ ;;
+ "parser")
+ python ${FUNCTEST_TEST_DIR}/vnf/vRNC/parser.py $report
+ ;;
+ *)
+ echo "The test case '${test_name}' does not exist."
+ exit 1
+ esac
+
+ if [[ $? != 0 ]]; then exit 1
+ else exit 0
+ fi
+}
+
+
+# Parse parameters
+while [[ $# > 0 ]]
+ do
+ key="$1"
+ case $key in
+ -h|--help)
+ echo "$usage"
+ exit 0
+ shift
+ ;;
+ -r|--report)
+ report="-r"
+ ;;
+ -s|--serial)
+ serial=true
+ ;;
+ -t|--test|--tests)
+ TEST="$2"
+ shift
+ ;;
+ *)
+ echo "unknown option $1 $2"
+ exit 1
+ ;;
+ esac
+ shift # past argument or value
+done
+
+
+# Source credentials
+echo "Sourcing Credentials ${FUNCTEST_CONF_DIR}/openstack.creds to run the test.."
+source ${FUNCTEST_CONF_DIR}/openstack.creds
+
+# ODL Boron workaround to create additional flow rules to allow port 22 TCP
+if [[ $DEPLOY_SCENARIO == *"odl_l2-sfc"* ]]; then
+ sfc_prepare
+fi
+
+# Run test
+run_test $TEST
diff --git a/functest/ci/generate_report.py b/functest/ci/generate_report.py
new file mode 100644
index 00000000..c9343729
--- /dev/null
+++ b/functest/ci/generate_report.py
@@ -0,0 +1,152 @@
+import json
+import os
+import re
+import urllib2
+
+import functest.utils.functest_logger as ft_logger
+import functest.utils.functest_utils as ft_utils
+
+
+COL_1_LEN = 25
+COL_2_LEN = 15
+COL_3_LEN = 12
+COL_4_LEN = 15
+COL_5_LEN = 75
+
+# If we run from CI (Jenkins) we will push the results to the DB
+# and then we can print the url to the specific test result
+IS_CI_RUN = False
+BUILD_TAG = None
+
+logger = ft_logger.Logger("generate_report").getLogger()
+
+
+def init(tiers_to_run):
+ test_cases_arr = []
+ for tier in tiers_to_run:
+ for test in tier.get_tests():
+ test_cases_arr.append({'test_name': test.get_name(),
+ 'tier_name': tier.get_name(),
+ 'result': 'Not executed',
+ 'duration': '0',
+ 'url': ''})
+ return test_cases_arr
+
+
+def get_results_from_db():
+ url = ft_utils.get_db_url() + '/results?build_tag=' + BUILD_TAG
+ logger.debug("Query to rest api: %s" % url)
+ try:
+ data = json.load(urllib2.urlopen(url))
+ return data['results']
+ except:
+ logger.error("Cannot read content from the url: %s" % url)
+ return None
+
+
+def get_data(test, results):
+ test_result = test['result']
+ url = ''
+ for test_db in results:
+ if test['test_name'] in test_db['case_name']:
+ id = test_db['_id']
+ url = ft_utils.get_db_url() + '/results/' + id
+ test_result = test_db['criteria']
+
+ return {"url": url, "result": test_result}
+
+
+def print_line(w1, w2='', w3='', w4='', w5=''):
+ str = ('| ' + w1.ljust(COL_1_LEN - 1) +
+ '| ' + w2.ljust(COL_2_LEN - 1) +
+ '| ' + w3.ljust(COL_3_LEN - 1) +
+ '| ' + w4.ljust(COL_4_LEN - 1))
+ if IS_CI_RUN:
+ str += ('| ' + w5.ljust(COL_5_LEN - 1))
+ str += '|\n'
+ return str
+
+
+def print_line_no_columns(str):
+ TOTAL_LEN = COL_1_LEN + COL_2_LEN + COL_3_LEN + COL_4_LEN + 2
+ if IS_CI_RUN:
+ TOTAL_LEN += COL_5_LEN + 1
+ return ('| ' + str.ljust(TOTAL_LEN) + "|\n")
+
+
+def print_separator(char="=", delimiter="+"):
+ str = ("+" + char * COL_1_LEN +
+ delimiter + char * COL_2_LEN +
+ delimiter + char * COL_3_LEN +
+ delimiter + char * COL_4_LEN)
+ if IS_CI_RUN:
+ str += (delimiter + char * COL_5_LEN)
+ str += '+\n'
+ return str
+
+
+def main(args):
+ global BUILD_TAG, IS_CI_RUN
+ executed_test_cases = args
+
+ BUILD_TAG = os.getenv("BUILD_TAG")
+ if BUILD_TAG is not None:
+ IS_CI_RUN = True
+
+ if IS_CI_RUN:
+ results = get_results_from_db()
+ if results is not None:
+ for test in executed_test_cases:
+ data = get_data(test, results)
+ test.update({"url": data['url'],
+ "result": data['result']})
+
+ TOTAL_LEN = COL_1_LEN + COL_2_LEN + COL_3_LEN + COL_4_LEN
+ if IS_CI_RUN:
+ TOTAL_LEN += COL_5_LEN
+ MID = TOTAL_LEN / 2
+
+ INSTALLER = os.getenv('INSTALLER_TYPE', 'unknown')
+ CI_LOOP = os.getenv('CI_LOOP')
+ SCENARIO = os.getenv('DEPLOY_SCENARIO')
+ CI_LOOP = None
+ if BUILD_TAG is not None:
+ if re.search("daily", BUILD_TAG) is not None:
+ CI_LOOP = "daily"
+ else:
+ CI_LOOP = "weekly"
+
+ str = ''
+ str += print_separator('=', delimiter="=")
+ str += print_line_no_columns(' ' * (MID - 8) + 'FUNCTEST REPORT')
+ str += print_separator('=', delimiter="=")
+ str += print_line_no_columns(' ')
+ str += print_line_no_columns(" Deployment description:")
+ str += print_line_no_columns(" INSTALLER: %s" % INSTALLER)
+ if SCENARIO is not None:
+ str += print_line_no_columns(" SCENARIO: %s" % SCENARIO)
+ if BUILD_TAG is not None:
+ str += print_line_no_columns(" BUILD TAG: %s" % BUILD_TAG)
+ if CI_LOOP is not None:
+ str += print_line_no_columns(" CI LOOP: %s" % CI_LOOP)
+ str += print_line_no_columns(' ')
+ str += print_separator('=')
+ if IS_CI_RUN:
+ str += print_line('TEST CASE', 'TIER', 'DURATION', 'RESULT', 'URL')
+ else:
+ str += print_line('TEST CASE', 'TIER', 'DURATION', 'RESULT')
+ str += print_separator('=')
+ for test in executed_test_cases:
+ str += print_line(test['test_name'],
+ test['tier_name'],
+ test['duration'],
+ test['result'],
+ test['url'])
+ str += print_separator('-')
+
+ logger.info("\n\n\n%s" % str)
+
+
+if __name__ == '__main__':
+ import sys
+ main(sys.argv[1:])
diff --git a/functest/ci/prepare_env.py b/functest/ci/prepare_env.py
new file mode 100644
index 00000000..e5c24cc3
--- /dev/null
+++ b/functest/ci/prepare_env.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python
+#
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+#
+# Installs the Functest framework within the Docker container
+# and run the tests automatically
+#
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+
+import json
+import os
+import re
+import subprocess
+import sys
+
+import argparse
+import yaml
+
+import functest.utils.functest_logger as ft_logger
+import functest.utils.functest_utils as ft_utils
+import functest.utils.openstack_utils as os_utils
+
+actions = ['start', 'check']
+parser = argparse.ArgumentParser()
+parser.add_argument("action", help="Possible actions are: "
+ "'{d[0]}|{d[1]}' ".format(d=actions))
+parser.add_argument("-d", "--debug", help="Debug mode", action="store_true")
+args = parser.parse_args()
+
+
+""" logging configuration """
+logger = ft_logger.Logger("prepare_env").getLogger()
+
+
+""" global variables """
+INSTALLERS = ['fuel', 'compass', 'apex', 'joid']
+CI_INSTALLER_TYPE = ""
+CI_INSTALLER_IP = ""
+CI_SCENARIO = ""
+CI_DEBUG = False
+CONFIG_FUNCTEST_PATH = os.environ["CONFIG_FUNCTEST_YAML"]
+CONFIG_PATCH_PATH = os.path.join(os.path.dirname(
+ CONFIG_FUNCTEST_PATH), "config_patch.yaml")
+
+with open(CONFIG_PATCH_PATH) as f:
+ functest_patch_yaml = yaml.safe_load(f)
+
+FUNCTEST_CONF_DIR = \
+ ft_utils.get_functest_config('general.directories.dir_functest_conf')
+
+
+FUNCTEST_DATA_DIR = \
+ ft_utils.get_functest_config('general.directories.dir_functest_data')
+FUNCTEST_RESULTS_DIR = \
+ ft_utils.get_functest_config('general.directories.dir_results')
+DEPLOYMENT_MAME = \
+ ft_utils.get_functest_config('rally.deployment_name')
+TEMPEST_REPO_DIR = \
+ ft_utils.get_functest_config('general.directories.dir_repo_tempest')
+
+ENV_FILE = FUNCTEST_CONF_DIR + "/env_active"
+
+
+def print_separator():
+ logger.info("==============================================")
+
+
+def check_env_variables():
+ print_separator()
+ logger.info("Checking environment variables...")
+ global CI_INSTALLER_TYPE
+ global CI_INSTALLER_IP
+ global CI_DEBUG
+ global CI_SCENARIO
+ CI_INSTALLER_TYPE = os.getenv('INSTALLER_TYPE')
+ CI_INSTALLER_IP = os.getenv('INSTALLER_IP')
+ CI_SCENARIO = os.getenv('DEPLOY_SCENARIO')
+ CI_NODE = os.getenv('NODE_NAME')
+ CI_BUILD_TAG = os.getenv('BUILD_TAG')
+ CI_DEBUG = os.getenv('CI_DEBUG')
+
+ if CI_INSTALLER_TYPE is None:
+ logger.warning("The env variable 'INSTALLER_TYPE' is not defined.")
+ CI_INSTALLER_TYPE = "undefined"
+ else:
+ if CI_INSTALLER_TYPE not in INSTALLERS:
+ logger.warning("INSTALLER_TYPE=%s is not a valid OPNFV installer. "
+ "Available OPNFV Installers are : %s. "
+ "Setting INSTALLER_TYPE=undefined."
+ % (CI_INSTALLER_TYPE, INSTALLERS))
+ CI_INSTALLER_TYPE = "undefined"
+ else:
+ logger.info(" INSTALLER_TYPE=%s" % CI_INSTALLER_TYPE)
+
+ if CI_INSTALLER_IP is None:
+ logger.warning("The env variable 'INSTALLER_IP' is not defined. "
+ "It is needed to fetch the OpenStack credentials. "
+ "If the credentials are not provided to the "
+ "container as a volume, please add this env variable "
+ "to the 'docker run' command.")
+ else:
+ logger.info(" INSTALLER_IP=%s" % CI_INSTALLER_IP)
+
+ if CI_SCENARIO is None:
+ logger.warning("The env variable 'DEPLOY_SCENARIO' is not defined. "
+ "Setting CI_SCENARIO=undefined.")
+ CI_SCENARIO = "undefined"
+ else:
+ logger.info(" DEPLOY_SCENARIO=%s" % CI_SCENARIO)
+ if CI_DEBUG:
+ logger.info(" CI_DEBUG=%s" % CI_DEBUG)
+
+ if CI_NODE:
+ logger.info(" NODE_NAME=%s" % CI_NODE)
+
+ if CI_BUILD_TAG:
+ logger.info(" BUILD_TAG=%s" % CI_BUILD_TAG)
+
+
+def create_directories():
+ print_separator()
+ logger.info("Creating needed directories...")
+ if not os.path.exists(FUNCTEST_CONF_DIR):
+ os.makedirs(FUNCTEST_CONF_DIR)
+ logger.info(" %s created." % FUNCTEST_CONF_DIR)
+ else:
+ logger.debug(" %s already exists." % FUNCTEST_CONF_DIR)
+
+ if not os.path.exists(FUNCTEST_DATA_DIR):
+ os.makedirs(FUNCTEST_DATA_DIR)
+ logger.info(" %s created." % FUNCTEST_DATA_DIR)
+ else:
+ logger.debug(" %s already exists." % FUNCTEST_DATA_DIR)
+
+
+def source_rc_file():
+ print_separator()
+ logger.info("Fetching RC file...")
+ rc_file = os.getenv('creds')
+ if rc_file is None:
+ logger.warning("The environment variable 'creds' must be set and"
+ "pointing to the local RC file. Using default: "
+ "/home/opnfv/functest/conf/openstack.creds ...")
+ rc_file = "/home/opnfv/functest/conf/openstack.creds"
+
+ if not os.path.isfile(rc_file):
+ logger.info("RC file not provided. "
+ "Fetching it from the installer...")
+ if CI_INSTALLER_IP is None:
+ logger.error("The env variable CI_INSTALLER_IP must be provided in"
+ " order to fetch the credentials from the installer.")
+ sys.exit("Missing CI_INSTALLER_IP.")
+ if CI_INSTALLER_TYPE not in INSTALLERS:
+ logger.error("Cannot fetch credentials. INSTALLER_TYPE=%s is "
+ "not a valid OPNFV installer. Available "
+ "installers are : %s." % INSTALLERS)
+ sys.exit("Wrong INSTALLER_TYPE.")
+
+ cmd = ("/home/opnfv/repos/releng/utils/fetch_os_creds.sh "
+ "-d %s -i %s -a %s"
+ % (rc_file, CI_INSTALLER_TYPE, CI_INSTALLER_IP))
+ logger.debug("Executing command: %s" % cmd)
+ p = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
+ output = p.communicate()[0]
+ logger.debug("\n%s" % output)
+ if p.returncode != 0:
+ logger.error("Failed to fetch credentials from installer.")
+ sys.exit(1)
+ else:
+ logger.info("RC file provided in %s." % rc_file)
+ if os.path.getsize(rc_file) == 0:
+ logger.error("The file %s is empty." % rc_file)
+ sys.exit(1)
+
+ logger.info("Sourcing the OpenStack RC file...")
+ creds = os_utils.source_credentials(rc_file)
+ str = ""
+ for key, value in creds.iteritems():
+ if re.search("OS_", key):
+ str += "\n\t\t\t\t\t\t " + key + "=" + value
+ logger.debug("Used credentials: %s" % str)
+
+
+def patch_config_file():
+ updated = False
+ for key in functest_patch_yaml:
+ if key in CI_SCENARIO:
+ new_functest_yaml = dict(ft_utils.merge_dicts(
+ ft_utils.get_functest_yaml(), functest_patch_yaml[key]))
+ updated = True
+
+ if updated:
+ os.remove(CONFIG_FUNCTEST_PATH)
+ with open(CONFIG_FUNCTEST_PATH, "w") as f:
+ f.write(yaml.dump(new_functest_yaml, default_style='"'))
+ f.close()
+
+
+def verify_deployment():
+ print_separator()
+ logger.info("Verifying OpenStack services...")
+ cmd = ("%s/functest/ci/check_os.sh" % ft_utils.FUNCTEST_REPO)
+
+ logger.debug("Executing command: %s" % cmd)
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
+
+ while p.poll() is None:
+ line = p.stdout.readline().rstrip()
+ if "ERROR" in line:
+ logger.error(line)
+ sys.exit("Problem while running 'check_os.sh'.")
+ logger.info(line)
+
+
+def install_rally():
+ print_separator()
+ logger.info("Creating Rally environment...")
+
+ cmd = "rally deployment destroy opnfv-rally"
+ ft_utils.execute_command(cmd,
+ error_msg=("Deployment %s does not exist."
+ % DEPLOYMENT_MAME), verbose=False)
+ rally_conf = os_utils.get_credentials_for_rally()
+ with open('rally_conf.json', 'w') as fp:
+ json.dump(rally_conf, fp)
+ cmd = "rally deployment create --file=rally_conf.json --name="
+ cmd += DEPLOYMENT_MAME
+ ft_utils.execute_command(cmd,
+ error_msg="Problem creating Rally deployment")
+
+ logger.info("Installing tempest from existing repo...")
+ cmd = ("rally verify install --source " + TEMPEST_REPO_DIR +
+ " --system-wide")
+ ft_utils.execute_command(cmd,
+ error_msg="Problem installing Tempest.")
+
+ cmd = "rally deployment check"
+ ft_utils.execute_command(cmd,
+ error_msg=("OpenStack not responding or "
+ "faulty Rally deployment."))
+
+ cmd = "rally show images"
+ ft_utils.execute_command(cmd,
+ error_msg=("Problem while listing "
+ "OpenStack images."))
+
+ cmd = "rally show flavors"
+ ft_utils.execute_command(cmd,
+ error_msg=("Problem while showing "
+ "OpenStack flavors."))
+
+
+def check_environment():
+ msg_not_active = "The Functest environment is not installed."
+ if not os.path.isfile(ENV_FILE):
+ logger.error(msg_not_active)
+ sys.exit(1)
+
+ with open(ENV_FILE, "r") as env_file:
+ s = env_file.read()
+ if not re.search("1", s):
+ logger.error(msg_not_active)
+ sys.exit(1)
+
+ logger.info("Functest environment installed.")
+
+
+def main():
+ if not (args.action in actions):
+ logger.error('Argument not valid.')
+ sys.exit()
+
+ if args.action == "start":
+ logger.info("######### Preparing Functest environment #########\n")
+ check_env_variables()
+ create_directories()
+ source_rc_file()
+ patch_config_file()
+ verify_deployment()
+ install_rally()
+
+ with open(ENV_FILE, "w") as env_file:
+ env_file.write("1")
+
+ check_environment()
+
+ if args.action == "check":
+ check_environment()
+
+ exit(0)
+
+if __name__ == '__main__':
+ main()
diff --git a/functest/ci/run_tests.py b/functest/ci/run_tests.py
new file mode 100644
index 00000000..f30062f7
--- /dev/null
+++ b/functest/ci/run_tests.py
@@ -0,0 +1,249 @@
+#!/usr/bin/python -u
+#
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+import datetime
+import importlib
+import os
+import re
+import sys
+
+import argparse
+
+import functest.ci.generate_report as generate_report
+import functest.ci.tier_builder as tb
+import functest.core.TestCasesBase as TestCasesBase
+import functest.utils.functest_logger as ft_logger
+import functest.utils.functest_utils as ft_utils
+import functest.utils.openstack_clean as os_clean
+import functest.utils.openstack_snapshot as os_snapshot
+import functest.utils.openstack_utils as os_utils
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-t", "--test", dest="test", action='store',
+ help="Test case or tier (group of tests) to be executed. "
+ "It will run all the test if not specified.")
+parser.add_argument("-n", "--noclean", help="Do not clean OpenStack resources"
+ " after running each test (default=false).",
+ action="store_true")
+parser.add_argument("-r", "--report", help="Push results to database "
+ "(default=false).", action="store_true")
+args = parser.parse_args()
+
+
+""" logging configuration """
+logger = ft_logger.Logger("run_tests").getLogger()
+
+
+""" global variables """
+EXEC_SCRIPT = ("%s/ci/exec_test.sh" % ft_utils.FUNCTEST_REPO)
+CLEAN_FLAG = True
+REPORT_FLAG = False
+EXECUTED_TEST_CASES = []
+
+# This will be the return code of this script. If any of the tests fails,
+# this variable will change to -1
+OVERALL_RESULT = 0
+
+
+def print_separator(str, count=45):
+ line = ""
+ for i in range(0, count - 1):
+ line += str
+ logger.info("%s" % line)
+
+
+def source_rc_file():
+ rc_file = os.getenv('creds')
+ if not os.path.isfile(rc_file):
+ logger.error("RC file %s does not exist..." % rc_file)
+ sys.exit(1)
+ logger.debug("Sourcing the OpenStack RC file...")
+ os_utils.source_credentials(rc_file)
+
+
+def generate_os_snapshot():
+ os_snapshot.main()
+
+
+def cleanup():
+ os_clean.main()
+
+
+def update_test_info(test_name, result, duration):
+ for test in EXECUTED_TEST_CASES:
+ if test['test_name'] == test_name:
+ test.update({"result": result,
+ "duration": duration})
+
+
+def get_run_dict_if_defined(testname):
+ try:
+ dict = ft_utils.get_dict_by_test(testname)
+ if not dict:
+ logger.error("Cannot get {}'s config options".format(testname))
+ elif 'run' in dict:
+ return dict['run']
+ return None
+ except Exception:
+ logger.exception("Cannot get {}'s config options".format(testname))
+ return None
+
+
+def run_test(test, tier_name):
+ global OVERALL_RESULT, EXECUTED_TEST_CASES
+ result_str = "PASS"
+ start = datetime.datetime.now()
+ test_name = test.get_name()
+ logger.info("\n") # blank line
+ print_separator("=")
+ logger.info("Running test case '%s'..." % test_name)
+ print_separator("=")
+ logger.debug("\n%s" % test)
+
+ if CLEAN_FLAG:
+ generate_os_snapshot()
+
+ flags = (" -t %s" % (test_name))
+ if REPORT_FLAG:
+ flags += " -r"
+
+ result = TestCasesBase.TestCasesBase.EX_RUN_ERROR
+ run_dict = get_run_dict_if_defined(test_name)
+ if run_dict:
+ try:
+ module = importlib.import_module(run_dict['module'])
+ cls = getattr(module, run_dict['class'])
+ test_case = cls()
+ result = test_case.run()
+ if result == TestCasesBase.TestCasesBase.EX_OK and REPORT_FLAG:
+ result = test_case.push_to_db()
+ except ImportError:
+ logger.exception("Cannot import module {}".format(
+ run_dict['module']))
+ except AttributeError:
+ logger.exception("Cannot get class {}".format(
+ run_dict['class']))
+ else:
+ cmd = ("%s%s" % (EXEC_SCRIPT, flags))
+ logger.info("Executing command {} because {} "
+ "doesn't implement the new framework".format(
+ cmd, test_name))
+ result = ft_utils.execute_command(cmd)
+
+ if CLEAN_FLAG:
+ cleanup()
+ end = datetime.datetime.now()
+ duration = (end - start).seconds
+ duration_str = ("%02d:%02d" % divmod(duration, 60))
+ logger.info("Test execution time: %s" % duration_str)
+
+ if result != 0:
+ logger.error("The test case '%s' failed. " % test_name)
+ OVERALL_RESULT = -1
+ result_str = "FAIL"
+
+ if test.is_blocking():
+ if not args.test or args.test == "all":
+ logger.info("This test case is blocking. Aborting overall "
+ "execution.")
+ # if it is a single test we don't print the whole results table
+ update_test_info(test_name, result_str, duration_str)
+ generate_report.main(EXECUTED_TEST_CASES)
+ logger.info("Execution exit value: %s" % OVERALL_RESULT)
+ sys.exit(OVERALL_RESULT)
+
+ update_test_info(test_name, result_str, duration_str)
+
+
+def run_tier(tier):
+ tier_name = tier.get_name()
+ tests = tier.get_tests()
+ if tests is None or len(tests) == 0:
+ logger.info("There are no supported test cases in this tier "
+ "for the given scenario")
+ return 0
+ logger.info("\n\n") # blank line
+ print_separator("#")
+ logger.info("Running tier '%s'" % tier_name)
+ print_separator("#")
+ logger.debug("\n%s" % tier)
+ for test in tests:
+ run_test(test, tier_name)
+
+
+def run_all(tiers):
+ global EXECUTED_TEST_CASES
+ summary = ""
+ BUILD_TAG = os.getenv('BUILD_TAG')
+ if BUILD_TAG is not None and re.search("daily", BUILD_TAG) is not None:
+ CI_LOOP = "daily"
+ else:
+ CI_LOOP = "weekly"
+
+ tiers_to_run = []
+
+ for tier in tiers.get_tiers():
+ if (len(tier.get_tests()) != 0 and
+ re.search(CI_LOOP, tier.get_ci_loop()) is not None):
+ tiers_to_run.append(tier)
+ summary += ("\n - %s:\n\t %s"
+ % (tier.get_name(),
+ tier.get_test_names()))
+
+ logger.info("Tests to be executed:%s" % summary)
+ EXECUTED_TEST_CASES = generate_report.init(tiers_to_run)
+ for tier in tiers_to_run:
+ run_tier(tier)
+
+ generate_report.main(EXECUTED_TEST_CASES)
+
+
+def main():
+ global CLEAN_FLAG
+ global REPORT_FLAG
+
+ CI_INSTALLER_TYPE = os.getenv('INSTALLER_TYPE')
+ CI_SCENARIO = os.getenv('DEPLOY_SCENARIO')
+
+ file = ft_utils.get_testcases_file()
+ _tiers = tb.TierBuilder(CI_INSTALLER_TYPE, CI_SCENARIO, file)
+
+ if args.noclean:
+ CLEAN_FLAG = False
+
+ if args.report:
+ REPORT_FLAG = True
+
+ if args.test:
+ source_rc_file()
+ if _tiers.get_tier(args.test):
+ run_tier(_tiers.get_tier(args.test))
+
+ elif _tiers.get_test(args.test):
+ run_test(_tiers.get_test(args.test), _tiers.get_tier(args.test))
+
+ elif args.test == "all":
+ run_all(_tiers)
+
+ else:
+ logger.error("Unknown test case or tier '%s', or not supported by "
+ "the given scenario '%s'."
+ % (args.test, CI_SCENARIO))
+ logger.debug("Available tiers are:\n\n%s"
+ % _tiers)
+ else:
+ run_all(_tiers)
+
+ logger.info("Execution exit value: %s" % OVERALL_RESULT)
+ sys.exit(OVERALL_RESULT)
+
+if __name__ == '__main__':
+ main()
diff --git a/functest/ci/testcases.yaml b/functest/ci/testcases.yaml
new file mode 100644
index 00000000..afd32986
--- /dev/null
+++ b/functest/ci/testcases.yaml
@@ -0,0 +1,269 @@
+tiers:
+ -
+ name: healthcheck
+ order: 0
+ ci_loop: '(daily)|(weekly)'
+ description : >-
+ First tier to be executed to verify the basic
+ operations in the VIM.
+ testcases:
+ -
+ name: healthcheck
+ criteria: 'status == "PASS"'
+ blocking: true
+ description: >-
+ This test case verifies the basic OpenStack services like
+ Keystone, Glance, Cinder, Neutron and Nova.
+
+ dependencies:
+ installer: ''
+ scenario: '^((?!lxd).)*$'
+
+ -
+ name: smoke
+ order: 1
+ ci_loop: '(daily)|(weekly)'
+ description : >-
+ Set of basic Functional tests to validate the OpenStack deployment.
+ testcases:
+ -
+ name: vping_ssh
+ criteria: 'status == "PASS"'
+ blocking: true
+ description: >-
+ This test case verifies: 1) SSH to an instance using floating
+ IPs over the public network. 2) Connectivity between 2 instances
+ over a private network.
+ dependencies:
+ installer: ''
+ scenario: '^((?!bgpvpn|odl_l3).)*$'
+
+ -
+ name: vping_userdata
+ criteria: 'status == "PASS"'
+ blocking: true
+ description: >-
+ This test case verifies: 1) Boot a VM with given userdata.
+ 2) Connectivity between 2 instances over a private network.
+ dependencies:
+ installer: ''
+ scenario: '^((?!lxd).)*$'
+
+ -
+ name: tempest_smoke_serial
+ criteria: 'success_rate == 100%'
+ blocking: false
+ description: >-
+ This test case runs the smoke subset of the OpenStack
+ Tempest suite. The list of test cases is generated by
+ Tempest automatically and depends on the parameters of
+ the OpenStack deplopyment.
+ dependencies:
+ installer: ''
+ scenario: ''
+
+ -
+ name: rally_sanity
+ criteria: 'success_rate == 100%'
+ blocking: false
+ description: >-
+ This test case runs a sub group of tests of the OpenStack
+ Rally suite in smoke mode.
+ dependencies:
+ installer: ''
+ scenario: '^((?!bgpvpn).)*$'
+
+ -
+ name: sdn_suites
+ order: 2
+ ci_loop: '(daily)|(weekly)'
+ description : >-
+ Test suites corresponding to the different
+ SDN Controllers existing in OPNFV.
+ testcases:
+ -
+ name: odl
+ criteria: 'success_rate == 100%'
+ blocking: true
+ description: >-
+ Test Suite for the OpenDaylight SDN Controller. It integrates
+ some test suites from upstream using Robot as the test
+ framework.
+ dependencies:
+ installer: ''
+ scenario: 'odl'
+ run:
+ module: 'functest.opnfv_tests.Controllers.ODL.OpenDaylightTesting'
+ class: 'ODLTestCases'
+
+ -
+ name: onos
+ criteria: 'status == "PASS"'
+ blocking: true
+ description: >-
+ Test Suite for the ONOS SDN Controller. It integrates
+ some test suites from upstream using TestON as the test
+ framework.
+ dependencies:
+ installer: ''
+ scenario: 'onos'
+
+ -
+ name: features
+ order: 3
+ ci_loop: '(daily)|(weekly)'
+ description : >-
+ Test suites from feature projects
+ integrated in functest
+ testcases:
+ -
+ name: promise
+ criteria: 'success_rate == 100%'
+ blocking: false
+ description: >-
+ Test suite from Promise project.
+ dependencies:
+ installer: '(fuel)|(joid)'
+ scenario: ''
+
+ -
+ name: doctor
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ Test suite from Doctor project.
+ dependencies:
+ installer: 'apex'
+ scenario: '^((?!fdio).)*$'
+
+ -
+ name: bgpvpn
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ Test suite from SDNVPN project.
+ dependencies:
+ installer: '(fuel)|(apex)'
+ scenario: 'bgpvpn'
+
+ -
+ name: security_scan
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ Simple security Scan
+ dependencies:
+ installer: 'apex'
+ scenario: '^((?!fdio).)*$'
+
+ -
+ name: copper
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ Test suite for policy management based on OpenStack Congress
+ dependencies:
+ installer: '(apex)|(joid)'
+ scenario: '^((?!fdio|lxd).)*$'
+ -
+ name: moon
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ Security management system for OPNFV
+ dependencies:
+ installer: 'compass'
+ scenario: '(odl)*(moon)'
+ -
+ name: multisite
+ criteria: 'success_rate == 100%'
+ blocking: false
+ description: >-
+ Test suite from kingbird
+ dependencies:
+ installer: '(fuel)|(compass)'
+ scenario: 'multisite'
+ -
+ name: domino
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ Test suite for template distribution based on Domino
+ dependencies:
+ installer: 'joid'
+ scenario: ''
+ -
+ name: odl-sfc
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ Test suite for odl-sfc to test two chains and two SFs
+ dependencies:
+ installer: '(apex)|(fuel)'
+ scenario: 'odl_l2-sfc'
+ -
+ name: onos_sfc
+ criteria: 'status == "PASS"'
+ blocking: true
+ description: >-
+ Test Suite for onos-sfc to test sfc function.
+ dependencies:
+ installer: ''
+ scenario: 'onos-sfc'
+ -
+ name: parser
+ criteria: 'ret == 0'
+ blocking: false
+ description: >-
+ Test suite from Parser project.
+ dependencies:
+ installer: 'fuel'
+ scenario: '^((?!bgpvpn|noha).)*$'
+
+ -
+ name: openstack
+ order: 4
+ ci_loop: 'weekly'
+ description : >-
+ Extensive testing of OpenStack API.
+ testcases:
+ -
+ name: tempest_full_parallel
+ criteria: 'success_rate >= 80%'
+ blocking: false
+ description: >-
+ The list of test cases is generated by
+ Tempest automatically and depends on the parameters of
+ the OpenStack deplopyment.
+ dependencies:
+ installer: ''
+ scenario: ''
+
+ -
+ name: rally_full
+ criteria: 'success_rate >= 90%'
+ blocking: false
+ description: >-
+ This test case runs the full suite of scenarios of the OpenStack
+ Rally suite using several threads and iterations.
+ dependencies:
+ installer: ''
+ scenario: ''
+
+ -
+ name: vnf
+ order: 5
+ ci_loop: 'weekly'
+ description : >-
+ Collection of VNF test cases.
+ testcases:
+ -
+ name: vims
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ This test case deploys an OpenSource vIMS solution from Clearwater
+ using the Cloudify orchestrator. It also runs some signaling traffic.
+ dependencies:
+ installer: ''
+ scenario: '(ocl)|(nosdn)|^(os-odl)((?!bgpvpn).)*$'
diff --git a/functest/ci/tier_builder.py b/functest/ci/tier_builder.py
new file mode 100644
index 00000000..e1c3e49e
--- /dev/null
+++ b/functest/ci/tier_builder.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+#
+# jose.lausuch@ericsson.com
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+import tier_handler as th
+import yaml
+
+
+class TierBuilder:
+
+ def __init__(self, ci_installer, ci_scenario, testcases_file):
+ self.ci_installer = ci_installer
+ self.ci_scenario = ci_scenario
+ self.testcases_file = testcases_file
+ self.dic_tier_array = None
+ self.tier_objects = []
+ self.testcases_yaml = None
+ self.generate_tiers()
+
+ def read_test_yaml(self):
+ with open(self.testcases_file) as f:
+ self.testcases_yaml = yaml.safe_load(f)
+
+ self.dic_tier_array = []
+ for tier in self.testcases_yaml.get("tiers"):
+ self.dic_tier_array.append(tier)
+
+ def generate_tiers(self):
+ if self.dic_tier_array is None:
+ self.read_test_yaml()
+
+ del self.tier_objects[:]
+ for dic_tier in self.dic_tier_array:
+ tier = th.Tier(name=dic_tier['name'],
+ order=dic_tier['order'],
+ ci_loop=dic_tier['ci_loop'],
+ description=dic_tier['description'])
+
+ for dic_testcase in dic_tier['testcases']:
+ installer = dic_testcase['dependencies']['installer']
+ scenario = dic_testcase['dependencies']['scenario']
+ dep = th.Dependency(installer, scenario)
+
+ testcase = th.TestCase(name=dic_testcase['name'],
+ dependency=dep,
+ criteria=dic_testcase['criteria'],
+ blocking=dic_testcase['blocking'],
+ description=dic_testcase['description'])
+ if testcase.is_compatible(self.ci_installer, self.ci_scenario):
+ tier.add_test(testcase)
+
+ self.tier_objects.append(tier)
+
+ def get_tiers(self):
+ return self.tier_objects
+
+ def get_tier_names(self):
+ tier_names = []
+ for tier in self.tier_objects:
+ tier_names.append(tier.get_name())
+ return tier_names
+
+ def get_tier(self, tier_name):
+ for i in range(0, len(self.tier_objects)):
+ if self.tier_objects[i].get_name() == tier_name:
+ return self.tier_objects[i]
+ return None
+
+ def get_test(self, test_name):
+ for i in range(0, len(self.tier_objects)):
+ if self.tier_objects[i].is_test(test_name):
+ return self.tier_objects[i].get_test(test_name)
+ return None
+
+ def get_tests(self, tier_name):
+ for i in range(0, len(self.tier_objects)):
+ if self.tier_objects[i].get_name() == tier_name:
+ return self.tier_objects[i].get_tests()
+ return None
+
+ def __str__(self):
+ output = ""
+ for i in range(0, len(self.tier_objects)):
+ output += str(self.tier_objects[i]) + "\n"
+ return output
diff --git a/functest/ci/tier_handler.py b/functest/ci/tier_handler.py
new file mode 100644
index 00000000..1eadfba5
--- /dev/null
+++ b/functest/ci/tier_handler.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python
+#
+# jose.lausuch@ericsson.com
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+
+import re
+
+LINE_LENGTH = 72
+
+
+def split_text(text, max_len):
+ words = text.split()
+ lines = []
+ line = ""
+ for word in words:
+ if len(line) + len(word) < max_len - 1:
+ line += word + " "
+ else:
+ lines.append(line)
+ line = word + " "
+ if line != "":
+ lines.append(line)
+ return lines
+
+
+class Tier:
+
+ def __init__(self, name, order, ci_loop, description=""):
+ self.tests_array = []
+ self.name = name
+ self.order = order
+ self.ci_loop = ci_loop
+ self.description = description
+
+ def add_test(self, testcase):
+ self.tests_array.append(testcase)
+
+ def get_tests(self):
+ array_tests = []
+ for test in self.tests_array:
+ array_tests.append(test)
+ return array_tests
+
+ def get_test_names(self):
+ array_tests = []
+ for test in self.tests_array:
+ array_tests.append(test.get_name())
+ return array_tests
+
+ def get_test(self, test_name):
+ if self.is_test(test_name):
+ for test in self.tests_array:
+ if test.get_name() == test_name:
+ return test
+ return None
+
+ def is_test(self, test_name):
+ for test in self.tests_array:
+ if test.get_name() == test_name:
+ return True
+ return False
+
+ def get_name(self):
+ return self.name
+
+ def get_order(self):
+ return self.order
+
+ def get_ci_loop(self):
+ return self.ci_loop
+
+ def __str__(self):
+ lines = split_text(self.description, LINE_LENGTH - 6)
+
+ out = ""
+ out += ("+%s+\n" % ("=" * (LINE_LENGTH - 2)))
+ out += ("| Tier: " + self.name.ljust(LINE_LENGTH - 10) + "|\n")
+ out += ("+%s+\n" % ("=" * (LINE_LENGTH - 2)))
+ out += ("| Order: " + str(self.order).ljust(LINE_LENGTH - 10) + "|\n")
+ out += ("| CI Loop: " + str(self.ci_loop).ljust(LINE_LENGTH - 12) +
+ "|\n")
+ out += ("| Description:".ljust(LINE_LENGTH - 1) + "|\n")
+ for line in lines:
+ out += ("| " + line.ljust(LINE_LENGTH - 7) + " |\n")
+ out += ("| Test cases:".ljust(LINE_LENGTH - 1) + "|\n")
+ tests = self.get_test_names()
+ if len(tests) > 0:
+ for i in range(len(tests)):
+ out += ("| - %s |\n" % tests[i].ljust(LINE_LENGTH - 9))
+ else:
+ out += ("| (There are no supported test cases "
+ .ljust(LINE_LENGTH - 1) + "|\n")
+ out += ("| in this tier for the given scenario) "
+ .ljust(LINE_LENGTH - 1) + "|\n")
+ out += ("|".ljust(LINE_LENGTH - 1) + "|\n")
+ out += ("+%s+\n" % ("-" * (LINE_LENGTH - 2)))
+ return out
+
+
+class TestCase:
+
+ def __init__(self, name, dependency, criteria, blocking, description=""):
+ self.name = name
+ self.dependency = dependency
+ self.description = description
+ self.criteria = criteria
+ self.blocking = blocking
+
+ @staticmethod
+ def is_none(item):
+ return item is None or item is ""
+
+ def is_compatible(self, ci_installer, ci_scenario):
+ try:
+ if not self.is_none(ci_installer):
+ if re.search(self.dependency.get_installer(),
+ ci_installer) is None:
+ return False
+ if not self.is_none(ci_scenario):
+ if re.search(self.dependency.get_scenario(),
+ ci_scenario) is None:
+ return False
+ return True
+ except TypeError:
+ return False
+
+ def get_name(self):
+ return self.name
+
+ def get_criteria(self):
+ return self.criteria
+
+ def is_blocking(self):
+ return self.blocking
+
+ def __str__(self):
+ lines = split_text(self.description, LINE_LENGTH - 6)
+
+ out = ""
+ out += ("+%s+\n" % ("=" * (LINE_LENGTH - 2)))
+ out += ("| Testcase: " + self.name.ljust(LINE_LENGTH - 14) + "|\n")
+ out += ("+%s+\n" % ("=" * (LINE_LENGTH - 2)))
+ out += ("| Description:".ljust(LINE_LENGTH - 1) + "|\n")
+ for line in lines:
+ out += ("| " + line.ljust(LINE_LENGTH - 7) + " |\n")
+ out += ("| Criteria: " +
+ self.criteria.ljust(LINE_LENGTH - 14) + "|\n")
+ out += ("| Dependencies:".ljust(LINE_LENGTH - 1) + "|\n")
+ installer = self.dependency.get_installer()
+ scenario = self.dependency.get_scenario()
+ out += ("| - Installer:" + installer.ljust(LINE_LENGTH - 17) + "|\n")
+ out += ("| - Scenario :" + scenario.ljust(LINE_LENGTH - 17) + "|\n")
+ out += ("|".ljust(LINE_LENGTH - 1) + "|\n")
+ out += ("+%s+\n" % ("-" * (LINE_LENGTH - 2)))
+ return out
+
+
+class Dependency:
+
+ def __init__(self, installer, scenario):
+ self.installer = installer
+ self.scenario = scenario
+
+ def get_installer(self):
+ return self.installer
+
+ def get_scenario(self):
+ return self.scenario
+
+ def __str__(self):
+ return ("Dependency info:\n"
+ " installer: " + self.installer + "\n"
+ " scenario: " + self.scenario + "\n")