aboutsummaryrefslogtreecommitdiffstats
path: root/functest
diff options
context:
space:
mode:
Diffstat (limited to 'functest')
-rw-r--r--functest/ci/check_deployment.py163
-rw-r--r--functest/ci/check_os.sh123
-rw-r--r--functest/ci/config_functest.yaml4
-rw-r--r--functest/ci/download_images.sh44
-rw-r--r--functest/ci/prepare_env.py38
-rw-r--r--functest/cli/commands/cli_os.py5
-rw-r--r--functest/energy/energy.py31
-rw-r--r--functest/opnfv_tests/openstack/rally/blacklist.txt3
-rw-r--r--functest/opnfv_tests/openstack/rally/rally.py208
-rw-r--r--functest/opnfv_tests/openstack/refstack_client/refstack_client.py164
-rw-r--r--functest/opnfv_tests/openstack/tempest/conf_utils.py13
-rw-r--r--functest/opnfv_tests/openstack/vping/vping_ssh.py78
-rw-r--r--functest/opnfv_tests/sdn/odl/odl.py4
-rw-r--r--functest/opnfv_tests/vnf/ims/clearwater_ims_base.py14
-rw-r--r--functest/opnfv_tests/vnf/ims/cloudify_ims.py135
-rw-r--r--functest/opnfv_tests/vnf/ims/cloudify_ims.yaml2
-rw-r--r--functest/tests/unit/ci/test_check_deployment.py176
-rw-r--r--functest/tests/unit/ci/test_prepare_env.py24
-rw-r--r--functest/tests/unit/cli/commands/test_cli_os.py10
-rw-r--r--functest/tests/unit/energy/test_functest_energy.py5
-rw-r--r--functest/tests/unit/odl/test_odl.py17
-rw-r--r--functest/tests/unit/openstack/rally/test_rally.py532
-rw-r--r--functest/utils/openstack_utils.py4
23 files changed, 1082 insertions, 715 deletions
diff --git a/functest/ci/check_deployment.py b/functest/ci/check_deployment.py
new file mode 100644
index 00000000..fe20dc8f
--- /dev/null
+++ b/functest/ci/check_deployment.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2017 Ericsson and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+"""
+OpenStack deployment checker
+
+Verifies that:
+ - Credentials file is given and contains the right information
+ - OpenStack endpoints are reachable
+"""
+
+import logging
+import logging.config
+import os
+import pkg_resources
+import socket
+import time
+from urlparse import urlparse
+
+from snaps.openstack.utils import glance_utils
+from snaps.openstack.utils import keystone_utils
+from snaps.openstack.utils import neutron_utils
+from snaps.openstack.utils import nova_utils
+from snaps.openstack.tests import openstack_tests
+
+__author__ = "Jose Lausuch <jose.lausuch@ericsson.com>"
+
+LOGGER = logging.getLogger(__name__)
+
+
+def verify_connectivity(adress, port, timeout=10):
+ """ Returns true if an ip/port is reachable"""
+ connection = socket.socket()
+ count = 0
+ while count < timeout:
+ try:
+ connection.connect((adress, port))
+ LOGGER.debug('%s:%s is reachable!', adress, port)
+ return True
+ except socket.error:
+ count += 1
+ time.sleep(1)
+ continue
+ LOGGER.error('%s:%s is not reachable.', adress, port)
+ return False
+
+
+class CheckDeployment(object):
+ """ Check deployment class."""
+
+ def __init__(self, rc_file='/home/opnfv/functest/conf/openstack.creds'):
+ self.rc_file = rc_file
+ self.services = ('compute', 'network', 'image')
+ self.os_creds = None
+
+ def check_rc(self):
+ """ Check if RC file exists and contains OS_AUTH_URL """
+ if not os.path.isfile(self.rc_file):
+ raise IOError('RC file {} does not exist!'.format(self.rc_file))
+ if 'OS_AUTH_URL' not in open(self.rc_file).read():
+ raise SyntaxError('OS_AUTH_URL not defined in {}.'.
+ format(self.rc_file))
+
+ def check_auth_endpoint(self):
+ """ Verifies connectivity to the OS_AUTH_URL given in the RC file """
+ rc_endpoint = self.os_creds.auth_url
+ if not (verify_connectivity(urlparse(rc_endpoint).hostname,
+ urlparse(rc_endpoint).port)):
+ raise Exception("OS_AUTH_URL {} is not reachable.".
+ format(rc_endpoint))
+ LOGGER.info("Connectivity to OS_AUTH_URL %s ...OK", rc_endpoint)
+
+ def check_public_endpoint(self):
+ """ Gets the public endpoint and verifies connectivity to it """
+ public_endpoint = keystone_utils.get_endpoint(self.os_creds,
+ 'identity',
+ interface='public')
+ if not (verify_connectivity(urlparse(public_endpoint).hostname,
+ urlparse(public_endpoint).port)):
+ raise Exception("Public endpoint {} is not reachable.".
+ format(public_endpoint))
+ LOGGER.info("Connectivity to the public endpoint %s ...OK",
+ public_endpoint)
+
+ def check_service_endpoint(self, service):
+ """ Verifies connectivity to a given openstack service """
+ endpoint = keystone_utils.get_endpoint(self.os_creds,
+ service,
+ interface='public')
+ if not (verify_connectivity(urlparse(endpoint).hostname,
+ urlparse(endpoint).port)):
+ raise Exception("{} endpoint {} is not reachable.".
+ format(service, endpoint))
+ LOGGER.info("Connectivity to endpoint '%s' %s ...OK",
+ service, endpoint)
+
+ def check_nova(self):
+ """ checks that a simple nova operation works """
+ try:
+ client = nova_utils.nova_client(self.os_creds)
+ client.servers.list()
+ LOGGER.info("Nova service ...OK")
+ except Exception as error:
+ LOGGER.error("Nova service ...FAILED")
+ raise error
+
+ def check_neutron(self):
+ """ checks that a simple neutron operation works """
+ try:
+ client = neutron_utils.neutron_client(self.os_creds)
+ client.list_networks()
+ LOGGER.info("Neutron service ...OK")
+ except Exception as error:
+ LOGGER.error("Neutron service ...FAILED")
+ raise error
+
+ def check_glance(self):
+ """ checks that a simple glance operation works """
+ try:
+ client = glance_utils.glance_client(self.os_creds)
+ client.images.list()
+ LOGGER.info("Glance service ...OK")
+ except Exception as error:
+ LOGGER.error("Glance service ...FAILED")
+ raise error
+
+ def check_all(self):
+ """
+ Calls all the class functions and returns 0 if all of them succeed.
+ This is the method called by prepare_env or CLI
+ """
+ self.check_rc()
+ try:
+ self.os_creds = openstack_tests.get_credentials(
+ os_env_file=self.rc_file,
+ proxy_settings_str=None,
+ ssh_proxy_cmd=None)
+ except:
+ raise Exception("Problem while getting credentials object.")
+ if self.os_creds is None:
+ raise Exception("Credentials is None.")
+ self.check_auth_endpoint()
+ self.check_public_endpoint()
+ for service in self.services:
+ self.check_service_endpoint(service)
+ self.check_nova()
+ self.check_neutron()
+ self.check_glance()
+ return 0
+
+
+def main():
+ """Entry point"""
+ logging.config.fileConfig(pkg_resources.resource_filename(
+ 'functest', 'ci/logging.ini'))
+ deployment = CheckDeployment()
+ return deployment.check_all()
diff --git a/functest/ci/check_os.sh b/functest/ci/check_os.sh
deleted file mode 100644
index 7b66f3da..00000000
--- a/functest/ci/check_os.sh
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/bin/bash
-#
-# Simple script to check the basic OpenStack clients
-#
-# Author:
-# jose.lausuch@ericsson.com
-#
-
-if [[ ${OS_INSECURE,,} == "true" ]]; then
- options='--insecure'
-else
- options=''
-fi
-
-declare -A service_cmd_array
-service_cmd_array['nova']="openstack $options server list"
-service_cmd_array['neutron']="openstack $options network list"
-service_cmd_array['keystone']="openstack $options endpoint list"
-service_cmd_array['cinder']="openstack $options volume list"
-service_cmd_array['glance']="openstack $options image list"
-
-MANDATORY_SERVICES='nova neutron keystone glance'
-OPTIONAL_SERVICES='cinder'
-
-verify_connectivity() {
- for i in $(seq 0 9); do
- if echo "test" | nc -v -w 10 $1 $2 &>/dev/null; then
- return 0
- fi
- sleep 1
- done
- return 1
-}
-
-verify_SSL_connectivity() {
- openssl s_client -connect $1:$2 &>/dev/null
- return $?
-}
-
-check_service() {
- local service cmd
- service=$1
- cmd=${service_cmd_array[$service]}
- if [ -z "$2" ]; then
- required='false'
- else
- required=$2
- fi
- echo ">>Checking ${service} service..."
- if ! openstack $options service list | grep -i ${service} > /dev/null; then
- if [ "$required" == 'false' ]; then
- echo "WARN: Optional Service ${service} is not enabled!"
- return
- else
- echo "ERROR: Required Service ${service} is not enabled!"
- exit 1
- fi
- fi
- $cmd &>/dev/null
- result=$?
- if [ $result -ne 0 ]; then
- echo "ERROR: Failed execution $cmd. The $service does not seem to be working."
- exit 1
- else
- echo " ...OK"
- fi
-}
-
-if [ -z $OS_AUTH_URL ];then
- echo "ERROR: OS_AUTH_URL environment variable missing... Have you sourced the OpenStack credentials?"
- exit 1
-fi
-
-
-echo "Checking OpenStack endpoints:"
-publicURL=$(openstack $options catalog show identity |awk '/public/ {print $4}')
-publicIP=$(echo $publicURL|sed 's/^.*http.*\:\/\///'|sed 's/.[^:]*$//')
-publicPort=$(echo $publicURL|grep -Po '(?<=:)\d+')
-https_enabled=$(echo $publicURL | grep 'https')
-if [[ -n $https_enabled ]]; then
- echo ">>Verifying SSL connectivity to the public endpoint $publicIP:$publicPort..."
- verify_SSL_connectivity $publicIP $publicPort
-else
- echo ">>Verifying connectivity to the public endpoint $publicIP:$publicPort..."
- verify_connectivity $publicIP $publicPort
-fi
-RETVAL=$?
-if [ $RETVAL -ne 0 ]; then
- echo "ERROR: Cannot talk to the public endpoint $publicIP:$publicPort ."
- echo "OS_AUTH_URL=$OS_AUTH_URL"
- exit 1
-fi
-echo " ...OK"
-
-
-echo "Checking Required OpenStack services:"
-for service in $MANDATORY_SERVICES; do
- check_service $service "true"
-done
-echo "Required OpenStack services are OK."
-
-echo "Checking Optional OpenStack services:"
-for service in $OPTIONAL_SERVICES; do
- check_service $service
-done
-
-echo "Checking External network..."
-networks=($(neutron $options net-list -F id | tail -n +4 | head -n -1 | awk '{print $2}'))
-is_external=False
-for net in "${networks[@]}"
-do
- is_external=$(neutron $options net-show $net|grep "router:external"|awk '{print $4}')
- if [ $is_external == "True" ]; then
- echo "External network found: $net"
- break
- fi
-done
-if [ $is_external == "False" ]; then
- echo "ERROR: There are no external networks in the deployment."
- exit 1
-fi
-
-exit 0
diff --git a/functest/ci/config_functest.yaml b/functest/ci/config_functest.yaml
index 77a52c65..e26b3139 100644
--- a/functest/ci/config_functest.yaml
+++ b/functest/ci/config_functest.yaml
@@ -21,7 +21,7 @@ general:
functest_data: /home/opnfv/functest/data
ims_data: /home/opnfv/functest/data/ims/
functest_images: /home/opnfv/functest/images
- rally_inst: /home/opnfv/.rally
+ rally_inst: /root/.rally
openstack:
creds: /home/opnfv/functest/conf/openstack.creds
@@ -200,6 +200,6 @@ results:
test_db_url: http://testresults.opnfv.org/test/api/v1/results
energy_recorder:
- api_url: http://opnfv.fr:8888/resources
+ api_url: http://energy.opnfv.fr/resources
api_user: ""
api_password: ""
diff --git a/functest/ci/download_images.sh b/functest/ci/download_images.sh
index 23e09c10..dd3e3789 100644
--- a/functest/ci/download_images.sh
+++ b/functest/ci/download_images.sh
@@ -8,31 +8,55 @@ RED='\033[1;31m'
NC='\033[0m' # No Color
function usage(){
- echo -e "${RED}USAGE: $script <destination_folder>${NC}"
+ echo -e "${RED}USAGE: $script <destination_folder> <scenario_name> [arch]${NC}"
exit 0
}
script=`basename "$0"`
IMAGES_FOLDER_DIR=$1
+SCENARIO=$2
+ARCH=$3
if [[ -z $IMAGES_FOLDER_DIR ]]; then usage; fi;
set -ex
mkdir -p ${IMAGES_FOLDER_DIR}
+
+####################
+# MANDATORY IMAGES #
+####################
+# These images should be present in Functest for the tests to work
+
+# Functest:
wget -nc ${CIRROS_REPO_URL}/${CIRROS_X86_64_TAG}/cirros-${CIRROS_X86_64_TAG}-x86_64-disk.img -P ${IMAGES_FOLDER_DIR}
wget -nc ${CIRROS_REPO_URL}/${CIRROS_X86_64_TAG}/cirros-${CIRROS_X86_64_TAG}-x86_64-lxc.tar.gz -P ${IMAGES_FOLDER_DIR}
-wget -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${IMAGES_FOLDER_DIR}
-
-# Add 3rd-party images for aarch64, since Functest can be run on an x86 machine to test an aarch64 POD
-wget -nc ${CIRROS_REPO_URL}/daily/20${CIRROS_AARCH64_TAG}/cirros-d${CIRROS_AARCH64_TAG}-aarch64-disk.img -P ${IMAGES_FOLDER_DIR}
-wget -nc ${CIRROS_REPO_URL}/daily/20${CIRROS_AARCH64_TAG}/cirros-d${CIRROS_AARCH64_TAG}-aarch64-initramfs -P ${IMAGES_FOLDER_DIR}
-wget -nc ${CIRROS_REPO_URL}/daily/20${CIRROS_AARCH64_TAG}/cirros-d${CIRROS_AARCH64_TAG}-aarch64-kernel -P ${IMAGES_FOLDER_DIR}
-# Add Ubuntu 14 qcow2 image
+# SNAPS:
wget -nc http://uec-images.ubuntu.com/releases/trusty/14.04/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P ${IMAGES_FOLDER_DIR}
-
-# Add Centos 7 qcow2 image
wget -nc http://cloud.centos.org/centos/7/images/CentOS-7-x86_64-GenericCloud.qcow2 -P ${IMAGES_FOLDER_DIR}
+
+###################
+# OPTIONAL IMAGES #
+###################
+# Optional images can be commented if they are not going to be used by the tests
+
+# SDNVPN (odl-bgpvpn scenarios):
+if [[ ${SCENARIO} == *"bgpvpn"* ]]; then
+ wget -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${IMAGES_FOLDER_DIR}
+fi
+
+# ONOS (onos-sfc scenarios):
+if [[ ${SCENARIO} == *"onos-sfc"* ]]; then
+ wget -nc http://artifacts.opnfv.org/onosfw/images/firewall_block_image.img -P ${IMAGES_FOLDER_DIR}
+fi
+
+if [[ ${ARCH} == "arm" ]] || [[ ${ARCH} == "aarch64" ]]; then
+ # ARM (aarch64 cirros images):
+ wget -nc ${CIRROS_REPO_URL}/daily/20${CIRROS_AARCH64_TAG}/cirros-d${CIRROS_AARCH64_TAG}-aarch64-disk.img -P ${IMAGES_FOLDER_DIR}
+ wget -nc ${CIRROS_REPO_URL}/daily/20${CIRROS_AARCH64_TAG}/cirros-d${CIRROS_AARCH64_TAG}-aarch64-initramfs -P ${IMAGES_FOLDER_DIR}
+ wget -nc ${CIRROS_REPO_URL}/daily/20${CIRROS_AARCH64_TAG}/cirros-d${CIRROS_AARCH64_TAG}-aarch64-kernel -P ${IMAGES_FOLDER_DIR}
+fi
+
set +ex \ No newline at end of file
diff --git a/functest/ci/prepare_env.py b/functest/ci/prepare_env.py
index da3e6245..c40e3266 100644
--- a/functest/ci/prepare_env.py
+++ b/functest/ci/prepare_env.py
@@ -19,6 +19,7 @@ import fileinput
import yaml
+from functest.ci import check_deployment
import functest.utils.functest_utils as ft_utils
import functest.utils.openstack_utils as os_utils
from functest.utils.constants import CONST
@@ -177,26 +178,6 @@ def create_directories():
def source_rc_file():
print_separator()
- if CONST.__getattribute__('openstack_creds') is None:
- logger.warning("The environment variable 'creds' must be set and"
- "pointing to the local RC file. Using default: "
- "/home/opnfv/functest/conf/openstack.creds ...")
- os.path.join(
- CONST.__getattribute__('dir_functest_conf'), 'openstack.creds')
-
- if not os.path.isfile(CONST.__getattribute__('openstack_creds')):
- raise Exception(
- "OpenStack credentials file not provided. "
- "The OpenStack credentials must be in {}"
- .format(CONST.__getattribute__('openstack_creds')))
- else:
- logger.info("RC file provided in %s."
- % CONST.__getattribute__('openstack_creds'))
- if os.path.getsize(CONST.__getattribute__('openstack_creds')) == 0:
- raise Exception(
- "The OpenStack RC file {} is empty."
- .format(CONST.__getattribute__('openstack_creds')))
-
logger.info("Sourcing the OpenStack RC file...")
os_utils.source_credentials(CONST.__getattribute__('openstack_creds'))
for key, value in os.environ.iteritems():
@@ -250,18 +231,9 @@ def update_db_url():
def verify_deployment():
print_separator()
- logger.info("Verifying OpenStack services...")
- cmd = "check_os.sh"
-
- logger.debug("Executing command: %s" % cmd)
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
-
- while p.poll() is None:
- line = p.stdout.readline().rstrip()
- if "ERROR" in line:
- logger.error(line)
- raise Exception("Problem while running '{}'.".format(cmd))
- logger.info(line)
+ logger.info("Verifying OpenStack deployment...")
+ deployment = check_deployment.CheckDeployment()
+ deployment.check_all()
def install_rally():
@@ -364,11 +336,11 @@ def prepare_env(**kwargs):
return -1
elif kwargs['action'] == "start":
logger.info("######### Preparing Functest environment #########\n")
+ verify_deployment()
check_env_variables()
create_directories()
source_rc_file()
update_config_file()
- verify_deployment()
install_rally()
install_tempest()
create_flavor()
diff --git a/functest/cli/commands/cli_os.py b/functest/cli/commands/cli_os.py
index 44181d4f..f4ec1661 100644
--- a/functest/cli/commands/cli_os.py
+++ b/functest/cli/commands/cli_os.py
@@ -12,8 +12,8 @@ import os
import click
+from functest.ci import check_deployment
from functest.utils.constants import CONST
-import functest.utils.functest_utils as ft_utils
import functest.utils.openstack_clean as os_clean
import functest.utils.openstack_snapshot as os_snapshot
@@ -49,7 +49,8 @@ class CliOpenStack(object):
def check(self):
self.ping_endpoint()
- ft_utils.execute_command("check_os.sh", verbose=False)
+ deployment = check_deployment.CheckDeployment()
+ deployment.check_all()
def snapshot_create(self):
self.ping_endpoint()
diff --git a/functest/energy/energy.py b/functest/energy/energy.py
index 71b71239..372c1d32 100644
--- a/functest/energy/energy.py
+++ b/functest/energy/energy.py
@@ -13,11 +13,24 @@
import json
import logging
import urllib
+
+from functools import wraps
import requests
import functest.utils.functest_utils as ft_utils
+def finish_session(current_scenario):
+ """Finish a recording session."""
+ if current_scenario is None:
+ EnergyRecorder.stop()
+ else:
+ EnergyRecorder.submit_scenario(
+ current_scenario["scenario"],
+ current_scenario["step"]
+ )
+
+
def enable_recording(method):
"""
Record energy during method execution.
@@ -30,6 +43,7 @@ def enable_recording(method):
.. note:: "method" should belong to a class having a "case_name"
attribute
"""
+ @wraps(method)
def wrapper(*args):
"""
Record energy during method execution (implementation).
@@ -38,14 +52,12 @@ def enable_recording(method):
"""
current_scenario = EnergyRecorder.get_current_scenario()
EnergyRecorder.start(args[0].case_name)
- return_value = method(*args)
- if current_scenario is None:
- EnergyRecorder.stop()
- else:
- EnergyRecorder.submit_scenario(
- current_scenario["scenario"],
- current_scenario["step"]
- )
+ try:
+ return_value = method(*args)
+ finish_session(current_scenario)
+ except Exception: # pylint: disable=broad-except
+ finish_session(current_scenario)
+ raise
return return_value
return wrapper
@@ -246,7 +258,6 @@ class EnergyRecorder(object):
"""Get current running scenario (if any, None else)."""
EnergyRecorder.logger.debug("Getting current scenario")
return_value = None
- print "In get current"
try:
# Ensure that connectyvity settings are loaded
EnergyRecorder.load_config()
@@ -263,13 +274,11 @@ class EnergyRecorder(object):
log_msg = log_msg.format(
EnergyRecorder.energy_recorder_api["uri"])
EnergyRecorder.logger.error(log_msg)
- print log_msg
return_value = None
else:
log_msg = "Error while getting current scenario\n{}"
log_msg = log_msg.format(response.text)
EnergyRecorder.logger.error(log_msg)
- print log_msg
return_value = None
except Exception: # pylint: disable=broad-except
# Default exception handler to ensure that method
diff --git a/functest/opnfv_tests/openstack/rally/blacklist.txt b/functest/opnfv_tests/openstack/rally/blacklist.txt
index 3a17fa61..95bea2b7 100644
--- a/functest/opnfv_tests/openstack/rally/blacklist.txt
+++ b/functest/opnfv_tests/openstack/rally/blacklist.txt
@@ -1,8 +1,7 @@
scenario:
-
scenarios:
- - os-nosdn-lxd-ha
- - os-nosdn-lxd-noha
+ - '^os-nosdn-lxd-(no)?ha$'
installers:
- joid
tests:
diff --git a/functest/opnfv_tests/openstack/rally/rally.py b/functest/opnfv_tests/openstack/rally/rally.py
index 24c9147c..6b7c49ca 100644
--- a/functest/opnfv_tests/openstack/rally/rally.py
+++ b/functest/opnfv_tests/openstack/rally/rally.py
@@ -8,27 +8,32 @@
# http://www.apache.org/licenses/LICENSE-2.0
#
+"""Rally testcases implementation."""
+
from __future__ import division
import json
import logging
import os
-import pkg_resources
import re
import subprocess
import time
import iniparse
+import pkg_resources
import yaml
from functest.core import testcase
+from functest.energy import energy
from functest.utils.constants import CONST
import functest.utils.openstack_utils as os_utils
-logger = logging.getLogger(__name__)
+LOGGER = logging.getLogger(__name__)
class RallyBase(testcase.OSGCTestCase):
+ """Base class form Rally testcases implementation."""
+
TESTS = ['authenticate', 'glance', 'cinder', 'heat', 'keystone',
'neutron', 'nova', 'quotas', 'vm', 'all']
GLANCE_IMAGE_NAME = CONST.__getattribute__('openstack_image_name')
@@ -64,6 +69,7 @@ class RallyBase(testcase.OSGCTestCase):
RALLY_ROUTER_NAME = CONST.__getattribute__('rally_router_name')
def __init__(self, **kwargs):
+ """Initialize RallyBase object."""
super(RallyBase, self).__init__(**kwargs)
self.mode = ''
self.summary = []
@@ -74,6 +80,12 @@ class RallyBase(testcase.OSGCTestCase):
self.network_dict = {}
self.volume_type = None
self.smoke = None
+ self.test_name = None
+ self.image_exists = None
+ self.image_id = None
+ self.start_time = None
+ self.result = None
+ self.details = None
def _build_task_args(self, test_file_name):
task_args = {'service_list': [test_file_name]}
@@ -117,7 +129,7 @@ class RallyBase(testcase.OSGCTestCase):
raise Exception("The scenario '%s' does not exist."
% scenario_file_name)
- logger.debug('Scenario fetched from : {}'.format(scenario_file_name))
+ LOGGER.debug('Scenario fetched from : %s', scenario_file_name)
test_file_name = os.path.join(self.TEMP_DIR, test_yaml_file_name)
if not os.path.exists(self.TEMP_DIR):
@@ -129,7 +141,8 @@ class RallyBase(testcase.OSGCTestCase):
@staticmethod
def get_task_id(cmd_raw):
"""
- get task id from command rally result
+ Get task id from command rally result.
+
:param cmd_raw:
:return: task_id as string
"""
@@ -144,7 +157,8 @@ class RallyBase(testcase.OSGCTestCase):
@staticmethod
def task_succeed(json_raw):
"""
- Parse JSON from rally JSON results
+ Parse JSON from rally JSON results.
+
:param json_raw:
:return: Bool
"""
@@ -161,6 +175,7 @@ class RallyBase(testcase.OSGCTestCase):
@staticmethod
def live_migration_supported():
+ """Determine is live migration is supported."""
config = iniparse.ConfigParser()
if (config.read(RallyBase.TEMPEST_CONF_FILE) and
config.has_section('compute-feature-enabled') and
@@ -173,6 +188,7 @@ class RallyBase(testcase.OSGCTestCase):
@staticmethod
def get_cmd_output(proc):
+ """Get command stdout."""
result = ""
while proc.poll() is None:
line = proc.stdout.readline()
@@ -181,6 +197,7 @@ class RallyBase(testcase.OSGCTestCase):
@staticmethod
def excl_scenario():
+ """Exclude scenario."""
black_tests = []
try:
with open(RallyBase.BLACKLIST_FILE, 'r') as black_list_file:
@@ -188,22 +205,45 @@ class RallyBase(testcase.OSGCTestCase):
installer_type = CONST.__getattribute__('INSTALLER_TYPE')
deploy_scenario = CONST.__getattribute__('DEPLOY_SCENARIO')
- if (bool(installer_type) * bool(deploy_scenario)):
- if 'scenario' in black_list_yaml.keys():
- for item in black_list_yaml['scenario']:
- scenarios = item['scenarios']
- installers = item['installers']
- if (deploy_scenario in scenarios and
- installer_type in installers):
- tests = item['tests']
- black_tests.extend(tests)
+ if (bool(installer_type) and bool(deploy_scenario) and
+ 'scenario' in black_list_yaml.keys()):
+ for item in black_list_yaml['scenario']:
+ scenarios = item['scenarios']
+ installers = item['installers']
+ in_it = RallyBase.in_iterable_re
+ if (in_it(deploy_scenario, scenarios) and
+ in_it(installer_type, installers)):
+ tests = item['tests']
+ black_tests.extend(tests)
except Exception:
- logger.debug("Scenario exclusion not applied.")
+ LOGGER.debug("Scenario exclusion not applied.")
return black_tests
@staticmethod
+ def in_iterable_re(needle, haystack):
+ """
+ Check if given needle is in the iterable haystack, using regex.
+
+ :param needle: string to be matched
+ :param haystack: iterable of strings (optionally regex patterns)
+ :return: True if needle is eqial to any of the elements in haystack,
+ or if a nonempty regex pattern in haystack is found in needle.
+ """
+ # match without regex
+ if needle in haystack:
+ return True
+
+ for pattern in haystack:
+ # match if regex pattern is set and found in the needle
+ if pattern and re.search(pattern, needle) is not None:
+ return True
+ else:
+ return False
+
+ @staticmethod
def excl_func():
+ """Exclude functionalities."""
black_tests = []
func_list = []
@@ -221,19 +261,23 @@ class RallyBase(testcase.OSGCTestCase):
if func in functions:
tests = item['tests']
black_tests.extend(tests)
- except Exception:
- logger.debug("Functionality exclusion not applied.")
+ except Exception: # pylint: disable=broad-except
+ LOGGER.debug("Functionality exclusion not applied.")
return black_tests
@staticmethod
def apply_blacklist(case_file_name, result_file_name):
- logger.debug("Applying blacklist...")
+ """Apply blacklist."""
+ LOGGER.debug("Applying blacklist...")
cases_file = open(case_file_name, 'r')
result_file = open(result_file_name, 'w')
black_tests = list(set(RallyBase.excl_func() +
- RallyBase.excl_scenario()))
+ RallyBase.excl_scenario()))
+
+ if black_tests:
+ LOGGER.debug("Blacklisted tests: " + str(black_tests))
include = True
for cases_line in cases_file:
@@ -254,56 +298,58 @@ class RallyBase(testcase.OSGCTestCase):
@staticmethod
def file_is_empty(file_name):
+ """Determine is a file is empty."""
try:
if os.stat(file_name).st_size > 0:
return False
- except:
+ except Exception: # pylint: disable=broad-except
pass
return True
def _run_task(self, test_name):
- logger.info('Starting test scenario "{}" ...'.format(test_name))
+ """Run a task."""
+ LOGGER.info('Starting test scenario "%s" ...', test_name)
task_file = os.path.join(self.RALLY_DIR, 'task.yaml')
if not os.path.exists(task_file):
- logger.error("Task file '%s' does not exist." % task_file)
- raise Exception("Task file '%s' does not exist." % task_file)
+ LOGGER.error("Task file '%s' does not exist.", task_file)
+ raise Exception("Task file '%s' does not exist.", task_file)
file_name = self._prepare_test_list(test_name)
if self.file_is_empty(file_name):
- logger.info('No tests for scenario "{}"'.format(test_name))
+ LOGGER.info('No tests for scenario "%s"', test_name)
return
cmd_line = ("rally task start --abort-on-sla-failure "
"--task {0} "
"--task-args \"{1}\""
.format(task_file, self._build_task_args(test_name)))
- logger.debug('running command line: {}'.format(cmd_line))
+ LOGGER.debug('running command line: %s', cmd_line)
- p = subprocess.Popen(cmd_line, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT, shell=True)
- output = self._get_output(p, test_name)
+ proc = subprocess.Popen(cmd_line, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, shell=True)
+ output = self._get_output(proc, test_name)
task_id = self.get_task_id(output)
- logger.debug('task_id : {}'.format(task_id))
+ LOGGER.debug('task_id : %s', task_id)
if task_id is None:
- logger.error('Failed to retrieve task_id, validating task...')
+ LOGGER.error('Failed to retrieve task_id, validating task...')
cmd_line = ("rally task validate "
"--task {0} "
"--task-args \"{1}\""
.format(task_file, self._build_task_args(test_name)))
- logger.debug('running command line: {}'.format(cmd_line))
- p = subprocess.Popen(cmd_line, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT, shell=True)
- output = self.get_cmd_output(p)
- logger.error("Task validation result:" + "\n" + output)
+ LOGGER.debug('running command line: %s', cmd_line)
+ proc = subprocess.Popen(cmd_line, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, shell=True)
+ output = self.get_cmd_output(proc)
+ LOGGER.error("Task validation result:" + "\n" + output)
return
# check for result directory and create it otherwise
if not os.path.exists(self.RESULTS_DIR):
- logger.debug('{} does not exist, we create it.'
- .format(self.RESULTS_DIR))
+ LOGGER.debug('%s does not exist, we create it.',
+ self.RESULTS_DIR)
os.makedirs(self.RESULTS_DIR)
# write html report file
@@ -312,25 +358,25 @@ class RallyBase(testcase.OSGCTestCase):
cmd_line = "rally task report {} --out {}".format(task_id,
report_html_dir)
- logger.debug('running command line: {}'.format(cmd_line))
+ LOGGER.debug('running command line: %s', cmd_line)
os.popen(cmd_line)
# get and save rally operation JSON result
cmd_line = "rally task results %s" % task_id
- logger.debug('running command line: {}'.format(cmd_line))
+ LOGGER.debug('running command line: %s', cmd_line)
cmd = os.popen(cmd_line)
json_results = cmd.read()
report_json_name = 'opnfv-{}.json'.format(test_name)
report_json_dir = os.path.join(self.RESULTS_DIR, report_json_name)
- with open(report_json_dir, 'w') as f:
- logger.debug('saving json file')
- f.write(json_results)
+ with open(report_json_dir, 'w') as r_file:
+ LOGGER.debug('saving json file')
+ r_file.write(json_results)
- """ parse JSON operation result """
+ # parse JSON operation result
if self.task_succeed(json_results):
- logger.info('Test scenario: "{}" OK.'.format(test_name) + "\n")
+ LOGGER.info('Test scenario: "{}" OK.'.format(test_name) + "\n")
else:
- logger.info('Test scenario: "{}" Failed.'.format(test_name) + "\n")
+ LOGGER.info('Test scenario: "{}" Failed.'.format(test_name) + "\n")
def _get_output(self, proc, test_name):
result = ""
@@ -367,15 +413,15 @@ class RallyBase(testcase.OSGCTestCase):
try:
success += float(percentage)
except ValueError:
- logger.info('Percentage error: %s, %s' %
- (percentage, line))
+ LOGGER.info('Percentage error: %s, %s',
+ percentage, line)
nb_totals += 1
elif "Full duration" in line:
duration = line.split(': ')[1]
try:
overall_duration += float(duration)
except ValueError:
- logger.info('Duration error: %s, %s' % (duration, line))
+ LOGGER.info('Duration error: %s, %s', duration, line)
overall_duration = "{:10.2f}".format(overall_duration)
if nb_totals == 0:
@@ -389,30 +435,30 @@ class RallyBase(testcase.OSGCTestCase):
'success': success_avg}
self.summary.append(scenario_summary)
- logger.debug("\n" + result)
+ LOGGER.debug("\n" + result)
return result
def _prepare_env(self):
- logger.debug('Validating the test name...')
- if not (self.test_name in self.TESTS):
+ LOGGER.debug('Validating the test name...')
+ if self.test_name not in self.TESTS:
raise Exception("Test name '%s' is invalid" % self.test_name)
volume_types = os_utils.list_volume_types(self.cinder_client,
private=False)
if volume_types:
- logger.debug("Using existing volume type(s)...")
+ LOGGER.debug("Using existing volume type(s)...")
else:
- logger.debug('Creating volume type...')
+ LOGGER.debug('Creating volume type...')
self.volume_type = os_utils.create_volume_type(
self.cinder_client, self.CINDER_VOLUME_TYPE_NAME)
if self.volume_type is None:
raise Exception("Failed to create volume type '%s'" %
self.CINDER_VOLUME_TYPE_NAME)
- logger.debug("Volume type '%s' is created succesfully." %
+ LOGGER.debug("Volume type '%s' is created succesfully.",
self.CINDER_VOLUME_TYPE_NAME)
- logger.debug('Getting or creating image...')
+ LOGGER.debug('Getting or creating image...')
self.image_exists, self.image_id = os_utils.get_or_create_image(
self.GLANCE_IMAGE_NAME,
self.GLANCE_IMAGE_PATH,
@@ -421,7 +467,7 @@ class RallyBase(testcase.OSGCTestCase):
raise Exception("Failed to get or create image '%s'" %
self.GLANCE_IMAGE_NAME)
- logger.debug("Creating network '%s'..." % self.RALLY_PRIVATE_NET_NAME)
+ LOGGER.debug("Creating network '%s'...", self.RALLY_PRIVATE_NET_NAME)
self.network_dict = os_utils.create_shared_network_full(
self.RALLY_PRIVATE_NET_NAME,
self.RALLY_PRIVATE_SUBNET_NAME,
@@ -434,7 +480,7 @@ class RallyBase(testcase.OSGCTestCase):
def _run_tests(self):
if self.test_name == 'all':
for test in self.TESTS:
- if (test == 'all' or test == 'vm'):
+ if test == 'all' or test == 'vm':
continue
self._run_task(test)
else:
@@ -459,25 +505,25 @@ class RallyBase(testcase.OSGCTestCase):
total_duration = 0.0
total_nb_tests = 0
total_success = 0.0
- for s in self.summary:
- name = "{0:<17}".format(s['test_name'])
- duration = float(s['overall_duration'])
+ for item in self.summary:
+ name = "{0:<17}".format(item['test_name'])
+ duration = float(item['overall_duration'])
total_duration += duration
duration = time.strftime("%M:%S", time.gmtime(duration))
duration = "{0:<10}".format(duration)
- nb_tests = "{0:<13}".format(s['nb_tests'])
- total_nb_tests += int(s['nb_tests'])
- success = "{0:<10}".format(str(s['success']) + '%')
- total_success += float(s['success'])
+ nb_tests = "{0:<13}".format(item['nb_tests'])
+ total_nb_tests += int(item['nb_tests'])
+ success = "{0:<10}".format(str(item['success']) + '%')
+ total_success += float(item['success'])
report += ("" +
"| " + name + " | " + duration + " | " +
nb_tests + " | " + success + "|\n" +
"+-------------------+------------"
"+---------------+-----------+\n")
payload.append({'module': name,
- 'details': {'duration': s['overall_duration'],
- 'nb tests': s['nb_tests'],
- 'success': s['success']}})
+ 'details': {'duration': item['overall_duration'],
+ 'nb tests': item['nb_tests'],
+ 'success': item['success']}})
total_duration_str = time.strftime("%H:%M:%S",
time.gmtime(total_duration))
@@ -500,29 +546,31 @@ class RallyBase(testcase.OSGCTestCase):
"+===============+===========+")
report += "\n"
- logger.info("\n" + report)
+ LOGGER.info("\n" + report)
payload.append({'summary': {'duration': total_duration,
'nb tests': total_nb_tests,
'nb success': success_rate}})
self.details = payload
- logger.info("Rally '%s' success_rate is %s%%"
- % (self.case_name, success_rate))
+ LOGGER.info("Rally '%s' success_rate is %s%%",
+ self.case_name, success_rate)
def _clean_up(self):
if self.volume_type:
- logger.debug("Deleting volume type '%s'..." % self.volume_type)
+ LOGGER.debug("Deleting volume type '%s'...", self.volume_type)
os_utils.delete_volume_type(self.cinder_client, self.volume_type)
if not self.image_exists:
- logger.debug("Deleting image '%s' with ID '%s'..."
- % (self.GLANCE_IMAGE_NAME, self.image_id))
+ LOGGER.debug("Deleting image '%s' with ID '%s'...",
+ self.GLANCE_IMAGE_NAME, self.image_id)
if not os_utils.delete_glance_image(self.nova_client,
self.image_id):
- logger.error("Error deleting the glance image")
+ LOGGER.error("Error deleting the glance image")
- def run(self):
+ @energy.enable_recording
+ def run(self, **kwargs):
+ """Run testcase."""
self.start_time = time.time()
try:
self._prepare_env()
@@ -530,8 +578,8 @@ class RallyBase(testcase.OSGCTestCase):
self._generate_report()
self._clean_up()
res = testcase.TestCase.EX_OK
- except Exception as e:
- logger.error('Error with run: %s' % e)
+ except Exception as exc: # pylint: disable=broad-except
+ LOGGER.error('Error with run: %s', exc)
res = testcase.TestCase.EX_RUN_ERROR
self.stop_time = time.time()
@@ -539,7 +587,10 @@ class RallyBase(testcase.OSGCTestCase):
class RallySanity(RallyBase):
+ """Rally sanity testcase implementation."""
+
def __init__(self, **kwargs):
+ """Initialize RallySanity object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "rally_sanity"
super(RallySanity, self).__init__(**kwargs)
@@ -550,7 +601,10 @@ class RallySanity(RallyBase):
class RallyFull(RallyBase):
+ """Rally full testcase implementation."""
+
def __init__(self, **kwargs):
+ """Initialize RallyFull object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "rally_full"
super(RallyFull, self).__init__(**kwargs)
diff --git a/functest/opnfv_tests/openstack/refstack_client/refstack_client.py b/functest/opnfv_tests/openstack/refstack_client/refstack_client.py
index c2a05379..76bee19d 100644
--- a/functest/opnfv_tests/openstack/refstack_client/refstack_client.py
+++ b/functest/opnfv_tests/openstack/refstack_client/refstack_client.py
@@ -6,108 +6,106 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
+"""Refstack client testcase implemenation."""
+
from __future__ import division
import argparse
import logging
import os
-import pkg_resources
import re
import sys
import subprocess
import time
+import pkg_resources
+
from functest.core import testcase
+from functest.energy import energy
+from functest.opnfv_tests.openstack.refstack_client.tempest_conf \
+ import TempestConf
from functest.opnfv_tests.openstack.tempest import conf_utils
from functest.utils.constants import CONST
import functest.utils.functest_utils as ft_utils
-from tempest_conf import TempestConf
-""" logging configuration """
-logger = logging.getLogger(__name__)
+# logging configuration """
+LOGGER = logging.getLogger(__name__)
class RefstackClient(testcase.OSGCTestCase):
+ """RefstackClient testcase implementation class."""
def __init__(self, **kwargs):
+ """Initialize RefstackClient testcase object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "refstack_defcore"
super(RefstackClient, self).__init__(**kwargs)
- self.CONF_PATH = pkg_resources.resource_filename(
+ self.conf_path = pkg_resources.resource_filename(
'functest',
'opnfv_tests/openstack/refstack_client/refstack_tempest.conf')
- self.FUNCTEST_TEST = pkg_resources.resource_filename(
+ self.functest_test = pkg_resources.resource_filename(
'functest', 'opnfv_tests')
- self.DEFCORE_LIST = 'openstack/refstack_client/defcore.txt'
- self.confpath = os.path.join(self.FUNCTEST_TEST,
- self.CONF_PATH)
+ self.defcore_list = 'openstack/refstack_client/defcore.txt'
+ self.confpath = os.path.join(self.functest_test,
+ self.conf_path)
self.defcorelist = pkg_resources.resource_filename(
'functest', 'opnfv_tests/openstack/refstack_client/defcore.txt')
+ self.testlist = None
self.insecure = ''
if ('https' in CONST.__getattribute__('OS_AUTH_URL') and
CONST.__getattribute__('OS_INSECURE').lower() == 'true'):
self.insecure = '-k'
def run_defcore(self, conf, testlist):
+ """Run defcore sys command."""
cmd = ("refstack-client test {0} -c {1} -v --test-list {2}"
.format(self.insecure, conf, testlist))
- logger.info("Starting Refstack_defcore test case: '%s'." % cmd)
+ LOGGER.info("Starting Refstack_defcore test case: '%s'.", cmd)
ft_utils.execute_command(cmd)
def run_defcore_default(self):
- cmd = ("refstack-client test {0} -c {1} -v --test-list {2}"
- .format(self.insecure, self.confpath, self.defcorelist))
- logger.info("Starting Refstack_defcore test case: '%s'." % cmd)
-
- header = ("Refstack environment:\n"
- " SUT: %s\n Scenario: %s\n Node: %s\n Date: %s\n" %
- (CONST.__getattribute__('INSTALLER_TYPE'),
- CONST.__getattribute__('DEPLOY_SCENARIO'),
- CONST.__getattribute__('NODE_NAME'),
- time.strftime("%a %b %d %H:%M:%S %Z %Y")))
-
- f_stdout = open(
- os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
- "refstack.log"), 'w+')
- f_env = open(os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
- "environment.log"), 'w+')
- f_env.write(header)
-
- p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT, bufsize=1)
-
- with p.stdout:
- for line in iter(p.stdout.readline, b''):
- if 'Tests' in line:
- break
- if re.search("\} tempest\.", line):
- logger.info(line.replace('\n', ''))
- f_stdout.write(line)
- p.wait()
-
- f_stdout.close()
- f_env.close()
+ """Run default defcore sys command."""
+ options = ["-v"] if not self.insecure else ["-v", self.insecure]
+ cmd = (["refstack-client", "test", "-c", self.confpath] +
+ options + ["--test-list", self.defcorelist])
+ LOGGER.info("Starting Refstack_defcore test case: '%s'.", cmd)
+
+ with open(os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
+ "environment.log"), 'w+') as f_env:
+ f_env.write(
+ ("Refstack environment:\n"
+ " SUT: {}\n Scenario: {}\n Node: {}\n Date: {}\n").format(
+ CONST.__getattribute__('INSTALLER_TYPE'),
+ CONST.__getattribute__('DEPLOY_SCENARIO'),
+ CONST.__getattribute__('NODE_NAME'),
+ time.strftime("%a %b %d %H:%M:%S %Z %Y")))
+
+ with open(os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
+ "refstack.log"), 'w+') as f_stdout:
+ subprocess.call(cmd, shell=False, stdout=f_stdout,
+ stderr=subprocess.STDOUT)
def parse_refstack_result(self):
+ """Parse Refstact results."""
try:
with open(os.path.join(conf_utils.REFSTACK_RESULTS_DIR,
"refstack.log"), 'r') as logfile:
output = logfile.read()
- for match in re.findall("Ran: (\d+) tests in (\d+\.\d{4}) sec.",
+ for match in re.findall(r"Ran: (\d+) tests in (\d+\.\d{4}) sec.",
output):
num_tests = match[0]
- logger.info("Ran: %s tests in %s sec." % (num_tests, match[1]))
- for match in re.findall("(- Passed: )(\d+)", output):
+ LOGGER.info("Ran: %s tests in %s sec.", num_tests, match[1])
+ for match in re.findall(r"(- Passed: )(\d+)", output):
num_success = match[1]
- logger.info("".join(match))
- for match in re.findall("(- Skipped: )(\d+)", output):
+ LOGGER.info("".join(match))
+ for match in re.findall(r"(- Skipped: )(\d+)", output):
num_skipped = match[1]
- logger.info("".join(match))
- for match in re.findall("(- Failed: )(\d+)", output):
+ LOGGER.info("".join(match))
+ for match in re.findall(r"(- Failed: )(\d+)", output):
num_failures = match[1]
- logger.info("".join(match))
+ LOGGER.info("".join(match))
success_testcases = ""
for match in re.findall(r"\{0\}(.*?)[. ]*ok", output):
success_testcases += match + ", "
@@ -123,7 +121,7 @@ class RefstackClient(testcase.OSGCTestCase):
try:
self.result = 100 * int(num_success) / int(num_executed)
except ZeroDivisionError:
- logger.error("No test has been executed")
+ LOGGER.error("No test has been executed")
self.details = {"tests": int(num_tests),
"failures": int(num_failures),
@@ -133,12 +131,17 @@ class RefstackClient(testcase.OSGCTestCase):
except Exception:
self.result = 0
- logger.info("Testcase %s success_rate is %s%%"
- % (self.case_name, self.result))
+ LOGGER.info("Testcase %s success_rate is %s%%",
+ self.case_name, self.result)
+
+ @energy.enable_recording
+ def run(self, **kwargs):
+ """
+ Start RefstackClient testcase.
- def run(self):
- '''used for functest command line,
- functest testcase run refstack_defcore'''
+ used for functest command line,
+ functest testcase run refstack_defcore
+ """
self.start_time = time.time()
if not os.path.exists(conf_utils.REFSTACK_RESULTS_DIR):
@@ -150,59 +153,64 @@ class RefstackClient(testcase.OSGCTestCase):
self.run_defcore_default()
self.parse_refstack_result()
res = testcase.TestCase.EX_OK
- except Exception as e:
- logger.error('Error with run: %s', e)
+ except Exception:
+ LOGGER.exception("Error with run")
res = testcase.TestCase.EX_RUN_ERROR
self.stop_time = time.time()
return res
def _prep_test(self):
- '''Check that the config file exists.'''
+ """Check that the config file exists."""
if not os.path.isfile(self.confpath):
- logger.error("Conf file not valid: %s" % self.confpath)
+ LOGGER.error("Conf file not valid: %s", self.confpath)
if not os.path.isfile(self.testlist):
- logger.error("testlist file not valid: %s" % self.testlist)
+ LOGGER.error("testlist file not valid: %s", self.testlist)
def main(self, **kwargs):
- '''used for manually running,
+ """
+ Execute RefstackClient testcase manually.
+
+ used for manually running,
python refstack_client.py -c <tempest_conf_path>
--testlist <testlist_path>
can generate a reference refstack_tempest.conf by
python tempest_conf.py
- '''
+ """
try:
self.confpath = kwargs['config']
self.testlist = kwargs['testlist']
- except KeyError as e:
- logger.error("Cannot run refstack client. Please check "
- "%s", e)
+ except KeyError as exc:
+ LOGGER.error("Cannot run refstack client. Please check "
+ "%s", exc)
return self.EX_RUN_ERROR
try:
self._prep_test()
self.run_defcore(self.confpath, self.testlist)
res = testcase.TestCase.EX_OK
- except Exception as e:
- logger.error('Error with run: %s', e)
+ except Exception as exc:
+ LOGGER.error('Error with run: %s', exc)
res = testcase.TestCase.EX_RUN_ERROR
return res
-class RefstackClientParser(object):
+class RefstackClientParser(object): # pylint: disable=too-few-public-methods
+ """Command line argument parser helper."""
def __init__(self):
- self.FUNCTEST_TEST = pkg_resources.resource_filename(
+ """Initialize helper object."""
+ self.functest_test = pkg_resources.resource_filename(
'functest', 'opnfv_tests')
- self.CONF_PATH = pkg_resources.resource_filename(
+ self.conf_path = pkg_resources.resource_filename(
'functest',
'opnfv_tests/openstack/refstack_client/refstack_tempest.conf')
- self.DEFCORE_LIST = pkg_resources.resource_filename(
+ self.defcore_list = pkg_resources.resource_filename(
'functest', 'opnfv_tests/openstack/refstack_client/defcore.txt')
- self.confpath = os.path.join(self.FUNCTEST_TEST,
- self.CONF_PATH)
- self.defcorelist = os.path.join(self.FUNCTEST_TEST,
- self.DEFCORE_LIST)
+ self.confpath = os.path.join(self.functest_test,
+ self.conf_path)
+ self.defcorelist = os.path.join(self.functest_test,
+ self.defcore_list)
self.parser = argparse.ArgumentParser()
self.parser.add_argument(
'-c', '--config',
@@ -215,11 +223,13 @@ class RefstackClientParser(object):
'should be tested.',
default=self.defcorelist)
- def parse_args(self, argv=[]):
+ def parse_args(self, argv=None):
+ """Parse command line arguments."""
return vars(self.parser.parse_args(argv))
def main():
+ """Run RefstackClient testcase with CLI."""
logging.basicConfig()
refstackclient = RefstackClient()
parser = RefstackClientParser()
diff --git a/functest/opnfv_tests/openstack/tempest/conf_utils.py b/functest/opnfv_tests/openstack/tempest/conf_utils.py
index fa8f00fc..d494db5e 100644
--- a/functest/opnfv_tests/openstack/tempest/conf_utils.py
+++ b/functest/opnfv_tests/openstack/tempest/conf_utils.py
@@ -28,12 +28,12 @@ GLANCE_IMAGE_PATH = os.path.join(
TEMPEST_RESULTS_DIR = os.path.join(CONST.__getattribute__('dir_results'),
'tempest')
TEMPEST_CUSTOM = pkg_resources.resource_filename(
- 'functest', 'opnfv_tests/openstack/tempest/custom_tests/test_list.txt')
+ 'functest', 'opnfv_tests/openstack/tempest/custom_tests/test_list.txt')
TEMPEST_BLACKLIST = pkg_resources.resource_filename(
- 'functest', 'opnfv_tests/openstack/tempest/custom_tests/blacklist.txt')
+ 'functest', 'opnfv_tests/openstack/tempest/custom_tests/blacklist.txt')
TEMPEST_DEFCORE = pkg_resources.resource_filename(
- 'functest',
- 'opnfv_tests/openstack/tempest/custom_tests/defcore_req.txt')
+ 'functest',
+ 'opnfv_tests/openstack/tempest/custom_tests/defcore_req.txt')
TEMPEST_RAW_LIST = os.path.join(TEMPEST_RESULTS_DIR, 'test_raw_list.txt')
TEMPEST_LIST = os.path.join(TEMPEST_RESULTS_DIR, 'test_list.txt')
REFSTACK_RESULTS_DIR = os.path.join(CONST.__getattribute__('dir_results'),
@@ -243,6 +243,11 @@ def configure_tempest_defcore(deployment_dir, img_flavor_dict):
logger.debug("Updating selected tempest.conf parameters for defcore...")
config = ConfigParser.RawConfigParser()
config.read(conf_file)
+ config.set('DEFAULT', 'log_file', '{}/tempest.log'.format(deployment_dir))
+ config.set('oslo_concurrency', 'lock_path',
+ '{}/lock_files'.format(deployment_dir))
+ config.set('scenario', 'img_dir', '{}'.format(deployment_dir))
+ config.set('scenario', 'img_file', 'tempest-image')
config.set('compute', 'image_ref', img_flavor_dict.get("image_id"))
config.set('compute', 'image_ref_alt',
img_flavor_dict['image_id_alt'])
diff --git a/functest/opnfv_tests/openstack/vping/vping_ssh.py b/functest/opnfv_tests/openstack/vping/vping_ssh.py
index d4c39ad2..5cacddb5 100644
--- a/functest/opnfv_tests/openstack/vping/vping_ssh.py
+++ b/functest/opnfv_tests/openstack/vping/vping_ssh.py
@@ -7,13 +7,25 @@
#
# http://www.apache.org/licenses/LICENSE-2.0
-import os
-import pkg_resources
-from scp import SCPClient
+
+"""vPingSSH testcase."""
+
+# This 1st import is here simply for pep8 as the 'os' package import appears
+# to be required for mock and the unit tests will fail without it
+import os # noqa # pylint: disable=unused-import
import time
+from scp import SCPClient
+import pkg_resources
+
+from functest.core.testcase import TestCase
+from functest.energy import energy
+from functest.opnfv_tests.openstack.snaps import snaps_utils
+from functest.opnfv_tests.openstack.vping import vping_base
+from functest.utils.constants import CONST
from snaps.openstack.create_instance import FloatingIpSettings, \
VmInstanceSettings
+
from snaps.openstack.create_keypairs import KeypairSettings
from snaps.openstack.create_network import PortSettings
from snaps.openstack.create_router import RouterSettings
@@ -21,24 +33,17 @@ from snaps.openstack.create_security_group import Direction, Protocol, \
SecurityGroupSettings, SecurityGroupRuleSettings
from snaps.openstack.utils import deploy_utils
-from functest.core.testcase import TestCase
-from functest.opnfv_tests.openstack.snaps import snaps_utils
-from functest.opnfv_tests.openstack.vping import vping_base
-from functest.utils.constants import CONST
-
class VPingSSH(vping_base.VPingBase):
"""
+ VPingSSH testcase implementation.
+
Class to execute the vPing test using a Floating IP to connect to one VM
to issue the ping command to the second
"""
def __init__(self, **kwargs):
-
- # This line is here simply for pep8 as the 'os' package import appears
- # to be required for mock and the unit tests will fail without it
- os.environ
-
+ """Initialize testcase."""
if "case_name" not in kwargs:
kwargs["case_name"] = "vping_ssh"
super(VPingSSH, self).__init__(**kwargs)
@@ -51,8 +56,11 @@ class VPingSSH(vping_base.VPingBase):
self.sg_name = CONST.__getattribute__('vping_sg_name') + self.guid
self.sg_desc = CONST.__getattribute__('vping_sg_desc')
+ @energy.enable_recording
def run(self):
"""
+ Excecute VPingSSH testcase.
+
Sets up the OpenStack keypair, router, security group, and VM instance
objects then validates the ping.
:return: the exit code from the super.execute() method
@@ -60,7 +68,8 @@ class VPingSSH(vping_base.VPingBase):
try:
super(VPingSSH, self).run()
- self.logger.info("Creating keypair with name: '%s'" % self.kp_name)
+ log = "Creating keypair with name: '%s'" % self.kp_name
+ self.logger.info(log)
kp_creator = deploy_utils.create_keypair(
self.os_creds,
KeypairSettings(name=self.kp_name,
@@ -69,8 +78,8 @@ class VPingSSH(vping_base.VPingBase):
self.creators.append(kp_creator)
# Creating router to external network
- self.logger.info("Creating router with name: '%s'"
- % self.router_name)
+ log = "Creating router with name: '%s'" % self.router_name
+ self.logger.info(log)
net_set = self.network_creator.network_settings
sub_set = [net_set.subnet_settings[0].name]
ext_net_name = snaps_utils.get_ext_net_name(self.os_creds)
@@ -93,9 +102,9 @@ class VPingSSH(vping_base.VPingBase):
ssh_connect_timeout=self.vm_ssh_connect_timeout,
port_settings=[port1_settings])
- self.logger.info(
- "Creating VM 1 instance with name: '%s'"
- % instance1_settings.name)
+ log = ("Creating VM 1 instance with name: '%s'"
+ % instance1_settings.name)
+ self.logger.info(log)
self.vm1_creator = deploy_utils.create_vm_instance(
self.os_creds,
instance1_settings,
@@ -122,9 +131,9 @@ class VPingSSH(vping_base.VPingBase):
port_name=port2_settings.name,
router_name=router_creator.router_settings.name)])
- self.logger.info(
- "Creating VM 2 instance with name: '%s'"
- % instance2_settings.name)
+ log = ("Creating VM 2 instance with name: '%s'"
+ % instance2_settings.name)
+ self.logger.info(log)
self.vm2_creator = deploy_utils.create_vm_instance(
self.os_creds,
instance2_settings,
@@ -133,14 +142,16 @@ class VPingSSH(vping_base.VPingBase):
self.creators.append(self.vm2_creator)
return self._execute()
- except Exception as e:
- self.logger.error('Unexpected error running test - ' + e.message)
+ except Exception as exc: # pylint: disable=broad-except
+ self.logger.error('Unexpected error running test - ' + exc.message)
return TestCase.EX_RUN_ERROR
finally:
self._cleanup()
def _do_vping(self, vm_creator, test_ip):
"""
+ Execute ping command.
+
Override from super
"""
if vm_creator.vm_ssh_active(block=True):
@@ -153,6 +164,8 @@ class VPingSSH(vping_base.VPingBase):
def _transfer_ping_script(self, ssh):
"""
+ Transfert vping script to VM.
+
Uses SCP to copy the ping script via the SSH client
:param ssh: the SSH client
:return:
@@ -163,11 +176,12 @@ class VPingSSH(vping_base.VPingBase):
'functest.opnfv_tests.openstack.vping', 'ping.sh')
try:
scp.put(ping_script, "~/")
- except:
- self.logger.error("Cannot SCP the file '%s'" % ping_script)
+ except Exception:
+ self.logger.error("Cannot SCP the file '%s'", ping_script)
return False
cmd = 'chmod 755 ~/ping.sh'
+ # pylint: disable=unused-variable
(stdin, stdout, stderr) = ssh.exec_command(cmd)
for line in stdout.readlines():
print line
@@ -176,6 +190,8 @@ class VPingSSH(vping_base.VPingBase):
def _do_vping_ssh(self, ssh, test_ip):
"""
+ Execute ping command via SSH.
+
Pings the test_ip via the SSH client
:param ssh: the SSH client used to issue the ping command
:param test_ip: the IP for the ping command to use
@@ -190,7 +206,7 @@ class VPingSSH(vping_base.VPingBase):
while True:
time.sleep(1)
- (stdin, stdout, stderr) = ssh.exec_command(cmd)
+ (_, stdout, _) = ssh.exec_command(cmd)
output = stdout.readlines()
for line in output:
@@ -206,12 +222,15 @@ class VPingSSH(vping_base.VPingBase):
break
if flag:
break
- self.logger.debug("Pinging %s. Waiting for response..." % test_ip)
+ log = "Pinging %s. Waiting for response..." % test_ip
+ self.logger.debug(log)
sec += 1
return exit_code
def __create_security_group(self):
"""
+ Configure OpenStack security groups.
+
Configures and deploys an OpenStack security group object
:return: the creator object
"""
@@ -231,7 +250,8 @@ class VPingSSH(vping_base.VPingBase):
protocol=Protocol.tcp, port_range_min=22,
port_range_max=22))
- self.logger.info("Security group with name: '%s'" % self.sg_name)
+ log = "Security group with name: '%s'" % self.sg_name
+ self.logger.info(log)
return deploy_utils.create_security_group(self.os_creds,
SecurityGroupSettings(
name=self.sg_name,
diff --git a/functest/opnfv_tests/sdn/odl/odl.py b/functest/opnfv_tests/sdn/odl/odl.py
index 67bf66e3..5724012c 100644
--- a/functest/opnfv_tests/sdn/odl/odl.py
+++ b/functest/opnfv_tests/sdn/odl/odl.py
@@ -234,7 +234,11 @@ class ODLTests(testcase.TestCase):
elif installer_type == 'joid':
kwargs['odlip'] = os.environ['SDN_CONTROLLER']
elif installer_type == 'compass':
+ kwargs['odlrestconfport'] = '8080'
+ elif installer_type == 'daisy':
+ kwargs['odlip'] = os.environ['SDN_CONTROLLER_IP']
kwargs['odlwebport'] = '8181'
+ kwargs['odlrestconfport'] = '8087'
else:
kwargs['odlip'] = os.environ['SDN_CONTROLLER_IP']
except KeyError as ex:
diff --git a/functest/opnfv_tests/vnf/ims/clearwater_ims_base.py b/functest/opnfv_tests/vnf/ims/clearwater_ims_base.py
index 25ddca21..5a5c12be 100644
--- a/functest/opnfv_tests/vnf/ims/clearwater_ims_base.py
+++ b/functest/opnfv_tests/vnf/ims/clearwater_ims_base.py
@@ -43,7 +43,7 @@ class ClearwaterOnBoardingBase(vnf.VnfOnBoarding):
def config_ellis(self, ellis_ip, signup_code='secret', two_numbers=False):
output_dict = {}
- self.logger.info('Configure Ellis: %s', ellis_ip)
+ self.logger.debug('Configure Ellis: %s', ellis_ip)
output_dict['ellis_ip'] = ellis_ip
account_url = 'http://{0}/accounts'.format(ellis_ip)
params = {"password": "functest",
@@ -54,7 +54,7 @@ class ClearwaterOnBoardingBase(vnf.VnfOnBoarding):
output_dict['login'] = params
if rq.status_code != 201 and rq.status_code != 409:
raise Exception("Unable to create an account for number provision")
- self.logger.info('Account is created on Ellis: %s', params)
+ self.logger.debug('Account is created on Ellis: %s', params)
session_url = 'http://{0}/session'.format(ellis_ip)
session_data = {
@@ -66,13 +66,13 @@ class ClearwaterOnBoardingBase(vnf.VnfOnBoarding):
if rq.status_code != 201:
raise Exception('Failed to get cookie for Ellis')
cookies = rq.cookies
- self.logger.info('Cookies: %s', cookies)
+ self.logger.debug('Cookies: %s', cookies)
number_url = 'http://{0}/accounts/{1}/numbers'.format(
ellis_ip,
params['email'])
- self.logger.info('Create 1st calling number on Ellis')
- i = 24
+ self.logger.debug('Create 1st calling number on Ellis')
+ i = 30
while rq.status_code != 200 and i > 0:
try:
number_res = self.create_ellis_number(number_url, cookies)
@@ -86,7 +86,7 @@ class ClearwaterOnBoardingBase(vnf.VnfOnBoarding):
output_dict['number'] = number_res
if two_numbers:
- self.logger.info('Create 2nd calling number on Ellis')
+ self.logger.debug('Create 2nd calling number on Ellis')
number_res = self.create_ellis_number(number_url, cookies)
output_dict['number2'] = number_res
@@ -131,7 +131,7 @@ class ClearwaterOnBoardingBase(vnf.VnfOnBoarding):
script = '{0}{1}'.format(script, subscript)
script = ('{0}{1}'.format(script, ' --trace'))
cmd = "/bin/bash -c '{0}'".format(script)
- self.logger.info('Live test cmd: %s', cmd)
+ self.logger.debug('Live test cmd: %s', cmd)
output_file = os.path.join(self.result_dir, "ims_test_output.txt")
ft_utils.execute_command(cmd,
error_msg='Clearwater live test failed',
diff --git a/functest/opnfv_tests/vnf/ims/cloudify_ims.py b/functest/opnfv_tests/vnf/ims/cloudify_ims.py
index 2dcce408..8f6fcec8 100644
--- a/functest/opnfv_tests/vnf/ims/cloudify_ims.py
+++ b/functest/opnfv_tests/vnf/ims/cloudify_ims.py
@@ -1,53 +1,56 @@
#!/usr/bin/env python
-# Copyright (c) 2016 Orange and others.
+# Copyright (c) 2017 Orange and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
+"""CloudifyIms testcase implementation."""
+
import logging
import os
import time
-import yaml
-from scp import SCPClient
from cloudify_rest_client import CloudifyClient
from cloudify_rest_client.executions import Execution
+from scp import SCPClient
+import yaml
+from functest.energy import energy
+from functest.opnfv_tests.openstack.snaps import snaps_utils
import functest.opnfv_tests.vnf.ims.clearwater_ims_base as clearwater_ims_base
from functest.utils.constants import CONST
import functest.utils.openstack_utils as os_utils
from snaps.openstack.os_credentials import OSCreds
-from snaps.openstack.create_network import NetworkSettings, SubnetSettings, \
- OpenStackNetwork
-from snaps.openstack.create_security_group import SecurityGroupSettings, \
- SecurityGroupRuleSettings,\
- Direction, Protocol, \
- OpenStackSecurityGroup
+from snaps.openstack.create_network import (NetworkSettings, SubnetSettings,
+ OpenStackNetwork)
+from snaps.openstack.create_security_group import (SecurityGroupSettings,
+ SecurityGroupRuleSettings,
+ Direction, Protocol,
+ OpenStackSecurityGroup)
from snaps.openstack.create_router import RouterSettings, OpenStackRouter
-from snaps.openstack.create_instance import VmInstanceSettings, \
- FloatingIpSettings, \
- OpenStackVmInstance
+from snaps.openstack.create_instance import (VmInstanceSettings,
+ FloatingIpSettings,
+ OpenStackVmInstance)
from snaps.openstack.create_flavor import FlavorSettings, OpenStackFlavor
from snaps.openstack.create_image import ImageSettings, OpenStackImage
from snaps.openstack.create_keypairs import KeypairSettings, OpenStackKeypair
from snaps.openstack.create_network import PortSettings
-from functest.opnfv_tests.openstack.snaps import snaps_utils
-
__author__ = "Valentin Boucher <valentin.boucher@orange.com>"
class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
- """Clearwater vIMS deployed with Cloudify Orchestrator Case"""
+ """Clearwater vIMS deployed with Cloudify Orchestrator Case."""
__logger = logging.getLogger(__name__)
def __init__(self, **kwargs):
+ """Initialize CloudifyIms testcase object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "cloudify_ims"
super(CloudifyIms, self).__init__(**kwargs)
@@ -93,6 +96,7 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
self.__logger.info("Images needed for vIMS: %s", self.images)
def prepare(self):
+ """Prepare testscase (Additional pre-configuration steps)."""
super(CloudifyIms, self).prepare()
self.__logger.info("Additional pre-configuration steps")
@@ -120,7 +124,7 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
def deploy_orchestrator(self):
"""
- Deploy Cloudify Manager
+ Deploy Cloudify Manager.
network, security group, fip, VM creation
"""
@@ -235,6 +239,8 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
while str(cfy_status) != 'running' and retry:
try:
cfy_status = cfy_client.manager.get_status()['status']
+ self.__logger.debug("The current manager status is %s",
+ cfy_status)
except Exception: # pylint: disable=broad-except
self.__logger.warning("Cloudify Manager isn't " +
"up and running. Retrying ...")
@@ -259,14 +265,15 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
self.__logger.info("Put private keypair in manager")
if manager_creator.vm_ssh_active(block=True):
ssh = manager_creator.ssh_client()
- scp = SCPClient(ssh.get_transport())
+ scp = SCPClient(ssh.get_transport(), socket_timeout=15.0)
scp.put(kp_file, '~/')
cmd = "sudo cp ~/cloudify_ims.pem /etc/cloudify/"
- ssh.exec_command(cmd)
+ run_blocking_ssh_command(ssh, cmd)
cmd = "sudo chmod 444 /etc/cloudify/cloudify_ims.pem"
- ssh.exec_command(cmd)
+ run_blocking_ssh_command(ssh, cmd)
cmd = "sudo yum install -y gcc python-devel"
- ssh.exec_command(cmd)
+ run_blocking_ssh_command(ssh, cmd, "Unable to install packages \
+ on manager")
self.details['orchestrator'].update(status='PASS', duration=duration)
@@ -277,9 +284,7 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
return True
def deploy_vnf(self):
- """
- Deploy Clearwater IMS
- """
+ """Deploy Clearwater IMS."""
start_time = time.time()
self.__logger.info("Upload VNFD")
@@ -290,15 +295,17 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
descriptor.get('file_name'))
self.__logger.info("Get or create flavor for all clearwater vm")
- self.exist_obj['flavor2'], flavor_id = os_utils.get_or_create_flavor(
- self.vnf['requirements']['flavor']['name'],
- self.vnf['requirements']['flavor']['ram_min'],
- '30',
- '1',
- public=True)
+ flavor_settings = FlavorSettings(
+ name=self.vnf['requirements']['flavor']['name'],
+ ram=self.vnf['requirements']['flavor']['ram_min'],
+ disk=25,
+ vcpus=1)
+ flavor_creator = OpenStackFlavor(self.snaps_creds, flavor_settings)
+ flavor_creator.create()
+ self.created_object.append(flavor_creator)
self.vnf['inputs'].update(dict(
- flavor_id=flavor_id,
+ flavor_id=self.vnf['requirements']['flavor']['name'],
))
self.__logger.info("Create VNF Instance")
@@ -323,15 +330,14 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
self.__logger.info(execution)
if execution.status == 'terminated':
self.details['vnf'].update(status='PASS', duration=duration)
- return True
+ result = True
else:
self.details['vnf'].update(status='FAIL', duration=duration)
- return False
+ result = False
+ return result
def test_vnf(self):
- """
- Run test on clearwater ims instance
- """
+ """Run test on clearwater ims instance."""
start_time = time.time()
cfy_client = self.orchestrator['object']
@@ -342,22 +348,23 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
ellis_ip = outputs['ellis_ip']
self.config_ellis(ellis_ip)
- if dns_ip != "":
- vims_test_result = self.run_clearwater_live_test(
- dns_ip=dns_ip,
- public_domain=self.vnf['inputs']["public_domain"])
- duration = time.time() - start_time
- short_result = sig_test_format(vims_test_result)
- self.__logger.info(short_result)
- self.details['test_vnf'].update(status='PASS',
- result=short_result,
- full_result=vims_test_result,
- duration=duration)
- return True
- else:
+ if not dns_ip:
return False
+ vims_test_result = self.run_clearwater_live_test(
+ dns_ip=dns_ip,
+ public_domain=self.vnf['inputs']["public_domain"])
+ duration = time.time() - start_time
+ short_result = sig_test_format(vims_test_result)
+ self.__logger.info(short_result)
+ self.details['test_vnf'].update(status='PASS',
+ result=short_result,
+ full_result=vims_test_result,
+ duration=duration)
+ return True
+
def clean(self):
+ """Clean created objects/functions."""
try:
cfy_client = self.orchestrator['object']
dep_name = self.vnf['descriptor'].get('name')
@@ -369,7 +376,7 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
try:
cfy_client.executions.cancel(execution['id'],
force=True)
- except:
+ except: # pylint: disable=broad-except
self.__logger.warn("Can't cancel the current exec")
execution = cfy_client.executions.start(
@@ -381,7 +388,7 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
wait_for_execution(cfy_client, execution, self.__logger)
cfy_client.deployments.delete(self.vnf['descriptor'].get('name'))
cfy_client.blueprints.delete(self.vnf['descriptor'].get('name'))
- except:
+ except: # pylint: disable=broad-except
self.__logger.warn("Some issue during the undeployment ..")
self.__logger.warn("Tenant clean continue ..")
@@ -389,10 +396,15 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
for creator in reversed(self.created_object):
try:
creator.clean()
- except Exception as e:
- self.logger.error('Unexpected error cleaning - %s', e)
+ except Exception as exc:
+ self.logger.error('Unexpected error cleaning - %s', exc)
super(CloudifyIms, self).clean()
+ @energy.enable_recording
+ def run(self, **kwargs):
+ """Execute CloudifyIms test case."""
+ super(CloudifyIms, self).run(**kwargs)
+
# ----------------------------------------------------------
#
@@ -401,6 +413,8 @@ class CloudifyIms(clearwater_ims_base.ClearwaterOnBoardingBase):
# -----------------------------------------------------------
def get_config(parameter, file_path):
"""
+ Get config parameter.
+
Returns the value of a given parameter in file.yaml
parameter must be given in string format with dots
Example: general.openstack.image_name
@@ -418,9 +432,7 @@ def get_config(parameter, file_path):
def wait_for_execution(client, execution, logger, timeout=2400, ):
- """
- Wait for a workflow execution on Cloudify Manager
- """
+ """Wait for a workflow execution on Cloudify Manager."""
# if execution already ended - return without waiting
if execution.status in Execution.END_STATES:
return execution
@@ -470,7 +482,7 @@ def wait_for_execution(client, execution, logger, timeout=2400, ):
def _get_deployment_environment_creation_execution(client, deployment_id):
"""
- Get the execution id of a env preparation
+ Get the execution id of a env preparation.
network, security group, fip, VM creation
"""
@@ -484,9 +496,7 @@ def _get_deployment_environment_creation_execution(client, deployment_id):
def sig_test_format(sig_test):
- """
- Process the signaling result to have a short result
- """
+ """Process the signaling result to have a short result."""
nb_passed = 0
nb_failures = 0
nb_skipped = 0
@@ -502,3 +512,10 @@ def sig_test_format(sig_test):
total_sig_test_result['failures'] = nb_failures
total_sig_test_result['skipped'] = nb_skipped
return total_sig_test_result
+
+
+def run_blocking_ssh_command(ssh, cmd, error_msg="Unable to run this command"):
+ """Command to run ssh command with the exit status."""
+ stdin, stdout, stderr = ssh.exec_command(cmd)
+ if stdout.channel.recv_exit_status() != 0:
+ raise Exception(error_msg)
diff --git a/functest/opnfv_tests/vnf/ims/cloudify_ims.yaml b/functest/opnfv_tests/vnf/ims/cloudify_ims.yaml
index f1028ce7..743c6ddd 100644
--- a/functest/opnfv_tests/vnf/ims/cloudify_ims.yaml
+++ b/functest/opnfv_tests/vnf/ims/cloudify_ims.yaml
@@ -19,7 +19,7 @@ vnf:
version: '122'
requirements:
flavor:
- name: m1.medium
+ name: m1.small
ram_min: 2048
inputs:
image_id: 'ubuntu_14.04'
diff --git a/functest/tests/unit/ci/test_check_deployment.py b/functest/tests/unit/ci/test_check_deployment.py
new file mode 100644
index 00000000..1f44d078
--- /dev/null
+++ b/functest/tests/unit/ci/test_check_deployment.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2017 Ericsson and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+import logging
+import mock
+import unittest
+
+from functest.ci import check_deployment
+
+__author__ = "Jose Lausuch <jose.lausuch@ericsson.com>"
+
+
+class CheckDeploymentTesting(unittest.TestCase):
+ """The super class which testing classes could inherit."""
+ # pylint: disable=missing-docstring
+
+ logging.disable(logging.CRITICAL)
+
+ def setUp(self):
+ self.client_test = mock.Mock()
+ self.deployment = check_deployment.CheckDeployment()
+ self.service_test = 'compute'
+ self.rc_file = self.deployment.rc_file
+ self.endpoint_test = 'http://192.168.0.6:5000/v3'
+ creds_attr = {'auth_url': self.endpoint_test,
+ 'proxy_settings': ''}
+ proxy_attr = {'host': '192.168.0.1', 'port': '5000'}
+ proxy_settings = mock.Mock()
+ proxy_settings.configure_mock(**proxy_attr)
+ self.os_creds = mock.Mock()
+ self.os_creds.configure_mock(**creds_attr)
+ self.os_creds.proxy_settings = proxy_settings
+ self.deployment.os_creds = self.os_creds
+
+ def test_check_rc(self):
+ with mock.patch('functest.ci.check_deployment.os.path.isfile',
+ returns=True) as m, \
+ mock.patch('__builtin__.open',
+ mock.mock_open(read_data='OS_AUTH_URL')):
+ self.deployment.check_rc()
+ self.assertTrue(m.called)
+
+ def test_check_rc_missing_file(self):
+ with mock.patch('functest.ci.check_deployment.os.path.isfile',
+ return_value=False), \
+ self.assertRaises(Exception) as context:
+ msg = 'RC file {} does not exist!'.format(self.rc_file)
+ self.deployment.check_rc(self.rc_file)
+ self.assertTrue(msg in context)
+
+ def test_check_rc_missing_os_auth(self):
+ with mock.patch('__builtin__.open',
+ mock.mock_open(read_data='test')), \
+ self.assertRaises(Exception) as context:
+ msg = 'OS_AUTH_URL not defined in {}.'.format(self.rc_file)
+ self.assertTrue(msg in context)
+
+ def test_check_auth_endpoint(self):
+ with mock.patch('functest.ci.check_deployment.verify_connectivity',
+ return_value=True) as m:
+ self.deployment.check_auth_endpoint()
+ self.assertTrue(m.called)
+
+ def test_check_auth_endpoint_not_reachable(self):
+ with mock.patch('functest.ci.check_deployment.verify_connectivity',
+ return_value=False) as m, \
+ self.assertRaises(Exception) as context:
+ endpoint = self.os_creds.auth_url
+ self.deployment.check_auth_endpoint()
+ msg = "OS_AUTH_URL {} is not reachable.".format(endpoint)
+ self.assertTrue(m.called)
+ self.assertTrue(msg in context)
+
+ def test_check_public_endpoint(self):
+ with mock.patch('functest.ci.check_deployment.verify_connectivity',
+ return_value=True) as m, \
+ mock.patch('functest.ci.check_deployment.keystone_utils.'
+ 'get_endpoint') as n:
+ self.deployment.check_public_endpoint()
+ self.assertTrue(m.called)
+ self.assertTrue(n.called)
+
+ def test_check_public_endpoint_not_reachable(self):
+ with mock.patch('functest.ci.check_deployment.verify_connectivity',
+ return_value=False) as m, \
+ mock.patch('functest.ci.check_deployment.keystone_utils.'
+ 'get_endpoint',
+ return_value=self.endpoint_test) as n, \
+ self.assertRaises(Exception) as context:
+ self.deployment.check_public_endpoint()
+ msg = ("Public endpoint {} is not reachable."
+ .format(self.mock_endpoint))
+ self.assertTrue(m.called)
+ self.assertTrue(n.called)
+ self.assertTrue(msg in context)
+
+ def test_check_service_endpoint(self):
+ with mock.patch('functest.ci.check_deployment.verify_connectivity',
+ return_value=True) as m, \
+ mock.patch('functest.ci.check_deployment.keystone_utils.'
+ 'get_endpoint') as n:
+ self.deployment.check_service_endpoint(self.service_test)
+ self.assertTrue(m.called)
+ self.assertTrue(n.called)
+
+ def test_check_service_endpoint_not_reachable(self):
+ with mock.patch('functest.ci.check_deployment.verify_connectivity',
+ return_value=False) as m, \
+ mock.patch('functest.ci.check_deployment.keystone_utils.'
+ 'get_endpoint',
+ return_value=self.endpoint_test) as n, \
+ self.assertRaises(Exception) as context:
+ self.deployment.check_service_endpoint(self.service_test)
+ msg = "{} endpoint {} is not reachable.".format(self.service_test,
+ self.endpoint_test)
+ self.assertTrue(m.called)
+ self.assertTrue(n.called)
+ self.assertTrue(msg in context)
+
+ def test_check_nova(self):
+ with mock.patch('functest.ci.check_deployment.nova_utils.nova_client',
+ return_value=self.client_test) as m:
+ self.deployment.check_nova()
+ self.assertTrue(m.called)
+
+ def test_check_nova_fail(self):
+ with mock.patch('functest.ci.check_deployment.nova_utils.nova_client',
+ return_value=self.client_test) as m, \
+ mock.patch.object(self.client_test, 'servers.list',
+ side_effect=Exception):
+ self.deployment.check_nova()
+ self.assertTrue(m.called)
+ self.assertRaises(Exception)
+
+ def test_check_neutron(self):
+ with mock.patch('functest.ci.check_deployment.neutron_utils.'
+ 'neutron_client', return_value=self.client_test) as m:
+ self.deployment.check_neutron()
+ self.assertTrue(m.called)
+
+ def test_check_neutron_fail(self):
+ with mock.patch('functest.ci.check_deployment.neutron_utils.'
+ 'neutron_client',
+ return_value=self.client_test) as m, \
+ mock.patch.object(self.client_test, 'list_networks',
+ side_effect=Exception), \
+ self.assertRaises(Exception):
+ self.deployment.check_neutron()
+ self.assertRaises(Exception)
+ self.assertTrue(m.called)
+
+ def test_check_glance(self):
+ with mock.patch('functest.ci.check_deployment.glance_utils.'
+ 'glance_client', return_value=self.client_test) as m:
+ self.deployment.check_glance()
+ self.assertTrue(m.called)
+
+ def test_check_glance_fail(self):
+ with mock.patch('functest.ci.check_deployment.glance_utils.'
+ 'glance_client', return_value=self.client_test) as m, \
+ mock.patch.object(self.client_test, 'images.list',
+ side_effect=Exception):
+ self.deployment.check_glance()
+ self.assertRaises(Exception)
+ self.assertTrue(m.called)
+
+
+if __name__ == "__main__":
+ logging.disable(logging.CRITICAL)
+ unittest.main(verbosity=2)
diff --git a/functest/tests/unit/ci/test_prepare_env.py b/functest/tests/unit/ci/test_prepare_env.py
index 7d4b5fb2..7d5fa564 100644
--- a/functest/tests/unit/ci/test_prepare_env.py
+++ b/functest/tests/unit/ci/test_prepare_env.py
@@ -309,22 +309,18 @@ class PrepareEnvTesting(unittest.TestCase):
prepare_env.update_config_file()
self.assertTrue(mock_db_url.called)
- @mock.patch('functest.ci.prepare_env.logger.info')
- def test_verify_deployment_error(self, mock_logger_error):
- mock_popen = mock.Mock()
- attrs = {'poll.return_value': None,
- 'stdout.readline.return_value': 'ERROR'}
- mock_popen.configure_mock(**attrs)
+ def test_verify_deployment(self):
+ with mock.patch('functest.ci.check_deployment.CheckDeployment') \
+ as mock_check_deployment:
+ prepare_env.verify_deployment()
+ self.assertTrue(mock_check_deployment.called)
- with mock.patch('functest.ci.prepare_env.print_separator') as m, \
- mock.patch('functest.ci.prepare_env.subprocess.Popen',
- return_value=mock_popen), \
- self.assertRaises(Exception) as context:
+ def test_verify_deployment_error(self):
+ with mock.patch('functest.ci.prepare_env.'
+ 'check_deployment.CheckDeployment',
+ return_value=('test_', None)), \
+ self.assertRaises(Exception):
prepare_env.verify_deployment()
- self.assertTrue(m.called)
- msg = "Problem while running 'check_os.sh'."
- mock_logger_error.assert_called_once_with('ERROR')
- self.assertTrue(msg in context)
def _get_rally_creds(self):
return {"type": "ExistingCloud",
diff --git a/functest/tests/unit/cli/commands/test_cli_os.py b/functest/tests/unit/cli/commands/test_cli_os.py
index a3d930de..806bc931 100644
--- a/functest/tests/unit/cli/commands/test_cli_os.py
+++ b/functest/tests/unit/cli/commands/test_cli_os.py
@@ -59,12 +59,12 @@ class CliOpenStackTesting(unittest.TestCase):
self.endpoint_ip)
mock_exit.assert_called_once_with(0)
- @mock.patch('functest.cli.commands.cli_os.ft_utils.execute_command')
- def test_check(self, mock_ftutils_execute):
- with mock.patch.object(self.cli_os, 'ping_endpoint'):
+ def test_check(self):
+ with mock.patch.object(self.cli_os, 'ping_endpoint'), \
+ mock.patch('functest.cli.commands.cli_os.check_deployment.'
+ 'CheckDeployment') as mock_check_deployment:
self.cli_os.check()
- mock_ftutils_execute.assert_called_once_with(
- "check_os.sh", verbose=False)
+ self.assertTrue(mock_check_deployment.called)
@mock.patch('functest.cli.commands.cli_os.os.path.isfile',
return_value=False)
diff --git a/functest/tests/unit/energy/test_functest_energy.py b/functest/tests/unit/energy/test_functest_energy.py
index 177788bc..f8bb13c9 100644
--- a/functest/tests/unit/energy/test_functest_energy.py
+++ b/functest/tests/unit/energy/test_functest_energy.py
@@ -248,7 +248,9 @@ class EnergyRecorderTest(unittest.TestCase):
self.__decorated_method() == self.returned_value_to_preserve
)
- def test_decorator_preserve_ex(self):
+ @mock.patch(
+ "functest.energy.energy.finish_session")
+ def test_decorator_preserve_ex(self, finish_mock=None):
"""Test that decorator preserve method exceptions."""
self.test_load_config()
with self.assertRaises(Exception) as context:
@@ -256,6 +258,7 @@ class EnergyRecorderTest(unittest.TestCase):
self.assertTrue(
self.exception_message_to_preserve in context.exception
)
+ self.assertTrue(finish_mock.called)
@mock.patch("functest.utils.functest_utils.get_functest_config",
side_effect=config_loader_mock)
diff --git a/functest/tests/unit/odl/test_odl.py b/functest/tests/unit/odl/test_odl.py
index 070a8d2e..8aeea41d 100644
--- a/functest/tests/unit/odl/test_odl.py
+++ b/functest/tests/unit/odl/test_odl.py
@@ -511,7 +511,22 @@ class ODLRunTesting(ODLTesting):
def test_compass(self):
os.environ["INSTALLER_TYPE"] = "compass"
self._test_run(testcase.TestCase.EX_OK,
- odlip=self._neutron_ip, odlwebport='8181')
+ odlip=self._neutron_ip, odlrestconfport='8080')
+
+ def test_daisy_no_controller_ip(self):
+ with mock.patch('functest.utils.openstack_utils.get_endpoint',
+ return_value="http://{}:9696".format(
+ ODLTesting._neutron_ip)):
+ os.environ["INSTALLER_TYPE"] = "daisy"
+ self.assertEqual(self.test.run(),
+ testcase.TestCase.EX_RUN_ERROR)
+
+ def test_daisy(self):
+ os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip
+ os.environ["INSTALLER_TYPE"] = "daisy"
+ self._test_run(testcase.TestCase.EX_OK,
+ odlip=self._sdn_controller_ip, odlwebport='8181',
+ odlrestconfport='8087')
class ODLArgParserTesting(ODLTesting):
diff --git a/functest/tests/unit/openstack/rally/test_rally.py b/functest/tests/unit/openstack/rally/test_rally.py
index 8845f660..05311c3f 100644
--- a/functest/tests/unit/openstack/rally/test_rally.py
+++ b/functest/tests/unit/openstack/rally/test_rally.py
@@ -18,75 +18,73 @@ from functest.utils.constants import CONST
class OSRallyTesting(unittest.TestCase):
-
- def setUp(self):
- self.nova_client = mock.Mock()
- self.neutron_client = mock.Mock()
- self.cinder_client = mock.Mock()
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.get_nova_client',
- return_value=self.nova_client), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.get_neutron_client',
- return_value=self.neutron_client), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.get_cinder_client',
- return_value=self.cinder_client):
- self.rally_base = rally.RallyBase()
- self.rally_base.network_dict['net_id'] = 'test_net_id'
- self.polling_iter = 2
-
- def test_build_task_args_missing_floating_network(self):
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'get_nova_client', return_value=mock.Mock())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'get_neutron_client', return_value=mock.Mock())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'get_cinder_client', return_value=mock.Mock())
+ def setUp(self, mock_func1, mock_func2, mock_func3):
+ self.rally_base = rally.RallyBase()
+ self.rally_base.network_dict['net_id'] = 'test_net_id'
+ self.polling_iter = 2
+ mock_func1.assert_called()
+ mock_func2.assert_called()
+ mock_func3.assert_called()
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'get_external_net', return_value=None)
+ def test_build_task_args_missing_floating_network(self, mock_func):
CONST.__setattr__('OS_AUTH_URL', None)
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.get_external_net',
- return_value=None):
- task_args = self.rally_base._build_task_args('test_file_name')
- self.assertEqual(task_args['floating_network'], '')
+ task_args = self.rally_base._build_task_args('test_file_name')
+ self.assertEqual(task_args['floating_network'], '')
+ mock_func.assert_called()
- def test_build_task_args_missing_net_id(self):
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'get_external_net', return_value='test_floating_network')
+ def test_build_task_args_missing_net_id(self, mock_func):
CONST.__setattr__('OS_AUTH_URL', None)
self.rally_base.network_dict['net_id'] = ''
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.get_external_net',
- return_value='test_floating_network'):
- task_args = self.rally_base._build_task_args('test_file_name')
- self.assertEqual(task_args['netid'], '')
+ task_args = self.rally_base._build_task_args('test_file_name')
+ self.assertEqual(task_args['netid'], '')
+ mock_func.assert_called()
- def check_scenario_file(self, value):
+ @staticmethod
+ def check_scenario_file(value):
yaml_file = 'opnfv-{}.yaml'.format('test_file_name')
if yaml_file in value:
return False
return True
- def test_prepare_test_list_missing_scenario_file(self):
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.path.exists',
- side_effect=self.check_scenario_file), \
- self.assertRaises(Exception):
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.path.exists')
+ def test_prepare_test_list_missing_scenario_file(self, mock_func):
+ mock_func.side_effect = self.check_scenario_file
+ with self.assertRaises(Exception):
self.rally_base._prepare_test_list('test_file_name')
+ mock_func.assert_called()
- def check_temp_dir(self, value):
+ @staticmethod
+ def check_temp_dir(value):
yaml_file = 'opnfv-{}.yaml'.format('test_file_name')
if yaml_file in value:
return True
return False
- def test_prepare_test_list_missing_temp_dir(self):
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.path.exists',
- side_effect=self.check_temp_dir), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.makedirs') as mock_os_makedir, \
- mock.patch.object(self.rally_base, 'apply_blacklist',
- return_value=mock.Mock()) as mock_method:
- yaml_file = 'opnfv-{}.yaml'.format('test_file_name')
- ret_val = os.path.join(self.rally_base.TEMP_DIR, yaml_file)
- self.assertEqual(self.rally_base.
- _prepare_test_list('test_file_name'),
- ret_val)
- self.assertTrue(mock_method.called)
- self.assertTrue(mock_os_makedir.called)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.path.exists')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.makedirs')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'apply_blacklist', return_value=mock.Mock())
+ def test_prepare_test_list_missing_temp_dir(
+ self, mock_method, mock_os_makedirs, mock_path_exists):
+ mock_path_exists.side_effect = self.check_temp_dir
+
+ yaml_file = 'opnfv-{}.yaml'.format('test_file_name')
+ ret_val = os.path.join(self.rally_base.TEMP_DIR, yaml_file)
+ self.assertEqual(self.rally_base._prepare_test_list('test_file_name'),
+ ret_val)
+ mock_path_exists.assert_called()
+ mock_method.assert_called()
+ mock_os_makedirs.assert_called()
def test_get_task_id_default(self):
cmd_raw = 'Task 1: started'
@@ -125,139 +123,163 @@ class OSRallyTesting(unittest.TestCase):
self.assertEqual(self.rally_base.get_cmd_output(proc),
'lineline')
- def test_excl_scenario_default(self):
+ @mock.patch('__builtin__.open', mock.mock_open())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.yaml.safe_load',
+ return_value={'scenario': [
+ {'scenarios': ['test_scenario'],
+ 'installers': ['test_installer'],
+ 'tests': ['test']},
+ {'scenarios': ['other_scenario'],
+ 'installers': ['test_installer'],
+ 'tests': ['other_test']}]})
+ def test_excl_scenario_default(self, mock_func):
CONST.__setattr__('INSTALLER_TYPE', 'test_installer')
CONST.__setattr__('DEPLOY_SCENARIO', 'test_scenario')
- dic = {'scenario': [{'scenarios': ['test_scenario'],
- 'installers': ['test_installer'],
- 'tests': ['test']}]}
- with mock.patch('__builtin__.open', mock.mock_open()), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'yaml.safe_load',
- return_value=dic):
- self.assertEqual(self.rally_base.excl_scenario(),
- ['test'])
-
- def test_excl_scenario_exception(self):
- with mock.patch('__builtin__.open', side_effect=Exception):
- self.assertEqual(self.rally_base.excl_scenario(),
- [])
-
- def test_excl_func_default(self):
+ self.assertEqual(self.rally_base.excl_scenario(), ['test'])
+ mock_func.assert_called()
+
+ @mock.patch('__builtin__.open', mock.mock_open())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.yaml.safe_load',
+ return_value={'scenario': [
+ {'scenarios': ['^os-[^-]+-featT-modeT$'],
+ 'installers': ['test_installer'],
+ 'tests': ['test1']},
+ {'scenarios': ['^os-ctrlT-[^-]+-modeT$'],
+ 'installers': ['test_installer'],
+ 'tests': ['test2']},
+ {'scenarios': ['^os-ctrlT-featT-[^-]+$'],
+ 'installers': ['test_installer'],
+ 'tests': ['test3']},
+ {'scenarios': ['^os-'],
+ 'installers': ['test_installer'],
+ 'tests': ['test4']},
+ {'scenarios': ['other_scenario'],
+ 'installers': ['test_installer'],
+ 'tests': ['test0a']},
+ {'scenarios': [''], # empty scenario
+ 'installers': ['test_installer'],
+ 'tests': ['test0b']}]})
+ def test_excl_scenario_regex(self, mock_func):
+ CONST.__setattr__('INSTALLER_TYPE', 'test_installer')
+ CONST.__setattr__('DEPLOY_SCENARIO', 'os-ctrlT-featT-modeT')
+ self.assertEqual(self.rally_base.excl_scenario(),
+ ['test1', 'test2', 'test3', 'test4'])
+ mock_func.assert_called()
+
+ @mock.patch('__builtin__.open', side_effect=Exception)
+ def test_excl_scenario_exception(self, mock_open):
+ self.assertEqual(self.rally_base.excl_scenario(), [])
+ mock_open.assert_called()
+
+ @mock.patch('__builtin__.open', mock.mock_open())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.yaml.safe_load',
+ return_value={'functionality': [
+ {'functions': ['no_live_migration'], 'tests': ['test']}]})
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'live_migration_supported', return_value=False)
+ def test_excl_func_default(self, mock_func, mock_yaml_load):
CONST.__setattr__('INSTALLER_TYPE', 'test_installer')
CONST.__setattr__('DEPLOY_SCENARIO', 'test_scenario')
- dic = {'functionality': [{'functions': ['no_live_migration'],
- 'tests': ['test']}]}
- with mock.patch('__builtin__.open', mock.mock_open()), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'yaml.safe_load',
- return_value=dic), \
- mock.patch.object(self.rally_base, 'live_migration_supported',
- return_value=False):
- self.assertEqual(self.rally_base.excl_func(),
- ['test'])
-
- def test_excl_func_exception(self):
- with mock.patch('__builtin__.open', side_effect=Exception):
- self.assertEqual(self.rally_base.excl_func(),
- [])
-
- def test_file_is_empty_default(self):
- mock_obj = mock.Mock()
+ self.assertEqual(self.rally_base.excl_func(), ['test'])
+ mock_func.assert_called()
+ mock_yaml_load.assert_called()
+
+ @mock.patch('__builtin__.open', side_effect=Exception)
+ def test_excl_func_exception(self, mock_open):
+ self.assertEqual(self.rally_base.excl_func(), [])
+ mock_open.assert_called()
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.stat',
+ return_value=mock.Mock())
+ def test_file_is_empty_default(self, mock_os_stat):
attrs = {'st_size': 10}
- mock_obj.configure_mock(**attrs)
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.stat',
- return_value=mock_obj):
- self.assertEqual(self.rally_base.file_is_empty('test_file_name'),
- False)
-
- def test_file_is_empty_exception(self):
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.stat',
- side_effect=Exception):
- self.assertEqual(self.rally_base.file_is_empty('test_file_name'),
- True)
-
- def test_run_task_missing_task_file(self):
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.path.exists',
- return_value=False), \
- self.assertRaises(Exception):
- self.rally_base._run_task('test_name')
+ mock_os_stat.return_value.configure_mock(**attrs)
+ self.assertEqual(self.rally_base.file_is_empty('test_file_name'),
+ False)
+ mock_os_stat.assert_called()
- @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.info')
- def test_run_task_no_tests_for_scenario(self, mock_logger_info):
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.path.exists',
- return_value=True), \
- mock.patch.object(self.rally_base, '_prepare_test_list',
- return_value='test_file_name'), \
- mock.patch.object(self.rally_base, 'file_is_empty',
- return_value=True):
- self.rally_base._run_task('test_name')
- str = 'No tests for scenario "test_name"'
- mock_logger_info.assert_any_call(str)
-
- @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.error')
- def test_run_task_taskid_missing(self, mock_logger_error):
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.path.exists',
- return_value=True), \
- mock.patch.object(self.rally_base, '_prepare_test_list',
- return_value='test_file_name'), \
- mock.patch.object(self.rally_base, 'file_is_empty',
- return_value=False), \
- mock.patch.object(self.rally_base, '_build_task_args',
- return_value={}), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'subprocess.Popen'), \
- mock.patch.object(self.rally_base, '_get_output',
- return_value=mock.Mock()), \
- mock.patch.object(self.rally_base, 'get_task_id',
- return_value=None), \
- mock.patch.object(self.rally_base, 'get_cmd_output',
- return_value=''):
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.stat',
+ side_effect=Exception)
+ def test_file_is_empty_exception(self, mock_os_stat):
+ self.assertEqual(self.rally_base.file_is_empty('test_file_name'), True)
+ mock_os_stat.assert_called()
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.path.exists',
+ return_value=False)
+ def test_run_task_missing_task_file(self, mock_path_exists):
+ with self.assertRaises(Exception):
self.rally_base._run_task('test_name')
- str = 'Failed to retrieve task_id, validating task...'
- mock_logger_error.assert_any_call(str)
-
- @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.info')
- @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.error')
- def test_run_task_default(self, mock_logger_error,
- mock_logger_info):
- popen = mock.Mock()
+ mock_path_exists.assert_called()
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.path.exists',
+ return_value=True)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_prepare_test_list', return_value='test_file_name')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'file_is_empty', return_value=True)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.LOGGER.info')
+ def test_run_task_no_tests_for_scenario(self, mock_logger_info,
+ mock_file_empty, mock_prep_list,
+ mock_path_exists):
+ self.rally_base._run_task('test_name')
+ mock_logger_info.assert_any_call('No tests for scenario \"%s\"',
+ 'test_name')
+ mock_file_empty.assert_called()
+ mock_prep_list.assert_called()
+ mock_path_exists.assert_called()
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_prepare_test_list', return_value='test_file_name')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'file_is_empty', return_value=False)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_build_task_args', return_value={})
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_get_output', return_value=mock.Mock())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'get_task_id', return_value=None)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'get_cmd_output', return_value='')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.path.exists',
+ return_value=True)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.subprocess.Popen')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.LOGGER.error')
+ def test_run_task_taskid_missing(self, mock_logger_error, *args):
+ self.rally_base._run_task('test_name')
+ text = 'Failed to retrieve task_id, validating task...'
+ mock_logger_error.assert_any_call(text)
+
+ @mock.patch('__builtin__.open', mock.mock_open())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_prepare_test_list', return_value='test_file_name')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'file_is_empty', return_value=False)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_build_task_args', return_value={})
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_get_output', return_value=mock.Mock())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'get_task_id', return_value='1')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'get_cmd_output', return_value='')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ 'task_succeed', return_value=True)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.path.exists',
+ return_value=True)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.subprocess.Popen')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.makedirs')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os.popen',
+ return_value=mock.Mock())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.LOGGER.info')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.LOGGER.error')
+ def test_run_task_default(self, mock_logger_error, mock_logger_info,
+ mock_popen, *args):
attrs = {'read.return_value': 'json_result'}
- popen.configure_mock(**attrs)
-
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.path.exists',
- return_value=True), \
- mock.patch.object(self.rally_base, '_prepare_test_list',
- return_value='test_file_name'), \
- mock.patch.object(self.rally_base, 'file_is_empty',
- return_value=False), \
- mock.patch.object(self.rally_base, '_build_task_args',
- return_value={}), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'subprocess.Popen'), \
- mock.patch.object(self.rally_base, '_get_output',
- return_value=mock.Mock()), \
- mock.patch.object(self.rally_base, 'get_task_id',
- return_value='1'), \
- mock.patch.object(self.rally_base, 'get_cmd_output',
- return_value=''), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.makedirs'), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os.popen',
- return_value=popen), \
- mock.patch('__builtin__.open', mock.mock_open()), \
- mock.patch.object(self.rally_base, 'task_succeed',
- return_value=True):
- self.rally_base._run_task('test_name')
- str = 'Test scenario: "test_name" OK.\n'
- mock_logger_info.assert_any_call(str)
+ mock_popen.return_value.configure_mock(**attrs)
+ self.rally_base._run_task('test_name')
+ text = 'Test scenario: "test_name" OK.\n'
+ mock_logger_info.assert_any_call(text)
+ mock_logger_error.assert_not_called()
def test_prepare_env_testname_invalid(self):
self.rally_base.TESTS = ['test1', 'test2']
@@ -265,103 +287,103 @@ class OSRallyTesting(unittest.TestCase):
with self.assertRaises(Exception):
self.rally_base._prepare_env()
- def test_prepare_env_volume_creation_failed(self):
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'list_volume_types', return_value=None)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'create_volume_type', return_value=None)
+ def test_prepare_env_volume_creation_failed(self, mock_list, mock_create):
self.rally_base.TESTS = ['test1', 'test2']
self.rally_base.test_name = 'test1'
- volume_type = None
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.list_volume_types',
- return_value=None), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.create_volume_type',
- return_value=volume_type), \
- self.assertRaises(Exception):
+ with self.assertRaises(Exception):
self.rally_base._prepare_env()
-
- def test_prepare_env_image_missing(self):
+ mock_list.assert_called()
+ mock_create.assert_called()
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'list_volume_types', return_value=None)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'create_volume_type', return_value=mock.Mock())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'get_or_create_image', return_value=(True, None))
+ def test_prepare_env_image_missing(self, mock_get_img, mock_create_vt,
+ mock_list_vt):
self.rally_base.TESTS = ['test1', 'test2']
self.rally_base.test_name = 'test1'
- volume_type = mock.Mock()
- image_id = None
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.list_volume_types',
- return_value=None), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.create_volume_type',
- return_value=volume_type), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.get_or_create_image',
- return_value=(True, image_id)), \
- self.assertRaises(Exception):
+ with self.assertRaises(Exception):
self.rally_base._prepare_env()
-
- def test_prepare_env_image_shared_network_creation_failed(self):
+ mock_get_img.assert_called()
+ mock_create_vt.assert_called()
+ mock_list_vt.assert_called()
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'list_volume_types', return_value=None)
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'create_volume_type', return_value=mock.Mock())
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'get_or_create_image', return_value=(True, 'image_id'))
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'create_shared_network_full', return_value=None)
+ def test_prepare_env_image_shared_network_creation_failed(
+ self, mock_create_net, mock_get_img, mock_create_vt, mock_list_vt):
self.rally_base.TESTS = ['test1', 'test2']
self.rally_base.test_name = 'test1'
- volume_type = mock.Mock()
- image_id = 'image_id'
- network_dict = None
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.list_volume_types',
- return_value=None), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.create_volume_type',
- return_value=volume_type), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.get_or_create_image',
- return_value=(True, image_id)), \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.create_shared_network_full',
- return_value=network_dict), \
- self.assertRaises(Exception):
+ with self.assertRaises(Exception):
self.rally_base._prepare_env()
-
- def test_run_tests_all(self):
+ mock_create_net.assert_called()
+ mock_get_img.assert_called()
+ mock_create_vt.assert_called()
+ mock_list_vt.assert_called()
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_run_task', return_value=mock.Mock())
+ def test_run_tests_all(self, mock_run_task):
self.rally_base.TESTS = ['test1', 'test2']
self.rally_base.test_name = 'all'
- with mock.patch.object(self.rally_base, '_run_task',
- return_value=mock.Mock()):
- self.rally_base._run_tests()
- self.rally_base._run_task.assert_any_call('test1')
- self.rally_base._run_task.assert_any_call('test2')
+ self.rally_base._run_tests()
+ mock_run_task.assert_any_call('test1')
+ mock_run_task.assert_any_call('test2')
- def test_run_tests_default(self):
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_run_task', return_value=mock.Mock())
+ def test_run_tests_default(self, mock_run_task):
self.rally_base.TESTS = ['test1', 'test2']
self.rally_base.test_name = 'test1'
- with mock.patch.object(self.rally_base, '_run_task',
- return_value=mock.Mock()):
- self.rally_base._run_tests()
- self.rally_base._run_task.assert_any_call('test1')
-
- def test_clean_up_default(self):
+ self.rally_base._run_tests()
+ mock_run_task.assert_any_call('test1')
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'delete_volume_type')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.os_utils.'
+ 'delete_glance_image')
+ def test_clean_up_default(self, mock_glance_method, mock_vol_method):
self.rally_base.volume_type = mock.Mock()
self.rally_base.cinder_client = mock.Mock()
self.rally_base.image_exists = False
self.rally_base.image_id = 1
self.rally_base.nova_client = mock.Mock()
- with mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.delete_volume_type') as mock_vol_method, \
- mock.patch('functest.opnfv_tests.openstack.rally.rally.'
- 'os_utils.delete_glance_image') as mock_glance_method:
- self.rally_base._clean_up()
- mock_vol_method.assert_any_call(self.rally_base.cinder_client,
- self.rally_base.volume_type)
- mock_glance_method.assert_any_call(self.rally_base.nova_client,
- 1)
-
- def test_run_default(self):
- with mock.patch.object(self.rally_base, '_prepare_env'), \
- mock.patch.object(self.rally_base, '_run_tests'), \
- mock.patch.object(self.rally_base, '_generate_report'), \
- mock.patch.object(self.rally_base, '_clean_up'):
- self.assertEqual(self.rally_base.run(),
- testcase.TestCase.EX_OK)
-
- def test_run_exception(self):
- with mock.patch.object(self.rally_base, '_prepare_env',
- side_effect=Exception):
- self.assertEqual(self.rally_base.run(),
- testcase.TestCase.EX_RUN_ERROR)
+ self.rally_base._clean_up()
+ mock_vol_method.assert_any_call(self.rally_base.cinder_client,
+ self.rally_base.volume_type)
+ mock_glance_method.assert_any_call(self.rally_base.nova_client,
+ 1)
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_prepare_env')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_run_tests')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_generate_report')
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_clean_up')
+ def test_run_default(self, *args):
+ self.assertEqual(self.rally_base.run(), testcase.TestCase.EX_OK)
+ map(lambda m: m.assert_called(), args)
+
+ @mock.patch('functest.opnfv_tests.openstack.rally.rally.RallyBase.'
+ '_prepare_env', side_effect=Exception)
+ def test_run_exception(self, mock_prep_env):
+ self.assertEqual(self.rally_base.run(), testcase.TestCase.EX_RUN_ERROR)
+ mock_prep_env.assert_called()
if __name__ == "__main__":
diff --git a/functest/utils/openstack_utils.py b/functest/utils/openstack_utils.py
index f8719bf0..4f8d6c35 100644
--- a/functest/utils/openstack_utils.py
+++ b/functest/utils/openstack_utils.py
@@ -175,11 +175,11 @@ def get_session_auth(other_creds={}):
return auth
-def get_endpoint(service_type, endpoint_type='publicURL'):
+def get_endpoint(service_type, interface='public'):
auth = get_session_auth()
return get_session().get_endpoint(auth=auth,
service_type=service_type,
- endpoint_type=endpoint_type)
+ interface=interface)
def get_session(other_creds={}):