summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--docker/Dockerfile12
-rw-r--r--docker/Dockerfile.aarch6411
-rwxr-xr-xdocs/com/pres/Summit/Berlin-2016/summit-Berlin.html2
-rwxr-xr-xdocs/com/pres/Summit/Berlin-2016/testapi.html28
-rw-r--r--docs/release/release-notes/functest-release.rst6
-rw-r--r--docs/testing/developer/devguide/index.rst26
-rw-r--r--docs/testing/developer/internship/testapi_evolution/index.rst40
-rw-r--r--docs/testing/user/configguide/configguide.rst1
-rw-r--r--functest/ci/config_aarch64_patch.yaml8
-rwxr-xr-xfunctest/ci/exec_test.sh148
-rwxr-xr-xfunctest/ci/run_tests.py23
-rwxr-xr-xfunctest/ci/testcases.yaml34
-rwxr-xr-xfunctest/ci/tier_builder.py1
-rwxr-xr-xfunctest/ci/tier_handler.py13
-rwxr-xr-xfunctest/opnfv_tests/features/security_scan.py4
-rw-r--r--functest/opnfv_tests/openstack/snaps/health_check.py7
-rw-r--r--functest/opnfv_tests/vnf/ims/orchestra_ims.py211
-rw-r--r--functest/opnfv_tests/vnf/ims/orchestra_ims.yaml14
-rw-r--r--functest/tests/unit/ci/__init__.py0
-rw-r--r--functest/tests/unit/ci/test_prepare_env.py345
20 files changed, 623 insertions, 311 deletions
diff --git a/docker/Dockerfile b/docker/Dockerfile
index de47e157a..81c3c540b 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -54,6 +54,7 @@ libpq-dev \
libssl-dev \
libxml2-dev \
libxslt-dev \
+libzmq3-dev \
python-dev \
python-mock \
python-pip \
@@ -108,10 +109,10 @@ RUN pip install -r ${REPOS_DIR}/tempest/requirements.txt
RUN cd ${FUNCTEST_REPO_DIR} \
&& pip install -r requirements.txt \
- && pip install .
+ && pip install -e .
RUN cd ${RELENG_MODULE_DIR} \
- && pip install .
+ && pip install -e .
RUN cd ${REPOS_DIR}/barometer \
&& pip install .
@@ -133,12 +134,11 @@ RUN pip install -e ${REPOS_DIR}/snaps/
# SFC integration
RUN /bin/bash -c ". ${REPOS_DIR}/sfc/sfc/tests/functest/setup_scripts/tacker_client_install.sh"
-RUN cd ${REPOS_DIR}/sfc && pip install .
+RUN cd ${REPOS_DIR}/sfc && pip install -e .
# SDNVPN integration
-RUN cd ${REPOS_DIR}/sdnvpn && pip install .
-
-RUN cd ${REPOS_DIR}/bgpvpn && pip install .
+RUN cd ${REPOS_DIR}/sdnvpn && pip install -e .
+RUN cd ${REPOS_DIR}/bgpvpn && pip install -e .
# Kingbird integration
RUN cd ${REPOS_DIR}/kingbird && pip install -e .
diff --git a/docker/Dockerfile.aarch64 b/docker/Dockerfile.aarch64
index a469801f3..82add067b 100644
--- a/docker/Dockerfile.aarch64
+++ b/docker/Dockerfile.aarch64
@@ -105,10 +105,10 @@ RUN pip install -r ${REPOS_DIR}/tempest/requirements.txt
RUN cd ${FUNCTEST_REPO_DIR} \
&& pip install -r requirements.txt \
- && pip install .
+ && pip install -e .
RUN cd ${RELENG_MODULE_DIR} \
- && pip install .
+ && pip install -e .
RUN find ${FUNCTEST_REPO_DIR} -name "*.py" \
-not -path "*tests/unit*" |xargs grep __main__ |cut -d\: -f 1 |xargs chmod -c 755 \
@@ -127,12 +127,11 @@ RUN pip install -e ${REPOS_DIR}/snaps/
# SFC integration
RUN /bin/bash -c ". ${REPOS_DIR}/sfc/sfc/tests/functest/setup_scripts/tacker_client_install.sh"
-RUN cd ${REPOS_DIR}/sfc && pip install .
+RUN cd ${REPOS_DIR}/sfc && pip install -e .
# SDNVPN integration
-RUN cd ${REPOS_DIR}/sdnvpn && pip install .
-
-RUN cd ${REPOS_DIR}/bgpvpn && pip install .
+RUN cd ${REPOS_DIR}/sdnvpn && pip install -e .
+RUN cd ${REPOS_DIR}/bgpvpn && pip install -e .
# Kingbird integration
RUN cd ${REPOS_DIR}/kingbird && pip install -e .
diff --git a/docs/com/pres/Summit/Berlin-2016/summit-Berlin.html b/docs/com/pres/Summit/Berlin-2016/summit-Berlin.html
index 4173a86ac..8369443f7 100755
--- a/docs/com/pres/Summit/Berlin-2016/summit-Berlin.html
+++ b/docs/com/pres/Summit/Berlin-2016/summit-Berlin.html
@@ -230,7 +230,7 @@
## What's new?
* Slicing of the tests (healthcheck / Smoke / SDN controllers / Features / Components / VNFs)
* Better Test duration management
- * Refactoring (repo, case management, test API)
+ * Refactoring (repo, case management, TestAPI)
* Automatic reporting
* Dashboard evolution to ELK
* CLI
diff --git a/docs/com/pres/Summit/Berlin-2016/testapi.html b/docs/com/pres/Summit/Berlin-2016/testapi.html
index 7035d71df..16f97c44d 100755
--- a/docs/com/pres/Summit/Berlin-2016/testapi.html
+++ b/docs/com/pres/Summit/Berlin-2016/testapi.html
@@ -6,7 +6,7 @@
<title>OPNFV presentation</title>
- <meta name="description" content="Test API">
+ <meta name="description" content="TestAPI">
<meta name="author" content="Serena Feng">
<meta name="apple-mobile-web-app-capable" content="yes" />
@@ -42,7 +42,7 @@
<section data-background="../../../img/title-bg-berlin.png" data-background-transition="none">
<br><br><br><br><br>
- <h1>Test API</h1>
+ <h1>TestAPI</h1>
<h3>Clean, Easy, Complete</h3>
<br>
<h4>OPNFV testing community</h4>
@@ -52,7 +52,7 @@
<section data-markdown>
# Agenda
- * testAPI: what for?
+ * TestAPI: what for?
* API overview
* API evolution
* Roadmap
@@ -60,7 +60,7 @@
<section>
<section data-markdown>
- # test API: what for?
+ # TestAPI: what for?
</section>
<section data-markdown>
## Consistant view for
@@ -78,12 +78,12 @@
</section>
<section data-markdown>
<script type='text/template'>
- ## Achieve using testAPI
- * Uniform API: testAPI
+ ## Achieve using TestAPI
+ * Uniform API: TestAPI
* Uniform format: JSON
* Universal Location: http://testresults.opnfv.org
<aside class='notes'>
- By using testAPI, we hope to provide a uniform way of collection and saving test results to a universal location
+ By using TestAPI, we hope to provide a uniform way of collection and saving test results to a universal location
</aside>
</script>
</section>
@@ -101,11 +101,11 @@
</section>
<section data-markdown>
## Storage structure
- ![testapi](https://wiki.opnfv.org/download/attachments/2926452/results_collection_structure.png?version=1&modificationDate=1459196347000&api=v2 "OPNFV API page")
+ ![TestAPI](https://wiki.opnfv.org/download/attachments/2926452/results_collection_structure.png?version=1&modificationDate=1459196347000&api=v2 "OPNFV API page")
</section>
<section data-markdown>
## API in Brahmaputra
- ![testapi](../../../img/testapi0.png)
+ ![TestAPI](../../../img/testapi0.png)
https://wiki.opnfv.org/display/functest/Collection+Of+Test+Results
</section>
@@ -137,7 +137,7 @@
<aside class='notes'>
So you can discover and understand the capabilities of the service without
access to source code, documentation, or through network traffic inspection,
- and also you can interact with the testAPI directly through swagger website.
+ and also you can interact with the TestAPI directly through swagger website.
</aside>
</ul>
<p class="fragment fade-up"><b>All done in Colorado!</b></p>
@@ -152,7 +152,7 @@
<section>
<h2> URI changes...</h2>
<div style="text-align:left"">
- <p> testresults.opnfv.org/<span style="color:lightblue">testapi</span> => <br>testresults.opnfv.org/<span style="color:yellow">test/api/v1</span> </p>
+ <p> testresults.opnfv.org/<span style="color:lightblue">TestAPI</span> => <br>testresults.opnfv.org/<span style="color:yellow">test/api/v1</span> </p>
<p> /test/api/v1/<b>pods</b></p>
<p> /test/api/v1/<b>projects</b></p>
@@ -195,7 +195,7 @@
</section>
<section data-markdown>
http://testresults.opnfv.org/test/swagger/spec.html
- ![alt text](../../../img/testapi1.png "Test API swagger interface")
+ ![alt text](../../../img/testapi1.png "TestAPI swagger interface")
</section>
<section>
<h2>unit tests</h2>
@@ -223,8 +223,8 @@ OK
</section>
<section data-markdown>
## Roadmap
- * Dockerize testAPI
- * Automatic update of testAPI
+ * Dockerize TestAPI
+ * Automatic update of TestAPI
* Command Line Interface
* Automatic update for pods/projects/testcases
</section>
diff --git a/docs/release/release-notes/functest-release.rst b/docs/release/release-notes/functest-release.rst
index 07389bb5b..ce03047f6 100644
--- a/docs/release/release-notes/functest-release.rst
+++ b/docs/release/release-notes/functest-release.rst
@@ -32,7 +32,7 @@ Version history
+------------+----------+------------------+------------------------+
OPNFV Colorado Release
-=========================
+======================
Functest deals with functional testing of the OPNFV solution.
It includes test cases developed within the project and test cases developed in
@@ -99,7 +99,7 @@ Software
- The Functest Docker image: https://hub.docker.com/r/opnfv/functest (tag: colorado.1.0)
- - The testapi Docker image: https://hub.docker.com/r/opnfv/testapi (tag:colorado.1.0)
+ - The TestAPI Docker image: https://hub.docker.com/r/opnfv/testapi (tag:colorado.1.0)
Documents
@@ -120,7 +120,7 @@ Feature evolution
- refactoring of ODL functional tests (with upstream modifications)
- - refactoring of testapi (update, swagger documentation, dockerization)
+ - refactoring of TestAPI (update, swagger documentation, dockerization)
- jenkins logs improvement
diff --git a/docs/testing/developer/devguide/index.rst b/docs/testing/developer/devguide/index.rst
index 722d0a7d2..ce5dc77be 100644
--- a/docs/testing/developer/devguide/index.rst
+++ b/docs/testing/developer/devguide/index.rst
@@ -14,7 +14,7 @@ Introduction
Functest is a project dealing with functional testing.
Functest produces its own internal test cases but can also be considered
as a framework to support feature and VNF onboarding project testing.
-Functest developed a test API and defined a test collection framework
+Functest developed a TestAPI and defined a test collection framework
that can be used by any OPNFV project.
Therefore there are many ways to contribute to Functest. You can:
@@ -259,9 +259,9 @@ The Test result management can be summarized as follows::
| |
+----------------------+
-Test API description
---------------------
-The Test API is used to declare pods, projects, test cases and test
+TestAPI description
+-------------------
+The TestAPI is used to declare pods, projects, test cases and test
results. Pods are the pods used to run the tests.
The results pushed in the database are related to pods, projects and
cases. If you try to push results of test done on non referenced pod,
@@ -499,21 +499,21 @@ Scenarios:
The code of the API is hosted in the releng repository `[6]`_.
The static documentation of the API can be found at `[17]`_.
-The test API has been dockerized and may be installed locally in your
+The TestAPI has been dockerized and may be installed locally in your
lab. See `[15]`_ for details.
-The deployment of the test API has been automated.
+The deployment of the TestAPI has been automated.
A jenkins job manages:
- * the unit tests of the test api
+ * the unit tests of the TestAPI
* the creation of a new docker file
- * the deployment of the new test api
- * the archive of the old test api
+ * the deployment of the new TestAPI
+ * the archive of the old TestAPI
* the backup of the Mongo DB
-Test API Authorization
-~~~~~~~~~~~~~~~~~~~~~~
+TestAPI Authorization
+~~~~~~~~~~~~~~~~~~~~~
-PUT/DELETE/POST operations of the testapi now require token based authorization. The token needs
+PUT/DELETE/POST operations of the TestAPI now require token based authorization. The token needs
to be added in the request using a header 'X-Auth-Token' for access to the database.
e.g::
@@ -626,7 +626,7 @@ script can be found in `[16]`_.
For next versions, it was decided to integrated bitergia dashboard.
Bitergia already provides a dashboard for code and infrastructure.
A new Test tab will be added. The dataset will be built by consuming
-the test API.
+the TestAPI.
=======
diff --git a/docs/testing/developer/internship/testapi_evolution/index.rst b/docs/testing/developer/internship/testapi_evolution/index.rst
index 6a1cde7df..3038d0ac6 100644
--- a/docs/testing/developer/internship/testapi_evolution/index.rst
+++ b/docs/testing/developer/internship/testapi_evolution/index.rst
@@ -7,9 +7,9 @@ International License.
You should have received a copy of the license along with this.
If not, see <http://creativecommons.org/licenses/by/4.0/>.
-==================
-Test API evolution
-==================
+=================
+TestAPI evolution
+=================
Author: Sakala Venkata Krishna Rohit
Mentors: S. Feng, J.Lausuch, M.Richomme
@@ -17,10 +17,10 @@ Mentors: S. Feng, J.Lausuch, M.Richomme
Abstract
========
-The testapi is used by all the test opnfv projects to report results.
+The TestAPI is used by all the test opnfv projects to report results.
It is also used to declare projects, test cases and labs. A major refactoring
-has been done in Colorado with the introduction of swagger. The testapi is defined in Functest
-developer guide. The purpose of this project is to add more features to the testapi that automate
+has been done in Colorado with the introduction of swagger. The TestAPI is defined in Functest
+developer guide. The purpose of this project is to add more features to the TestAPI that automate
the tasks that are done manually now, though there are tasks other than automation.
Version history
@@ -48,33 +48,33 @@ documentation is as follows.
Problem Statement:
------------------
-The problem statement could be divided into pending features that needed to be added into testapi
+The problem statement could be divided into pending features that needed to be added into TestAPI
repo. The following were to be accomplished within the internship time frame.
-* **Add verification jenkins job for the testapi code**
+* **Add verification jenkins job for the TestAPI code**
The purpose of this job is to verify whehter the unit tests are successful or not with the
inclusion of the patchset submitted.
* **Automatic update of opnfv/testapi docker image**
- The docker image of testapi is hosted in the opnfv docker hub. To ensure that the testapi image
+ The docker image of TestAPI is hosted in the opnfv docker hub. To ensure that the TestAPI image
is always updated with the repository, automatic updation of the image is necessary and a job
is triggered whenever a new patch gets merged.
* **Automation deployment of testresults.opnfv.org/test/ website**
- In the same manner as the docker image of testapi is updated, the testapi website needs to be
+ In the same manner as the docker image of TestAPI is updated, the TestAPI website needs to be
in sync with the repository code. So, a job has been added to the opnfv jenkins ci for the
updation of the testresults website.
-* **Generate static documentation of testapi calls**
- The purpose of this is to give an static/offline view of testapi. If someone wants to have a
- look at the Restful apis of testapi, he/she does't need to go to the website, he can download
+* **Generate static documentation of TestAPI calls**
+ The purpose of this is to give an static/offline view of TestAPI. If someone wants to have a
+ look at the Restful APIs of TestAPI, he/she does't need to go to the website, he can download
a html page and view it anytime.
-* **Backup MongoDB of testapi**
+* **Backup MongoDB of TestAPI**
The mongoDB needs to be backed up every week. Till now it was done manually, but due to this
internship, it is now automated using a jenkins job.
-* **Add token based authorization to the testapi calls**
+* **Add token based authorization to the TestAPI calls**
The token based authorization was implemented to ensure that only ci_pods could access the
database. Authentication has been added to only delete/put/post requests.
@@ -82,7 +82,7 @@ Curation Phase:
---------------
The curation phase was the first 3 to 4 weeks of the internship. This phase was to get familiar
-with the testapi code and functionality and propose the solutions/tools for the tasks mentioned
+with the TestAPI code and functionality and propose the solutions/tools for the tasks mentioned
above. Swagger codegen was choosen out of the four tools proposed `[3]`_ for generating static
documentaion.
@@ -107,7 +107,7 @@ The progress and completion of the tasks is described in the below table.
| **Date** | **Comment** |
| | |
+--------------------------+------------------------------------------+
-| Nov 14th - Dec 31st | Understand Testapi code and the |
+| Nov 14th - Dec 31st | Understand TestAPI code and the |
| | requirements. |
+--------------------------+------------------------------------------+
| Jan 1st - Jan 7th | Add jenkins job to create static |
@@ -134,8 +134,8 @@ the internship. This section may help other developers in solving any errors cas
code written as a part of this internship.
-Test Api
---------
+TestAPI
+-------
What is the difference between defining data_file as "/etc/.." and "etc/.." in setup.cfg ?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -216,7 +216,7 @@ defined.
What job style should be used when there is a situation like one build should trigger other builds
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
or when different build scripts need to be run on different machines ?
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
MultiJob style should be used as it has phases where each phase can be taken as a build scipt and
can have its own parameters by which one can define the SLAVE_LABEL parameter.
diff --git a/docs/testing/user/configguide/configguide.rst b/docs/testing/user/configguide/configguide.rst
index 1d56e99aa..6bec89258 100644
--- a/docs/testing/user/configguide/configguide.rst
+++ b/docs/testing/user/configguide/configguide.rst
@@ -392,7 +392,6 @@ follows::
| |-- check_os.sh
| |-- config_functest.yaml
| |-- config_patch.yaml
- | |-- exec_test.sh
| |-- generate_report.py
| |-- prepare_env.py
| |-- run_tests.py
diff --git a/functest/ci/config_aarch64_patch.yaml b/functest/ci/config_aarch64_patch.yaml
index 9a345e3f7..b43b5a76c 100644
--- a/functest/ci/config_aarch64_patch.yaml
+++ b/functest/ci/config_aarch64_patch.yaml
@@ -5,10 +5,10 @@ os:
image_file_name: cirros-d161201-aarch64-disk.img
image_password: gocubsgo
- snaps_simple_healthcheck:
- disk_image: /home/opnfv/functest/data/cirros-d161201-aarch64-disk.img
- kernel_image: /home/opnfv/functest/data/cirros-d161201-aarch64-kernel
- ramdisk_image: /home/opnfv/functest/data/cirros-d161201-aarch64-initramfs
+ snaps_health_check:
+ disk_url: http://download.cirros-cloud.net/daily/20161201/cirros-d161201-aarch64-disk.img
+ kernel_url: http://download.cirros-cloud.net/daily/20161201/cirros-d161201-aarch64-kernel
+ ramdisk_url: http://download.cirros-cloud.net/daily/20161201/cirros-d161201-aarch64-initramfs
extra_properties:
os_command_line: root=/dev/vdb1 rw rootwait console=tty0 console=ttyS0 console=ttyAMA0
hw_video_model: vga
diff --git a/functest/ci/exec_test.sh b/functest/ci/exec_test.sh
deleted file mode 100755
index 6f776101d..000000000
--- a/functest/ci/exec_test.sh
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/bin/bash
-
-#
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-# Morgan Richomme (morgan.richomme@orange.com)
-# Installs the Functest framework within the Docker container
-# and run the tests automatically
-#
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-
-usage="Script to trigger the tests automatically.
-
-usage:
- bash $(basename "$0") [-h|--help] [-t <test_name>]
-
-where:
- -h|--help show this help text
- -r|--report push results to database (false by default)
- -s|--serial run Tempest tests in one thread
- -t|--test run specific test case
- <test_name>"
-
-
-report=""
-serial=false
-
-# Get the list of runnable tests
-# Check if we are in CI mode
-debug=""
-if [[ "${CI_DEBUG,,}" == "true" ]];then
- debug="--debug"
-fi
-
-FUNCTEST_REPO_DIR=${REPOS_DIR}/functest
-FUNCTEST_TEST_DIR=${REPOS_DIR}/functest/functest/opnfv_tests
-FUNCTEST_CONF_DIR=/home/opnfv/functest/conf
-
-export PYTHONUNBUFFERED=1
-
-function odl_tests(){
- keystone_ip=$(openstack catalog show identity |grep publicURL| cut -f3 -d"/" | cut -f1 -d":")
- neutron_ip=$(openstack catalog show network | grep publicURL | cut -f3 -d"/" | cut -f1 -d":")
- odl_ip=${neutron_ip}
- odl_port=8080
- odl_restport=8181
- if [ "$INSTALLER_TYPE" == "fuel" ]; then
- odl_port=8282
- elif [ "$INSTALLER_TYPE" == "apex" ]; then
- odl_ip=$SDN_CONTROLLER_IP
- odl_port=8081
- odl_restport=8081
- elif [ "$INSTALLER_TYPE" == "netvirt" ]; then
- odl_ip=$SDN_CONTROLLER_IP
- odl_port=8081
- odl_restport=8081
- elif [ "$INSTALLER_TYPE" == "joid" ]; then
- odl_ip=$SDN_CONTROLLER
- elif [ "$INSTALLER_TYPE" == "compass" ]; then
- odl_port=8181
- else
- odl_ip=$SDN_CONTROLLER_IP
- fi
-}
-
-
-
-function run_test(){
- test_name=$1
- serial_flag=""
- if [ $serial == "true" ]; then
- serial_flag="-s"
- fi
-
- case $test_name in
- "healthcheck")
- ${FUNCTEST_TEST_DIR}/openstack/healthcheck/healthcheck.sh
- ;;
- "odl")
- odl_tests
- [[ "$report" == "-r" ]] && args=-p
- ${FUNCTEST_TEST_DIR}/sdn/odl/odl.py \
- --keystoneip $keystone_ip \
- --neutronip $neutron_ip \
- --odlip $odl_ip \
- --odlrestconfport $odl_restport \
- --odlwebport $odl_port \
- --ospassword ${OS_PASSWORD} \
- --ostenantname ${OS_TENANT_NAME} \
- --osusername ${OS_USERNAME} \
- ${args}
- ;;
- "ovno")
- # suite under rewritting for colorado
- # no need to run anything until refactoring done
- # ${REPOS_DIR}/ovno/Testcases/RunTests.sh
- ;;
- *)
- echo "The test case '${test_name}' does not exist."
- exit 1
- esac
-
- if [[ $? != 0 ]]; then exit 1
- else exit 0
- fi
-}
-
-
-# Parse parameters
-while [[ $# > 0 ]]
- do
- key="$1"
- case $key in
- -h|--help)
- echo "$usage"
- exit 0
- shift
- ;;
- -r|--report)
- report="-r"
- ;;
- -s|--serial)
- serial=true
- ;;
- -t|--test|--tests)
- TEST="$2"
- shift
- ;;
- *)
- echo "unknown option $1 $2"
- exit 1
- ;;
- esac
- shift # past argument or value
-done
-
-
-# Source credentials
-echo "Sourcing Credentials ${creds} to run the test.."
-source ${creds}
-
-
-# Run test
-run_test $TEST
diff --git a/functest/ci/run_tests.py b/functest/ci/run_tests.py
index 4193e48f1..4a47ba570 100755
--- a/functest/ci/run_tests.py
+++ b/functest/ci/run_tests.py
@@ -31,13 +31,6 @@ from functest.utils.constants import CONST
logger = ft_logger.Logger("run_tests").getLogger()
-""" global variables """
-EXEC_SCRIPT = ("%s/functest/ci/exec_test.sh" % CONST.dir_repo_functest)
-
-# This will be the return code of this script. If any of the tests fails,
-# this variable will change to Result.EX_ERROR
-
-
class Result(enum.Enum):
EX_OK = os.EX_OK
EX_ERROR = -1
@@ -114,7 +107,7 @@ def update_test_info(test_name, result, duration):
"duration": duration})
-def get_run_dict_if_defined(testname):
+def get_run_dict(testname):
try:
dict = ft_utils.get_dict_by_test(testname)
if not dict:
@@ -138,7 +131,7 @@ def run_test(test, tier_name, testcases=None):
logger.debug("\n%s" % test)
source_rc_file()
- if GlobalVariables.CLEAN_FLAG:
+ if test.needs_clean() and GlobalVariables.CLEAN_FLAG:
generate_os_snapshot()
flags = (" -t %s" % (test_name))
@@ -146,12 +139,13 @@ def run_test(test, tier_name, testcases=None):
flags += " -r"
result = testcase_base.TestcaseBase.EX_RUN_ERROR
- run_dict = get_run_dict_if_defined(test_name)
+ run_dict = get_run_dict(test_name)
if run_dict:
try:
module = importlib.import_module(run_dict['module'])
cls = getattr(module, run_dict['class'])
test_case = cls()
+
try:
kwargs = run_dict['args']
result = test_case.run(**kwargs)
@@ -168,14 +162,11 @@ def run_test(test, tier_name, testcases=None):
logger.exception("Cannot get class {}".format(
run_dict['class']))
else:
- cmd = ("%s%s" % (EXEC_SCRIPT, flags))
- logger.info("Executing command {} because {} "
- "doesn't implement the new framework".format(
- cmd, test_name))
- result = ft_utils.execute_command(cmd)
+ raise Exception("Cannot import the class for the test case.")
- if GlobalVariables.CLEAN_FLAG:
+ if test.needs_clean() and GlobalVariables.CLEAN_FLAG:
cleanup()
+
end = datetime.datetime.now()
duration = (end - start).seconds
duration_str = ("%02d:%02d" % divmod(duration, 60))
diff --git a/functest/ci/testcases.yaml b/functest/ci/testcases.yaml
index 77cd1ae64..1a1d0f4b9 100755
--- a/functest/ci/testcases.yaml
+++ b/functest/ci/testcases.yaml
@@ -11,6 +11,7 @@ tiers:
name: snaps_health_check
criteria: 'status == "PASS"'
blocking: true
+ clean_flag: false
description: >-
This test case creates executes the SimpleHealthCheck
Python test class which creates an, image, flavor, network,
@@ -26,6 +27,7 @@ tiers:
name: connection_check
criteria: 'status == "PASS"'
blocking: true
+ clean_flag: false
description: >-
This test case verifies the retrieval of OpenStack clients:
Keystone, Glance, Neutron and Nova and may perform some
@@ -44,6 +46,7 @@ tiers:
name: api_check
criteria: 'status == "PASS"'
blocking: true
+ clean_flag: false
description: >-
This test case verifies the retrieval of OpenStack clients:
Keystone, Glance, Neutron and Nova and may perform some
@@ -68,6 +71,7 @@ tiers:
name: vping_ssh
criteria: 'status == "PASS"'
blocking: true
+ clean_flag: true
description: >-
This test case verifies: 1) SSH to an instance using floating
IPs over the public network. 2) Connectivity between 2 instances
@@ -83,6 +87,7 @@ tiers:
name: vping_userdata
criteria: 'status == "PASS"'
blocking: true
+ clean_flag: true
description: >-
This test case verifies: 1) Boot a VM with given userdata.
2) Connectivity between 2 instances over a private network.
@@ -97,6 +102,7 @@ tiers:
name: tempest_smoke_serial
criteria: 'success_rate == 100%'
blocking: false
+ clean_flag: false
description: >-
This test case runs the smoke subset of the OpenStack
Tempest suite. The list of test cases is generated by
@@ -113,6 +119,7 @@ tiers:
name: rally_sanity
criteria: 'success_rate == 100%'
blocking: false
+ clean_flag: false
description: >-
This test case runs a sub group of tests of the OpenStack
Rally suite in smoke mode.
@@ -127,6 +134,7 @@ tiers:
name: odl
criteria: 'success_rate == 100%'
blocking: true
+ clean_flag: false
description: >-
Test Suite for the OpenDaylight SDN Controller. It
integrates some test suites from upstream using
@@ -146,6 +154,7 @@ tiers:
name: odl_netvirt
criteria: 'success_rate == 100%'
blocking: true
+ clean_flag: false
description: >-
Test Suite for the OpenDaylight SDN Controller when
the NetVirt features are installed. It integrates
@@ -167,6 +176,7 @@ tiers:
name: onos
criteria: 'status == "PASS"'
blocking: true
+ clean_flag: true
description: >-
Test Suite for the ONOS SDN Controller. It integrates
some test suites from upstream using TestON as the test
@@ -182,6 +192,7 @@ tiers:
name: snaps_smoke
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: false
description: >-
This test case contains tests that setup and destroy
environments with VMs with and without Floating IPs
@@ -210,6 +221,7 @@ tiers:
name: promise
criteria: 'success_rate == 100%'
blocking: false
+ clean_flag: true
description: >-
Test suite from Promise project.
dependencies:
@@ -223,6 +235,7 @@ tiers:
name: doctor
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
Test suite from Doctor project.
dependencies:
@@ -236,6 +249,7 @@ tiers:
name: bgpvpn
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
Test suite from SDNVPN project.
dependencies:
@@ -249,6 +263,7 @@ tiers:
name: security_scan
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
Simple Security Scan
dependencies:
@@ -261,6 +276,7 @@ tiers:
# name: copper
# criteria: 'status == "PASS"'
# blocking: false
+# clean_flag: true
# description: >-
# Test suite for policy management based on OpenStack Congress
# dependencies:
@@ -273,6 +289,7 @@ tiers:
name: multisite
criteria: 'success_rate == 100%'
blocking: false
+ clean_flag: false
description: >-
Test suite from kingbird
dependencies:
@@ -285,6 +302,7 @@ tiers:
name: odl-sfc
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
Test suite for odl-sfc to test two chains and two SFs
dependencies:
@@ -297,6 +315,7 @@ tiers:
name: onos_sfc
criteria: 'status == "PASS"'
blocking: true
+ clean_flag: true
description: >-
Test Suite for onos-sfc to test sfc function.
dependencies:
@@ -309,6 +328,7 @@ tiers:
name: parser
criteria: 'ret == 0'
blocking: false
+ clean_flag: true
description: >-
Test suite from Parser project.
dependencies:
@@ -321,6 +341,7 @@ tiers:
name: domino
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
Test suite from Domino project.
dependencies:
@@ -333,6 +354,7 @@ tiers:
name: orchestra
criteria: 'ret == 0'
blocking: false
+ clean_flag: true
description: >-
Test OpenBaton (Orchestra) stack
dependencies:
@@ -345,6 +367,7 @@ tiers:
name: netready
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
Test suite from Netready project.
dependencies:
@@ -357,6 +380,7 @@ tiers:
name: barometer
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
Test suite for the Barometer project. Separate tests verify the
proper configuration and functionality of the following
@@ -379,6 +403,7 @@ tiers:
# name: tempest_full_parallel
# criteria: 'success_rate >= 80%'
# blocking: false
+# clean_flag: false
# description: >-
# The list of test cases is generated by
# Tempest automatically and depends on the parameters of
@@ -393,6 +418,7 @@ tiers:
name: tempest_defcore
criteria: 'success_rate == 100%'
blocking: false
+ clean_flag: false
description: >-
This is the set of Tempest test cases created by OpenStack
Interop Working Group for certification purposes.
@@ -406,6 +432,7 @@ tiers:
name: tempest_custom
criteria: 'success_rate == 100%'
blocking: false
+ clean_flag: false
description: >-
The test case allows running a customized list of tempest
test cases defined in a file under
@@ -422,6 +449,7 @@ tiers:
# name: rally_full
# criteria: 'success_rate >= 90%'
# blocking: false
+# clean_flag: false
# description: >-
# This test case runs the full suite of scenarios of the OpenStack
# Rally suite using several threads and iterations.
@@ -443,6 +471,7 @@ tiers:
name: cloudify_ims
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
This test case deploys an OpenSource vIMS solution from Clearwater
using the Cloudify orchestrator. It also runs some signaling traffic.
@@ -456,6 +485,7 @@ tiers:
name: aaa
criteria: 'ret == 0'
blocking: false
+ clean_flag: true
description: >-
Test suite from Parser project.
dependencies:
@@ -469,6 +499,7 @@ tiers:
name: juju_epc
criteria: 'ret == 0'
blocking: false
+ clean_flag: true
description: >-
Test suite from OAI project, vEPC deployed with Juju.
dependencies:
@@ -482,6 +513,7 @@ tiers:
name: orchestra_ims
criteria: 'ret == 0'
blocking: false
+ clean_flag: true
description: >-
VNF deployment with OpenBaton (Orchestra)
dependencies:
@@ -495,6 +527,7 @@ tiers:
name: opera_ims
criteria: 'ret == 0'
blocking: false
+ clean_flag: true
description: >-
Evolution of vIMS
dependencies:
@@ -508,6 +541,7 @@ tiers:
name: vyos_vrouter
criteria: 'status == "PASS"'
blocking: false
+ clean_flag: true
description: >-
This test case is vRouter testing.
dependencies:
diff --git a/functest/ci/tier_builder.py b/functest/ci/tier_builder.py
index dae7c73e8..f4c6f70fd 100755
--- a/functest/ci/tier_builder.py
+++ b/functest/ci/tier_builder.py
@@ -50,6 +50,7 @@ class TierBuilder(object):
dependency=dep,
criteria=dic_testcase['criteria'],
blocking=dic_testcase['blocking'],
+ clean_flag=dic_testcase['clean_flag'],
description=dic_testcase['description'])
if testcase.is_compatible(self.ci_installer, self.ci_scenario):
tier.add_test(testcase)
diff --git a/functest/ci/tier_handler.py b/functest/ci/tier_handler.py
index 127986bf3..6b4864b5b 100755
--- a/functest/ci/tier_handler.py
+++ b/functest/ci/tier_handler.py
@@ -104,12 +104,18 @@ class Tier(object):
class TestCase(object):
- def __init__(self, name, dependency, criteria, blocking, description=""):
+ def __init__(self, name,
+ dependency,
+ criteria,
+ blocking,
+ clean_flag,
+ description=""):
self.name = name
self.dependency = dependency
- self.description = description
self.criteria = criteria
self.blocking = blocking
+ self.clean_flag = clean_flag
+ self.description = description
@staticmethod
def is_none(item):
@@ -138,6 +144,9 @@ class TestCase(object):
def is_blocking(self):
return self.blocking
+ def needs_clean(self):
+ return self.clean_flag
+
def __str__(self):
lines = split_text(self.description, LINE_LENGTH - 6)
diff --git a/functest/opnfv_tests/features/security_scan.py b/functest/opnfv_tests/features/security_scan.py
index 2db44175d..58f0ec748 100755
--- a/functest/opnfv_tests/features/security_scan.py
+++ b/functest/opnfv_tests/features/security_scan.py
@@ -17,8 +17,8 @@ class SecurityScan(base.FeatureBase):
super(SecurityScan, self).__init__(project='securityscanning',
case='security_scan',
repo='dir_repo_securityscan')
- self.cmd = ('bash {0} && '
+ self.cmd = ('. {0}/stackrc && '
'cd {1} && '
'python security_scan.py --config config.ini && '
- 'cd -'.format(CONST.openstack_creds,
+ 'cd -'.format(CONST.dir_functest_conf,
self.repo))
diff --git a/functest/opnfv_tests/openstack/snaps/health_check.py b/functest/opnfv_tests/openstack/snaps/health_check.py
index 993c1000c..c19bf39c6 100644
--- a/functest/opnfv_tests/openstack/snaps/health_check.py
+++ b/functest/opnfv_tests/openstack/snaps/health_check.py
@@ -28,7 +28,12 @@ class HealthCheck(PyTestSuiteRunner):
self.case_name = "snaps_health_check"
ext_net_name = snaps_utils.get_ext_net_name()
+ image_custom_config = None
+ if hasattr(CONST, 'snaps_health_check'):
+ image_custom_config = CONST.snaps_health_check
+
self.suite.addTest(
OSIntegrationTestCase.parameterize(
SimpleHealthCheck, CONST.openstack_creds, ext_net_name,
- use_keystone=CONST.snaps_use_keystone))
+ use_keystone=CONST.snaps_use_keystone,
+ image_metadata=image_custom_config))
diff --git a/functest/opnfv_tests/vnf/ims/orchestra_ims.py b/functest/opnfv_tests/vnf/ims/orchestra_ims.py
index 352b609b0..42b218e62 100644
--- a/functest/opnfv_tests/vnf/ims/orchestra_ims.py
+++ b/functest/opnfv_tests/vnf/ims/orchestra_ims.py
@@ -23,6 +23,45 @@ from functest.utils.constants import CONST
from org.openbaton.cli.agents.agents import MainAgent
from org.openbaton.cli.errors.errors import NfvoException
+# ----------------------------------------------------------
+#
+# UTILS
+#
+# -----------------------------------------------------------
+
+
+def get_config(parameter, file):
+ """
+ Returns the value of a given parameter in file.yaml
+ parameter must be given in string format with dots
+ Example: general.openstack.image_name
+ """
+ with open(file) as f:
+ file_yaml = yaml.safe_load(f)
+ f.close()
+ value = file_yaml
+ for element in parameter.split("."):
+ value = value.get(element)
+ if value is None:
+ raise ValueError("The parameter %s is not defined in"
+ " %s" % (parameter, file))
+ return value
+
+
+def download_and_add_image_on_glance(glance, image_name,
+ image_url, data_dir):
+ dest_path = data_dir
+ if not os.path.exists(dest_path):
+ os.makedirs(dest_path)
+ file_name = image_url.rsplit('/')[-1]
+ if not ft_utils.download_url(image_url, dest_path):
+ return False
+ image = os_utils.create_glance_image(
+ glance, image_name, dest_path + file_name)
+ if not image:
+ return False
+ return image
+
def servertest(host, port):
args = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM)
@@ -38,6 +77,7 @@ def servertest(host, port):
class ImsVnf(vnf_base.VnfOnBoardingBase):
+
def __init__(self, project='functest', case='orchestra_ims',
repo='', cmd=''):
super(ImsVnf, self).__init__(project, case, repo, cmd)
@@ -54,6 +94,7 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
self.ob_projectid = ""
self.keystone_client = os_utils.get_keystone_client()
self.ob_nsr_id = ""
+ self.nsr = None
self.main_agent = None
# vIMS Data directory creation
if not os.path.exists(self.data_dir):
@@ -66,9 +107,14 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
raise Exception("Orchestra VNF config file not found")
config_file = self.case_dir + self.config
self.imagename = get_config("openbaton.imagename", config_file)
+ self.bootstrap_link = get_config("openbaton.bootstrap_link",
+ config_file)
+ self.bootstrap_config_link = get_config(
+ "openbaton.bootstrap_config_link", config_file)
self.market_link = get_config("openbaton.marketplace_link",
config_file)
self.images = get_config("tenant_images", config_file)
+ self.ims_conf = get_config("vIMS", config_file)
def deploy_orchestrator(self, **kwargs):
self.logger.info("Additional pre-configuration steps")
@@ -77,6 +123,7 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
glance_client = os_utils.get_glance_client()
# Import images if needed
+ # needs some images
self.logger.info("Upload some OS images if it doesn't exist")
temp_dir = os.path.join(self.data_dir, "tmp/")
for image_name, image_url in self.images.iteritems():
@@ -106,9 +153,9 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
"192.168.100.0/24")
# orchestrator VM flavor
- self.logger.info("Check medium Flavor is available, if not create one")
+ self.logger.info("Check if Flavor is available, if not create one")
flavor_exist, flavor_id = os_utils.get_or_create_flavor(
- "m1.medium",
+ "orchestra",
"4096",
'20',
'2',
@@ -129,13 +176,14 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
self.logger.error("Cannot create floating IP.")
userdata = "#!/bin/bash\n"
+ userdata += "echo \"Executing userdata...\"\n"
userdata += "set -x\n"
userdata += "set -e\n"
+ userdata += "echo \"Set nameserver to '8.8.8.8'...\"\n"
userdata += "echo \"nameserver 8.8.8.8\" >> /etc/resolv.conf\n"
+ userdata += "echo \"Install curl...\"\n"
userdata += "apt-get install curl\n"
- userdata += ("echo \"rabbitmq_broker_ip=%s\" > ./config_file\n"
- % floatip)
- userdata += "echo \"mysql=no\" >> ./config_file\n"
+ userdata += "echo \"Inject public key...\"\n"
userdata += ("echo \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCuPXrV3"
"geeHc6QUdyUr/1Z+yQiqLcOskiEGBiXr4z76MK4abiFmDZ18OMQlc"
"fl0p3kS0WynVgyaOHwZkgy/DIoIplONVr2CKBKHtPK+Qcme2PVnCtv"
@@ -145,17 +193,30 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
"7YAdalAnyD/jwCHuwIrUw/lxo7UdNCmaUxeobEYyyFA1YVXzpNFZya"
"XPGAAYIJwEq/ openbaton@opnfv\" >> /home/ubuntu/.ssh/aut"
"horized_keys\n")
- userdata += "cat ./config_file\n"
- userdata += ("curl -s http://get.openbaton.org/bootstrap "
- "> ./bootstrap\n")
+ userdata += "echo \"Download bootstrap...\"\n"
+ userdata += ("curl -s %s "
+ "> ./bootstrap\n" % self.bootstrap_link)
+ userdata += ("curl -s %s"
+ "> ./config_file\n" % self.bootstrap_config_link)
+ userdata += ("echo \"Disable usage of mysql...\"\n")
+ userdata += "sed -i s/mysql=.*/mysql=no/g /config_file\n"
+ userdata += ("echo \"Setting 'rabbitmq_broker_ip' to '%s'\"\n"
+ % floatip)
+ userdata += ("sed -i s/rabbitmq_broker_ip=localhost/rabbitmq_broker_ip"
+ "=%s/g /config_file\n" % floatip)
+ userdata += "echo \"Set autostart of components to 'false'\"\n"
userdata += "export OPENBATON_COMPONENT_AUTOSTART=false\n"
+ userdata += "echo \"Execute bootstrap...\"\n"
bootstrap = "sh ./bootstrap release -configFile=./config_file"
userdata += bootstrap + "\n"
-
+ userdata += "echo \"Setting 'nfvo.plugin.timeout' to '300000'\"\n"
userdata += ("echo \"nfvo.plugin.timeout=300000\" >> "
"/etc/openbaton/openbaton-nfvo.properties\n")
+ userdata += "echo \"Starting NFVO\"\n"
userdata += "service openbaton-nfvo restart\n"
+ userdata += "echo \"Starting Generic VNFM\"\n"
userdata += "service openbaton-vnfm-generic restart\n"
+ userdata += "echo \"...end of userdata...\"\n"
sg_id = os_utils.create_security_group_full(neutron_client,
"orchestra-sec-group",
@@ -200,22 +261,22 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
self.logger.info("Associating floating ip: '%s' to VM '%s' "
% (floatip, "orchestra-openbaton"))
if not os_utils.add_floating_ip(nova_client, instance.id, floatip):
- self.logger.error("Cannot associate floating IP to VM.")
self.step_failure("Cannot associate floating IP to VM.")
- self.logger.info("Waiting for nfvo to be up and running...")
+ self.logger.info("Waiting for Open Baton NFVO to be up and running...")
x = 0
while x < 100:
if servertest(floatip, "8080"):
break
else:
- self.logger.debug("openbaton is not started yet")
+ self.logger.debug(
+ "Open Baton NFVO is not started yet (%ss)" %
+ (x * 5))
time.sleep(5)
x += 1
if x == 100:
- self.logger.error("Openbaton is not started correctly")
- self.step_failure("Openbaton is not started correctly")
+ self.step_failure("Open Baton is not started correctly")
self.ob_ip = floatip
self.ob_password = "openbaton"
@@ -223,10 +284,10 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
self.ob_https = False
self.ob_port = "8080"
- self.logger.info("Deploy orchestrator: OK")
+ self.logger.info("Deploy Open Baton NFVO: OK")
def deploy_vnf(self):
- self.logger.info("vIMS Deployment")
+ self.logger.info("Starting vIMS Deployment...")
self.main_agent = MainAgent(nfvo_ip=self.ob_ip,
nfvo_port=self.ob_port,
@@ -235,15 +296,16 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
username=self.ob_username,
password=self.ob_password)
+ self.logger.info("Getting project 'default'...")
project_agent = self.main_agent.get_agent("project", self.ob_projectid)
for p in json.loads(project_agent.find()):
if p.get("name") == "default":
self.ob_projectid = p.get("id")
+ self.logger.info("Found project 'default': %s" % p)
break
self.logger.debug("project id: %s" % self.ob_projectid)
if self.ob_projectid == "":
- self.logger.error("Default project id was not found!")
self.step_failure("Default project id was not found!")
vim_json = {
@@ -252,9 +314,6 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
"tenant": os_utils.get_credentials().get("tenant_name"),
"username": os_utils.get_credentials().get("username"),
"password": os_utils.get_credentials().get("password"),
- "keyPair": "opnfv",
- # TODO change the keypair to correct value
- # or upload a correct one or remove it
"securityGroups": [
"default",
"orchestra-sec-group"
@@ -267,7 +326,7 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
}
}
- self.logger.debug("vim: %s" % vim_json)
+ self.logger.debug("Registering VIM: %s" % vim_json)
self.main_agent.get_agent(
"vim",
@@ -280,7 +339,7 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
try:
self.logger.info("sending: %s" % self.market_link)
nsd = market_agent.create(entity=self.market_link)
- self.logger.info("Onboarded nsd: " + nsd.get("name"))
+ self.logger.info("Onboarded NSD: " + nsd.get("name"))
except NfvoException as e:
self.step_failure(e.message)
@@ -290,34 +349,77 @@ class ImsVnf(vnf_base.VnfOnBoardingBase):
if nsd_id is None:
self.step_failure("NSD not onboarded correctly")
- nsr = None
try:
- nsr = nsr_agent.create(nsd_id)
+ self.nsr = nsr_agent.create(nsd_id)
except NfvoException as e:
self.step_failure(e.message)
- if nsr is None:
- self.step_failure("NSR not deployed correctly")
+ if self.nsr.get('code') is not None:
+ self.logger.error(
+ "vIMS cannot be deployed: %s -> %s" %
+ (self.nsr.get('code'), self.nsr.get('message')))
+ self.step_failure("vIMS cannot be deployed")
i = 0
- self.logger.info("waiting NSR to go to active...")
- while nsr.get("status") != 'ACTIVE':
+ self.logger.info("Waiting for NSR to go to ACTIVE...")
+ while self.nsr.get("status") != 'ACTIVE' and self.nsr.get(
+ "status") != 'ERROR':
i += 1
- if i == 100:
- self.step_failure("After %s sec the nsr did not go to active.."
- % 5 * 100)
+ if i == 150:
+ self.step_failure("After %s sec the NSR did not go to ACTIVE.."
+ % 5 * i)
time.sleep(5)
- nsr = json.loads(nsr_agent.find(nsr.get('id')))
+ self.nsr = json.loads(nsr_agent.find(self.nsr.get('id')))
- deploy_vnf = {'status': "PASS", 'result': nsr}
- self.ob_nsr_id = nsr.get("id")
- self.logger.info("Deploy VNF: OK")
+ if self.nsr.get("status") == 'ACTIVE':
+ deploy_vnf = {'status': "PASS", 'result': self.nsr}
+ self.logger.info("Deploy VNF: OK")
+ else:
+ deploy_vnf = {'status': "FAIL", 'result': self.nsr}
+ self.step_failure("Deploy VNF: ERROR")
+ self.ob_nsr_id = self.nsr.get("id")
+ self.logger.info(
+ "Sleep for 60s to ensure that all services are up and running...")
+ time.sleep(60)
return deploy_vnf
def test_vnf(self):
# Adaptations probably needed
# code used for cloudify_ims
# ruby client on jumphost calling the vIMS on the SUT
+ self.logger.info(
+ "Testing if %s works properly..." %
+ self.nsr.get('name'))
+ for vnfr in self.nsr.get('vnfr'):
+ self.logger.info(
+ "Checking ports %s of VNF %s" %
+ (self.ims_conf.get(
+ vnfr.get('name')).get('ports'),
+ vnfr.get('name')))
+ for vdu in vnfr.get('vdu'):
+ for vnfci in vdu.get('vnfc_instance'):
+ self.logger.debug(
+ "Checking ports of VNFC instance %s" %
+ vnfci.get('hostname'))
+ for floatingIp in vnfci.get('floatingIps'):
+ self.logger.debug(
+ "Testing %s:%s" %
+ (vnfci.get('hostname'), floatingIp.get('ip')))
+ for port in self.ims_conf.get(
+ vnfr.get('name')).get('ports'):
+ if servertest(floatingIp.get('ip'), port):
+ self.logger.info(
+ "VNFC instance %s is reachable at %s:%s" %
+ (vnfci.get('hostname'),
+ floatingIp.get('ip'),
+ port))
+ else:
+ self.logger.error(
+ "VNFC instance %s is not reachable "
+ "at %s:%s" % (vnfci.get('hostname'),
+ floatingIp.get('ip'), port))
+ self.step_failure("Test VNF: ERROR")
+ self.logger.info("Test VNF: OK")
return
def clean(self):
@@ -349,42 +451,5 @@ if __name__ == '__main__':
test = ImsVnf()
test.deploy_orchestrator()
test.deploy_vnf()
+ test.test_vnf()
test.clean()
-
-
-# ----------------------------------------------------------
-#
-# UTILS
-#
-# -----------------------------------------------------------
-def get_config(parameter, file):
- """
- Returns the value of a given parameter in file.yaml
- parameter must be given in string format with dots
- Example: general.openstack.image_name
- """
- with open(file) as f:
- file_yaml = yaml.safe_load(f)
- f.close()
- value = file_yaml
- for element in parameter.split("."):
- value = value.get(element)
- if value is None:
- raise ValueError("The parameter %s is not defined in"
- " reporting.yaml" % parameter)
- return value
-
-
-def download_and_add_image_on_glance(glance, image_name,
- image_url, data_dir):
- dest_path = data_dir
- if not os.path.exists(dest_path):
- os.makedirs(dest_path)
- file_name = image_url.rsplit('/')[-1]
- if not ft_utils.download_url(image_url, dest_path):
- return False
- image = os_utils.create_glance_image(
- glance, image_name, dest_path + file_name)
- if not image:
- return False
- return image
diff --git a/functest/opnfv_tests/vnf/ims/orchestra_ims.yaml b/functest/opnfv_tests/vnf/ims/orchestra_ims.yaml
index 2fb33df5d..86d6e604a 100644
--- a/functest/opnfv_tests/vnf/ims/orchestra_ims.yaml
+++ b/functest/opnfv_tests/vnf/ims/orchestra_ims.yaml
@@ -2,6 +2,18 @@ tenant_images:
ubuntu_14.04: http://cloud-images.ubuntu.com/trusty/current/trusty-server-cloudimg-amd64-disk1.img
openims: http://marketplace.openbaton.org:8082/api/v1/images/52e2ccc0-1dce-4663-894d-28aab49323aa/img
openbaton:
- bootstrap: sh <(curl -s http://get.openbaton.org/bootstrap) release -configFile=
+ bootstrap_link: http://get.openbaton.org/bootstrap
+ bootstrap_config_link: http://get.openbaton.org/bootstrap-config-file
marketplace_link: http://marketplace.openbaton.org:8082/api/v1/nsds/fokus/OpenImsCore/3.2.0/json
imagename: ubuntu_14.04
+vIMS:
+ scscf:
+ ports: [3870, 6060]
+ pcscf:
+ ports: [4060]
+ icscf:
+ ports: [3869, 5060]
+ fhoss:
+ ports: [3868]
+ bind9:
+ ports: [] \ No newline at end of file
diff --git a/functest/tests/unit/ci/__init__.py b/functest/tests/unit/ci/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/functest/tests/unit/ci/__init__.py
diff --git a/functest/tests/unit/ci/test_prepare_env.py b/functest/tests/unit/ci/test_prepare_env.py
new file mode 100644
index 000000000..41b3f6a14
--- /dev/null
+++ b/functest/tests/unit/ci/test_prepare_env.py
@@ -0,0 +1,345 @@
+#!/usr/bin/env python
+
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+import logging
+import unittest
+
+import mock
+
+from functest.ci import prepare_env
+from functest.tests.unit import test_utils
+from functest.utils.constants import CONST
+from opnfv.utils import constants as opnfv_constants
+
+
+class PrepareEnvTesting(unittest.TestCase):
+
+ logging.disable(logging.CRITICAL)
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ def test_print_separator(self, mock_logger_info):
+ str = "=============================================="
+ prepare_env.print_separator()
+ mock_logger_info.assert_called_once_with(str)
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_missing_inst_type(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.INSTALLER_TYPE = None
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+ mock_logger_warn.assert_any_call("The env variable 'INSTALLER_TYPE'"
+ " is not defined.")
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_missing_inst_ip(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.INSTALLER_IP = None
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+ mock_logger_warn.assert_any_call("The env variable 'INSTALLER_IP'"
+ " is not defined. It is needed to"
+ " fetch the OpenStack credentials."
+ " If the credentials are not"
+ " provided to the container as a"
+ " volume, please add this env"
+ " variable to the 'docker run'"
+ " command.")
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_with_inst_ip(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.INSTALLER_IP = mock.Mock()
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+ mock_logger_info.assert_any_call(test_utils.
+ SubstrMatch(" INSTALLER_IP="))
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_missing_scenario(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.DEPLOY_SCENARIO = None
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+ mock_logger_warn.assert_any_call("The env variable"
+ " 'DEPLOY_SCENARIO' is not defined"
+ ". Setting CI_SCENARIO=undefined.")
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_with_scenario(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.DEPLOY_SCENARIO = mock.Mock()
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+ mock_logger_info.assert_any_call(test_utils.
+ SubstrMatch("DEPLOY_SCENARIO="))
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_with_ci_debug(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.CI_DEBUG = mock.Mock()
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+ mock_logger_info.assert_any_call(test_utils.
+ SubstrMatch(" CI_DEBUG="))
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_with_node(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.NODE_NAME = mock.Mock()
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+ mock_logger_info.assert_any_call(test_utils.
+ SubstrMatch(" NODE_NAME="))
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_with_build_tag(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.BUILD_TAG = mock.Mock()
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+
+ mock_logger_info.assert_any_call(test_utils.
+ SubstrMatch(" BUILD_TAG="))
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_check_env_variables_with_is_ci_run(self, mock_logger_warn,
+ mock_logger_info):
+ CONST.IS_CI_RUN = mock.Mock()
+ prepare_env.check_env_variables()
+ mock_logger_info.assert_any_call("Checking environment variables"
+ "...")
+
+ mock_logger_info.assert_any_call(test_utils.
+ SubstrMatch(" IS_CI_RUN="))
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.debug')
+ def test_create_directories_missing_dir(self, mock_logger_debug,
+ mock_logger_info):
+ with mock.patch('functest.ci.prepare_env.os.path.exists',
+ return_value=False), \
+ mock.patch('functest.ci.prepare_env.os.makedirs') \
+ as mock_method:
+ prepare_env.create_directories()
+ mock_logger_info.assert_any_call("Creating needed directories...")
+ mock_method.assert_any_call(CONST.dir_functest_conf)
+ mock_method.assert_any_call(CONST.dir_functest_data)
+ mock_logger_info.assert_any_call(" %s created." %
+ CONST.dir_functest_conf)
+ mock_logger_info.assert_any_call(" %s created." %
+ CONST.dir_functest_data)
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.debug')
+ def test_create_directories_with_dir(self, mock_logger_debug,
+ mock_logger_info):
+ with mock.patch('functest.ci.prepare_env.os.path.exists',
+ return_value=True):
+ prepare_env.create_directories()
+ mock_logger_info.assert_any_call("Creating needed directories...")
+ mock_logger_debug.assert_any_call(" %s already exists." %
+ CONST.dir_functest_conf)
+ mock_logger_debug.assert_any_call(" %s already exists." %
+ CONST.dir_functest_data)
+
+ def _get_env_cred_dict(self, os_prefix=''):
+ return {'OS_USERNAME': os_prefix + 'username',
+ 'OS_PASSWORD': os_prefix + 'password',
+ 'OS_AUTH_URL': 'http://test_ip:test_port/v2.0',
+ 'OS_TENANT_NAME': os_prefix + 'tenant_name',
+ 'OS_USER_DOMAIN_NAME': os_prefix + 'user_domain_name',
+ 'OS_PROJECT_DOMAIN_NAME': os_prefix + 'project_domain_name',
+ 'OS_PROJECT_NAME': os_prefix + 'project_name',
+ 'OS_ENDPOINT_TYPE': os_prefix + 'endpoint_type',
+ 'OS_REGION_NAME': os_prefix + 'region_name'}
+
+ @mock.patch('functest.ci.prepare_env.logger.error')
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.logger.warning')
+ def test_source_rc_missing_rc_file(self, mock_logger_warn,
+ mock_logger_info,
+ mock_logger_error):
+ with mock.patch('functest.ci.prepare_env.os.path.isfile',
+ return_value=True), \
+ mock.patch('functest.ci.prepare_env.os.path.getsize',
+ return_value=0), \
+ self.assertRaises(Exception):
+ CONST.openstack_creds = 'test_creds'
+ prepare_env.source_rc_file()
+
+ def test_source_rc_missing_installer_ip(self):
+ with mock.patch('functest.ci.prepare_env.os.path.isfile',
+ return_value=False), \
+ self.assertRaises(Exception):
+ CONST.INSTALLER_IP = None
+ CONST.openstack_creds = 'test_creds'
+ prepare_env.source_rc_file()
+
+ def test_source_rc_missing_installer_type(self):
+ with mock.patch('functest.ci.prepare_env.os.path.isfile',
+ return_value=False), \
+ self.assertRaises(Exception):
+ CONST.INSTALLER_IP = 'test_ip'
+ CONST.openstack_creds = 'test_creds'
+ CONST.INSTALLER_TYPE = 'test_type'
+ opnfv_constants.INSTALLERS = []
+ prepare_env.source_rc_file()
+
+ def test_source_rc_missing_os_credfile_ci_inst(self):
+ with mock.patch('functest.ci.prepare_env.os.path.isfile',
+ return_value=False), \
+ mock.patch('functest.ci.prepare_env.os.path.getsize'), \
+ mock.patch('functest.ci.prepare_env.os.path.join'), \
+ mock.patch('functest.ci.prepare_env.subprocess.Popen') \
+ as mock_subproc_popen, \
+ self.assertRaises(Exception):
+ CONST.openstack_creds = 'test_creds'
+ CONST.INSTALLER_IP = None
+ CONST.INSTALLER_TYPE = 'test_type'
+ opnfv_constants.INSTALLERS = ['test_type']
+
+ process_mock = mock.Mock()
+ attrs = {'communicate.return_value': ('output', 'error'),
+ 'return_code': 1}
+ process_mock.configure_mock(**attrs)
+ mock_subproc_popen.return_value = process_mock
+
+ prepare_env.source_rc_file()
+
+ def _get_rally_creds(self):
+ return {"type": "ExistingCloud",
+ "admin": {"username": 'test_user_name',
+ "password": 'test_password',
+ "tenant": 'test_tenant'}}
+
+ @mock.patch('functest.ci.prepare_env.os_utils.get_credentials_for_rally')
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ @mock.patch('functest.ci.prepare_env.ft_utils.execute_command')
+ def test_install_rally(self, mock_exec, mock_logger_info, mock_os_utils):
+
+ mock_os_utils.return_value = self._get_rally_creds()
+
+ prepare_env.install_rally()
+
+ cmd = "rally deployment destroy opnfv-rally"
+ error_msg = "Deployment %s does not exist." % \
+ CONST.rally_deployment_name
+ mock_logger_info.assert_any_call("Creating Rally environment...")
+ mock_exec.assert_any_call(cmd, error_msg=error_msg, verbose=False)
+
+ cmd = "rally deployment create --file=rally_conf.json --name="
+ cmd += CONST.rally_deployment_name
+ error_msg = "Problem while creating Rally deployment"
+ mock_exec.assert_any_call(cmd, error_msg=error_msg)
+
+ cmd = "rally deployment check"
+ error_msg = ("OpenStack not responding or "
+ "faulty Rally deployment.")
+ mock_exec.assert_any_call(cmd, error_msg=error_msg)
+
+ cmd = "rally deployment list"
+ error_msg = ("Problem while listing "
+ "Rally deployment.")
+ mock_exec.assert_any_call(cmd, error_msg=error_msg)
+
+ cmd = "rally plugin list | head -5"
+ error_msg = ("Problem while showing "
+ "Rally plugins.")
+ mock_exec.assert_any_call(cmd, error_msg=error_msg)
+
+ @mock.patch('functest.ci.prepare_env.sys.exit')
+ @mock.patch('functest.ci.prepare_env.logger.error')
+ def test_check_environment_missing_file(self, mock_logger_error,
+ mock_sys_exit):
+ with mock.patch('functest.ci.prepare_env.os.path.isfile',
+ return_value=False), \
+ self.assertRaises(Exception):
+ prepare_env.check_environment()
+
+ @mock.patch('functest.ci.prepare_env.sys.exit')
+ @mock.patch('functest.ci.prepare_env.logger.error')
+ def test_check_environment_with_error(self, mock_logger_error,
+ mock_sys_exit):
+ with mock.patch('functest.ci.prepare_env.os.path.isfile',
+ return_value=True), \
+ mock.patch("__builtin__.open", mock.mock_open(read_data='0')), \
+ self.assertRaises(Exception):
+ prepare_env.check_environment()
+
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ def test_check_environment_default(self, mock_logger_info):
+ with mock.patch('functest.ci.prepare_env.os.path.isfile',
+ return_value=True):
+ with mock.patch("__builtin__.open", mock.mock_open(read_data='1')):
+ prepare_env.check_environment()
+ mock_logger_info.assert_any_call("Functest environment"
+ " is installed.")
+
+ @mock.patch('functest.ci.prepare_env.check_environment')
+ @mock.patch('functest.ci.prepare_env.create_flavor')
+ @mock.patch('functest.ci.prepare_env.install_tempest')
+ @mock.patch('functest.ci.prepare_env.install_rally')
+ @mock.patch('functest.ci.prepare_env.verify_deployment')
+ @mock.patch('functest.ci.prepare_env.patch_config_file')
+ @mock.patch('functest.ci.prepare_env.source_rc_file')
+ @mock.patch('functest.ci.prepare_env.create_directories')
+ @mock.patch('functest.ci.prepare_env.check_env_variables')
+ @mock.patch('functest.ci.prepare_env.logger.info')
+ def test_main_start(self, mock_logger_info, mock_env_var,
+ mock_create_dir, mock_source_rc, mock_patch_config,
+ mock_verify_depl, mock_install_rally,
+ mock_install_temp, mock_create_flavor,
+ mock_check_env):
+ with mock.patch("__builtin__.open", mock.mock_open()) as m:
+ args = {'action': 'start'}
+ self.assertEqual(prepare_env.main(**args), 0)
+ mock_logger_info.assert_any_call("######### Preparing Functest "
+ "environment #########\n")
+ self.assertTrue(mock_env_var.called)
+ self.assertTrue(mock_create_dir.called)
+ self.assertTrue(mock_source_rc.called)
+ self.assertTrue(mock_patch_config.called)
+ self.assertTrue(mock_verify_depl.called)
+ self.assertTrue(mock_install_rally.called)
+ self.assertTrue(mock_install_temp.called)
+ self.assertTrue(mock_create_flavor.called)
+ m.assert_called_once_with(CONST.env_active, "w")
+ self.assertTrue(mock_check_env.called)
+
+ @mock.patch('functest.ci.prepare_env.check_environment')
+ def test_main_check(self, mock_check_env):
+ args = {'action': 'check'}
+ self.assertEqual(prepare_env.main(**args), 0)
+ self.assertTrue(mock_check_env.called)
+
+ @mock.patch('functest.ci.prepare_env.logger.error')
+ def test_main_no_arg(self, mock_logger_error):
+ args = {'action': 'not_valid'}
+ self.assertEqual(prepare_env.main(**args), -1)
+ mock_logger_error.assert_called_once_with('Argument not valid.')
+
+
+if __name__ == "__main__":
+ unittest.main(verbosity=2)