summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xci/exec_test.sh16
-rwxr-xr-xci/generate_report.py6
-rwxr-xr-xci/run_tests.py16
-rw-r--r--ci/testcases.yaml2
-rw-r--r--docs/configguide/configguide.rst58
-rw-r--r--docs/configguide/index.rst32
-rw-r--r--docs/devguide/index.rst245
-rw-r--r--docs/release-notes/functest-release.rst69
-rw-r--r--docs/userguide/index.rst4
-rw-r--r--docs/userguide/introduction.rst4
-rwxr-xr-xtestcases/Controllers/ODL/OpenDaylightTesting.py45
-rw-r--r--testcases/Controllers/ODL/__init__.py0
-rw-r--r--testcases/Controllers/__init__.py0
-rwxr-xr-xtestcases/OpenStack/rally/run_rally-cert.py46
-rw-r--r--testcases/OpenStack/tempest/custom_tests/blacklist.txt27
-rwxr-xr-xtestcases/OpenStack/tempest/run_tempest.py45
-rw-r--r--testcases/OpenStack/vPing/vping_util.py23
-rwxr-xr-xtestcases/features/sfc/set-up-tacker.sh3
-rw-r--r--utils/functest_utils.py30
-rwxr-xr-xutils/openstack_utils.py42
20 files changed, 432 insertions, 281 deletions
diff --git a/ci/exec_test.sh b/ci/exec_test.sh
index 03eb2c873..deb505df9 100755
--- a/ci/exec_test.sh
+++ b/ci/exec_test.sh
@@ -39,25 +39,24 @@ fi
FUNCTEST_REPO_DIR=${repos_dir}/functest
FUNCTEST_CONF_DIR=/home/opnfv/functest/conf
+export PYTHONUNBUFFERED=1
function odl_tests(){
keystone_ip=$(openstack catalog show identity |grep publicURL| cut -f3 -d"/" | cut -f1 -d":")
neutron_ip=$(openstack catalog show network | grep publicURL | cut -f3 -d"/" | cut -f1 -d":")
odl_ip=${neutron_ip}
- odl_port=8181
+ odl_port=8080
if [ "$INSTALLER_TYPE" == "fuel" ]; then
odl_port=8282
elif [ "$INSTALLER_TYPE" == "apex" ]; then
odl_ip=$SDN_CONTROLLER_IP
+ odl_port=8181
elif [ "$INSTALLER_TYPE" == "joid" ]; then
odl_ip=$SDN_CONTROLLER
- odl_port=8080
- :
elif [ "$INSTALLER_TYPE" == "compass" ]; then
- :
+ odl_port=8181
else
odl_ip=$SDN_CONTROLLER_IP
- odl_port=8080
fi
}
@@ -80,12 +79,7 @@ function run_test(){
;;
"odl")
odl_tests
- if [[ "$report" == "-r" &&
- -n "$DEPLOY_SCENARIO" && "$DEPLOY_SCENARIO" != "none" &&
- -n "$INSTALLER_TYPE" && "$INSTALLER_TYPE" != "none" ]] &&
- env | grep NODE_NAME > /dev/null; then
- args=-p
- fi
+ [[ "$report" == "-r" ]] && args=-p
${FUNCTEST_REPO_DIR}/testcases/Controllers/ODL/OpenDaylightTesting.py \
--keystoneip $keystone_ip --neutronip $neutron_ip \
--osusername ${OS_USERNAME} --ostenantname ${OS_TENANT_NAME} \
diff --git a/ci/generate_report.py b/ci/generate_report.py
index 3ca2847bd..d2e09eb80 100755
--- a/ci/generate_report.py
+++ b/ci/generate_report.py
@@ -3,6 +3,7 @@ import os
import re
import urllib2
import functest.utils.functest_logger as ft_logger
+import functest.utils.functest_utils as ft_utils
COL_1_LEN = 25
@@ -32,8 +33,7 @@ def init(tiers_to_run):
def get_results_from_db():
- url = 'http://testresults.opnfv.org/test/api/v1/results?build_tag=' + \
- BUILD_TAG
+ url = ft_utils.get_db_url() + '/results?build_tag=' + BUILD_TAG
logger.debug("Query to rest api: %s" % url)
try:
data = json.load(urllib2.urlopen(url))
@@ -49,7 +49,7 @@ def get_data(test, results):
for test_db in results:
if test['test_name'] in test_db['case_name']:
id = test_db['_id']
- url = 'http://testresults.opnfv.org/test/api/v1/results/' + id
+ url = ft_utils.get_db_url() + '/results/' + id
test_result = test_db['criteria']
return {"url": url, "result": test_result}
diff --git a/ci/run_tests.py b/ci/run_tests.py
index 982567217..f2a119900 100755
--- a/ci/run_tests.py
+++ b/ci/run_tests.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/python -u
#
# Author: Jose Lausuch (jose.lausuch@ericsson.com)
#
@@ -13,8 +13,10 @@ import datetime
import os
import re
import sys
+
import functest.ci.generate_report as generate_report
import functest.ci.tier_builder as tb
+from functest.testcases.Controllers.ODL.OpenDaylightTesting import ODLTestCases
import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
import functest.utils.openstack_clean as os_clean
@@ -100,9 +102,15 @@ def run_test(test, tier_name):
if REPORT_FLAG:
flags += " -r"
- cmd = ("%s%s" % (EXEC_SCRIPT, flags))
- logger.debug("Executing command '%s'" % cmd)
- result = ft_utils.execute_command(cmd, exit_on_error=False)
+ if test_name == 'odl':
+ result = ODLTestCases.functest_run()
+ if result and REPORT_FLAG:
+ result = ODLTestCases.push_to_db()
+ result = not result
+ else:
+ cmd = ("%s%s" % (EXEC_SCRIPT, flags))
+ logger.debug("Executing command '%s'" % cmd)
+ result = ft_utils.execute_command(cmd, exit_on_error=False)
if CLEAN_FLAG:
cleanup()
diff --git a/ci/testcases.yaml b/ci/testcases.yaml
index 7b7a7d14d..69d4a2ef1 100644
--- a/ci/testcases.yaml
+++ b/ci/testcases.yaml
@@ -17,7 +17,7 @@ tiers:
dependencies:
installer: ''
- scenario: ''
+ scenario: '^((?!lxd).)*$'
-
name: smoke
diff --git a/docs/configguide/configguide.rst b/docs/configguide/configguide.rst
index 6448d2c99..d79494399 100644
--- a/docs/configguide/configguide.rst
+++ b/docs/configguide/configguide.rst
@@ -18,8 +18,10 @@ release installed in your environment. All available tagged images can
be seen from location [FunctestDockerTags_]. For example, when running
on the first official release of the OPNFV Colorado system platform,
tag "colorado.1.0" is needed. Pulling other tags might cause some
-problems while running the tests. If you need to specifically pull the
-latest Functest docker image, then omit the tag argument::
+problems while running the tests.
+Docker images pulled without a tag specifier bear the implicitly
+assigned label "latest". If you need to specifically pull the latest
+Functest docker image, then omit the tag argument::
docker pull opnfv/functest
@@ -32,9 +34,6 @@ following docker command::
opnfv/functest brahmaputra.3.0 94b78faa94f7 4 weeks ago 874.9 MB
hello-world latest 94df4f0ce8a4 7 weeks ago 967 B
-Docker images pulled without a tag specifier bear the implicitly
-assigned label "latest", as seen above.
-
The Functest docker container environment can -in principle- be also
used with non-OPNFV official installers (e.g. 'devstack'), with the
**disclaimer** that support for such environments is outside of the
@@ -121,8 +120,9 @@ recommended parameters for invoking docker container
under the path: '/home/opnfv/functest/conf/openstack.creds'.
WARNING: If you are using the Joid installer, you must pass the
- credentials using the **-v** option. See the section
- `Accessing the Openstack credentials`_ above.
+ credentials using the **-v** option:
+ -v /var/lib/jenkins/admin-openrc:/home/opnfv/functest/conf/openstack.creds.
+ See the section `Accessing the Openstack credentials`_ above.
#. Passing deployment scenario
When running Functest against any of the supported OPNFV scenarios,
@@ -133,7 +133,7 @@ recommended parameters for invoking docker container
-e "DEPLOY_SCENARIO=os-<controller>-<nfv_feature>-<ha_mode>"
where:
os = OpenStack (No other VIM choices currently available)
- controller is one of ( nosdn | odl_l2 | odl_l3 | onos )
+ controller is one of ( nosdn | odl_l2 | odl_l3 | onos | ocl)
nfv_feature is one or more of ( ovs | kvm | sfc | bgpvpn | nofeature )
If several features are pertinent then use the underscore
character '_' to separate each feature (e.g. ovs_kvm)
@@ -143,7 +143,7 @@ recommended parameters for invoking docker container
**NOTE:** Not all possible combinations of "DEPLOY_SCENARIO" are
supported. The name passed in to the Functest Docker container
must match the scenario used when the actual OPNFV platform was
- deployed.
+ deployed. See release note to see the list of supported scenarios.
Putting all above together, when using installer 'fuel' and an invented
INSTALLER_IP of '10.20.0.2', the recommended command to create the
@@ -243,19 +243,20 @@ illustration purposes::
Compass installer local development env usage Tips
--------------------------------------------------
-In the compass-functest local test case check and development environment, in order
-to get openstack service inside the functest container, some parameters should be
-configured during container creation, which are hard to guess for freshman. This
-section will provide the guideline, the parameters values are defaults here, which should
-be adjusted according to the settings, the complete steps are given here so as
-not to appear too abruptly.
+In the compass-functest local test case check and development environment,
+in order to get openstack service inside the functest container, some
+parameters should be configured during container creation, which are
+hard to guess for freshman. This section will provide the guideline, the
+parameters values are defaults here, which should be adjusted according
+to the settings, the complete steps are given here so as not to appear
+too abruptly.
1, Pull Functest docker image from public dockerhub::
docker pull opnfv/functest:<Tag>
-<Tag> here can be "brahmaputra.1.0", "colorado.1.0", etc. Tag omitted means the
-latest docker image::
+<Tag> here can be "brahmaputra.1.0", "colorado.1.0", etc.
+Tag omitted means the latest docker image::
docker pull opnfv/functest
@@ -273,8 +274,8 @@ To make a file used for the environment, such as 'functest-docker-env'::
INSTALLER_IP=192.168.200.2
EXTERNAL_NETWORK=ext-net
-Note: please adjust the content according to the environment, such as 'TENANT_ID'
-maybe used for some special cases.
+Note: please adjust the content according to the environment, such as
+'TENANT_ID' maybe used for some special cases.
Then to create the Functest docker::
@@ -283,8 +284,6 @@ Then to create the Functest docker::
--name <Functest_Container_Name> \
opnfv/functest:<Tag> /bin/bash
-Note: it is recommended to be run on jumpserver.
-
3, To attach Functest container
Before trying to attach the Functest container, the status can be checked by::
@@ -345,6 +344,7 @@ follows::
| |-- __init__.py
| |-- check_os.sh
| |-- config_functest.yaml
+ | |-- generate_report.py
| |-- exec_test.sh
| |-- prepare_env.py
| |-- run_tests.py
@@ -374,12 +374,12 @@ follows::
| |-- results
| `--userguide
|-- testcases
+ | |-- __init__.py
| |-- Controllers
| |-- OpenStack
- | |-- __init__.py
| |-- features
| |-- security_scan
- | `-- vIMS
+ | `-- vnf
`-- utils
|-- __init__.py
|-- functest_logger.py
@@ -694,22 +694,22 @@ docker container.
For example, try to use the **nc** command from inside the functest
docker container::
- nc -v google.com 80
- Connection to google.com 80 port [tcp/http] succeeded!
+ nc -v opnfv.org 80
+ Connection to opnfv.org 80 port [tcp/http] succeeded!
- nc -v google.com 443
- Connection to google.com 443 port [tcp/https] succeeded!
+ nc -v opnfv.org 443
+ Connection to opnfv.org 443 port [tcp/https] succeeded!
Note: In a Jumphost node based on the CentOS family OS, the **nc**
commands might not work. You can use the **curl** command instead.
- curl http://www.google.com:80
+ curl http://www.opnfv.org:80
<HTML><HEAD><meta http-equiv="content-type"
.
.
</BODY></HTML>
- curl https://www.google.com:443
+ curl https://www.opnfv.org:443
<HTML><HEAD><meta http-equiv="content-type"
.
.
diff --git a/docs/configguide/index.rst b/docs/configguide/index.rst
index 6b6c62a1b..b61bf8ff5 100644
--- a/docs/configguide/index.rst
+++ b/docs/configguide/index.rst
@@ -40,16 +40,20 @@ follows::
| | | | | | | | |
| | | Testcases | | | | | |
| | | - VIM | | | | | |
- | | | -- vPing | | | | | |
+ | | | -- healthcheck | | | | | |
+ | | | -- vPing_ssh | | | | | |
| | | -- vPing_userdata | | | | | |
- | | | -- Tempest | | | | | |
- | | | -- Rally | | | | | |
- | | | - Controller | | | | | |
+ | | | -- Tempest_smoke | | | | | |
+ | | | -- Rally_sanity | | | | | |
+ | | | -- Tempest_full | | | | | |
+ | | | -- Rally_full | | | | | |
+ | | | | | | | | |
+ | | | - SDN Controller | | | | | |
| | | -- odl | | | | | |
| | | -- onos | | | | | |
| | | | | | | | |
- | | | Features | | | | | |
- | | | - vIMS | | | | | |
+ | | | - VNF | | | | | |
+ | | | -- vIMS | | | | | |
| | | | | | | | |
| | +--------------------+ | | | | |
| | +-------------------------+ |
@@ -85,10 +89,10 @@ on any platform on any Operating System.
The automated mechanisms inside the Functest Docker container will:
- * retrieve OpenStack credentials
- * prepare the environment according to the SUT
- * perform the appropriate functional tests
- * push the test results into the OPNFV test result database
+ * Retrieve OpenStack credentials
+ * Prepare the environment according to the SUT
+ * Perform the appropriate functional tests
+ * Push the test results into the OPNFV test result database
This Docker image can be integrated into CI or deployed independently.
@@ -99,11 +103,13 @@ communities.
The functional test cases are described in the Functest User Guide `[2]`_
+
Prerequisites
=============
The OPNFV deployment is out of the scope of this document but it can be
-found in `[4]`_. The OPNFV platform is considered as the System Under
-Test (SUT) in this document.
+found in http://artifacts.opnfv.org/opnfvdocs/colorado/docs/configguide/index.html.
+The OPNFV platform is considered as the System Under Test (SUT) in this
+document.
Several prerequisites are needed for Functest:
@@ -273,7 +279,7 @@ References
.. _`[1]`: https://ask.openstack.org/en/question/68144/keystone-unable-to-use-the-public-endpoint/
.. _`[2]`: http://artifacts.opnfv.org/functest/docs/userguide/index.html
.. _`[3]`: https://git.opnfv.org/cgit/releng/tree/jjb/functest/functest-ci-jobs.yml
-.. _`[4]`: http://artifacts.opnfv.org/opnfvdocs/brahmaputra/docs/configguide/index.html
+.. _`[4]`: http://artifacts.opnfv.org/opnfvdocs/colorado/docs/configguide/index.html
OPNFV main site: opnfvmain_.
diff --git a/docs/devguide/index.rst b/docs/devguide/index.rst
index 88ae232bc..6b98437c6 100644
--- a/docs/devguide/index.rst
+++ b/docs/devguide/index.rst
@@ -13,17 +13,17 @@ Introduction
Functest is a project dealing with functional testing.
Functest produces its own internal test cases but can also be considered
-as a framework to support feature project testing suite integration.
-Functest developed a test API and defined a Test collection framework
+as a framework to support feature project testing.
+Functest developed a test API and defined a test collection framework
that can be used by any OPNFV project.
Therefore there are many ways to contribute to Functest. You can:
- * develop new internal test cases
- * integrate the tests from your feature project
- * develop the framework to ease the integration of external test cases
- * develop the API / Test collection framework
- * develop dashboards or automatic reporting portals
+ * Develop new internal test cases
+ * Integrate the tests from your feature project
+ * Develop the framework to ease the integration of external test cases
+ * Develop the API / Test collection framework
+ * Develop dashboards or automatic reporting portals
This document describes how, as a developer, you may interact with the
Functest project. The first section details the main working areas of
@@ -62,6 +62,7 @@ Functest internal test cases
============================
The internal test cases in Colorado are:
+ * healthcheck
* vping_ssh
* vping_userdata
* odl
@@ -72,22 +73,28 @@ The internal test cases in Colorado are:
* vims
By internal, we mean that this particular test cases have been
-developped by functest contributors and the associated code is hosted in
-the Functest repository.
+developped and/or integrated by functest contributors and the associated
+code is hosted in the Functest repository.
+An internal case can be fully developped or a simple integration of
+upstream suites (e.g. Tempest/Rally developped in OpenStack are just
+integrated in Functest).
The structure of this repository is detailed in `[1]`_.
-The main internal test cases are int the testcases subfolder of the
+The main internal test cases are in the testcases subfolder of the
repository, the internal test cases are:
- * Controllers: odl, onos
- * OpenStack: vping_ssh, vping_userdata, tempest_*, rally_*
- * vIMS: vims
+ * Controllers: odl, onos, ocl
+ * OpenStack: healthcheck, vping_ssh, vping_userdata, tempest_*, rally_*
+ * VNF: vims
-If you want to create a new test cases you will have to create a new
-folder under the testcases directory
+If you want to create a new test case you will have to create a new
+folder under the testcases directory.
Functest external test cases
============================
-The external tescases are:
+The external test cases are inherited from other OPNFV projects,
+especially the feature projects.
+
+The external test cases are:
* promise
* doctor
@@ -98,14 +105,16 @@ The external tescases are:
* security_scan
* sfc-odl
* sfc-onos
+ * parser
+ * domino
+ * multisite
-Note that security_scan has been bootstraped in Functest but will be
-considered as an external test cases as soon as it will get its own
-repository.
+Note that security_scan has been bootstraped in Functest but is
+considered as an external test case as it gets its own repository.
The code to run these test cases may be directly in the repository of
-the project. We have also a features sub directory under testcases
+the project. We have also a **features** sub directory under testcases
directory that may be used (it can be usefull if you want to reuse
Functest library).
@@ -114,32 +123,37 @@ Functest framework
==================
Functest can be considered as a framework.
-Functest is release a a docker file, including tools, scripts and a CLI
+Functest is release as a docker file, including tools, scripts and a CLI
to prepare the environement and run tests.
-It simplifies the integration of external test suite in CI pipeline
+It simplifies the integration of external test suites in CI pipeline
and provide commodity tools to collect and display results.
Since Colorado, test categories also known as tiers have been created to
group similar tests, provide consistant sub-lists and at the end optimize
test duration for CI (see How To section).
-see `[2]`_ for details.
+see http://artifacts.opnfv.org/functest/docs/userguide/index.html for
+details.
+
-Test API
-========
+Test collection framework
+=========================
The OPNFV testing group created a test collection database to collect
-the test results from CI.
+the test results from CI:
+
+
+ http://testresults.opnfv.org/test/swagger/spec.html
+ Authentication: opnfv/api@opnfv
+
Any test project running on any lab integrated in CI can push the
results to this database.
-This database can be used afterwards to see the evolution of the tests
-and compare the results versus the installers, the scenario or the labs.
+This database can be used to see the evolution of the tests and compare
+the results versus the installers, the scenarios or the labs.
-You can find more information about the dashboard from Testing Dashboard
-wiki page `[3]`_.
Overall Architecture
-====================
+--------------------
The Test result management can be summarized as follows::
+-------------+ +-------------+ +-------------+
@@ -173,10 +187,15 @@ The Test result management can be summarized as follows::
+----------------------+
Test API description
-====================
+--------------------
The Test API is used to declare pods, projects, test cases and test
-results. An additional method dashboard has been added to post-process
-the raw results in release Brahmaputra.
+results. Pods are the pods used to run the tests.
+The results pushed in the database are related to pods, projects and
+cases. If you try to push results of test done on non referenced pod,
+the API will return an error message.
+
+An additional method dashboard has been added to post-process
+the raw results in release Brahmaputra (deprecated in Colorado).
The data model is very basic, 4 objects are created:
@@ -234,9 +253,9 @@ Results::
}
The API can described as follows. For detailed information, please go to
-http://testresults.opnfv.org/test/swagger/spec.html
-Authentication: opnfv/api@opnfv
+ http://testresults.opnfv.org/test/swagger/spec.html
+ Authentication: opnfv/api@opnfv
Please notes that POST/DELETE/PUT operations for test or study purpose via
swagger website is not allowed, because it will change the real data in
@@ -247,7 +266,7 @@ Version:
+--------+--------------------------+-----------------------------------------+
| Method | Path | Description |
+========+==========================+=========================================+
- | GET | /versions | Get all supported API versions |
+ | GET | /versions | Get all supported API versions |
+--------+--------------------------+-----------------------------------------+
@@ -333,7 +352,7 @@ Results:
+--------+----------------------------+------------------------------------------+
| Method | Path | Description |
+========+============================+==========================================+
- | GET | /api/v1/results | Get all the test results |
+ | GET | /api/v1/results | Get all the test results |
+--------+----------------------------+------------------------------------------+
| POST | /api/v1/results | Add a new test results |
| | | Content-Type: application/json |
@@ -430,6 +449,8 @@ Dashboard:
+--------+----------------------------+-----------------------------------------+
The code of the API is hosted in the releng repository `[6]`_.
+The test API has been dockerized and may be installed locally in your
+lab. See `[15]`_ for details.
Dashboard
=========
@@ -458,17 +479,19 @@ Credentials for a guest account: opnfvuser/kibana
Automatic reporting
===================
-OPNFV release is scenario centric. An automatic reporting page has been
-created in order to provide a consistant view of the scenarios.
+An automatic reporting page has been created in order to provide a
+consistant view of the scenarios.
In this page each scenario is evaluated according to test criteria.
The code for the automatic reporting is available at `[8]`_.
-Every day, we collect the results from the centralized database and, per
-scenario we calculate a score. This score is the addition of individual
-tests considered on the last 10 runs according to a defined criteria.
+The results are collected from the centralized database every day and,
+per scenario. A score is calculated based on the results from the last
+50 days. This score is the addition of single test scores. Each test
+case has a success criteria reflected in the criteria field from the
+results.
-If we consider for instance a scenario os-odl_l2-nofeature-ha, we will
-consider for the scoring all the runnable tests from the first test
+Considering an instance of a scenario os-odl_l2-nofeature-ha, the
+scoring is the addition of the scores of all the runnable tests from the
categories (tiers healthcheck, smoke, controller and feature)
corresponding to this scenario.
@@ -495,7 +518,7 @@ corresponding to this scenario.
All the testcases listed in the table are runnable on
os-odl_l2-nofeature scenarios.
-If no results are available or if all the results are failed, the test
+If no result is available or if all the results are failed, the test
case get 0 point.
If it was succesfull at least once but no anymore during the 4 runs,
the case get 1 point (it worked once).
@@ -508,8 +531,9 @@ is 3x6 = 18 points.
The scenario is validated per installer when we got 3 points for all
individual test cases (e.g 18/18).
Please note that complex or long duration tests are not considered for
-the scoring. Such cases will be also get points, but these points will
-be indicative and not used for the scenario validation.
+the scoring. The success criteria are not always easy to define and may
+require specific hardware configuration. These results however provide
+a good level of trust on the scenario.
A web page is automatically generated every day to display the status.
This page can be found at `[9]`_. For the status, click on Status menu,
@@ -517,8 +541,8 @@ you may also get feedback for vims and tempest_smoke_serial test cases.
Any validated scenario is stored in a local file on the web server. In
fact as we are using a sliding windows to get results, it may happen
-that a successfull scenarios is not more run (because considered as
-stable)and then the number of iterations (need 4) would not be
+that a successful scenarios is no more run (because considered as
+stable) and then the number of iterations (4 needed) would not be
sufficient to get the green status.
Please note that other test cases e.g. sfc_odl, bgpvpn, moon) need also
@@ -546,9 +570,11 @@ contribute to functest. If you are totally new to OPNFV, you must first
create your Linux Foundation account, then contact us in order to
declare you in the repository database.
-We distinguish 2 levels of contributors: the standard contributor can
-push patch and vote +1/0/-1 on any Functest patch. The commitor can vote
--2/-1/0/+1/+2 and merge.
+We distinguish 2 levels of contributors:
+
+ * the standard contributor can push patch and vote +1/0/-1 on any Functest patch
+ * The commitor can vote -2/-1/0/+1/+2 and merge
+
Functest commitors are promoted by the Functest contributors.
@@ -559,22 +585,23 @@ This guide is made for you. You can also have a look at the project wiki
page `[10]`_.
There are references on documentation, video tutorials, tips...
-You can also directly contact us by mail with [Functest] prefix at
-opnfv-tech-discuss@lists.opnfv.org or on the IRC chan #opnfv-functest.
+You can also directly contact us by mail with [Functest] prefix in the
+title at opnfv-tech-discuss@lists.opnfv.org or on the IRC chan
+#opnfv-functest.
What kind of testing do you do in Functest?
===========================================
Functest is focusing on Functional testing. The results must be PASS or
-FAILED. We do not deal with performance and/or qualification tests.
+FAIL. We do not deal with performance and/or qualification tests.
We consider OPNFV as a black box and execute our tests from the jumphost
according to Pharos reference technical architecture.
Upstream test suites are integrated (Rally/Tempest/ODL/ONOS,...).
If needed Functest may bootstrap temporarily testing activities if they
are identified but not covered yet by an existing testing project (e.g
-security_scan before the creation of the security reporistory)
+security_scan before the creation of the security repository)
How test constraints are defined?
@@ -582,8 +609,8 @@ How test constraints are defined?
Test constraints are defined according to 2 paramaters:
- * the scenario (DEPLOY_SCENARIO env variable)
- * the installer (INSTALLER_TYPE env variable)
+ * The scenario (DEPLOY_SCENARIO env variable)
+ * The installer (INSTALLER_TYPE env variable)
A scenario is a formal description of the system under test.
The rules to define a scenario are described in `[4]`_
@@ -629,8 +656,7 @@ How to write and check constaint regex?
Regex are standard regex. You can have a look at `[11]`_
-You caa also easily test your regex via an online regex checker such as
- `[12]`_.
+You can also easily test your regex via an online regex checker such as `[12]`_.
Put your scenario in the TEST STRING window (e.g. os-odl_l3-ovs-ha), put
your regex in the REGULAR EXPRESSION window, then you can test your rule
.
@@ -642,7 +668,7 @@ How to know which test I can run?
You can use the API `[13]`_. The static declaration is in git `[5]`_
If you are in a Functest docker container (assuming that the
-environement has been prepared): just use the new CLI.
+environement has been prepared): just use the CLI.
You can get the list per Test cases or by Tier::
@@ -673,10 +699,10 @@ You can get the list per Test cases or by Tier::
['vims']
-How to manually start Functest test?
-====================================
+How to manually start Functest tests?
+=====================================
-Assuming that you are connected on the jumhost and that the system is
+Assuming that you are connected on the jumphost and that the system is
"Pharos compliant", i.e the technical architecture is compatible with
the one defined in the Pharos project::
@@ -731,11 +757,10 @@ How to declare my tests in Functest?
====================================
If you want to add new internal test cases, you can submit patch under
-the testcases directory of Functest reporsitory.
+the testcases directory of Functest repository.
For feature test integration, the code can be kept into your own
-repository. Then the files of the Functest repository you must modify to
-integrate Functest are:
+repository. The Functest files to be modified are:
* functest/docker/Dockerfile: get your code in Functest container
* functest/ci/testcases.yaml: reference your test and its associated constraints
@@ -745,8 +770,8 @@ integrate Functest are:
Dockerfile
----------
-This file lists the repositories to be cloned in the Functest container.
-The repositories can be internal or external::
+This file lists the repositories (internal or external) to be cloned in
+the Functest container. You can also add external packages::
RUN git clone https://gerrit.opnfv.org/gerrit/<your project> ${repos_dir}/<your project>
@@ -754,8 +779,9 @@ testcases.yaml
--------------
All the test cases that must be run from CI / CLI must be declared in
-co/testcases.yaml.
-This file is used to get the constraintes related to the test::
+ci/testcases.yaml.
+
+This file is used to get the constraints related to the test::
name: <my_super_test_case>
criteria: <not used yet in Colorado, could be > 'PASS', 'rate > 90%'
@@ -768,8 +794,8 @@ This file is used to get the constraintes related to the test::
You must declare your test case in one of the category (tier).
-If you are integrating test suites from a feature project, the category
-must be features.
+If you are integrating test suites from a feature project, the default
+category is **features**.
exec_test.sh
@@ -786,7 +812,9 @@ You just patch the file in git and add a line::
;;
-Note you can use python or bash scripts.
+Note you can use python or bash scripts (or any language assuming that
+the packages have been properly preinstalled but we recommand python or
+bash..).
How to select my list of tests for CI?
@@ -801,7 +829,7 @@ Functest jenkins job)::
Each case can be configured as daily and/or weekly task.
-When executing run_tests.py a check based on the jenkins build tag will
+When executing run_tests.py, a check based on the jenkins build tag will
be considered to detect whether it is a daily and/or a weekly test.
in your CI you can customize the list of test you want to run by case or
@@ -825,7 +853,7 @@ The test database is used to collect test results. By default it is
enabled only for CI tests from Production CI pods.
The architecture and associated API is described in previous chapter.
-If you want to push your results from CI, you just have to use the API
+If you want to push your results from CI, you just have to call the API
at the end of your script.
You can also reuse a python function defined in functest_utils.py::
@@ -856,29 +884,50 @@ You can also reuse a python function defined in functest_utils.py::
References
==========
-.. _`[1]`: http://artifacts.opnfv.org/functest/docs/configguide/index.html Functest configuration guide URL
-.. _`[2]`: http://artifacts.opnfv.org/functest/docs/userguide/index.html functest user guide URL
-.. _`[3]`: https://wiki.opnfv.org/opnfv_test_dashboard
-.. _`[4]`: https://wiki.opnfv.org/display/INF/CI+Scenario+Naming
-.. _`[5]`: https://git.opnfv.org/cgit/functest/tree/ci/testcases.yaml
-.. _`[6]`: https://git.opnfv.org/cgit/releng/tree/utils/test/result_collection_api
-.. _`[7]`: https://git.opnfv.org/cgit/releng/tree/utils/test/scripts
-.. _`[8]`: https://git.opnfv.org/cgit/releng/tree/utils/test/reporting/functest
-.. _`[9]`: http://testresults.opnfv.org/reporting/
-.. _`[10]`: https://wiki.opnfv.org/opnfv_functional_testing
-.. _`[11]`: https://docs.python.org/2/howto/regex.html
-.. _`[12]`: https://regex101.com/
-.. _`[13]`: http://testresults.opnfv.org/test/api/v1/projects/functest/cases
-.. _`[14]`: https://git.opnfv.org/cgit/releng/tree/jjb/functest/functest-daily.sh
+_`[1]`: http://artifacts.opnfv.org/functest/docs/configguide/index.html Functest configuration guide
+
+_`[2]`: http://artifacts.opnfv.org/functest/docs/userguide/index.html functest user guide
+
+_`[3]`: https://wiki.opnfv.org/opnfv_test_dashboard Brahmaputra dashboard
+
+_`[4]`: https://wiki.opnfv.org/display/INF/CI+Scenario+Naming
+
+_`[5]`: https://git.opnfv.org/cgit/functest/tree/ci/testcases.yaml
+
+_`[6]`: https://git.opnfv.org/cgit/releng/tree/utils/test/result_collection_api
+
+_`[7]`: https://git.opnfv.org/cgit/releng/tree/utils/test/scripts
+
+_`[8]`: https://git.opnfv.org/cgit/releng/tree/utils/test/reporting/functest
+
+_`[9]`: http://testresults.opnfv.org/reporting/
+
+_`[10]`: https://wiki.opnfv.org/opnfv_functional_testing
+
+_`[11]`: https://docs.python.org/2/howto/regex.html
+
+_`[12]`: https://regex101.com/
+
+_`[13]`: http://testresults.opnfv.org/test/api/v1/projects/functest/cases
+
+_`[14]`: https://git.opnfv.org/cgit/releng/tree/jjb/functest/functest-daily.sh
+
+_`[15]`: https://git.opnfv.org/cgit/releng/tree/utils/test/result_collection_api/README.rst
+
OPNFV main site: opnfvmain_.
OPNFV functional test page: opnfvfunctest_.
-IRC support chan: #opnfv-testperf
+IRC support chan: #opnfv-functest
+
+_opnfvmain: http://www.opnfv.org
+
+_opnfvfunctest: https://wiki.opnfv.org/opnfv_functional_testing
+
+_`OpenRC`: http://docs.openstack.org/user-guide/common/cli_set_environment_variables_using_openstack_rc.html
+
+_`Rally installation procedure`: https://rally.readthedocs.org/en/latest/tutorial/step_0_installation.html
+
+_`config_test.py` : https://git.opnfv.org/cgit/functest/tree/testcases/config_functest.py
-.. _opnfvmain: http://www.opnfv.org
-.. _opnfvfunctest: https://wiki.opnfv.org/opnfv_functional_testing
-.. _`OpenRC`: http://docs.openstack.org/user-guide/common/cli_set_environment_variables_using_openstack_rc.html
-.. _`Rally installation procedure`: https://rally.readthedocs.org/en/latest/tutorial/step_0_installation.html
-.. _`config_test.py` : https://git.opnfv.org/cgit/functest/tree/testcases/config_functest.py
-.. _`config_functest.yaml` : https://git.opnfv.org/cgit/functest/tree/testcases/config_functest.yaml
+_`config_functest.yaml` : https://git.opnfv.org/cgit/functest/tree/testcases/config_functest.yaml
diff --git a/docs/release-notes/functest-release.rst b/docs/release-notes/functest-release.rst
index 9fbc35634..6ebab22a7 100644
--- a/docs/release-notes/functest-release.rst
+++ b/docs/release-notes/functest-release.rst
@@ -227,24 +227,54 @@ ci/testcases.yaml file.
The reporting pages can be found at:
- * apex: * TODO / check branch *
- * compass: * TODO / check branch *
- * fuel: * TODO / check branch *
- * joid: * TODO / check branch *
-
-Colorado limitations
------------------------
-
-- vPing userdata and Tempest cases related to metada service excluded
-from onos scenarios https://gerrit.opnfv.org/gerrit/#/c/18729/
-
-- Tempest cases related to storage for joid scenarios
-https://gerrit.opnfv.org/gerrit/#/c/17871/
-
-- vPing_ssh and vIMS excluded from bgpvpn and kvm scenario
-
-
-See known issues section for details
+ * apex: http://testresults.opnfv.org/reporting/functest/release/colorado/index-status-apex.html
+ * compass: http://testresults.opnfv.org/reporting/functest/release/colorado/index-status-compass.html
+ * fuel: http://testresults.opnfv.org/reporting/functest/release/colorado/index-status-fuel.html
+ * joid: http://testresults.opnfv.org/reporting/functest/release/colorado/index-status-joid.html
+
+Colorado known restrictions/issues
+==================================
+
++-----------+-----------+----------------------------------------------+
+| Installer | Scenario | Issue |
++===========+===========+==============================================+
+| any | onos-* | vPing userdata and Tempest cases related to |
+| | | metadata service excluded from onos scenarios|
+| | | https://gerrit.opnfv.org/gerrit/#/c/18729/ |
++-----------+-----------+----------------------------------------------+
+| apex/fuel | *-bgpvpn | vPing_ssh (floating ips not supported) and |
+| | | vIMS excluded. Some Tempest cases related to |
+| | | floating ips also excluded. |
++-----------+-----------+----------------------------------------------+
+| compass | moon | First ODL test FAILS because ODL/Openstack |
+| | | federation done in moon is partial. Only |
+| | | MD-SAL is federated (not AD-SAL) |
++-----------+-----------+----------------------------------------------+
+| fuel | any | TestServerBasicOps test case skipped |
+| | | https://gerrit.opnfv.org/gerrit/#/c/19635/ |
++-----------+-----------+----------------------------------------------+
+| fuel | kvm | vPing_ssh and vIMS excluded (metadata related|
+| | | scenarios) |
++-----------+-----------+----------------------------------------------+
+| fuel | multisite | random errors running multisite. A known bug |
+| | | in keystone mitaka, due to which memcache |
+| | | raises exception and keystone becomes |
+| | | unresponsive |
+| | | bugs.launchpad.net/keystone/+bug/1600394 |
+| | | workaround consists in restarting memcache on|
+| | | server |
++-----------+-----------+----------------------------------------------+
+| joid | any | Tempest cases related to object storage |
+| | | excluded |
+| | | https://gerrit.opnfv.org/gerrit/#/c/17871/ |
++-----------+-----------+----------------------------------------------+
+| joid | domino | Domino tests are skipped in CI. However the |
+| | | test case can be run by manually setting |
+| | | IS_IPandKEY_CONFIGURED=true after manually |
+| | | setting properly the IP addresses of the 3 |
+| | | Controller nodes in the configuration file |
+| | | /repos/domino/tests/run_multinode.sh |
++-----------+-----------+----------------------------------------------+
Test and installer/scenario dependencies
@@ -265,9 +295,6 @@ Test results are available in:
- jenkins logs on CI: https://build.opnfv.org/ci/view/functest/
-Known issues
-------------
-
Open JIRA tickets
=================
diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst
index 0e13d3547..6140f606d 100644
--- a/docs/userguide/index.rst
+++ b/docs/userguide/index.rst
@@ -331,13 +331,13 @@ Please refer to the dedicated feature user guides for details:
* bgpvpn: ** TODO link **
* copper: ** TODO link **
- * doctor: ** TODO link **
+ * doctor: http://artifacts.opnfv.org/doctor/colorado/userguide/index.html
* domino: ** TODO link **
* moon: ** TODO link **
* multisites: ** TODO link **
* onos-sfc: ** TODO link **
* odl-sfc: ** TODO link **
- * promise: ** TODO link **
+ * promise: http://artifacts.opnfv.org/promise/colorado/userguide/index.html
security_scan
^^^^^^^^^^^^^
diff --git a/docs/userguide/introduction.rst b/docs/userguide/introduction.rst
index 53b3df4ad..fc8e6680e 100644
--- a/docs/userguide/introduction.rst
+++ b/docs/userguide/introduction.rst
@@ -209,8 +209,8 @@ section `Executing the functest suites`_ of this document.
.. _`[2]`: http://docs.openstack.org/developer/tempest/overview.html
.. _`[3]`: https://rally.readthedocs.org/en/latest/index.html
-.. _`Doctor User Guide`: http://artifacts.opnfv.org/opnfvdocs/brahmaputra/docs/userguide/featureusage-doctor.html
-.. _`Promise User Guide`: http://artifacts.opnfv.org/promise/brahmaputra/docs/userguide/index.html
+.. _`Doctor User Guide`: http://artifacts.opnfv.org/doctor/colorado/userguide/index.html
+.. _`Promise User Guide`: http://artifacts.opnfv.org/promise/colorado/userguide/index.html
.. _`ONOSFW User Guide`: http://artifacts.opnfv.org/onosfw/brahmaputra/docs/userguide/index.html
.. _`SDNVPN User Guide`: http://artifacts.opnfv.org/sdnvpn/brahmaputra/docs/userguide/featureusage.html
.. _`Domino User Guide`: http://artifacts.opnfv.org/domino/docs/userguide/index.html
diff --git a/testcases/Controllers/ODL/OpenDaylightTesting.py b/testcases/Controllers/ODL/OpenDaylightTesting.py
index d3bc0978b..22df9f667 100755
--- a/testcases/Controllers/ODL/OpenDaylightTesting.py
+++ b/testcases/Controllers/ODL/OpenDaylightTesting.py
@@ -6,6 +6,7 @@ import os
import re
import shutil
import sys
+import urlparse
from robot import run
from robot.api import ExecutionResult, ResultVisitor
@@ -14,6 +15,7 @@ from robot.utils.robottime import timestamp_to_secs
import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
+import functest.utils.openstack_utils as op_utils
class ODLResultVisitor(ResultVisitor):
@@ -60,7 +62,7 @@ class ODLTestCases:
except IOError as e:
cls.logger.error(
"Cannot copy OPNFV's testcases to ODL directory: "
- "%s" % e.strerror)
+ "%s" % str(e))
return False
return True
@@ -76,7 +78,7 @@ class ODLTestCases:
line.rstrip())
return True
except Exception as e:
- cls.logger.error("Cannot set ODL creds: %s" % e.strerror)
+ cls.logger.error("Cannot set ODL creds: %s" % str(e))
return False
@classmethod
@@ -95,7 +97,7 @@ class ODLTestCases:
'RESTCONFPORT:' + kwargs['odlrestconfport']]
except KeyError as e:
cls.logger.error("Cannot run ODL testcases. Please check "
- "%s" % e.strerror)
+ "%s" % str(e))
return False
if (cls.copy_opnf_testcases() and
cls.set_robotframework_vars(odlusername, odlpassword)):
@@ -122,6 +124,43 @@ class ODLTestCases:
return False
@classmethod
+ def functest_run(cls):
+ kclient = op_utils.get_keystone_client()
+ keystone_url = kclient.service_catalog.url_for(
+ service_type='identity', endpoint_type='publicURL')
+ neutron_url = kclient.service_catalog.url_for(
+ service_type='network', endpoint_type='publicURL')
+ kwargs = {'keystoneip': urlparse.urlparse(keystone_url).hostname}
+ kwargs['neutronip'] = urlparse.urlparse(neutron_url).hostname
+ kwargs['odlip'] = kwargs['neutronip']
+ kwargs['odlwebport'] = '8080'
+ kwargs['odlrestconfport'] = '8181'
+ kwargs['odlusername'] = 'admin'
+ kwargs['odlpassword'] = 'admin'
+ try:
+ installer_type = os.environ['INSTALLER_TYPE']
+ kwargs['osusername'] = os.environ['OS_USERNAME']
+ kwargs['ostenantname'] = os.environ['OS_TENANT_NAME']
+ kwargs['ospassword'] = os.environ['OS_PASSWORD']
+ if installer_type == 'fuel':
+ kwargs['odlwebport'] = '8282'
+ elif installer_type == 'apex':
+ kwargs['odlip'] = os.environ['SDN_CONTROLLER_IP']
+ kwargs['odlwebport'] = '8181'
+ elif installer_type == 'joid':
+ kwargs['odlip'] = os.environ['SDN_CONTROLLER']
+ elif installer_type == 'compass':
+ kwargs['odlwebport'] = '8181'
+ else:
+ kwargs['odlip'] = os.environ['SDN_CONTROLLER_IP']
+ except KeyError as e:
+ cls.logger.error("Cannot run ODL testcases. Please check env var: "
+ "%s" % str(e))
+ return False
+
+ return cls.run(**kwargs)
+
+ @classmethod
def push_to_db(cls):
try:
result = ExecutionResult(cls.res_dir + 'output.xml')
diff --git a/testcases/Controllers/ODL/__init__.py b/testcases/Controllers/ODL/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testcases/Controllers/ODL/__init__.py
diff --git a/testcases/Controllers/__init__.py b/testcases/Controllers/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/testcases/Controllers/__init__.py
diff --git a/testcases/OpenStack/rally/run_rally-cert.py b/testcases/OpenStack/rally/run_rally-cert.py
index 92dbddff6..f3eb79d26 100755
--- a/testcases/OpenStack/rally/run_rally-cert.py
+++ b/testcases/OpenStack/rally/run_rally-cert.py
@@ -376,7 +376,6 @@ def main():
nova_client = os_utils.get_nova_client()
neutron_client = os_utils.get_neutron_client()
- glance_client = os_utils.get_glance_client()
cinder_client = os_utils.get_cinder_client()
start_time = time.time()
@@ -402,44 +401,19 @@ def main():
else:
logger.debug("Using existing volume type(s)...")
- image_id = os_utils.get_image_id(glance_client, GLANCE_IMAGE_NAME)
- image_exists = False
-
- if image_id == '':
- logger.debug("Creating image '%s' from '%s'..." % (GLANCE_IMAGE_NAME,
- GLANCE_IMAGE_PATH))
- image_id = os_utils.create_glance_image(glance_client,
- GLANCE_IMAGE_NAME,
- GLANCE_IMAGE_PATH,
- GLANCE_IMAGE_FORMAT)
- if not image_id:
- logger.error("Failed to create the Glance image...")
- exit(-1)
- else:
- logger.debug("Image '%s' with ID '%s' created succesfully ."
- % (GLANCE_IMAGE_NAME, image_id))
- else:
- logger.debug("Using existing image '%s' with ID '%s'..."
- % (GLANCE_IMAGE_NAME, image_id))
- image_exists = True
+ image_exists, image_id = os_utils.get_or_create_image(GLANCE_IMAGE_NAME,
+ GLANCE_IMAGE_PATH,
+ GLANCE_IMAGE_FORMAT)
+ if not image_id:
+ exit(-1)
logger.debug("Creating network '%s'..." % PRIVATE_NET_NAME)
- network_dict = os_utils.create_network_full(neutron_client,
- PRIVATE_NET_NAME,
- PRIVATE_SUBNET_NAME,
- ROUTER_NAME,
- PRIVATE_SUBNET_CIDR)
+ network_dict = os_utils.create_shared_network_full(PRIVATE_NET_NAME,
+ PRIVATE_SUBNET_NAME,
+ ROUTER_NAME,
+ PRIVATE_SUBNET_CIDR)
if not network_dict:
- logger.error("Failed to create network...")
- exit(-1)
- else:
- if not os_utils.update_neutron_net(neutron_client,
- network_dict['net_id'],
- shared=True):
- logger.error("Failed to update network...")
- exit(-1)
- else:
- logger.debug("Network '%s' available..." % PRIVATE_NET_NAME)
+ exit(1)
if args.test_name == "all":
for test_name in tests:
diff --git a/testcases/OpenStack/tempest/custom_tests/blacklist.txt b/testcases/OpenStack/tempest/custom_tests/blacklist.txt
index 6dd7fad5c..42e1a327a 100644
--- a/testcases/OpenStack/tempest/custom_tests/blacklist.txt
+++ b/testcases/OpenStack/tempest/custom_tests/blacklist.txt
@@ -11,6 +11,8 @@
- tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers
- tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details
- tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard
+ - tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_list_show_update_delete_floating_ip
+ - tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_floating_ip_specifying_a_fixed_ip_address
- tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_network_basic_ops
- tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basic_ops
- tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern
@@ -22,12 +24,23 @@
- os-odl_l2-nofeature-noha
- os-nosdn-nofeature-ha
- os-nosdn-nofeature-noha
+ installers:
+ - joid
+ tests:
+ - tempest.api.object_storage
+
+-
+ scenarios:
- os-nosdn-lxd-ha
- os-nosdn-lxd-noha
installers:
- joid
tests:
- tempest.api.object_storage
+ - tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_network_basic_ops
+ - tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basic_ops
+ - tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern
+ - tempest.scenario.test_volume_boot_pattern.TestVolumeBootPatternV2.test_volume_boot_pattern
-
scenarios:
@@ -61,3 +74,17 @@
- tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basic_ops
- tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern
- tempest.scenario.test_volume_boot_pattern.TestVolumeBootPatternV2.test_volume_boot_pattern
+
+-
+ # https://bugs.launchpad.net/tempest/+bug/1586931
+ scenarios:
+ - os-odl_l2-nofeature-ha
+ - os-odl_l2-sfc-ha
+ - os-odl_l3-nofeature-ha
+ - os-nosdn-kvm-ha
+ - os-nosdn-nofeature-ha
+ - os-nosdn-ovs-ha
+ installers:
+ - fuel
+ tests:
+ - tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basic_ops
diff --git a/testcases/OpenStack/tempest/run_tempest.py b/testcases/OpenStack/tempest/run_tempest.py
index 64a5ed778..306664feb 100755
--- a/testcases/OpenStack/tempest/run_tempest.py
+++ b/testcases/OpenStack/tempest/run_tempest.py
@@ -125,8 +125,6 @@ def get_info(file_result):
def create_tempest_resources():
keystone_client = os_utils.get_keystone_client()
- neutron_client = os_utils.get_neutron_client()
- glance_client = os_utils.get_glance_client()
logger.debug("Creating tenant and user for Tempest suite")
tenant_id = os_utils.create_tenant(keystone_client,
@@ -141,40 +139,19 @@ def create_tempest_resources():
logger.error("Error : Failed to create %s user" % USER_NAME)
logger.debug("Creating private network for Tempest suite")
- network_dic = os_utils.create_network_full(neutron_client,
- PRIVATE_NET_NAME,
- PRIVATE_SUBNET_NAME,
- ROUTER_NAME,
- PRIVATE_SUBNET_CIDR)
- if network_dic:
- if not os_utils.update_neutron_net(neutron_client,
- network_dic['net_id'],
- shared=True):
- logger.error("Failed to update private network...")
- exit(-1)
- else:
- logger.debug("Network '%s' is available..." % PRIVATE_NET_NAME)
- else:
- logger.error("Private network creation failed")
- exit(-1)
+ network_dic = os_utils.create_shared_network_full(PRIVATE_NET_NAME,
+ PRIVATE_SUBNET_NAME,
+ ROUTER_NAME,
+ PRIVATE_SUBNET_CIDR)
+ if not network_dic:
+ exit(1)
logger.debug("Creating image for Tempest suite")
- # Check if the given image exists
- image_id = os_utils.get_image_id(glance_client, GLANCE_IMAGE_NAME)
- if image_id != '':
- logger.info("Using existing image '%s'..." % GLANCE_IMAGE_NAME)
- else:
- logger.info("Creating image '%s' from '%s'..." % (GLANCE_IMAGE_NAME,
- GLANCE_IMAGE_PATH))
- image_id = os_utils.create_glance_image(glance_client,
- GLANCE_IMAGE_NAME,
- GLANCE_IMAGE_PATH,
- GLANCE_IMAGE_FORMAT)
- if not image_id:
- logger.error("Failed to create a Glance image...")
- exit(-1)
- logger.debug("Image '%s' with ID=%s created successfully."
- % (GLANCE_IMAGE_NAME, image_id))
+ _, image_id = os_utils.get_or_create_image(GLANCE_IMAGE_NAME,
+ GLANCE_IMAGE_PATH,
+ GLANCE_IMAGE_FORMAT)
+ if not image_id:
+ exit(-1)
def configure_tempest(deployment_dir):
diff --git a/testcases/OpenStack/vPing/vping_util.py b/testcases/OpenStack/vPing/vping_util.py
index c16c5d659..3f4adae73 100644
--- a/testcases/OpenStack/vPing/vping_util.py
+++ b/testcases/OpenStack/vPing/vping_util.py
@@ -147,24 +147,11 @@ def create_security_group():
def create_image():
- EXIT_CODE = -1
-
- # Check if the given image exists
- image_id = os_utils.get_image_id(glance_client, GLANCE_IMAGE_NAME)
- if image_id != '':
- logger.info("Using existing image '%s'..." % GLANCE_IMAGE_NAME)
- else:
- logger.info("Creating image '%s' from '%s'..." % (GLANCE_IMAGE_NAME,
- GLANCE_IMAGE_PATH))
- image_id = os_utils.create_glance_image(glance_client,
- GLANCE_IMAGE_NAME,
- GLANCE_IMAGE_PATH,
- GLANCE_IMAGE_FORMAT)
- if not image_id:
- logger.error("Failed to create a Glance image...")
- exit(EXIT_CODE)
- logger.debug("Image '%s' with ID=%s created successfully."
- % (GLANCE_IMAGE_NAME, image_id))
+ _, image_id = os_utils.get_or_create_image(GLANCE_IMAGE_NAME,
+ GLANCE_IMAGE_PATH,
+ GLANCE_IMAGE_FORMAT)
+ if not image_id:
+ exit(-1)
return image_id
diff --git a/testcases/features/sfc/set-up-tacker.sh b/testcases/features/sfc/set-up-tacker.sh
index e88893391..85832ab7f 100755
--- a/testcases/features/sfc/set-up-tacker.sh
+++ b/testcases/features/sfc/set-up-tacker.sh
@@ -1,6 +1,7 @@
+apt-get install -y git-core
git clone https://gerrit.opnfv.org/gerrit/fuel fuel
pushd fuel
-git checkout e7f7abc89161441548545f79f0299610c6e5b203
+git checkout ad0dd57
popd
mv fuel/prototypes/sfc_tacker/poc.tacker-up.sh .
bash poc.tacker-up.sh
diff --git a/utils/functest_utils.py b/utils/functest_utils.py
index b916ab129..199088b9c 100644
--- a/utils/functest_utils.py
+++ b/utils/functest_utils.py
@@ -200,11 +200,30 @@ def push_results_to_db(project, case_name, logger,
"""
# Retrieve params from CI and conf
url = get_db_url(logger) + "/results"
- installer = get_installer_type(logger)
- scenario = get_scenario(logger)
- version = get_version(logger)
- pod_name = get_pod_name(logger)
- build_tag = get_build_tag(logger)
+
+ try:
+ installer = os.environ['INSTALLER_TYPE']
+ scenario = os.environ['DEPLOY_SCENARIO']
+ pod_name = os.environ['NODE_NAME']
+ build_tag = os.environ['BUILD_TAG']
+ except KeyError as e:
+ msg = "Please set env var: " + str(e)
+ if logger:
+ logger.error(msg)
+ else:
+ print(msg)
+ return False
+ rule = "daily-(.+?)-[0-9]*"
+ m = re.search(rule, build_tag)
+ if m:
+ version = m.group(1)
+ else:
+ msg = "Please fix BUILD_TAG env var: " + build_tag
+ if logger:
+ logger.error(msg)
+ else:
+ print(msg)
+ return False
test_start = dt.fromtimestamp(start_date).strftime('%Y-%m-%d %H:%M:%S')
test_stop = dt.fromtimestamp(stop_date).strftime('%Y-%m-%d %H:%M:%S')
@@ -320,6 +339,7 @@ def execute_command(cmd, logger=None,
logger.debug(line)
else:
print line
+ sys.stdout.flush()
p.stdout.close()
returncode = p.wait()
if returncode != 0:
diff --git a/utils/openstack_utils.py b/utils/openstack_utils.py
index d30ca629c..bc718bb2b 100755
--- a/utils/openstack_utils.py
+++ b/utils/openstack_utils.py
@@ -671,6 +671,28 @@ def create_network_full(neutron_client,
return network_dic
+def create_shared_network_full(net_name, subnt_name, router_name, subnet_cidr):
+ neutron_client = get_neutron_client()
+
+ network_dic = create_network_full(neutron_client,
+ net_name,
+ subnt_name,
+ router_name,
+ subnet_cidr)
+ if network_dic:
+ if not update_neutron_net(neutron_client,
+ network_dic['net_id'],
+ shared=True):
+ logger.error("Failed to update network %s..." % net_name)
+ return None
+ else:
+ logger.debug("Network '%s' is available..." % net_name)
+ else:
+ logger.error("Network %s creation failed" % net_name)
+ return None
+ return network_dic
+
+
def create_bgpvpn(neutron_client, **kwargs):
# route_distinguishers
# route_targets
@@ -889,6 +911,26 @@ def create_glance_image(glance_client, image_name, file_path, disk="qcow2",
return None
+def get_or_create_image(name, path, format):
+ image_exists = False
+ glance_client = get_glance_client()
+
+ image_id = get_image_id(glance_client, name)
+ if image_id != '':
+ logger.info("Using existing image '%s'..." % name)
+ image_exists = True
+ else:
+ logger.info("Creating image '%s' from '%s'..." % (name, path))
+ image_id = create_glance_image(glance_client, name, path, format)
+ if not image_id:
+ logger.error("Failed to create a Glance image...")
+ else:
+ logger.debug("Image '%s' with ID=%s created successfully."
+ % (name, image_id))
+
+ return image_exists, image_id
+
+
def delete_glance_image(nova_client, image_id):
try:
nova_client.images.delete(image_id)