aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--docker/Dockerfile12
-rw-r--r--docker/Dockerfile.aarch646
-rw-r--r--docs/devguide/index.rst30
-rw-r--r--docs/internship/testapi_evolution/index.rst189
-rwxr-xr-xfunctest/ci/config_functest.yaml5
-rw-r--r--functest/ci/installer_params.yaml16
-rwxr-xr-xfunctest/ci/prepare_env.py89
-rwxr-xr-xfunctest/ci/run_tests.py2
-rwxr-xr-xfunctest/ci/testcases.yaml104
-rwxr-xr-xfunctest/ci/tier_builder.py2
-rwxr-xr-xfunctest/ci/tier_handler.py6
-rw-r--r--functest/cli/commands/cli_env.py2
-rw-r--r--functest/cli/commands/cli_os.py2
-rw-r--r--functest/cli/commands/cli_testcase.py2
-rw-r--r--functest/cli/commands/cli_tier.py2
-rw-r--r--functest/core/feature_base.py10
-rw-r--r--functest/core/vnf_base.py15
-rw-r--r--functest/opnfv_tests/features/barometer.py28
-rw-r--r--functest/opnfv_tests/openstack/tempest/custom_tests/defcore_req.txt286
-rw-r--r--functest/opnfv_tests/openstack/tempest/tempest.py9
-rw-r--r--functest/opnfv_tests/openstack/vping/vping_base.py2
-rwxr-xr-xfunctest/opnfv_tests/openstack/vping/vping_ssh.py6
-rwxr-xr-xfunctest/opnfv_tests/sdn/odl/odl.py2
-rw-r--r--functest/opnfv_tests/sdn/onos/sfc/sfc_onos.py14
-rw-r--r--functest/opnfv_tests/sdn/onos/teston/adapters/foundation.py2
-rw-r--r--functest/opnfv_tests/vnf/ims/clearwater.py2
-rw-r--r--functest/opnfv_tests/vnf/ims/opera_ims.py475
-rw-r--r--functest/opnfv_tests/vnf/ims/orchestra_ims.py465
-rw-r--r--functest/opnfv_tests/vnf/ims/orchestra_ims.yaml7
-rw-r--r--functest/opnfv_tests/vnf/ims/orchestrator_cloudify.py2
-rwxr-xr-xfunctest/opnfv_tests/vnf/router/__init__.py0
-rwxr-xr-xfunctest/opnfv_tests/vnf/router/vyos_vrouter.py33
-rw-r--r--functest/utils/env.py2
-rwxr-xr-xfunctest/utils/functest_logger.py4
-rw-r--r--functest/utils/functest_utils.py9
-rw-r--r--functest/utils/openstack_tacker.py42
-rw-r--r--requirements.txt3
37 files changed, 1446 insertions, 441 deletions
diff --git a/docker/Dockerfile b/docker/Dockerfile
index ca0fb6a70..1187fb95f 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -19,6 +19,7 @@ ARG ODL_TAG=release/beryllium-sr4
ARG OPENSTACK_TAG=stable/mitaka
ARG KINGBIRD_TAG=0.2.2
ARG VIMS_TAG=stable
+ARG VROUTER_TAG=stable
ARG REPOS_DIR=/home/opnfv/repos
ARG FUNCTEST_BASE_DIR=/home/opnfv/functest
ARG FUNCTEST_CONF_DIR=${FUNCTEST_BASE_DIR}/conf
@@ -84,6 +85,7 @@ RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/doctor ${REPO
RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/ovno ${REPOS_DIR}/ovno
RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/promise ${REPOS_DIR}/promise
RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/netready ${REPOS_DIR}/netready
+RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/barometer ${REPOS_DIR}/barometer
RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/sfc ${REPOS_DIR}/sfc
RUN git clone --depth 1 -b $BRANCH https://gerrit.opnfv.org/gerrit/snaps ${REPOS_DIR}/snaps
RUN git clone --depth 1 https://gerrit.opnfv.org/gerrit/securityscanning ${REPOS_DIR}/securityscanning
@@ -91,13 +93,14 @@ RUN git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng ${REPOS_DIR}/rele
# OpenStack repositories
RUN git clone --depth 1 -b $OPENSTACK_TAG https://github.com/openstack/networking-bgpvpn ${REPOS_DIR}/bgpvpn
-#RUN git clone --depth 1 -b $KINGBIRD_TAG https://github.com/openstack/kingbird.git ${REPOS_DIR}/kingbird
+RUN git clone --depth 1 -b $KINGBIRD_TAG https://github.com/openstack/kingbird.git ${REPOS_DIR}/kingbird
RUN git clone --depth 1 -b $RALLY_TAG https://github.com/openstack/rally.git ${REPOS_DIR}/rally
RUN git clone --depth 1 -b $TEMPEST_TAG https://github.com/openstack/tempest.git ${REPOS_DIR}/tempest
# other repositories
RUN git clone --depth 1 -b $ODL_TAG https://git.opendaylight.org/gerrit/p/integration/test.git ${REPOS_DIR}/odl_test
RUN git clone --depth 1 -b $VIMS_TAG https://github.com/boucherv-orange/clearwater-live-test ${REPOS_VNFS_DIR}/vims-test
+RUN git clone --depth 1 -b $VROUTER_TAG https://github.com/oolorg/opnfv-functest-vrouter.git ${REPOS_VNFS_DIR}/vrouter
RUN git clone --depth 1 https://github.com/wuwenbin2/OnosSystemTest.git ${REPOS_DIR}/onos
RUN pip install -r ${REPOS_DIR}/rally/requirements.txt
@@ -110,6 +113,9 @@ RUN cd ${FUNCTEST_REPO_DIR} \
RUN cd ${RELENG_MODULE_DIR} \
&& pip install .
+RUN cd ${REPOS_DIR}/barometer \
+ && pip install .
+
RUN find ${FUNCTEST_REPO_DIR} -name "*.py" \
-not -path "*tests/unit*" |xargs grep __main__ |cut -d\: -f 1 |xargs chmod -c 755 \
&& find ${FUNCTEST_REPO_DIR} -name "*.sh" |xargs grep \#\! |cut -d\: -f 1 |xargs chmod -c 755
@@ -135,7 +141,9 @@ RUN cd ${REPOS_DIR}/sfc && pip install .
RUN cd ${REPOS_DIR}/sdnvpn && pip install .
RUN cd ${REPOS_DIR}/bgpvpn && pip install .
-#RUN cd ${REPOS_DIR}/kingbird && pip install -e .
+
+# Kingbird integration
+RUN cd ${REPOS_DIR}/kingbird && pip install -e .
RUN /bin/bash -c ". /etc/profile.d/rvm.sh \
&& cd ${REPOS_VNFS_DIR}/vims-test \
diff --git a/docker/Dockerfile.aarch64 b/docker/Dockerfile.aarch64
index fa04e8c8c..bf8e361f0 100644
--- a/docker/Dockerfile.aarch64
+++ b/docker/Dockerfile.aarch64
@@ -91,7 +91,7 @@ RUN git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng ${REPOS_DIR}/rele
# OpenStack repositories
RUN git clone --depth 1 -b $OPENSTACK_TAG https://github.com/openstack/networking-bgpvpn ${REPOS_DIR}/bgpvpn
-#RUN git clone --depth 1 -b $KINGBIRD_TAG https://github.com/openstack/kingbird.git ${REPOS_DIR}/kingbird
+RUN git clone --depth 1 -b $KINGBIRD_TAG https://github.com/openstack/kingbird.git ${REPOS_DIR}/kingbird
RUN git clone --depth 1 -b $RALLY_TAG https://github.com/openstack/rally.git ${REPOS_DIR}/rally
RUN git clone --depth 1 -b $TEMPEST_TAG https://github.com/openstack/tempest.git ${REPOS_DIR}/tempest
@@ -131,7 +131,9 @@ RUN cd ${REPOS_DIR}/sfc && pip install .
RUN cd ${REPOS_DIR}/sdnvpn && pip install .
RUN cd ${REPOS_DIR}/bgpvpn && pip install .
-#RUN cd ${REPOS_DIR}/kingbird && pip install -e .
+
+# Kingbird integration
+RUN cd ${REPOS_DIR}/kingbird && pip install -e .
RUN /bin/bash -c ". /etc/profile.d/rvm.sh \
&& cd ${REPOS_VNFS_DIR}/vims-test \
diff --git a/docs/devguide/index.rst b/docs/devguide/index.rst
index 42ad04451..eee013678 100644
--- a/docs/devguide/index.rst
+++ b/docs/devguide/index.rst
@@ -335,10 +335,6 @@ The API can described as follows. For detailed information, please go to
Authentication: opnfv/api@opnfv
-Please notes that POST/DELETE/PUT operations for test or study purpose via
-swagger website is not allowed, because it will change the real data in
-the database.
-
Version:
+--------+--------------------------+-----------------------------------------+
@@ -503,17 +499,39 @@ Scenarios:
The code of the API is hosted in the releng repository `[6]`_.
+The static documentation of the API can be found at `[17]`_.
The test API has been dockerized and may be installed locally in your
lab. See `[15]`_ for details.
The deployment of the test API has been automated.
A jenkins job manages:
* the unit tests of the test api
- * the cration of a new docker file
+ * the creation of a new docker file
* the deployment of the new test api
* the archive of the old test api
* the backup of the Mongo DB
+Test API Authorization
+~~~~~~~~~~~~~~~~~~~~~~
+
+PUT/DELETE/POST operations of the testapi now require token based authorization. The token needs
+to be added in the request using a header 'X-Auth-Token' for access to the database.
+
+e.g::
+ headers['X-Auth-Token']
+
+The value of the header i.e the token can be accessed in the jenkins environment variable
+*TestApiToken*. The token value is added as a masked password.
+
+.. code-block:: python
+
+ headers['X-Auth-Token'] = os.environ.get('TestApiToken')
+
+The above example is in Python. Token based authentication has been added so that only ci pods
+jenkins job can have access to the database.
+
+Please note that currently token authorization is implemented but is not yet enabled.
+
Automatic reporting
===================
@@ -960,6 +978,8 @@ _`[15]`: https://git.opnfv.org/cgit/releng/tree/utils/test/result_collection_api
_`[16]`: https://git.opnfv.org/cgit/releng/tree/utils/test/scripts/mongo_to_elasticsearch.py
+_`[17]`: http://artifacts.opnfv.org/releng/docs/testapi.html
+
OPNFV main site: http://www.opnfv.org
OPNFV functional test page: https://wiki.opnfv.org/opnfv_functional_testing
diff --git a/docs/internship/testapi_evolution/index.rst b/docs/internship/testapi_evolution/index.rst
index f2583e2f0..9cca9ebce 100644
--- a/docs/internship/testapi_evolution/index.rst
+++ b/docs/internship/testapi_evolution/index.rst
@@ -11,12 +11,17 @@ If not, see <http://creativecommons.org/licenses/by/4.0/>.
Test API evolution
==================
-Author: Rohit Sakala
+Author: Sakala Venkata Krishna Rohit
Mentors: S. Feng, J.Lausuch, M.Richomme
Abstract
========
+The testapi is used by all the test opnfv projects to report results.
+It is also used to declare projects, test cases and labs. A major refactoring
+has been done in Colorado with the introduction of swagger. The testapi is defined in Functest
+developer guide. The purpose of this project is to add more features to the testapi that automate
+the tasks that are done manually now, though there are tasks other than automation.
Version history
===============
@@ -25,46 +30,208 @@ Version history
| **Date** | **Ver.** | **Author** | **Comment** |
| | | | |
+------------+----------+------------------+------------------------+
-| 2016-??-?? | 0.0.1 | Morgan Richomme | Beginning of the |
+| 2016-11-14 | 0.0.1 | Morgan Richomme | Beginning of the |
| | | (Orange) | Internship |
+------------+----------+------------------+------------------------+
-
+| 2017-02-17 | 0.0.2 | S.V.K Rohit | End of the Internship |
+| | | (IIIT Hyderabad) | |
++------------+----------+------------------+------------------------+
Overview:
=========
-
-
+The internhip time period was from Nov 14th to Feb 17th. The project prosposal page is here `[1]`_.
+The intern project was assigned to Svk Rohit and was mentored by S. Feng, J.Lausuch, M.Richomme.
+The link to the patches submitted is `[2]`_. The internship was successfully completed and the
+documentation is as follows.
Problem Statement:
------------------
+The problem statement could be divided into pending features that needed to be added into testapi
+repo. The following were to be accomplished within the internship time frame.
+
+* **Add verification jenkins job for the testapi code**
+ The purpose of this job is to verify whehter the unit tests are successful or not with the
+ inclusion of the patchset submitted.
+
+* **Automatic update of opnfv/testapi docker image**
+ The docker image of testapi is hosted in the opnfv docker hub. To ensure that the testapi image
+ is always updated with the repository, automatic updation of the image is necessary and a job
+ is triggered whenever a new patch gets merged.
+
+* **Automation deployment of testresults.opnfv.org/test/ website**
+ In the same manner as the docker image of testapi is updated, the testapi website needs to be
+ in sync with the repository code. So, a job has been added to the opnfv jenkins ci for the
+ updation of the testresults website.
+* **Generate static documentation of testapi calls**
+ The purpose of this is to give an static/offline view of testapi. If someone wants to have a
+ look at the Restful apis of testapi, he/she does't need to go to the website, he can download
+ a html page and view it anytime.
-Curation Phase
---------------
+* **Backup MongoDB of testapi**
+ The mongoDB needs to be backed up every week. Till now it was done manually, but due to this
+ internship, it is now automated using a jenkins job.
+* **Add token based authorization to the testapi calls**
+ The token based authorization was implemented to ensure that only ci_pods could access the
+ database. Authentication has been added to only delete/put/post requests.
+Curation Phase:
+---------------
+The curation phase was the first 3 to 4 weeks of the internship. This phase was to get familiar
+with the testapi code and functionality and propose the solutions/tools for the tasks mentioned
+above. Swagger codegen was choosen out of the four tools proposed `[3]`_ for generating static
+documentaion.
+Also, specific amount of time was spent on the script flow of the jenkins jobs. The automatic
+deployment task involves accessing a remote server from inside the jenkins build. The deployment
+had to be done only after the docker image update is done. For these constraints to satisfy, a
+multijob jenkins job was choosen instead of a freestyle job.
+
+Important Links:
+----------------
+
+* MongoDB Backup Link - `[4]`_
+* Static Documentation - `[5]`_
+* TestAPI Token addition to ci_pods - `[6]`_
Schedule:
=========
-
+The progress and completion of the tasks is described in the below table.
+--------------------------+------------------------------------------+
| **Date** | **Comment** |
| | |
+--------------------------+------------------------------------------+
-| December - January | ........ |
+| Nov 14th - Dec 31st | Understand Testapi code and the |
+| | requirements. |
++--------------------------+------------------------------------------+
+| Jan 1st - Jan 7th | Add jenkins job to create static |
+| | documentation and write build scripts. |
++--------------------------+------------------------------------------+
+| Jan 8th - Jan 21st | Add verification jenkins job for unit |
+| | tests. |
++--------------------------+------------------------------------------+
+| Jan 22nd - Jan 28th | Add jenkins job for mongodb backup |
+| | |
++--------------------------+------------------------------------------+
+| Jan 29th - Feb 11th | Enable automatic deployment of |
+| | testresults.opnfv.org/test/ |
+--------------------------+------------------------------------------+
-| January - february | ........ |
+| Feb 12th - Feb 17th | Add token based authentication |
+| | |
+--------------------------+------------------------------------------+
+FAQ's
+=====
+
+This section lists the problems that I have faced and the understanding that I have acquired during
+the internship. This section may help other developers in solving any errors casused because of the
+code written as a part of this internship.
+
+
+Test Api
+--------
+
+What is the difference between defining data_file as "/etc/.." and "etc/.." in setup.cfg ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If in the setup.cfg, it is defined as
+
+[files]
+data_files =
+etc/a.conf = etc/a.conf.sample
+
+then it ends up installed in the /usr/etc/. With this configuration, it would be installed
+correctly within a venv. but when it is defined as
+
+[files]
+data_files =
+/etc/a.conf = etc/a.conf.sample
+
+then it ends up installed on the root of the filesystem instead of properly be installed within the
+venv.
+
+Which attribute does swagger-codegen uses as the title in the generation of document generation ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+It uses the nickname of the api call in swagger as the title in the generation of the document
+generation.
+
+Does swagger-codegen take more than one yaml file as input ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+No, swagger-codegen only takes one yaml file as input to its jar file. If there more than one yaml
+file, one needs to merge them and give it as an input keeping mind the swagger specs.
+
+
+Jenkins & JJB
+-------------
+
+Which scm macro is used for verification jenkins jobs ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+There are two macros for scm one is git-scm and other git-scm-gerrit. git-scm-gerrit is used for
+verification jenkins job.
+
+Does the virtualenv created in one build script exists in other build scripts too ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+No, the virtualenv created in one build script only exists in that build script/shell.
+
+What parameters are needed for the scm macros ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Project and Branch are the two parameters needed for scm macros.
+
+What is the directory inside the jenkins build ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The directory of the jenkins build is the directory of the repo. `ls $WORKSPACE` command will give
+you all the contents of the directory.
+
+How to include a bash script in jenkins job yaml file ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+An example might be apt here as an answer.
+
+builders:
+ - shell:
+ !include-raw: include-raw001-hello-world.sh
+
+
+How do you make a build server run on a specific machine ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+It can be done by defining a label parameter 'SLAVE_LABEL' or in OPNFV , there are macros for each
+server, one can use those parameter macros.
+Ex: opnfv-build-defaults. Note, if we use macro, then no need to define GIT_BASE, but if one uses
+SLAVE_LABEL, one needs to define a parameter GIT_BASE. This is because macro already has GIT_BASE
+defined.
+
+What job style should be used when there is a situation like one build should trigger other builds
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+or when different build scripts need to be run on different machines ?
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+MultiJob style should be used as it has phases where each phase can be taken as a build scipt and
+can have its own parameters by which one can define the SLAVE_LABEL parameter.
References:
===========
-.. _`[1]` : https://wiki.opnfv.org/display/DEV/Intern+Project%3A+testapi+evolution
+_`[1]` : https://wiki.opnfv.org/display/DEV/Intern+Project%3A+testapi+evolution
+
+_`[2]` : https://gerrit.opnfv.org/gerrit/#/q/status:merged+owner:%22Rohit+Sakala+%253Crohitsakala%2540gmail.com%253E%22
+
+_`[3]` : https://docs.google.com/document/d/1jWwVZ1ZpKgKcOS_zSz2KzX1nwg4BXxzBxcwkesl7krw/edit?usp=sharing
+
+_`[4]` : http://artifacts.opnfv.org/testapibackup.html
+
+_`[5]` : http://artifacts.opnfv.org/releng/docs/testapi.html
+_`[6]` : http://artifacts.opnfv.org/functest/review/26047/devguide/index.html#test-api-authorization
diff --git a/functest/ci/config_functest.yaml b/functest/ci/config_functest.yaml
index 8fa4bd342..489c395ff 100755
--- a/functest/ci/config_functest.yaml
+++ b/functest/ci/config_functest.yaml
@@ -21,6 +21,7 @@ general:
dir_repo_onos: /home/opnfv/repos/onos
repo_promise: /home/opnfv/repos/promise
repo_netready: /home/opnfv/repos/netready
+ repo_barometer: /home/opnfv/repos/barometer
repo_doctor: /home/opnfv/repos/doctor
repo_copper: /home/opnfv/repos/copper
dir_repo_ovno: /home/opnfv/repos/ovno
@@ -28,6 +29,7 @@ general:
repo_domino: /home/opnfv/repos/domino
repo_snaps: /home/opnfv/repos/snaps
repo_securityscan: /home/opnfv/repos/securityscanning
+ repo_vrouter: /home/opnfv/repos/vrouter
functest: /home/opnfv/functest
functest_test: /home/opnfv/repos/functest/functest/opnfv_tests
results: /home/opnfv/functest/results
@@ -44,6 +46,8 @@ general:
image_name: Cirros-0.3.4
image_file_name: cirros-0.3.4-x86_64-disk.img
image_disk_format: qcow2
+ image_username: cirros
+ image_password: cubswin:)
flavor_name: opnfv_flavor
flavor_ram: 512
@@ -130,6 +134,7 @@ vnf:
orchestra_ims:
tenant_name: orchestra_ims
tenant_description: ims deployed with openbaton
+ config: orchestra_ims.yaml
opera_ims:
tenant_name: opera_ims
tenant_description: ims deployed with open-o
diff --git a/functest/ci/installer_params.yaml b/functest/ci/installer_params.yaml
new file mode 100644
index 000000000..26aff9bb9
--- /dev/null
+++ b/functest/ci/installer_params.yaml
@@ -0,0 +1,16 @@
+apex:
+ ip: ''
+ user: 'stack'
+ pkey: '/root/.ssh/id_rsa'
+# compass:
+# ip: '192.168.200.2'
+# user: 'root'
+# password: 'root'
+fuel:
+ ip: '10.20.0.2'
+ user: 'root'
+ password: 'r00tme'
+# joid:
+# ip: ''
+# user: ''
+# password: ''
diff --git a/functest/ci/prepare_env.py b/functest/ci/prepare_env.py
index 6b24fe086..5a9f99cb6 100755
--- a/functest/ci/prepare_env.py
+++ b/functest/ci/prepare_env.py
@@ -1,18 +1,11 @@
#!/usr/bin/env python
#
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-#
-# Installs the Functest framework within the Docker container
-# and run the tests automatically
-#
-#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
#
-
import argparse
import json
import os
@@ -21,13 +14,15 @@ import subprocess
import sys
import yaml
-from opnfv.utils import constants as opnfv_constants
import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
import functest.utils.openstack_utils as os_utils
from functest.utils.constants import CONST
+from opnfv.utils import constants as opnfv_constants
+from opnfv.deployment import factory
+
actions = ['start', 'check']
""" logging configuration """
@@ -42,7 +37,7 @@ with open(CONFIG_PATCH_PATH) as f:
functest_patch_yaml = yaml.safe_load(f)
-class PrepareEnvParser():
+class PrepareEnvParser(object):
def __init__(self):
self.parser = argparse.ArgumentParser()
@@ -222,20 +217,19 @@ def install_rally():
"Deployment %s does not exist."
% CONST.rally_deployment_name),
verbose=False)
+
rally_conf = os_utils.get_credentials_for_rally()
with open('rally_conf.json', 'w') as fp:
json.dump(rally_conf, fp)
cmd = ("rally deployment create "
- "--file=rally_conf.json --name={}"
+ "--file=rally_conf.json --name={0}"
.format(CONST.rally_deployment_name))
- ft_utils.execute_command(cmd,
- error_msg=("Problem while creating "
- "Rally deployment"))
+ error_msg = "Problem while creating Rally deployment"
+ ft_utils.execute_command_raise(cmd, error_msg=error_msg)
cmd = "rally deployment check"
- ft_utils.execute_command(cmd,
- error_msg=("OpenStack not responding or "
- "faulty Rally deployment."))
+ error_msg = "OpenStack not responding or faulty Rally deployment."
+ ft_utils.execute_command_raise(cmd, error_msg=error_msg)
cmd = "rally deployment list"
ft_utils.execute_command(cmd,
@@ -250,19 +244,30 @@ def install_rally():
def install_tempest():
logger.info("Installing tempest from existing repo...")
- cmd = ("rally verify create-verifier --source {0} "
- "--name {1} --type tempest"
- .format(CONST.dir_repo_tempest, CONST.tempest_deployment_name))
- ft_utils.execute_command(cmd,
- error_msg="Problem while installing Tempest.")
+ cmd = ("rally verify list-verifiers | "
+ "grep '{0}' | wc -l".format(CONST.tempest_deployment_name))
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
+ while p.poll() is None:
+ line = p.stdout.readline().rstrip()
+ if str(line) == '0':
+ logger.debug("Tempest %s does not exist" %
+ CONST.tempest_deployment_name)
+ cmd = ("rally verify create-verifier --source {0} "
+ "--name {1} --type tempest"
+ .format(CONST.dir_repo_tempest,
+ CONST.tempest_deployment_name))
+ error_msg = "Problem while installing Tempest."
+ ft_utils.execute_command_raise(cmd, error_msg=error_msg)
def create_flavor():
- os_utils.get_or_create_flavor('m1.tiny',
- '512',
- '1',
- '1',
- public=True)
+ _, flavor_id = os_utils.get_or_create_flavor('m1.tiny',
+ '512',
+ '1',
+ '1',
+ public=True)
+ if flavor_id is None:
+ raise Exception('Failed to create flavor')
def check_environment():
@@ -278,6 +283,37 @@ def check_environment():
logger.info("Functest environment is installed.")
+def print_deployment_info():
+ installer_params_yaml = os.path.join(CONST.dir_repo_functest,
+ 'functest/ci/installer_params.yaml')
+ if (CONST.INSTALLER_IP and CONST.INSTALLER_TYPE and
+ CONST.INSTALLER_TYPE in opnfv_constants.INSTALLERS):
+ try:
+ installer_params = ft_utils.get_parameter_from_yaml(
+ CONST.INSTALLER_TYPE, installer_params_yaml)
+ except ValueError as e:
+ logger.debug('Printing deployment info is not supported for %s' %
+ CONST.INSTALLER_TYPE)
+ logger.debug(e)
+ else:
+ user = installer_params.get('user', None)
+ password = installer_params.get('password', None)
+ pkey = installer_params.get('pkey', None)
+
+ try:
+ handler = factory.Factory.get_handler(
+ installer=CONST.INSTALLER_TYPE,
+ installer_ip=CONST.INSTALLER_IP,
+ installer_user=user,
+ installer_pwd=password,
+ pkey_file=pkey)
+ if handler:
+ logger.info('\n\nDeployment information:\n%s' %
+ handler.get_deployment_info())
+ except Exception as e:
+ logger.debug("Cannot get deployment information. %s" % e)
+
+
def main(**kwargs):
try:
if not (kwargs['action'] in actions):
@@ -296,6 +332,7 @@ def main(**kwargs):
with open(CONST.env_active, "w") as env_file:
env_file.write("1")
check_environment()
+ print_deployment_info()
elif kwargs['action'] == "check":
check_environment()
except Exception as e:
diff --git a/functest/ci/run_tests.py b/functest/ci/run_tests.py
index 93518de0b..f920e70d8 100755
--- a/functest/ci/run_tests.py
+++ b/functest/ci/run_tests.py
@@ -48,7 +48,7 @@ class BlockingTestFailed(Exception):
pass
-class RunTestsParser():
+class RunTestsParser(object):
def __init__(self):
self.parser = argparse.ArgumentParser()
diff --git a/functest/ci/testcases.yaml b/functest/ci/testcases.yaml
index b1d824f6c..be12ac76c 100755
--- a/functest/ci/testcases.yaml
+++ b/functest/ci/testcases.yaml
@@ -8,26 +8,14 @@ tiers:
operations in the VIM.
testcases:
-
- name: healthcheck
- criteria: 'status == "PASS"'
- blocking: true
- description: >-
- This test case verifies the basic OpenStack services like
- Keystone, Glance, Cinder, Neutron and Nova.
-
- dependencies:
- installer: ''
- scenario: '^((?!lxd).)*$'
- -
name: snaps_health_check
criteria: 'status == "PASS"'
- blocking: false
+ blocking: true
description: >-
This test case creates executes the SimpleHealthCheck
Python test class which creates an, image, flavor, network,
and Cirros VM instance and observes the console output to
validate the single port obtains the correct IP address.
-
dependencies:
installer: ''
scenario: '^((?!lxd).)*$'
@@ -348,41 +336,68 @@ tiers:
run:
module: 'functest.opnfv_tests.features.netready'
class: 'GluonVping'
+ -
+ name: barometer
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ Test suite for the Barometer project. Separate tests verify the
+ proper configuration and functionality of the following
+ collectd plugins Ceilometer, Hugepages, Memory RAS (mcelog),
+ and OVS Events
+ dependencies:
+ installer: 'fuel'
+ scenario: 'kvm_ovs_dpdk_bar'
+ run:
+ module: 'functest.opnfv_tests.features.barometer'
+ class: 'BarometerCollectd'
-
name: components
order: 3
- ci_loop: 'weekly'
+ ci_loop: 'daily'
description : >-
Extensive testing of OpenStack API.
testcases:
+# -
+# name: tempest_full_parallel
+# criteria: 'success_rate >= 80%'
+# blocking: false
+# description: >-
+# The list of test cases is generated by
+# Tempest automatically and depends on the parameters of
+# the OpenStack deplopyment.
+# dependencies:
+# installer: '^((?!netvirt).)*$'
+# scenario: ''
+# run:
+# module: 'functest.opnfv_tests.openstack.tempest.tempest'
+# class: 'TempestFullParallel'
-
- name: tempest_full_parallel
- criteria: 'success_rate >= 80%'
+ name: tempest_defcore
+ criteria: 'success_rate == 100%'
blocking: false
description: >-
- The list of test cases is generated by
- Tempest automatically and depends on the parameters of
- the OpenStack deplopyment.
+ This is the set of Tempest test cases created by OpenStack
+ Interop Working Group for certification purposes.
dependencies:
- installer: '^((?!netvirt).)*$'
- scenario: ''
+ installer: ''
+ scenario: 'nosdn-nofeature-ha'
run:
module: 'functest.opnfv_tests.openstack.tempest.tempest'
- class: 'TempestFullParallel'
-
- -
- name: rally_full
- criteria: 'success_rate >= 90%'
- blocking: false
- description: >-
- This test case runs the full suite of scenarios of the OpenStack
- Rally suite using several threads and iterations.
- dependencies:
- installer: '^((?!netvirt).)*$'
- scenario: ''
- run:
- module: 'functest.opnfv_tests.openstack.rally.rally'
- class: 'RallyFull'
+ class: 'TempestDefcore'
+# -
+# name: rally_full
+# criteria: 'success_rate >= 90%'
+# blocking: false
+# description: >-
+# This test case runs the full suite of scenarios of the OpenStack
+# Rally suite using several threads and iterations.
+# dependencies:
+# installer: '^((?!netvirt).)*$'
+# scenario: ''
+# run:
+# module: 'functest.opnfv_tests.openstack.rally.rally'
+# class: 'RallyFull'
-
name: vnf
@@ -437,8 +452,8 @@ tiers:
description: >-
VNF deployment with OpenBaton (Orchestra)
dependencies:
- installer: 'unknown'
- scenario: 'unknown'
+ installer: ''
+ scenario: ''
run:
module: 'functest.opnfv_tests.vnf.ims.orchestra_ims'
class: 'ImsVnf'
@@ -455,3 +470,16 @@ tiers:
run:
module: 'functest.opnfv_tests.vnf.ims.opera_ims'
class: 'ImsVnf'
+
+ -
+ name: vyos_vrouter
+ criteria: 'status == "PASS"'
+ blocking: false
+ description: >-
+ This test case is vRouter testing.
+ dependencies:
+ installer: 'fuel'
+ scenario: 'nosdn-nofeature'
+ run:
+ module: 'functest.opnfv_tests.vnf.router.vyos_vrouter'
+ class: 'VrouterVnf'
diff --git a/functest/ci/tier_builder.py b/functest/ci/tier_builder.py
index e1c3e49e6..dae7c73e8 100755
--- a/functest/ci/tier_builder.py
+++ b/functest/ci/tier_builder.py
@@ -11,7 +11,7 @@ import tier_handler as th
import yaml
-class TierBuilder:
+class TierBuilder(object):
def __init__(self, ci_installer, ci_scenario, testcases_file):
self.ci_installer = ci_installer
diff --git a/functest/ci/tier_handler.py b/functest/ci/tier_handler.py
index 1eadfba50..127986bf3 100755
--- a/functest/ci/tier_handler.py
+++ b/functest/ci/tier_handler.py
@@ -28,7 +28,7 @@ def split_text(text, max_len):
return lines
-class Tier:
+class Tier(object):
def __init__(self, name, order, ci_loop, description=""):
self.tests_array = []
@@ -102,7 +102,7 @@ class Tier:
return out
-class TestCase:
+class TestCase(object):
def __init__(self, name, dependency, criteria, blocking, description=""):
self.name = name
@@ -160,7 +160,7 @@ class TestCase:
return out
-class Dependency:
+class Dependency(object):
def __init__(self, installer, scenario):
self.installer = installer
diff --git a/functest/cli/commands/cli_env.py b/functest/cli/commands/cli_env.py
index 9423631bf..14ad01bfc 100644
--- a/functest/cli/commands/cli_env.py
+++ b/functest/cli/commands/cli_env.py
@@ -16,7 +16,7 @@ from functest.utils.constants import CONST
import functest.utils.functest_utils as ft_utils
-class CliEnv:
+class CliEnv(object):
def __init__(self):
pass
diff --git a/functest/cli/commands/cli_os.py b/functest/cli/commands/cli_os.py
index aeb34974f..f85f4041f 100644
--- a/functest/cli/commands/cli_os.py
+++ b/functest/cli/commands/cli_os.py
@@ -18,7 +18,7 @@ import functest.utils.openstack_clean as os_clean
import functest.utils.openstack_snapshot as os_snapshot
-class CliOpenStack:
+class CliOpenStack(object):
def __init__(self):
self.os_auth_url = CONST.OS_AUTH_URL
diff --git a/functest/cli/commands/cli_testcase.py b/functest/cli/commands/cli_testcase.py
index b6566245a..6644a0c29 100644
--- a/functest/cli/commands/cli_testcase.py
+++ b/functest/cli/commands/cli_testcase.py
@@ -19,7 +19,7 @@ import functest.utils.functest_utils as ft_utils
import functest.utils.functest_vacation as vacation
-class CliTestcase:
+class CliTestcase(object):
def __init__(self):
self.tiers = tb.TierBuilder(CONST.INSTALLER_TYPE,
diff --git a/functest/cli/commands/cli_tier.py b/functest/cli/commands/cli_tier.py
index b9d25b6d0..012b11d0e 100644
--- a/functest/cli/commands/cli_tier.py
+++ b/functest/cli/commands/cli_tier.py
@@ -18,7 +18,7 @@ from functest.utils.constants import CONST
import functest.utils.functest_utils as ft_utils
-class CliTier:
+class CliTier(object):
def __init__(self):
self.tiers = tb.TierBuilder(CONST.INSTALLER_TYPE,
diff --git a/functest/core/feature_base.py b/functest/core/feature_base.py
index fe9a99989..2bd1ec83d 100644
--- a/functest/core/feature_base.py
+++ b/functest/core/feature_base.py
@@ -7,6 +7,7 @@ from functest.utils.constants import CONST
class FeatureBase(base.TestcaseBase):
+
def __init__(self, project='functest', case='', repo='', cmd=''):
super(FeatureBase, self).__init__()
self.project_name = project
@@ -19,7 +20,7 @@ class FeatureBase(base.TestcaseBase):
def run(self, **kwargs):
self.prepare()
self.start_time = time.time()
- ret = ft_utils.execute_command(self.cmd, output_file=self.result_file)
+ ret = self.execute()
self.stop_time = time.time()
self.post()
self.parse_results(ret)
@@ -27,6 +28,13 @@ class FeatureBase(base.TestcaseBase):
self.logger.info("Test result is stored in '%s'" % self.result_file)
return base.TestcaseBase.EX_OK
+ def execute(self):
+ '''
+ Executer method that can be overwritten
+ By default it executes a shell command.
+ '''
+ return ft_utils.execute_command(self.cmd, output_file=self.result_file)
+
def prepare(self, **kwargs):
pass
diff --git a/functest/core/vnf_base.py b/functest/core/vnf_base.py
index 07b64fd05..9438dca10 100644
--- a/functest/core/vnf_base.py
+++ b/functest/core/vnf_base.py
@@ -111,9 +111,9 @@ class VnfOnBoardingBase(base.TestcaseBase):
self.keystone_client = os_utils.get_keystone_client()
self.logger.info("Prepare OpenStack plateform(create tenant and user)")
- user_id = os_utils.get_user_id(self.keystone_client,
- self.creds['username'])
- if user_id == '':
+ admin_user_id = os_utils.get_user_id(self.keystone_client,
+ self.creds['username'])
+ if admin_user_id == '':
self.step_failure("Failed to get id of " +
self.creds['username'])
@@ -133,7 +133,7 @@ class VnfOnBoardingBase(base.TestcaseBase):
self.logger.error("Failed to get id for %s role" % role_name)
self.step_failure("Failed to get role id of " + role_name)
- if not os_utils.add_role_user(self.keystone_client, user_id,
+ if not os_utils.add_role_user(self.keystone_client, admin_user_id,
role_id, tenant_id):
self.logger.error("Failed to add %s on tenant" %
self.creds['username'])
@@ -149,6 +149,13 @@ class VnfOnBoardingBase(base.TestcaseBase):
self.logger.error("Failed to create %s user" % self.tenant_name)
self.step_failure("Failed to create user ")
+ if not os_utils.add_role_user(self.keystone_client, user_id,
+ role_id, tenant_id):
+ self.logger.error("Failed to add %s on tenant" %
+ self.tenant_name)
+ self.step_failure("Failed to add %s on tenant" %
+ self.tenant_name)
+
self.logger.info("Update OpenStack creds informations")
self.admin_creds = self.creds.copy()
self.admin_creds.update({
diff --git a/functest/opnfv_tests/features/barometer.py b/functest/opnfv_tests/features/barometer.py
new file mode 100644
index 000000000..aec2bce5d
--- /dev/null
+++ b/functest/opnfv_tests/features/barometer.py
@@ -0,0 +1,28 @@
+#!/usr/bin/python
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+
+
+import functest.core.feature_base as base
+import functest.utils.functest_logger as ft_logger
+
+from baro_tests import collectd
+
+
+class BarometerCollectd(base.FeatureBase):
+ '''
+ Class for executing barometercollectd testcase.
+ '''
+
+ def __init__(self):
+ super(BarometerCollectd, self).__init__(project='barometer',
+ case='barometercollectd',
+ repo='dir_repo_barometer')
+ self.logger = ft_logger.Logger("BarometerCollectd").getLogger()
+
+ def execute(self):
+ return collectd.main(self.logger)
diff --git a/functest/opnfv_tests/openstack/tempest/custom_tests/defcore_req.txt b/functest/opnfv_tests/openstack/tempest/custom_tests/defcore_req.txt
index bb1d172df..1456db877 100644
--- a/functest/opnfv_tests/openstack/tempest/custom_tests/defcore_req.txt
+++ b/functest/opnfv_tests/openstack/tempest/custom_tests/defcore_req.txt
@@ -1,35 +1,19 @@
-# Set of DefCore tempest test cases (see http://www.openstack.org/brand/interop)
-# This approved version (2016.01) is valid for Juno, Kilo, and Liberty releases of OpenStack
-# The list is stored at http://git.openstack.org/cgit/openstack/defcore/plain/2016.01/2016.01.required.txt
-tempest.api.compute.images.test_images.ImagesTestJSON.test_delete_saving_image[id-aa06b52b-2db5-4807-b218-9441f75d74e3]
+# Set of DefCore tempest test cases not flagged and required. It only contains OpenStack core (no object storage)
+# The approved guidelines (2016.08) are valid for Kilo, Liberty, Mitaka and Newton releases of OpenStack
+# The list can be generated using the Rest API from RefStack project:
+# https://refstack.openstack.org/api/v1/guidelines/2016.08/tests?target=compute&type=required&alias=true&flag=false
tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image[id-3731d080-d4c5-4872-b41a-64d0d0021314]
tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name[id-3b7c6fe4-dfe7-477c-9243-b06359db51e6]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_changes_since[id-18bac3ae-da27-436c-92a9-b22474d13aab]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_name[id-33163b73-79f5-4d07-a7ea-9213bcc468ff]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_server_id[id-9f238683-c763-45aa-b848-232ec3ce3105]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_server_ref[id-05a377b8-28cf-4734-a1e6-2ab5c38bf606]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_status[id-a3f5b513-aeb3-42a9-b18e-f091ef73254d]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_type[id-e3356918-4d3e-4756-81d5-abc4524ba29f]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_limit_results[id-3a484ca9-67ba-451e-b494-7fcf28d32d62]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_with_detail_filter_by_changes_since[id-7d439e18-ac2e-4827-b049-7e18004712c4]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_with_detail_filter_by_name[id-644ea267-9bd9-4f3b-af9f-dffa02396a17]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_with_detail_filter_by_server_ref[id-8c78f822-203b-4bf6-8bba-56ebd551cf84]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_with_detail_filter_by_status[id-9b0ea018-6185-4f71-948a-a123a107988e]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_with_detail_filter_by_type[id-888c0cc0-7223-43c5-9db0-b125fd0a393b]
-tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_with_detail_limit_results[id-ba2fa9a9-b672-47cc-b354-3b4c0600e2cb]
-tempest.api.compute.images.test_list_images.ListImagesTestJSON.test_get_image[id-490d0898-e12a-463f-aef0-c50156b9f789]
-tempest.api.compute.images.test_list_images.ListImagesTestJSON.test_list_images[id-fd51b7f4-d4a3-4331-9885-866658112a6f]
-tempest.api.compute.images.test_list_images.ListImagesTestJSON.test_list_images_with_detail[id-9f94cb6b-7f10-48c5-b911-a0b84d7d4cd6]
tempest.api.compute.servers.test_create_server.ServersTestJSON.test_host_name_is_same_as_server_name[id-ac1ad47f-984b-4441-9274-c9079b7a0666]
-tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers[id-9a438d88-10c6-4bcd-8b5b-5b6e25e1346f,smoke]
+tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers[id-9a438d88-10c6-4bcd-8b5b-5b6e25e1346f]
tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers_with_detail[id-585e934c-448e-43c4-acbf-d06a9b899997]
tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_created_server_vcpus[id-cbc0f52f-05aa-492b-bdc1-84b575ca294b]
-tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details[id-5de47127-9977-400a-936f-abcfbec1218f,smoke]
+tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details[id-5de47127-9977-400a-936f-abcfbec1218f]
tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_host_name_is_same_as_server_name[id-ac1ad47f-984b-4441-9274-c9079b7a0666]
-tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers[id-9a438d88-10c6-4bcd-8b5b-5b6e25e1346f,smoke]
+tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers[id-9a438d88-10c6-4bcd-8b5b-5b6e25e1346f]
tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers_with_detail[id-585e934c-448e-43c4-acbf-d06a9b899997]
tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_created_server_vcpus[id-cbc0f52f-05aa-492b-bdc1-84b575ca294b]
-tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details[id-5de47127-9977-400a-936f-abcfbec1218f,smoke]
+tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details[id-5de47127-9977-400a-936f-abcfbec1218f]
tempest.api.compute.servers.test_instance_actions.InstanceActionsTestJSON.test_get_instance_action[id-aacc71ca-1d70-4aa5-bbf6-0ff71470e43c]
tempest.api.compute.servers.test_instance_actions.InstanceActionsTestJSON.test_list_instance_actions[id-77ca5cc5-9990-45e0-ab98-1de8fead201a]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_flavor[id-80c574cc-0925-44ba-8602-299028357dd9]
@@ -37,31 +21,28 @@ tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.t
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_server_name[id-f9eb2b70-735f-416c-b260-9914ac6181e4]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_server_status[id-de2612ab-b7dd-4044-b0b1-d2539601911f]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_limit_results[id-67aec2d0-35fe-4503-9f92-f13272b867ed]
+tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_active_status[id-ca78e20e-fddb-4ce6-b7f7-bcbf8605e66e]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_flavor[id-573637f5-7325-47bb-9144-3476d0416908]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_image[id-05e8a8e7-9659-459a-989d-92c2f501f4ba]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_limit[id-614cdfc1-d557-4bac-915b-3e67b48eee76]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_name[id-9b067a7b-7fee-4f6a-b29c-be43fe18fc5a]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_status[id-ca78e20e-fddb-4ce6-b7f7-bcbf8605e66e]
-tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_ip[id-43a1242e-7b31-48d1-88f2-3f72aa9f2077]
-tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_ip_regex[id-a905e287-c35e-42f2-b132-d02b09f3654a]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_name_wildcard[id-e9f624ee-92af-4562-8bec-437945a18dcb]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_future_date[id-74745ad8-b346-45b5-b9b8-509d7447fc1f,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_invalid_date[id-87d12517-e20a-4c9c-97b6-dd1628d6d6c9,negative]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_future_date[id-74745ad8-b346-45b5-b9b8-509d7447fc1f]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_invalid_date[id-87d12517-e20a-4c9c-97b6-dd1628d6d6c9]
tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits[id-12c80a9f-2dec-480e-882b-98ba15757659]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_greater_than_actual_count[id-d47c17fb-eebd-4287-8e95-f20a7e627b18,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_negative_value[id-62610dd9-4713-4ee0-8beb-fd2c1aa7f950,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_string[id-679bc053-5e70-4514-9800-3dfab1a380a6,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_flavor[id-5913660b-223b-44d4-a651-a0fbfd44ca75,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_image[id-ff01387d-c7ad-47b4-ae9e-64fa214638fe,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_server_name[id-e2c77c4a-000a-4af3-a0bd-629a328bde7c,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_detail_server_is_deleted[id-93055106-2d34-46fe-af68-d9ddbf7ee570,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_status_non_existing[id-fcdf192d-0f74-4d89-911f-1ec002b822c4,negative]
-tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_with_a_deleted_server[id-24a26f1a-1ddc-4eea-b0d7-a90cc874ad8f,negative]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_greater_than_actual_count[id-d47c17fb-eebd-4287-8e95-f20a7e627b18]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_negative_value[id-62610dd9-4713-4ee0-8beb-fd2c1aa7f950]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_string[id-679bc053-5e70-4514-9800-3dfab1a380a6]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_flavor[id-5913660b-223b-44d4-a651-a0fbfd44ca75]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_image[id-ff01387d-c7ad-47b4-ae9e-64fa214638fe]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_server_name[id-e2c77c4a-000a-4af3-a0bd-629a328bde7c]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_detail_server_is_deleted[id-93055106-2d34-46fe-af68-d9ddbf7ee570]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_status_non_existing[id-fcdf192d-0f74-4d89-911f-1ec002b822c4]
+tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_with_a_deleted_server[id-24a26f1a-1ddc-4eea-b0d7-a90cc874ad8f]
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_unlock_server[id-80a8094c-211e-440a-ab88-9e59d556c7ee]
-tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard[id-2cb1baf6-ac8d-4429-bf0d-ba8a0ba53e32,smoke]
+tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard[id-2cb1baf6-ac8d-4429-bf0d-ba8a0ba53e32]
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_rebuild_server[id-aaa6cdf3-55a7-461a-add9-1c8596b9a07c]
-tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_server_confirm[id-1499262a-9328-4eda-9068-db1ac57498d2]
-tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_server_revert[id-c03aab19-adb1-44f5-917d-c419577e9e68]
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_stop_start_server[id-af8eafd4-38a7-4a4b-bdbc-75145a580560]
tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_delete_server_metadata_item[id-127642d6-4c7b-4486-b7cd-07265a378658]
tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_get_server_metadata_item[id-3043c57d-7e0e-49a6-9a96-ad569c265e6a]
@@ -74,49 +55,194 @@ tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_specify_key
tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_with_existing_server_name[id-8fea6be7-065e-47cf-89b8-496e6f96c699]
tempest.api.compute.servers.test_servers.ServersTestJSON.test_update_access_server_address[id-89b90870-bc13-4b73-96af-f9d4f2b70077]
tempest.api.compute.servers.test_servers.ServersTestJSON.test_update_server_name[id-5e6ccff8-349d-4852-a8b3-055df7988dd2]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_numeric_server_name[id-fd57f159-68d6-4c2a-902b-03070828a87e,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_metadata_exceeds_length_limit[id-7fc74810-0bd2-4cd7-8244-4f33a9db865a,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_name_length_exceeds_256[id-c3e0fb12-07fc-4d76-a22e-37409887afe8,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_flavor[id-18f5227f-d155-4429-807c-ccb103887537,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_image[id-fcba1052-0a50-4cf3-b1ac-fae241edf02f,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_network_uuid[id-4e72dc2d-44c5-4336-9667-f7972e95c402,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_id_exceeding_length_limit[id-f4d7279b-5fd2-4bf2-9ba4-ae35df0d18c5,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_negative_id[id-75f79124-277c-45e6-a373-a1d6803f4cc4,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_get_non_existent_server[id-3436b02f-1b1e-4f03-881e-c6a602327439,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_invalid_ip_v6_address[id-5226dd80-1e9c-4d8a-b5f9-b26ca4763fd0,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_reboot_non_existent_server[id-d4c023a0-9c55-4747-9dd5-413b820143c7,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_deleted_server[id-98fa0458-1485-440f-873b-fe7f0d714930,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_non_existent_server[id-d86141a7-906e-4731-b187-d64a2ea61422,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_resize_server_with_non_existent_flavor[id-ced1a1d7-2ab6-45c9-b90f-b27d87b30efd,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_resize_server_with_null_flavor[id-45436a7d-a388-4a35-a9d8-3adc5d0d940b,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_server_name_blank[id-dbbfd247-c40c-449e-8f6c-d2aa7c7da7cf,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_stop_non_existent_server[id-a31460a9-49e1-42aa-82ee-06e0bb7c2d03,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_name_of_non_existent_server[id-aa8eed43-e2cb-4ebf-930b-da14f6a21d81,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_name_length_exceeds_256[id-5c8e244c-dada-4590-9944-749c455b431f,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_set_empty_name[id-38204696-17c6-44da-9590-40f87fb5a899,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestMultiTenantJSON.test_delete_a_server_of_another_tenant[id-5c75009d-3eea-423e-bea3-61b09fd25f9c,negative]
-tempest.api.compute.servers.test_servers_negative.ServersNegativeTestMultiTenantJSON.test_update_server_of_another_tenant[id-543d84c1-dd2e-4c6d-8cb2-b9da0efaa384,negative]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_numeric_server_name[id-fd57f159-68d6-4c2a-902b-03070828a87e]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_metadata_exceeds_length_limit[id-7fc74810-0bd2-4cd7-8244-4f33a9db865a]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_name_length_exceeds_256[id-c3e0fb12-07fc-4d76-a22e-37409887afe8]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_flavor[id-18f5227f-d155-4429-807c-ccb103887537]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_image[id-fcba1052-0a50-4cf3-b1ac-fae241edf02f]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_network_uuid[id-4e72dc2d-44c5-4336-9667-f7972e95c402]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_id_exceeding_length_limit[id-f4d7279b-5fd2-4bf2-9ba4-ae35df0d18c5]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_negative_id[id-75f79124-277c-45e6-a373-a1d6803f4cc4]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_get_non_existent_server[id-3436b02f-1b1e-4f03-881e-c6a602327439]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_invalid_ip_v6_address[id-5226dd80-1e9c-4d8a-b5f9-b26ca4763fd0]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_reboot_non_existent_server[id-d4c023a0-9c55-4747-9dd5-413b820143c7]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_deleted_server[id-98fa0458-1485-440f-873b-fe7f0d714930]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_non_existent_server[id-d86141a7-906e-4731-b187-d64a2ea61422]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_reboot_deleted_server[id-98fa0458-1485-440f-873b-fe7f0d714930]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_server_name_blank[id-dbbfd247-c40c-449e-8f6c-d2aa7c7da7cf]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_stop_non_existent_server[id-a31460a9-49e1-42aa-82ee-06e0bb7c2d03]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_name_of_non_existent_server[id-aa8eed43-e2cb-4ebf-930b-da14f6a21d81]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_name_length_exceeds_256[id-5c8e244c-dada-4590-9944-749c455b431f]
+tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_set_empty_name[id-38204696-17c6-44da-9590-40f87fb5a899]
tempest.api.compute.test_quotas.QuotasTestJSON.test_get_default_quotas[id-9bfecac7-b966-4f47-913f-1a9e2c12134a]
tempest.api.compute.test_quotas.QuotasTestJSON.test_get_quotas[id-f1ef0a97-dbbb-4cca-adc5-c9fbc4f76107]
+tempest.api.compute.test_versions.TestVersions.test_list_api_versions[id-6c0a0990-43b6-4529-9b61-5fd8daf7c55c]
tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_attach_detach_volume[id-52e9045a-e90d-4c0d-9087-79d657faffff]
tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_list_get_volume_attachments[id-7fa563fe-f0f7-43eb-9e22-a1ece036b513]
-tempest.api.compute.volumes.test_volumes_list.VolumesTestJSON.test_volume_list[id-bc2dd1a0-15af-48e5-9990-f2e75a48325d]
-tempest.api.compute.volumes.test_volumes_list.VolumesTestJSON.test_volume_list_with_details[id-bad0567a-5a4f-420b-851e-780b55bb867c]
-tempest.api.compute.volumes.test_volumes_negative.VolumesNegativeTest.test_get_invalid_volume_id[id-f01904f2-e975-4915-98ce-cb5fa27bde4f,negative]
-tempest.api.compute.volumes.test_volumes_negative.VolumesNegativeTest.test_get_volume_without_passing_volume_id[id-62bab09a-4c03-4617-8cca-8572bc94af9b,negative]
+tempest.api.identity.v3.TestApiDiscovery.test_api_media_types[id-657c1970-4722-4189-8831-7325f3bc4265]
+tempest.api.identity.v3.TestApiDiscovery.test_api_version_resources[id-b9232f5e-d9e5-4d97-b96c-28d3db4de1bd]
+tempest.api.identity.v3.TestApiDiscovery.test_api_version_statuses[id-8879a470-abfb-47bb-bb8d-5a7fd279ad1e]
+tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_media_types[id-657c1970-4722-4189-8831-7325f3bc4265]
+tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_resources[id-b9232f5e-d9e5-4d97-b96c-28d3db4de1bd]
+tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_statuses[id-8879a470-abfb-47bb-bb8d-5a7fd279ad1e]
tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token[id-6f8e4436-fc96-4282-8122-e41df57197a9]
+tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_delete_image[id-f848bb94-1c6e-45a4-8726-39e3a5b23535]
+tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_update_image[id-f66891a7-a35c-41a8-b590-a065c2a1caa6]
+tempest.api.image.v2.test_images.ListImagesTest.test_get_image_schema[id-622b925c-479f-4736-860d-adeaf13bc371]
+tempest.api.image.v2.test_images.ListImagesTest.test_get_images_schema[id-25c8d7b2-df21-460f-87ac-93130bcdc684]
+tempest.api.image.v2.test_images.ListImagesTest.test_index_no_params[id-1e341d7a-90a9-494c-b143-2cdf2aeb6aee]
+tempest.api.image.v2.test_images.ListImagesTest.test_list_images_param_container_format[id-9959ca1d-1aa7-4b7a-a1ea-0fff0499b37e]
+tempest.api.image.v2.test_images.ListImagesTest.test_list_images_param_disk_format[id-4a4735a7-f22f-49b6-b0d9-66e1ef7453eb]
+tempest.api.image.v2.test_images.ListImagesTest.test_list_images_param_limit[id-e914a891-3cc8-4b40-ad32-e0a39ffbddbb]
+tempest.api.image.v2.test_images.ListImagesTest.test_list_images_param_min_max_size[id-4ad8c157-971a-4ba8-aa84-ed61154b1e7f]
+tempest.api.image.v2.test_images.ListImagesTest.test_list_images_param_size[id-cf1b9a48-8340-480e-af7b-fe7e17690876]
+tempest.api.image.v2.test_images.ListImagesTest.test_list_images_param_status[id-7fc9e369-0f58-4d05-9aa5-0969e2d59d15]
+tempest.api.image.v2.test_images.ListImagesTest.test_list_images_param_visibility[id-7a95bb92-d99e-4b12-9718-7bc6ab73e6d2]
tempest.api.image.v2.test_images.ListImagesTest.test_list_no_params[id-1e341d7a-90a9-494c-b143-2cdf2aeb6aee]
-tempest.api.image.v1.test_images.ListImagesTest.test_index_no_params[id-246178ab-3b33-4212-9a4b-a7fe8261794d]
-tempest.api.object_storage.test_object_expiry.ObjectExpiryTest.test_get_object_after_expiry_time[id-fb024a42-37f3-4ba5-9684-4f40a7910b41]
-tempest.api.object_storage.test_object_services.ObjectTest.test_copy_object_2d_way[id-06f90388-2d0e-40aa-934c-e9a8833e958a]
-tempest.api.object_storage.test_object_services.ObjectTest.test_copy_object_across_containers[id-aa467252-44f3-472a-b5ae-5b57c3c9c147]
-tempest.api.object_storage.test_object_services.ObjectTest.test_copy_object_in_same_container[id-1a9ab572-1b66-4981-8c21-416e2a5e6011]
-tempest.api.object_storage.test_object_services.ObjectTest.test_copy_object_to_itself[id-2248abba-415d-410b-9c30-22dff9cd6e67]
-tempest.api.object_storage.test_object_services.ObjectTest.test_create_object[id-5b4ce26f-3545-46c9-a2ba-5754358a4c62,smoke]
-tempest.api.object_storage.test_object_services.ObjectTest.test_delete_object[id-17738d45-03bd-4d45-9e0b-7b2f58f98687]
-tempest.api.object_storage.test_object_services.ObjectTest.test_get_object[id-02610ba7-86b7-4272-9ed8-aa8d417cb3cd,smoke]
-tempest.api.object_storage.test_object_services.ObjectTest.test_get_object_if_different[id-50d01f12-526f-4360-9ac2-75dd508d7b68]
-tempest.api.object_storage.test_object_services.ObjectTest.test_object_upload_in_segments[id-e3e6a64a-9f50-4955-b987-6ce6767c97fb]
-tempest.api.object_storage.test_object_temp_url.ObjectTempUrlTest.test_get_object_using_temp_url[id-f91c96d4-1230-4bba-8eb9-84476d18d991]
-tempest.api.object_storage.test_object_temp_url.ObjectTempUrlTest.test_put_object_using_temp_url[id-9b08dade-3571-4152-8a4f-a4f2a873a735]
-tempest.api.object_storage.test_object_version.ContainerTest.test_versioned_container[id-a151e158-dcbf-4a1f-a1e7-46cd65895a6f]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_get_image_schema[id-622b925c-479f-4736-860d-adeaf13bc371]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_get_images_schema[id-25c8d7b2-df21-460f-87ac-93130bcdc684]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_container_format[id-9959ca1d-1aa7-4b7a-a1ea-0fff0499b37e]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_disk_format[id-4a4735a7-f22f-49b6-b0d9-66e1ef7453eb]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_limit[id-e914a891-3cc8-4b40-ad32-e0a39ffbddbb]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_min_max_size[id-4ad8c157-971a-4ba8-aa84-ed61154b1e7f]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_size[id-cf1b9a48-8340-480e-af7b-fe7e17690876]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_status[id-7fc9e369-0f58-4d05-9aa5-0969e2d59d15]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_visibility[id-7a95bb92-d99e-4b12-9718-7bc6ab73e6d2]
+tempest.api.image.v2.test_images.ListUserImagesTest.test_list_no_params[id-1e341d7a-90a9-494c-b143-2cdf2aeb6aee]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_image_null_id[id-32248db1-ab88-4821-9604-c7c369f1f88c]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_non_existing_image[id-6fe40f1c-57bd-4918-89cc-8500f850f3de]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_delete_deleted_image[id-e57fc127-7ba0-4693-92d7-1d8a05ebcba9]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_image_null_id[id-ef45000d-0a72-4781-866d-4cb7bf2562ad]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_non_existent_image[id-668743d5-08ad-4480-b2b8-15da34f81d9f]
+tempest.api.image.v2.test_images_tags.ImagesTagsTest.test_update_delete_tags_for_image[id-10407036-6059-4f95-a2cd-cbbbee7ed329]
+tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_delete_non_existing_tag[id-39c023a2-325a-433a-9eea-649bf1414b19]
+tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_update_tags_for_non_existing_image[id-8cd30f82-6f9a-4c6e-8034-c1b51fba43d9]
+tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_all_attributes[id-a4d9ec4c-0306-4111-a75c-db01a709030b]
+tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_allocation_pools[id-bec949c4-3147-4ba6-af5f-cd2306118404]
+tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_dhcp_enabled[id-94ce038d-ff0a-4a4c-a56b-09da3ca0b55d]
+tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_gw[id-9393b468-186d-496d-aa36-732348cd76e7]
+tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_gw_and_allocation_pools[id-8217a149-0c6c-4cfb-93db-0486f707d13f]
+tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_host_routes_and_dns_nameservers[id-d830de0a-be47-468f-8f02-1fd996118289]
+tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_without_gateway[id-d2d596e2-8e76-47a9-ac51-d4648009f4d3]
+tempest.api.network.test_networks.NetworksTest.test_create_update_delete_network_subnet[id-0e269138-0da6-4efc-a46d-578161e7b221]
+tempest.api.network.test_networks.NetworksTest.test_delete_network_with_subnet[id-f04f61a9-b7f3-4194-90b2-9bcf660d1bfe]
+tempest.api.network.test_networks.NetworksTest.test_list_networks[id-f7ffdeda-e200-4a7a-bcbe-05716e86bf43]
+tempest.api.network.test_networks.NetworksTest.test_list_networks_fields[id-6ae6d24f-9194-4869-9c85-c313cb20e080]
+tempest.api.network.test_networks.NetworksTest.test_list_subnets[id-db68ba48-f4ea-49e9-81d1-e367f6d0b20a]
+tempest.api.network.test_networks.NetworksTest.test_list_subnets_fields[id-842589e3-9663-46b0-85e4-7f01273b0412]
+tempest.api.network.test_networks.NetworksTest.test_show_network[id-2bf13842-c93f-4a69-83ed-717d2ec3b44e]
+tempest.api.network.test_networks.NetworksTest.test_show_network_fields[id-867819bb-c4b6-45f7-acf9-90edcf70aa5e]
+tempest.api.network.test_networks.NetworksTest.test_show_subnet[id-bd635d81-6030-4dd1-b3b9-31ba0cfdf6cc]
+tempest.api.network.test_networks.NetworksTest.test_show_subnet_fields[id-270fff0b-8bfc-411f-a184-1e8fd35286f0]
+tempest.api.network.test_networks.NetworksTest.test_update_subnet_gw_dns_host_routes_dhcp[id-3d3852eb-3009-49ec-97ac-5ce83b73010a]
+tempest.api.network.test_networks.NetworksTestJSON.test_create_delete_subnet_all_attributes[id-a4d9ec4c-0306-4111-a75c-db01a709030b]
+tempest.api.network.test_networks.NetworksTestJSON.test_create_delete_subnet_with_allocation_pools[id-bec949c4-3147-4ba6-af5f-cd2306118404]
+tempest.api.network.test_networks.NetworksTestJSON.test_create_delete_subnet_with_dhcp_enabled[id-94ce038d-ff0a-4a4c-a56b-09da3ca0b55d]
+tempest.api.network.test_networks.NetworksTestJSON.test_create_delete_subnet_with_gw[id-9393b468-186d-496d-aa36-732348cd76e7]
+tempest.api.network.test_networks.NetworksTestJSON.test_create_delete_subnet_with_gw_and_allocation_pools[id-8217a149-0c6c-4cfb-93db-0486f707d13f]
+tempest.api.network.test_networks.NetworksTestJSON.test_create_delete_subnet_with_host_routes_and_dns_nameservers[id-d830de0a-be47-468f-8f02-1fd996118289]
+tempest.api.network.test_networks.NetworksTestJSON.test_create_delete_subnet_without_gateway[id-d2d596e2-8e76-47a9-ac51-d4648009f4d3]
+tempest.api.network.test_networks.NetworksTestJSON.test_create_update_delete_network_subnet[id-0e269138-0da6-4efc-a46d-578161e7b221]
+tempest.api.network.test_networks.NetworksTestJSON.test_delete_network_with_subnet[id-f04f61a9-b7f3-4194-90b2-9bcf660d1bfe]
+tempest.api.network.test_networks.NetworksTestJSON.test_list_networks[id-f7ffdeda-e200-4a7a-bcbe-05716e86bf43]
+tempest.api.network.test_networks.NetworksTestJSON.test_list_networks_fields[id-6ae6d24f-9194-4869-9c85-c313cb20e080]
+tempest.api.network.test_networks.NetworksTestJSON.test_list_subnets[id-db68ba48-f4ea-49e9-81d1-e367f6d0b20a]
+tempest.api.network.test_networks.NetworksTestJSON.test_list_subnets_fields[id-842589e3-9663-46b0-85e4-7f01273b0412]
+tempest.api.network.test_networks.NetworksTestJSON.test_show_network[id-2bf13842-c93f-4a69-83ed-717d2ec3b44e]
+tempest.api.network.test_networks.NetworksTestJSON.test_show_network_fields[id-867819bb-c4b6-45f7-acf9-90edcf70aa5e]
+tempest.api.network.test_networks.NetworksTestJSON.test_show_subnet[id-bd635d81-6030-4dd1-b3b9-31ba0cfdf6cc]
+tempest.api.network.test_networks.NetworksTestJSON.test_show_subnet_fields[id-270fff0b-8bfc-411f-a184-1e8fd35286f0]
+tempest.api.network.test_networks.NetworksTestJSON.test_update_subnet_gw_dns_host_routes_dhcp[id-3d3852eb-3009-49ec-97ac-5ce83b73010a]
+tempest.api.network.test_ports.PortsTestJSON.test_create_bulk_port[id-67f1b811-f8db-43e2-86bd-72c074d4a42c]
+tempest.api.network.test_ports.PortsTestJSON.test_create_port_in_allowed_allocation_pools[id-0435f278-40ae-48cb-a404-b8a087bc09b1]
+tempest.api.network.test_ports.PortsTestJSON.test_create_update_delete_port[id-c72c1c0c-2193-4aca-aaa4-b1442640f51c]
+tempest.api.network.test_ports.PortsTestJSON.test_list_ports[id-cf95b358-3e92-4a29-a148-52445e1ac50e]
+tempest.api.network.test_ports.PortsTestJSON.test_list_ports_fields[id-ff7f117f-f034-4e0e-abff-ccef05c454b4]
+tempest.api.network.test_ports.PortsTestJSON.test_show_port[id-c9a685bd-e83f-499c-939f-9f7863ca259f]
+tempest.api.network.test_ports.PortsTestJSON.test_show_port_fields[id-45fcdaf2-dab0-4c13-ac6c-fcddfb579dbd]
+tempest.api.network.test_ports.PortsTestJSON.test_update_port_with_security_group_and_extra_attributes[id-58091b66-4ff4-4cc1-a549-05d60c7acd1a]
+tempest.api.network.test_ports.PortsTestJSON.test_update_port_with_two_security_groups_and_extra_attributes[id-edf6766d-3d40-4621-bc6e-2521a44c257d]
+tempest.api.network.test_security_groups.SecGroupTest.test_create_list_update_show_delete_security_group[id-bfd128e5-3c92-44b6-9d66-7fe29d22c802]
+tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_additional_args[id-87dfbcf9-1849-43ea-b1e4-efa3eeae9f71]
+tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_icmp_type_code[id-c9463db8-b44d-4f52-b6c0-8dbda99f26ce]
+tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_protocol_integer_value[id-0a307599-6655-4220-bebc-fd70c64f2290]
+tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_remote_group_id[id-c2ed2deb-7a0c-44d8-8b4c-a5825b5c310b]
+tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_remote_ip_prefix[id-16459776-5da2-4634-bce4-4b55ee3ec188]
+tempest.api.network.test_security_groups.SecGroupTest.test_create_show_delete_security_group_rule[id-cfb99e0e-7410-4a3d-8a0c-959a63ee77e9]
+tempest.api.network.test_security_groups.SecGroupTest.test_list_security_groups[id-e30abd17-fef9-4739-8617-dc26da88e686]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_additional_default_security_group_fails[id-2323061e-9fbf-4eb0-b547-7e8fafc90849]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_duplicate_security_group_rule_fails[id-8fde898f-ce88-493b-adc9-4e4692879fc5]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_ethertype[id-5666968c-fff3-40d6-9efc-df1c8bd01abb]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_protocol[id-981bdc22-ce48-41ed-900a-73148b583958]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_remote_ip_prefix[id-5f8daf69-3c5f-4aaa-88c9-db1d66f68679]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_invalid_ports[id-0d9c7791-f2ad-4e2f-ac73-abf2373b0d2d]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_non_existent_remote_groupid[id-4bf786fd-2f02-443c-9716-5b98e159a49a]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_non_existent_security_group[id-be308db6-a7cf-4d5c-9baf-71bafd73f35e]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_delete_non_existent_security_group[id-1f1bb89d-5664-4956-9fcd-83ee0fa603df]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group[id-424fd5c3-9ddc-486a-b45f-39bf0c820fc6]
+tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group_rule[id-4c094c09-000b-4e41-8100-9617600c02a6]
+tempest.api.volume.test_availability_zone.AvailabilityZoneV2TestJSON.test_get_availability_zone_list[id-01f1ae88-eba9-4c6b-a011-6f7ace06b725]
+tempest.api.volume.test_extensions.ExtensionsV2TestJSON.test_list_extensions[id-94607eb0-43a5-47ca-82aa-736b41bd2e2c]
+tempest.api.volume.test_snapshot_metadata.SnapshotV2MetadataTestJSON.test_create_get_delete_snapshot_metadata[id-a2f20f99-e363-4584-be97-bc33afb1a56c]
+tempest.api.volume.test_snapshot_metadata.SnapshotV2MetadataTestJSON.test_crud_snapshot_metadata[id-a2f20f99-e363-4584-be97-bc33afb1a56c]
+tempest.api.volume.test_snapshot_metadata.SnapshotV2MetadataTestJSON.test_update_snapshot_metadata_item[id-e8ff85c5-8f97-477f-806a-3ac364a949ed]
+tempest.api.volume.test_volume_metadata.VolumesV2MetadataTest.test_create_get_delete_volume_metadata[id-6f5b125b-f664-44bf-910f-751591fe5769]
+tempest.api.volume.test_volume_metadata.VolumesV2MetadataTest.test_crud_volume_metadata[id-6f5b125b-f664-44bf-910f-751591fe5769]
+tempest.api.volume.test_volume_metadata.VolumesV2MetadataTest.test_update_volume_metadata_item[id-862261c5-8df4-475a-8c21-946e50e36a20]
+tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_attach_detach_volume_to_instance[id-fff42874-7db5-4487-a8e1-ddda5fb5288d]
+tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_get_volume_attachment[id-9516a2c8-9135-488c-8dd6-5677a7e5f371]
+tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_reserve_unreserve_volume[id-92c4ef64-51b2-40c0-9f7e-4749fbaaba33]
+tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_volume_bootable[id-63e21b4c-0a0c-41f6-bfc3-7c2816815599]
+tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_volume_readonly_update[id-fff74e1e-5bd3-4b33-9ea9-24c103bc3f59]
+tempest.api.volume.test_volumes_get.VolumesV2GetTest.test_volume_create_get_update_delete[id-27fb0e9f-fb64-41dd-8bdb-1ffa762f0d51]
+tempest.api.volume.test_volumes_get.VolumesV2GetTest.test_volume_create_get_update_delete_as_clone[id-3f591b4a-7dc6-444c-bd51-77469506b3a1]
+tempest.api.volume.test_volumes_get.VolumesV2GetTest.test_volume_create_get_update_delete_from_image[id-54a01030-c7fc-447c-86ee-c1182beae638]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list[id-0b6ddd39-b948-471f-8038-4787978747c4]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_by_name[id-a28e8da4-0b56-472f-87a8-0f4d3f819c02]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_details_by_name[id-2de3a6d4-12aa-403b-a8f2-fdeb42a89623]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_param_display_name_and_status[id-777c87c1-2fc4-4883-8b8e-5c0b951d1ec8]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_with_detail_param_display_name_and_status[id-856ab8ca-6009-4c37-b691-be1065528ad4]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_with_detail_param_metadata[id-1ca92d3c-4a8e-4b43-93f5-e4c7fb3b291d]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_with_details[id-adcbb5a7-5ad8-4b61-bd10-5380e111a877]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_with_param_metadata[id-b5ebea1b-0603-40a0-bb41-15fcd0a53214]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volumes_list_by_availability_zone[id-c0cfa863-3020-40d7-b587-e35f597d5d87]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volumes_list_by_status[id-39654e13-734c-4dab-95ce-7613bf8407ce]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volumes_list_details_by_availability_zone[id-e1b80d13-94f0-4ba2-a40e-386af29f8db1]
+tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volumes_list_details_by_status[id-2943f712-71ec-482a-bf49-d5ca06216b9f]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_attach_volumes_with_nonexistent_volume_id[id-f5e56b0a-5d02-43c1-a2a7-c9b792c2e3f6]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_invalid_size[id-1ed83a8a-682d-4dfb-a30e-ee63ffd6c049]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_nonexistent_snapshot_id[id-0c36f6ae-4604-4017-b0a9-34fdc63096f9]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_nonexistent_source_volid[id-47c73e08-4be8-45bb-bfdf-0c4e79b88344]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_nonexistent_volume_type[id-10254ed8-3849-454e-862e-3ab8e6aa01d2]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_out_passing_size[id-9387686f-334f-4d31-a439-33494b9e2683]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_size_negative[id-8b472729-9eba-446e-a83b-916bdb34bef7]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_size_zero[id-41331caa-eaf4-4001-869d-bc18c1869360]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_without_passing_size[id-9387686f-334f-4d31-a439-33494b9e2683]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_delete_invalid_volume_id[id-1f035827-7c32-4019-9240-b4ec2dbd9dfd]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_delete_volume_without_passing_volume_id[id-441a1550-5d44-4b30-af0f-a6d402f52026]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_detach_volumes_with_invalid_volume_id[id-9f9c24e4-011d-46b5-b992-952140ce237a]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_get_invalid_volume_id[id-30799cfd-7ee4-446c-b66c-45b383ed211b]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_get_volume_without_passing_volume_id[id-c6c3db06-29ad-4e91-beb0-2ab195fe49e3]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_list_volumes_detail_with_invalid_status[id-ba94b27b-be3f-496c-a00e-0283b373fa75]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_list_volumes_detail_with_nonexistent_name[id-9ca17820-a0e7-4cbd-a7fa-f4468735e359]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_list_volumes_with_invalid_status[id-143b279b-7522-466b-81be-34a87d564a7c]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_list_volumes_with_nonexistent_name[id-0f4aa809-8c7b-418f-8fb3-84c7a5dfc52f]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_reserve_volume_with_negative_volume_status[id-449c4ed2-ecdd-47bb-98dc-072aeccf158c]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_reserve_volume_with_nonexistent_volume_id[id-ac6084c0-0546-45f9-b284-38a367e0e0e2]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_unreserve_volume_with_nonexistent_volume_id[id-eb467654-3dc1-4a72-9b46-47c29d22654c]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_update_volume_with_empty_volume_id[id-72aeca85-57a5-4c1f-9057-f320f9ea575b]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_update_volume_with_invalid_volume_id[id-e66e40d6-65e6-4e75-bdc7-636792fa152d]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_update_volume_with_nonexistent_volume_id[id-0186422c-999a-480e-a026-6a665744c30c]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_volume_delete_nonexistent_volume_id[id-555efa6e-efcd-44ef-8a3b-4a7ca4837a29]
+tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_volume_get_nonexistent_volume_id[id-f131c586-9448-44a4-a8b0-54ca838aa43e]
+tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapshot_create_get_list_update_delete[id-2a8abbe4-d871-46db-b049-c41f5af8216e]
+tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
+tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapshots_list_with_params[id-59f41f43-aebf-48a9-ab5d-d76340fab32b]
+tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_volume_from_snapshot[id-677863d1-3142-456d-b6ac-9924f667a7f4]
+tempest.api.volume.test_volumes_snapshots_list.VolumesV2SnapshotListTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
+tempest.api.volume.test_volumes_snapshots_list.VolumesV2SnapshotListTestJSON.test_snapshots_list_with_params[id-59f41f43-aebf-48a9-ab5d-d76340fab32b]
+tempest.api.volume.test_volumes_snapshots_negative.VolumesV2SnapshotNegativeTestJSON.test_create_snapshot_with_nonexistent_volume_id[id-e3e466af-70ab-4f4b-a967-ab04e3532ea7]
+tempest.api.volume.test_volumes_snapshots_negative.VolumesV2SnapshotNegativeTestJSON.test_create_snapshot_without_passing_volume_id[id-bb9da53e-d335-4309-9c15-7e76fd5e4d6d]
+tempest.api.volume.v2.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_details_pagination[id-e9138a2c-f67b-4796-8efa-635c196d01de]
+tempest.api.volume.v2.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_details_with_multiple_params[id-2a7064eb-b9c3-429b-b888-33928fc5edd3]
+tempest.api.volume.v2.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_pagination[id-af55e775-8e4b-4feb-8719-215c43b0238c] \ No newline at end of file
diff --git a/functest/opnfv_tests/openstack/tempest/tempest.py b/functest/opnfv_tests/openstack/tempest/tempest.py
index f925336d4..37b5c0ee7 100644
--- a/functest/opnfv_tests/openstack/tempest/tempest.py
+++ b/functest/opnfv_tests/openstack/tempest/tempest.py
@@ -329,3 +329,12 @@ class TempestCustom(TempestCommon):
self.case_name = "tempest_custom"
self.MODE = mode
self.OPTION = option
+
+
+class TempestDefcore(TempestCommon):
+
+ def __init__(self):
+ TempestCommon.__init__(self)
+ self.case_name = "tempest_defcore"
+ self.MODE = "defcore"
+ self.OPTION = "--concurrency 1"
diff --git a/functest/opnfv_tests/openstack/vping/vping_base.py b/functest/opnfv_tests/openstack/vping/vping_base.py
index a5309bd44..9d57cfaea 100644
--- a/functest/opnfv_tests/openstack/vping/vping_base.py
+++ b/functest/opnfv_tests/openstack/vping/vping_base.py
@@ -32,6 +32,8 @@ class VPingBase(testcase_base.TestcaseBase):
self.image_name = CONST.vping_image_name
self.image_filename = CONST.openstack_image_file_name
self.image_format = CONST.openstack_image_disk_format
+ self.image_username = CONST.openstack_image_username
+ self.image_password = CONST.openstack_image_password
self.image_path = os.path.join(CONST.dir_functest_data,
self.image_filename)
diff --git a/functest/opnfv_tests/openstack/vping/vping_ssh.py b/functest/opnfv_tests/openstack/vping/vping_ssh.py
index b032c3087..7a58a41fa 100755
--- a/functest/opnfv_tests/openstack/vping/vping_ssh.py
+++ b/functest/opnfv_tests/openstack/vping/vping_ssh.py
@@ -61,8 +61,6 @@ class VPingSSH(vping_base.VPingBase):
def establish_ssh(self, vm, floatip):
self.logger.info("Trying to establish SSH connection to %s..."
% floatip)
- username = 'cirros'
- password = 'cubswin:)'
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
@@ -73,8 +71,8 @@ class VPingSSH(vping_base.VPingBase):
cidr_first_octet = self.private_subnet_cidr.split('.')[0]
while timeout > 0:
try:
- ssh.connect(floatip, username=username,
- password=password, timeout=2)
+ ssh.connect(floatip, username=self.image_username,
+ password=self.image_password, timeout=2)
self.logger.debug("SSH connection established to %s."
% floatip)
break
diff --git a/functest/opnfv_tests/sdn/odl/odl.py b/functest/opnfv_tests/sdn/odl/odl.py
index 9bff324f1..69818f5a5 100755
--- a/functest/opnfv_tests/sdn/odl/odl.py
+++ b/functest/opnfv_tests/sdn/odl/odl.py
@@ -186,7 +186,7 @@ class ODLTests(testcase_base.TestcaseBase):
return self.main(suites, **kwargs)
-class ODLParser():
+class ODLParser(object):
def __init__(self):
self.parser = argparse.ArgumentParser()
diff --git a/functest/opnfv_tests/sdn/onos/sfc/sfc_onos.py b/functest/opnfv_tests/sdn/onos/sfc/sfc_onos.py
index 090502ba9..c21986902 100644
--- a/functest/opnfv_tests/sdn/onos/sfc/sfc_onos.py
+++ b/functest/opnfv_tests/sdn/onos/sfc/sfc_onos.py
@@ -10,13 +10,15 @@ from pexpect import pxssh
import functest.utils.functest_logger as ft_logger
+from functest.utils.constants import CONST
+
OK = 200
CREATED = 201
ACCEPTED = 202
NO_CONTENT = 204
-class SfcOnos:
+class SfcOnos(object):
"""Defines all the def function of SFC."""
def __init__(self):
@@ -99,6 +101,8 @@ class SfcOnos:
self.ip_pool = 0
self.vm_public_ip = []
self.vm_public_id = []
+ self.cirros_username = CONST.openstack_image_username
+ self.cirros_password = CONST.openstack_image_password
self.net_id1 = 0
self.vm = []
self.address = 0
@@ -628,9 +632,7 @@ class SfcOnos:
s = pxssh.pxssh()
hostname = self.vm_public_ip[0]
- username = "cirros"
- password = "cubswin:)"
- s.login(hostname, username, password)
+ s.login(hostname, self.cirros_username, self.cirros_password)
s.sendline("ping -c 5 " + str(self.port_ip[2]))
s.prompt() # match the prompt
@@ -644,9 +646,7 @@ class SfcOnos:
def vm1(queue1):
s = pxssh.pxssh()
hostname = self.vm_public_ip[1]
- username = "cirros"
- password = "cubswin:)"
- s.login(hostname, username, password)
+ s.login(hostname, self.cirros_username, self.cirros_password)
s.sendline('sudo ./firewall')
s.prompt()
output_pack = s.before
diff --git a/functest/opnfv_tests/sdn/onos/teston/adapters/foundation.py b/functest/opnfv_tests/sdn/onos/teston/adapters/foundation.py
index bf2c4302c..2bef5cc6e 100644
--- a/functest/opnfv_tests/sdn/onos/teston/adapters/foundation.py
+++ b/functest/opnfv_tests/sdn/onos/teston/adapters/foundation.py
@@ -21,7 +21,7 @@ import functest.utils.functest_constants as ft_constants
import functest.utils.functest_utils as ft_utils
-class Foundation:
+class Foundation(object):
def __init__(self):
diff --git a/functest/opnfv_tests/vnf/ims/clearwater.py b/functest/opnfv_tests/vnf/ims/clearwater.py
index eb0abacdc..32c6dc5c9 100644
--- a/functest/opnfv_tests/vnf/ims/clearwater.py
+++ b/functest/opnfv_tests/vnf/ims/clearwater.py
@@ -12,7 +12,7 @@
########################################################################
-class Clearwater:
+class Clearwater(object):
def __init__(self, inputs={}, orchestrator=None, logger=None):
self.config = inputs
diff --git a/functest/opnfv_tests/vnf/ims/opera_ims.py b/functest/opnfv_tests/vnf/ims/opera_ims.py
index 073a56c37..7ead401fe 100644
--- a/functest/opnfv_tests/vnf/ims/opera_ims.py
+++ b/functest/opnfv_tests/vnf/ims/opera_ims.py
@@ -8,148 +8,393 @@
# http://www.apache.org/licenses/LICENSE-2.0
import json
-import os
-import requests
-import subprocess
+import socket
+import sys
import time
+import yaml
import functest.core.vnf_base as vnf_base
import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
+import functest.utils.openstack_utils as os_utils
+import os
from functest.utils.constants import CONST
+from org.openbaton.cli.agents.agents import MainAgent
+from org.openbaton.cli.errors.errors import NfvoException
+
+
+def servertest(host, port):
+ args = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM)
+ for family, socktype, proto, canonname, sockaddr in args:
+ s = socket.socket(family, socktype, proto)
+ try:
+ s.connect(sockaddr)
+ except socket.error:
+ return False
+ else:
+ s.close()
+ return True
+
+# ----------------------------------------------------------
+#
+# UTILS
+#
+# -----------------------------------------------------------
+
+
+def get_config(parameter, file):
+ """
+ Returns the value of a given parameter in file.yaml
+ parameter must be given in string format with dots
+ Example: general.openstack.image_name
+ """
+ with open(file) as f:
+ file_yaml = yaml.safe_load(f)
+ f.close()
+ value = file_yaml
+ for element in parameter.split("."):
+ value = value.get(element)
+ if value is None:
+ raise ValueError("The parameter %s is not defined in"
+ " reporting.yaml" % parameter)
+ return value
+
+
+def download_and_add_image_on_glance(glance, image_name,
+ image_url, data_dir):
+ dest_path = data_dir
+ if not os.path.exists(dest_path):
+ os.makedirs(dest_path)
+ file_name = image_url.rsplit('/')[-1]
+ if not ft_utils.download_url(image_url, dest_path):
+ return False
+ image = os_utils.create_glance_image(
+ glance, image_name, dest_path + file_name)
+ if not image:
+ return False
+ return image
+
class ImsVnf(vnf_base.VnfOnBoardingBase):
- def __init__(self, project='functest', case='opera_ims',
+ def __init__(self, project='functest', case='orchestra_ims',
repo='', cmd=''):
super(ImsVnf, self).__init__(project, case, repo, cmd)
- self.logger = ft_logger.Logger("vIMS").getLogger()
- self.case_dir = os.path.join(CONST.functest_test, 'vnf/ims/')
- self.data_dir = CONST.dir_vIMS_data
+ self.ob_password = "openbaton"
+ self.ob_username = "admin"
+ self.ob_https = False
+ self.ob_port = "8080"
+ self.ob_ip = "localhost"
+ self.ob_instance_id = ""
+ self.logger = ft_logger.Logger("orchestra_ims").getLogger()
+ self.case_dir = os.path.join(CONST.dir_functest_test, 'vnf/ims/')
+ self.data_dir = CONST.dir_ims_data
self.test_dir = CONST.dir_repo_vims_test
-
+ self.ob_projectid = ""
+ self.keystone_client = os_utils.get_keystone_client()
+ self.ob_nsr_id = ""
+ self.main_agent = None
# vIMS Data directory creation
if not os.path.exists(self.data_dir):
os.makedirs(self.data_dir)
+ # Retrieve the configuration
+ try:
+ self.config = CONST.__getattribute__(
+ 'vnf_{}_config'.format(self.case_name))
+ except:
+ raise Exception("Orchestra VNF config file not found")
+ config_file = self.case_dir + self.config
+ self.imagename = get_config("openbaton.imagename", config_file)
+ self.market_link = get_config("openbaton.marketplace_link",
+ config_file)
+ self.images = get_config("tenant_images", config_file)
def deploy_orchestrator(self, **kwargs):
- # TODO
- # deploy open-O from Functest docker located on the Jumphost
- # you have admin rights on OpenStack SUT
- # you can cretae a VM, spawn docker on the jumphost
- # spawn docker on a VM in the SUT, ..up to you
- #
- # note: this step can be ignored
- # if Open-O is part of the installer
+ self.logger.info("Additional pre-configuration steps")
+ nova_client = os_utils.get_nova_client()
+ neutron_client = os_utils.get_neutron_client()
+ glance_client = os_utils.get_glance_client()
+
+ # needs some images
+ self.logger.info("Upload some OS images if it doesn't exist")
+ temp_dir = os.path.join(self.data_dir, "tmp/")
+ for image_name, image_url in self.images.iteritems():
+ self.logger.info("image: %s, url: %s" % (image_name, image_url))
+ try:
+ image_id = os_utils.get_image_id(glance_client,
+ image_name)
+ self.logger.info("image_id: %s" % image_id)
+ except:
+ self.logger.error("Unexpected error: %s" % sys.exc_info()[0])
+
+ if image_id == '':
+ self.logger.info("""%s image doesn't exist on glance repository. Try
+ downloading this image and upload on glance !""" % image_name)
+ image_id = download_and_add_image_on_glance(glance_client,
+ image_name,
+ image_url,
+ temp_dir)
+ if image_id == '':
+ self.step_failure(
+ "Failed to find or upload required OS "
+ "image for this deployment")
+ network_dic = os_utils.create_network_full(neutron_client,
+ "openbaton_mgmt",
+ "openbaton_mgmt_subnet",
+ "openbaton_router",
+ "192.168.100.0/24")
+
+ # orchestrator VM flavor
+ self.logger.info("Check medium Flavor is available, if not create one")
+ flavor_exist, flavor_id = os_utils.get_or_create_flavor(
+ "m1.medium",
+ "4096",
+ '1',
+ '2',
+ public=True)
+ self.logger.debug("Flavor id: %s" % flavor_id)
+
+ if not network_dic:
+ self.logger.error("There has been a problem when creating the "
+ "neutron network")
+
+ network_id = network_dic["net_id"]
+
+ self.logger.info("Creating floating IP for VM in advance...")
+ floatip_dic = os_utils.create_floating_ip(neutron_client)
+ floatip = floatip_dic['fip_addr']
+
+ if floatip is None:
+ self.logger.error("Cannot create floating IP.")
+
+ userdata = "#!/bin/bash\n"
+ userdata += "set -x\n"
+ userdata += "set -e\n"
+ userdata += "echo \"nameserver 8.8.8.8\" >> /etc/resolv.conf\n"
+ userdata += "apt-get install curl\n"
+ userdata += ("echo \"rabbitmq_broker_ip=%s\" > ./config_file\n"
+ % floatip)
+ userdata += "echo \"mysql=no\" >> ./config_file\n"
+ userdata += ("echo \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCuPXrV3"
+ "geeHc6QUdyUr/1Z+yQiqLcOskiEGBiXr4z76MK4abiFmDZ18OMQlc"
+ "fl0p3kS0WynVgyaOHwZkgy/DIoIplONVr2CKBKHtPK+Qcme2PVnCtv"
+ "EqItl/FcD+1h5XSQGoa+A1TSGgCod/DPo+pes0piLVXP8Ph6QS1k7S"
+ "ic7JDeRQ4oT1bXYpJ2eWBDMfxIWKZqcZRiGPgMIbJ1iEkxbpeaAd9O"
+ "4MiM9nGCPESmed+p54uYFjwEDlAJZShcAZziiZYAvMZhvAhe6USljc"
+ "7YAdalAnyD/jwCHuwIrUw/lxo7UdNCmaUxeobEYyyFA1YVXzpNFZya"
+ "XPGAAYIJwEq/ openbaton@opnfv\" >> /home/ubuntu/.ssh/aut"
+ "horized_keys\n")
+ userdata += "cat ./config_file\n"
+ userdata += ("curl -s http://get.openbaton.org/bootstrap "
+ "> ./bootstrap\n")
+ userdata += "export OPENBATON_COMPONENT_AUTOSTART=false\n"
+ bootstrap = "sh ./bootstrap release -configFile=./config_file"
+ userdata += bootstrap + "\n"
+
+ userdata += ("echo \"nfvo.plugin.timeout=300000\" >> "
+ "/etc/openbaton/openbaton-nfvo.properties\n")
+ userdata += "service openbaton-nfvo restart\n"
+ userdata += "service openbaton-vnfm-generic restart\n"
+
+ sg_id = os_utils.create_security_group_full(neutron_client,
+ "orchestra-sec-group",
+ "allowall")
+
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "ingress",
+ "icmp", 0, 255)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "egress",
+ "icmp", 0, 255)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "ingress",
+ "tcp", 1, 65535)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "ingress",
+ "udp", 1, 65535)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "egress",
+ "tcp", 1, 65535)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "egress",
+ "udp", 1, 65535)
+
+ self.logger.info("Security group set")
+
+ self.logger.info("Create instance....")
+ self.logger.info("flavor: m1.medium\n"
+ "image: %s\n"
+ "network_id: %s\n"
+ "userdata: %s\n"
+ % (self.imagename, network_id, userdata))
+
+ instance = os_utils.create_instance_and_wait_for_active(
+ "m1.medium",
+ os_utils.get_image_id(glance_client, self.imagename),
+ network_id,
+ "orchestra-openbaton",
+ config_drive=False,
+ userdata=userdata)
+
+ self.ob_instance_id = instance.id
+
+ self.logger.info("Adding sec group to orchestra instance")
+ os_utils.add_secgroup_to_instance(nova_client,
+ self.ob_instance_id, sg_id)
+
+ self.logger.info("Associating floating ip: '%s' to VM '%s' "
+ % (floatip, "orchestra-openbaton"))
+ if not os_utils.add_floating_ip(nova_client, instance.id, floatip):
+ self.logger.error("Cannot associate floating IP to VM.")
+ self.step_failure("Cannot associate floating IP to VM.")
+
+ self.logger.info("Waiting for nfvo to be up and running...")
+ x = 0
+ while x < 100:
+ if servertest(floatip, "8080"):
+ break
+ else:
+ self.logger.debug("openbaton is not started yet")
+ time.sleep(5)
+ x += 1
+
+ if x == 100:
+ self.logger.error("Openbaton is not started correctly")
+ self.step_failure("Openbaton is not started correctly")
+
+ self.ob_ip = floatip
+ self.ob_password = "openbaton"
+ self.ob_username = "admin"
+ self.ob_https = False
+ self.ob_port = "8080"
+
self.logger.info("Deploy orchestrator: OK")
def deploy_vnf(self):
- # TODO
- self.logger.info("Deploy VNF: OK")
+ self.logger.info("vIMS Deployment")
+
+ self.main_agent = MainAgent(nfvo_ip=self.ob_ip,
+ nfvo_port=self.ob_port,
+ https=self.ob_https,
+ version=1,
+ username=self.ob_username,
+ password=self.ob_password)
+
+ project_agent = self.main_agent.get_agent("project", self.ob_projectid)
+ for p in json.loads(project_agent.find()):
+ if p.get("name") == "default":
+ self.ob_projectid = p.get("id")
+ break
+
+ self.logger.debug("project id: %s" % self.ob_projectid)
+ if self.ob_projectid == "":
+ self.logger.error("Default project id was not found!")
+ self.step_failure("Default project id was not found!")
+
+ vim_json = {
+ "name": "vim-instance",
+ "authUrl": os_utils.get_credentials().get("auth_url"),
+ "tenant": os_utils.get_credentials().get("tenant_name"),
+ "username": os_utils.get_credentials().get("username"),
+ "password": os_utils.get_credentials().get("password"),
+ # "keyPair": "opnfv",
+ # TODO change the keypair to correct value
+ # or upload a correct one or remove it
+ "securityGroups": [
+ "default",
+ "orchestra-sec-group"
+ ],
+ "type": "openstack",
+ "location": {
+ "name": "opnfv",
+ "latitude": "52.525876",
+ "longitude": "13.314400"
+ }
+ }
+
+ self.logger.debug("vim: %s" % vim_json)
+
+ self.main_agent.get_agent(
+ "vim",
+ project_id=self.ob_projectid).create(entity=json.dumps(vim_json))
+
+ market_agent = self.main_agent.get_agent("market",
+ project_id=self.ob_projectid)
+
+ nsd = {}
+ try:
+ self.logger.info("sending: %s" % self.market_link)
+ nsd = market_agent.create(entity=self.market_link)
+ self.logger.info("Onboarded nsd: " + nsd.get("name"))
+ except NfvoException as e:
+ self.step_failure(e.message)
+
+ nsr_agent = self.main_agent.get_agent("nsr",
+ project_id=self.ob_projectid)
+ nsd_id = nsd.get('id')
+ if nsd_id is None:
+ self.step_failure("NSD not onboarded correctly")
+
+ nsr = None
+ try:
+ nsr = nsr_agent.create(nsd_id)
+ except NfvoException as e:
+ self.step_failure(e.message)
+
+ if nsr.get('code') is not None:
+ self.logger.error(
+ "vIMS cannot be deployed: %s -> %s" %
+ (nsr.get('code'), nsr.get('message')))
+ self.step_failure("vIMS cannot be deployed")
+
+ i = 0
+ self.logger.info("waiting NSR to go to active...")
+ while nsr.get("status") != 'ACTIVE' and nsr.get("status") != 'ERROR':
+ i += 1
+ if i == 100:
+ self.step_failure("After %s sec the nsr did not go to active.."
+ % 5 * 100)
+ time.sleep(5)
+ nsr = json.loads(nsr_agent.find(nsr.get('id')))
+
+ if nsr.get("status") == 'ACTIVE':
+ deploy_vnf = {'status': "PASS", 'result': nsr}
+ self.logger.info("Deploy VNF: OK")
+ else:
+ deploy_vnf = {'status': "FAIL", 'result': nsr}
+ self.logger.error("Deploy VNF: ERROR")
+ self.step_failure("Deploy vIMS failed")
+ self.ob_nsr_id = nsr.get("id")
+ return deploy_vnf
def test_vnf(self):
# Adaptations probably needed
# code used for cloudify_ims
# ruby client on jumphost calling the vIMS on the SUT
- script = "source {0}venv_cloudify/bin/activate; "
- script += "cd {0}; "
- script += "cfy status | grep -Eo \"([0-9]{{1,3}}\.){{3}}[0-9]{{1,3}}\""
- cmd = "/bin/bash -c '" + script.format(self.data_dir) + "'"
+ return
- try:
- self.logger.debug("Trying to get clearwater manager IP ... ")
- mgr_ip = os.popen(cmd).read()
- mgr_ip = mgr_ip.splitlines()[0]
- except:
- self.step_failure("Unable to retrieve the IP of the "
- "cloudify manager server !")
-
- api_url = "http://" + mgr_ip + "/api/v2"
- dep_outputs = requests.get(api_url + "/deployments/" +
- self.vnf.deployment_name + "/outputs")
- dns_ip = dep_outputs.json()['outputs']['dns_ip']
- ellis_ip = dep_outputs.json()['outputs']['ellis_ip']
-
- ellis_url = "http://" + ellis_ip + "/"
- url = ellis_url + "accounts"
-
- params = {"password": "functest",
- "full_name": "opnfv functest user",
- "email": "functest@opnfv.fr",
- "signup_code": "secret"}
-
- rq = requests.post(url, data=params)
- i = 20
- while rq.status_code != 201 and i > 0:
- rq = requests.post(url, data=params)
- i = i - 1
- time.sleep(10)
-
- if rq.status_code == 201:
- url = ellis_url + "session"
- rq = requests.post(url, data=params)
- cookies = rq.cookies
-
- url = ellis_url + "accounts/" + params['email'] + "/numbers"
- if cookies != "":
- rq = requests.post(url, cookies=cookies)
- i = 24
- while rq.status_code != 200 and i > 0:
- rq = requests.post(url, cookies=cookies)
- i = i - 1
- time.sleep(25)
-
- if rq.status_code != 200:
- self.step_failure("Unable to create a number: %s"
- % rq.json()['reason'])
-
- nameservers = ft_utils.get_resolvconf_ns()
- resolvconf = ""
- for ns in nameservers:
- resolvconf += "\nnameserver " + ns
-
- if dns_ip != "":
- script = ('echo -e "nameserver ' + dns_ip + resolvconf +
- '" > /etc/resolv.conf; ')
- script += 'source /etc/profile.d/rvm.sh; '
- script += 'cd {0}; '
- script += ('rake test[{1}] SIGNUP_CODE="secret"')
-
- cmd = ("/bin/bash -c '" +
- script.format(self.data_dir, self.inputs["public_domain"]) +
- "'")
- output_file = "output.txt"
- f = open(output_file, 'w+')
- subprocess.call(cmd, shell=True, stdout=f,
- stderr=subprocess.STDOUT)
- f.close()
-
- f = open(output_file, 'r')
- result = f.read()
- if result != "":
- self.logger.debug(result)
-
- vims_test_result = ""
- tempFile = os.path.join(self.test_dir, "temp.json")
- try:
- self.logger.debug("Trying to load test results")
- with open(tempFile) as f:
- vims_test_result = json.load(f)
- f.close()
- except:
- self.logger.error("Unable to retrieve test results")
+ def clean(self):
+ self.main_agent.get_agent(
+ "nsr",
+ project_id=self.ob_projectid).delete(self.ob_nsr_id)
+ time.sleep(5)
+ os_utils.delete_instance(nova_client=os_utils.get_nova_client(),
+ instance_id=self.ob_instance_id)
+ # TODO question is the clean removing also the VM?
+ # I think so since is goinf to remove the tenant...
+ super(ImsVnf, self).clean()
- try:
- os.remove(tempFile)
- except:
- self.logger.error("Deleting file failed")
+ def main(self, **kwargs):
+ self.logger.info("Orchestra IMS VNF onboarding test starting")
+ self.execute()
+ self.logger.info("Orchestra IMS VNF onboarding test executed")
+ if self.criteria is "PASS":
+ return self.EX_OK
+ else:
+ return self.EX_RUN_ERROR
- if vims_test_result != '':
- return {'status': 'PASS', 'result': vims_test_result}
- else:
- return {'status': 'FAIL', 'result': ''}
+ def run(self):
+ kwargs = {}
+ return self.main(**kwargs)
- def clean(self):
- # TODO
- super(ImsVnf, self).clean()
+
+if __name__ == '__main__':
+ test = ImsVnf()
+ test.deploy_orchestrator()
+ test.deploy_vnf()
+ test.clean()
diff --git a/functest/opnfv_tests/vnf/ims/orchestra_ims.py b/functest/opnfv_tests/vnf/ims/orchestra_ims.py
index 28f37f053..352b609b0 100644
--- a/functest/opnfv_tests/vnf/ims/orchestra_ims.py
+++ b/functest/opnfv_tests/vnf/ims/orchestra_ims.py
@@ -8,150 +8,383 @@
# http://www.apache.org/licenses/LICENSE-2.0
import json
-import os
-import requests
-import subprocess
+import socket
+import sys
import time
+import yaml
import functest.core.vnf_base as vnf_base
import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
+import functest.utils.openstack_utils as os_utils
+import os
from functest.utils.constants import CONST
+from org.openbaton.cli.agents.agents import MainAgent
+from org.openbaton.cli.errors.errors import NfvoException
-class ImsVnf(vnf_base.VnfOnBoardingBase):
+def servertest(host, port):
+ args = socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM)
+ for family, socktype, proto, canonname, sockaddr in args:
+ s = socket.socket(family, socktype, proto)
+ try:
+ s.connect(sockaddr)
+ except socket.error:
+ return False
+ else:
+ s.close()
+ return True
+
+
+class ImsVnf(vnf_base.VnfOnBoardingBase):
def __init__(self, project='functest', case='orchestra_ims',
repo='', cmd=''):
super(ImsVnf, self).__init__(project, case, repo, cmd)
- self.logger = ft_logger.Logger("vIMS").getLogger()
- self.case_dir = os.path.join(CONST.functest_test, 'vnf/ims/')
- self.data_dir = CONST.dir_vIMS_data
+ self.ob_password = "openbaton"
+ self.ob_username = "admin"
+ self.ob_https = False
+ self.ob_port = "8080"
+ self.ob_ip = "localhost"
+ self.ob_instance_id = ""
+ self.logger = ft_logger.Logger("orchestra_ims").getLogger()
+ self.case_dir = os.path.join(CONST.dir_functest_test, 'vnf/ims/')
+ self.data_dir = CONST.dir_ims_data
self.test_dir = CONST.dir_repo_vims_test
-
+ self.ob_projectid = ""
+ self.keystone_client = os_utils.get_keystone_client()
+ self.ob_nsr_id = ""
+ self.main_agent = None
# vIMS Data directory creation
if not os.path.exists(self.data_dir):
os.makedirs(self.data_dir)
+ # Retrieve the configuration
+ try:
+ self.config = CONST.__getattribute__(
+ 'vnf_{}_config'.format(self.case_name))
+ except:
+ raise Exception("Orchestra VNF config file not found")
+ config_file = self.case_dir + self.config
+ self.imagename = get_config("openbaton.imagename", config_file)
+ self.market_link = get_config("openbaton.marketplace_link",
+ config_file)
+ self.images = get_config("tenant_images", config_file)
def deploy_orchestrator(self, **kwargs):
- # TODO
- # put your code here to deploy openbaton
- # from the functest docker located on the jumphost
- # you have admin rights on OpenStack SUT
- # you can cretae a VM, spawn docker on the jumphost
- # spawn docker on a VM in the SUT, ..up to you
- #
- # note: this step can be ignored
- # if OpenBaton is part of the installer
+ self.logger.info("Additional pre-configuration steps")
+ nova_client = os_utils.get_nova_client()
+ neutron_client = os_utils.get_neutron_client()
+ glance_client = os_utils.get_glance_client()
+
+ # Import images if needed
+ self.logger.info("Upload some OS images if it doesn't exist")
+ temp_dir = os.path.join(self.data_dir, "tmp/")
+ for image_name, image_url in self.images.iteritems():
+ self.logger.info("image: %s, url: %s" % (image_name, image_url))
+ try:
+ image_id = os_utils.get_image_id(glance_client,
+ image_name)
+ self.logger.info("image_id: %s" % image_id)
+ except:
+ self.logger.error("Unexpected error: %s" % sys.exc_info()[0])
+
+ if image_id == '':
+ self.logger.info("""%s image doesn't exist on glance repository. Try
+ downloading this image and upload on glance !""" % image_name)
+ image_id = download_and_add_image_on_glance(glance_client,
+ image_name,
+ image_url,
+ temp_dir)
+ if image_id == '':
+ self.step_failure(
+ "Failed to find or upload required OS "
+ "image for this deployment")
+ network_dic = os_utils.create_network_full(neutron_client,
+ "openbaton_mgmt",
+ "openbaton_mgmt_subnet",
+ "openbaton_router",
+ "192.168.100.0/24")
+
+ # orchestrator VM flavor
+ self.logger.info("Check medium Flavor is available, if not create one")
+ flavor_exist, flavor_id = os_utils.get_or_create_flavor(
+ "m1.medium",
+ "4096",
+ '20',
+ '2',
+ public=True)
+ self.logger.debug("Flavor id: %s" % flavor_id)
+
+ if not network_dic:
+ self.logger.error("There has been a problem when creating the "
+ "neutron network")
+
+ network_id = network_dic["net_id"]
+
+ self.logger.info("Creating floating IP for VM in advance...")
+ floatip_dic = os_utils.create_floating_ip(neutron_client)
+ floatip = floatip_dic['fip_addr']
+
+ if floatip is None:
+ self.logger.error("Cannot create floating IP.")
+
+ userdata = "#!/bin/bash\n"
+ userdata += "set -x\n"
+ userdata += "set -e\n"
+ userdata += "echo \"nameserver 8.8.8.8\" >> /etc/resolv.conf\n"
+ userdata += "apt-get install curl\n"
+ userdata += ("echo \"rabbitmq_broker_ip=%s\" > ./config_file\n"
+ % floatip)
+ userdata += "echo \"mysql=no\" >> ./config_file\n"
+ userdata += ("echo \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCuPXrV3"
+ "geeHc6QUdyUr/1Z+yQiqLcOskiEGBiXr4z76MK4abiFmDZ18OMQlc"
+ "fl0p3kS0WynVgyaOHwZkgy/DIoIplONVr2CKBKHtPK+Qcme2PVnCtv"
+ "EqItl/FcD+1h5XSQGoa+A1TSGgCod/DPo+pes0piLVXP8Ph6QS1k7S"
+ "ic7JDeRQ4oT1bXYpJ2eWBDMfxIWKZqcZRiGPgMIbJ1iEkxbpeaAd9O"
+ "4MiM9nGCPESmed+p54uYFjwEDlAJZShcAZziiZYAvMZhvAhe6USljc"
+ "7YAdalAnyD/jwCHuwIrUw/lxo7UdNCmaUxeobEYyyFA1YVXzpNFZya"
+ "XPGAAYIJwEq/ openbaton@opnfv\" >> /home/ubuntu/.ssh/aut"
+ "horized_keys\n")
+ userdata += "cat ./config_file\n"
+ userdata += ("curl -s http://get.openbaton.org/bootstrap "
+ "> ./bootstrap\n")
+ userdata += "export OPENBATON_COMPONENT_AUTOSTART=false\n"
+ bootstrap = "sh ./bootstrap release -configFile=./config_file"
+ userdata += bootstrap + "\n"
+
+ userdata += ("echo \"nfvo.plugin.timeout=300000\" >> "
+ "/etc/openbaton/openbaton-nfvo.properties\n")
+ userdata += "service openbaton-nfvo restart\n"
+ userdata += "service openbaton-vnfm-generic restart\n"
+
+ sg_id = os_utils.create_security_group_full(neutron_client,
+ "orchestra-sec-group",
+ "allowall")
+
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "ingress",
+ "icmp", 0, 255)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "egress",
+ "icmp", 0, 255)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "ingress",
+ "tcp", 1, 65535)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "ingress",
+ "udp", 1, 65535)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "egress",
+ "tcp", 1, 65535)
+ os_utils.create_secgroup_rule(neutron_client, sg_id, "egress",
+ "udp", 1, 65535)
+
+ self.logger.info("Security group set")
+
+ self.logger.info("Create instance....")
+ self.logger.info("flavor: m1.medium\n"
+ "image: %s\n"
+ "network_id: %s\n"
+ "userdata: %s\n"
+ % (self.imagename, network_id, userdata))
+
+ instance = os_utils.create_instance_and_wait_for_active(
+ "m1.medium",
+ os_utils.get_image_id(glance_client, self.imagename),
+ network_id,
+ "orchestra-openbaton",
+ config_drive=False,
+ userdata=userdata)
+
+ self.ob_instance_id = instance.id
+
+ self.logger.info("Adding sec group to orchestra instance")
+ os_utils.add_secgroup_to_instance(nova_client,
+ self.ob_instance_id, sg_id)
+
+ self.logger.info("Associating floating ip: '%s' to VM '%s' "
+ % (floatip, "orchestra-openbaton"))
+ if not os_utils.add_floating_ip(nova_client, instance.id, floatip):
+ self.logger.error("Cannot associate floating IP to VM.")
+ self.step_failure("Cannot associate floating IP to VM.")
+
+ self.logger.info("Waiting for nfvo to be up and running...")
+ x = 0
+ while x < 100:
+ if servertest(floatip, "8080"):
+ break
+ else:
+ self.logger.debug("openbaton is not started yet")
+ time.sleep(5)
+ x += 1
+
+ if x == 100:
+ self.logger.error("Openbaton is not started correctly")
+ self.step_failure("Openbaton is not started correctly")
+
+ self.ob_ip = floatip
+ self.ob_password = "openbaton"
+ self.ob_username = "admin"
+ self.ob_https = False
+ self.ob_port = "8080"
+
self.logger.info("Deploy orchestrator: OK")
def deploy_vnf(self):
- # deploy the VNF
- # call openbaton to deploy the vIMS
+ self.logger.info("vIMS Deployment")
+
+ self.main_agent = MainAgent(nfvo_ip=self.ob_ip,
+ nfvo_port=self.ob_port,
+ https=self.ob_https,
+ version=1,
+ username=self.ob_username,
+ password=self.ob_password)
+
+ project_agent = self.main_agent.get_agent("project", self.ob_projectid)
+ for p in json.loads(project_agent.find()):
+ if p.get("name") == "default":
+ self.ob_projectid = p.get("id")
+ break
+
+ self.logger.debug("project id: %s" % self.ob_projectid)
+ if self.ob_projectid == "":
+ self.logger.error("Default project id was not found!")
+ self.step_failure("Default project id was not found!")
+
+ vim_json = {
+ "name": "vim-instance",
+ "authUrl": os_utils.get_credentials().get("auth_url"),
+ "tenant": os_utils.get_credentials().get("tenant_name"),
+ "username": os_utils.get_credentials().get("username"),
+ "password": os_utils.get_credentials().get("password"),
+ "keyPair": "opnfv",
+ # TODO change the keypair to correct value
+ # or upload a correct one or remove it
+ "securityGroups": [
+ "default",
+ "orchestra-sec-group"
+ ],
+ "type": "openstack",
+ "location": {
+ "name": "opnfv",
+ "latitude": "52.525876",
+ "longitude": "13.314400"
+ }
+ }
+
+ self.logger.debug("vim: %s" % vim_json)
+
+ self.main_agent.get_agent(
+ "vim",
+ project_id=self.ob_projectid).create(entity=json.dumps(vim_json))
+
+ market_agent = self.main_agent.get_agent("market",
+ project_id=self.ob_projectid)
+
+ nsd = {}
+ try:
+ self.logger.info("sending: %s" % self.market_link)
+ nsd = market_agent.create(entity=self.market_link)
+ self.logger.info("Onboarded nsd: " + nsd.get("name"))
+ except NfvoException as e:
+ self.step_failure(e.message)
+
+ nsr_agent = self.main_agent.get_agent("nsr",
+ project_id=self.ob_projectid)
+ nsd_id = nsd.get('id')
+ if nsd_id is None:
+ self.step_failure("NSD not onboarded correctly")
+
+ nsr = None
+ try:
+ nsr = nsr_agent.create(nsd_id)
+ except NfvoException as e:
+ self.step_failure(e.message)
+
+ if nsr is None:
+ self.step_failure("NSR not deployed correctly")
+
+ i = 0
+ self.logger.info("waiting NSR to go to active...")
+ while nsr.get("status") != 'ACTIVE':
+ i += 1
+ if i == 100:
+ self.step_failure("After %s sec the nsr did not go to active.."
+ % 5 * 100)
+ time.sleep(5)
+ nsr = json.loads(nsr_agent.find(nsr.get('id')))
+
+ deploy_vnf = {'status': "PASS", 'result': nsr}
+ self.ob_nsr_id = nsr.get("id")
self.logger.info("Deploy VNF: OK")
+ return deploy_vnf
def test_vnf(self):
# Adaptations probably needed
# code used for cloudify_ims
# ruby client on jumphost calling the vIMS on the SUT
- script = "source {0}venv_cloudify/bin/activate; "
- script += "cd {0}; "
- script += "cfy status | grep -Eo \"([0-9]{{1,3}}\.){{3}}[0-9]{{1,3}}\""
- cmd = "/bin/bash -c '" + script.format(self.data_dir) + "'"
+ return
- try:
- self.logger.debug("Trying to get clearwater manager IP ... ")
- mgr_ip = os.popen(cmd).read()
- mgr_ip = mgr_ip.splitlines()[0]
- except:
- self.step_failure("Unable to retrieve the IP of the "
- "cloudify manager server !")
-
- api_url = "http://" + mgr_ip + "/api/v2"
- dep_outputs = requests.get(api_url + "/deployments/" +
- self.vnf.deployment_name + "/outputs")
- dns_ip = dep_outputs.json()['outputs']['dns_ip']
- ellis_ip = dep_outputs.json()['outputs']['ellis_ip']
-
- ellis_url = "http://" + ellis_ip + "/"
- url = ellis_url + "accounts"
-
- params = {"password": "functest",
- "full_name": "opnfv functest user",
- "email": "functest@opnfv.fr",
- "signup_code": "secret"}
-
- rq = requests.post(url, data=params)
- i = 20
- while rq.status_code != 201 and i > 0:
- rq = requests.post(url, data=params)
- i = i - 1
- time.sleep(10)
-
- if rq.status_code == 201:
- url = ellis_url + "session"
- rq = requests.post(url, data=params)
- cookies = rq.cookies
-
- url = ellis_url + "accounts/" + params['email'] + "/numbers"
- if cookies != "":
- rq = requests.post(url, cookies=cookies)
- i = 24
- while rq.status_code != 200 and i > 0:
- rq = requests.post(url, cookies=cookies)
- i = i - 1
- time.sleep(25)
-
- if rq.status_code != 200:
- self.step_failure("Unable to create a number: %s"
- % rq.json()['reason'])
-
- nameservers = ft_utils.get_resolvconf_ns()
- resolvconf = ""
- for ns in nameservers:
- resolvconf += "\nnameserver " + ns
-
- if dns_ip != "":
- script = ('echo -e "nameserver ' + dns_ip + resolvconf +
- '" > /etc/resolv.conf; ')
- script += 'source /etc/profile.d/rvm.sh; '
- script += 'cd {0}; '
- script += ('rake test[{1}] SIGNUP_CODE="secret"')
-
- cmd = ("/bin/bash -c '" +
- script.format(self.data_dir, self.inputs["public_domain"]) +
- "'")
- output_file = "output.txt"
- f = open(output_file, 'w+')
- subprocess.call(cmd, shell=True, stdout=f,
- stderr=subprocess.STDOUT)
- f.close()
-
- f = open(output_file, 'r')
- result = f.read()
- if result != "":
- self.logger.debug(result)
-
- vims_test_result = ""
- tempFile = os.path.join(self.test_dir, "temp.json")
- try:
- self.logger.debug("Trying to load test results")
- with open(tempFile) as f:
- vims_test_result = json.load(f)
- f.close()
- except:
- self.logger.error("Unable to retrieve test results")
+ def clean(self):
+ self.main_agent.get_agent(
+ "nsr",
+ project_id=self.ob_projectid).delete(self.ob_nsr_id)
+ time.sleep(5)
+ os_utils.delete_instance(nova_client=os_utils.get_nova_client(),
+ instance_id=self.ob_instance_id)
+ # TODO question is the clean removing also the VM?
+ # I think so since is goinf to remove the tenant...
+ super(ImsVnf, self).clean()
- try:
- os.remove(tempFile)
- except:
- self.logger.error("Deleting file failed")
+ def main(self, **kwargs):
+ self.logger.info("Orchestra IMS VNF onboarding test starting")
+ self.execute()
+ self.logger.info("Orchestra IMS VNF onboarding test executed")
+ if self.criteria is "PASS":
+ return self.EX_OK
+ else:
+ return self.EX_RUN_ERROR
- if vims_test_result != '':
- return {'status': 'PASS', 'result': vims_test_result}
- else:
- return {'status': 'FAIL', 'result': ''}
+ def run(self):
+ kwargs = {}
+ return self.main(**kwargs)
- def clean(self):
- # TODO
- super(ImsVnf, self).clean()
+
+if __name__ == '__main__':
+ test = ImsVnf()
+ test.deploy_orchestrator()
+ test.deploy_vnf()
+ test.clean()
+
+
+# ----------------------------------------------------------
+#
+# UTILS
+#
+# -----------------------------------------------------------
+def get_config(parameter, file):
+ """
+ Returns the value of a given parameter in file.yaml
+ parameter must be given in string format with dots
+ Example: general.openstack.image_name
+ """
+ with open(file) as f:
+ file_yaml = yaml.safe_load(f)
+ f.close()
+ value = file_yaml
+ for element in parameter.split("."):
+ value = value.get(element)
+ if value is None:
+ raise ValueError("The parameter %s is not defined in"
+ " reporting.yaml" % parameter)
+ return value
+
+
+def download_and_add_image_on_glance(glance, image_name,
+ image_url, data_dir):
+ dest_path = data_dir
+ if not os.path.exists(dest_path):
+ os.makedirs(dest_path)
+ file_name = image_url.rsplit('/')[-1]
+ if not ft_utils.download_url(image_url, dest_path):
+ return False
+ image = os_utils.create_glance_image(
+ glance, image_name, dest_path + file_name)
+ if not image:
+ return False
+ return image
diff --git a/functest/opnfv_tests/vnf/ims/orchestra_ims.yaml b/functest/opnfv_tests/vnf/ims/orchestra_ims.yaml
new file mode 100644
index 000000000..2fb33df5d
--- /dev/null
+++ b/functest/opnfv_tests/vnf/ims/orchestra_ims.yaml
@@ -0,0 +1,7 @@
+tenant_images:
+ ubuntu_14.04: http://cloud-images.ubuntu.com/trusty/current/trusty-server-cloudimg-amd64-disk1.img
+ openims: http://marketplace.openbaton.org:8082/api/v1/images/52e2ccc0-1dce-4663-894d-28aab49323aa/img
+openbaton:
+ bootstrap: sh <(curl -s http://get.openbaton.org/bootstrap) release -configFile=
+ marketplace_link: http://marketplace.openbaton.org:8082/api/v1/nsds/fokus/OpenImsCore/3.2.0/json
+ imagename: ubuntu_14.04
diff --git a/functest/opnfv_tests/vnf/ims/orchestrator_cloudify.py b/functest/opnfv_tests/vnf/ims/orchestrator_cloudify.py
index 775b71c8a..82a9dca05 100644
--- a/functest/opnfv_tests/vnf/ims/orchestrator_cloudify.py
+++ b/functest/opnfv_tests/vnf/ims/orchestrator_cloudify.py
@@ -21,7 +21,7 @@ from git import Repo
import functest.utils.functest_logger as ft_logger
-class Orchestrator:
+class Orchestrator(object):
def __init__(self, testcase_dir, inputs={}):
self.testcase_dir = testcase_dir
diff --git a/functest/opnfv_tests/vnf/router/__init__.py b/functest/opnfv_tests/vnf/router/__init__.py
new file mode 100755
index 000000000..e69de29bb
--- /dev/null
+++ b/functest/opnfv_tests/vnf/router/__init__.py
diff --git a/functest/opnfv_tests/vnf/router/vyos_vrouter.py b/functest/opnfv_tests/vnf/router/vyos_vrouter.py
new file mode 100755
index 000000000..94a3ecfd8
--- /dev/null
+++ b/functest/opnfv_tests/vnf/router/vyos_vrouter.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# Copyright 2017 Okinawa Open Laboratory
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+import functest.core.feature_base as base
+import json
+import os
+
+RESULT_DETAILS_FILE = "test_result.json"
+
+
+class VrouterVnf(base.FeatureBase):
+ def __init__(self):
+ super(VrouterVnf, self).__init__(project='vRouter',
+ case='vyos_vrouter',
+ repo='dir_repo_vrouter')
+ self.cmd = 'cd %s && ./run.sh' % self.repo
+
+ def set_result_details(self):
+ filepath = os.path.join(self.repo, RESULT_DETAILS_FILE)
+ if os.path.exists(filepath):
+ f = open(filepath, 'r')
+ self.details = json.load(f)
+ f.close()
+
+ def log_results(self):
+ if self.criteria == 'PASS':
+ self.set_result_details()
+ super(VrouterVnf, self).log_results()
diff --git a/functest/utils/env.py b/functest/utils/env.py
index fa5245fb6..7e4df2ea5 100644
--- a/functest/utils/env.py
+++ b/functest/utils/env.py
@@ -3,7 +3,7 @@ import re
default_envs = {
'NODE_NAME': 'unknown_pod',
- 'CI_DEBUG': 'true',
+ 'CI_DEBUG': 'false',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
diff --git a/functest/utils/functest_logger.py b/functest/utils/functest_logger.py
index 0cba8c528..022211cb7 100755
--- a/functest/utils/functest_logger.py
+++ b/functest/utils/functest_logger.py
@@ -29,10 +29,12 @@ import json
from functest.utils.constants import CONST
-class Logger:
+class Logger(object):
+
def __init__(self, logger_name):
self.setup_logging()
self.logger = logging.getLogger(logger_name)
+ logging.getLogger("paramiko").setLevel(logging.WARNING)
def getLogger(self):
return self.logger
diff --git a/functest/utils/functest_utils.py b/functest/utils/functest_utils.py
index b2c36cff9..dbed811a7 100644
--- a/functest/utils/functest_utils.py
+++ b/functest/utils/functest_utils.py
@@ -291,6 +291,13 @@ def get_ci_envvars():
return ci_env_var
+def execute_command_raise(cmd, info=False, error_msg="",
+ verbose=True, output_file=None):
+ ret = execute_command(cmd, info, error_msg, verbose, output_file)
+ if ret != 0:
+ raise Exception(error_msg)
+
+
def execute_command(cmd, info=False, error_msg="",
verbose=True, output_file=None):
if not error_msg:
@@ -362,7 +369,7 @@ def get_parameter_from_yaml(parameter, file):
value = value.get(element)
if value is None:
raise ValueError("The parameter %s is not defined in"
- " config_functest.yaml" % parameter)
+ " %s" % (parameter, file))
return value
diff --git a/functest/utils/openstack_tacker.py b/functest/utils/openstack_tacker.py
index 1c02e0403..8327fdbe2 100644
--- a/functest/utils/openstack_tacker.py
+++ b/functest/utils/openstack_tacker.py
@@ -48,9 +48,8 @@ def get_vnfd_id(tacker_client, vnfd_name):
def get_vnf_id(tacker_client, vnf_name, timeout=5):
vnf_id = None
while vnf_id is None and timeout >= 0:
- try:
- vnf_id = get_id_from_name(tacker_client, 'vnf', vnf_name)
- except:
+ vnf_id = get_id_from_name(tacker_client, 'vnf', vnf_name)
+ if vnf_id is None:
logger.info("Could not retrieve ID for vnf with name [%s]."
" Retrying." % vnf_name)
time.sleep(1)
@@ -145,27 +144,44 @@ def create_vnf(tacker_client, vnf_name, vnfd_id=None,
return None
-def wait_for_vnf(tacker_client, vnf_id=None, vnf_name=None, timeout=60):
+def get_vnf(tacker_client, vnf_id=None, vnf_name=None):
try:
if vnf_id is None and vnf_name is None:
raise Exception('You must specify vnf_id or vnf_name')
+
_id = get_vnf_id(tacker_client, vnf_name) if vnf_id is None else vnf_id
- vnf = next((v for v in list_vnfs(tacker_client, verbose=True)['vnfs']
- if v['id'] == _id), None)
+ if _id is not None:
+ all_vnfs = list_vnfs(tacker_client, verbose=True)['vnfs']
+ return next((vnf for vnf in all_vnfs if vnf['id'] == _id), None)
+ else:
+ raise Exception('Could not retrieve ID from name [%s]' % vnf_name)
+
+ except Exception, e:
+ logger.error("Could not retrieve VNF [vnf_id=%s, vnf_name=%s] - %s"
+ % (vnf_id, vnf_name, e))
+ return None
+
+
+def wait_for_vnf(tacker_client, vnf_id=None, vnf_name=None, timeout=60):
+ try:
+ vnf = get_vnf(tacker_client, vnf_id, vnf_name)
if vnf is None:
- raise Exception("Could not retrieve VNF with ID [%s]" % _id)
+ raise Exception("Could not retrieve VNF - id='%s', name='%s'"
+ % vnf_id, vnf_name)
logger.info('Waiting for vnf {0}'.format(str(vnf)))
- while True and timeout >= 0:
+ while vnf['status'] != 'ACTIVE' and timeout >= 0:
if vnf['status'] == 'ERROR':
- raise Exception('Error when booting vnf %s' % _id)
+ raise Exception('Error when booting vnf %s' % vnf['id'])
elif vnf['status'] == 'PENDING_CREATE':
time.sleep(3)
timeout -= 3
- continue
- else:
- break
- return _id
+ vnf = get_vnf(tacker_client, vnf_id, vnf_name)
+
+ if (timeout < 0):
+ raise Exception('Timeout when booting vnf %s' % vnf['id'])
+
+ return vnf['id']
except Exception, e:
logger.error("error [wait_for_vnf(tacker_client, '%s', '%s')]: %s"
% (vnf_id, vnf_name, e))
diff --git a/requirements.txt b/requirements.txt
index b5e78bb59..68b889b35 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -28,4 +28,5 @@ subprocess32
shyaml
dnspython
Pillow==3.3.0
-click==6.6 \ No newline at end of file
+click==6.6
+openbaton-cli==2.2.1-beta7