summaryrefslogtreecommitdiffstats
path: root/docs
diff options
context:
space:
mode:
Diffstat (limited to 'docs')
-rw-r--r--docs/testing/user/userguide/conf.py185
-rw-r--r--docs/testing/user/userguide/index.rst38
-rw-r--r--docs/testing/user/userguide/intro.rst12
-rw-r--r--docs/testing/user/userguide/reporting.rst24
-rw-r--r--docs/testing/user/userguide/runfunctest.rst10
-rw-r--r--docs/testing/user/userguide/test_details.rst86
-rw-r--r--docs/testing/user/userguide/test_overview.rst308
-rw-r--r--docs/testing/user/userguide/test_results.rst10
-rw-r--r--docs/testing/user/userguide/troubleshooting.rst135
9 files changed, 508 insertions, 300 deletions
diff --git a/docs/testing/user/userguide/conf.py b/docs/testing/user/userguide/conf.py
new file mode 100644
index 000000000..cd37ed4f0
--- /dev/null
+++ b/docs/testing/user/userguide/conf.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+#
+# Functest User Guide documentation build configuration file, created by
+# sphinx-quickstart on Wed Apr 4 04:05:42 2018.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+# import os
+# import sys
+# sys.path.insert(0, os.path.abspath('.'))
+import sphinx_opnfv_theme
+
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Functest User Guide'
+copyright = u'2018, Cédric Ollivier <cedric.ollivier@orange.com>'
+author = u'Cédric Ollivier <cedric.ollivier@orange.com>'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = u'master'
+# The full version, including alpha/beta/rc tags.
+release = u'master'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = []
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'opnfv'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#
+# html_theme_options = {}
+html_theme_options = {
+ 'bootswatch_theme': 'journal',
+ 'navbar_sidebarrel': False,
+ 'navbar_title': '',
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+html_theme_path = sphinx_opnfv_theme.get_html_theme_path()
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = []
+
+# Custom sidebar templates, must be a dictionary that maps document names
+# to template names.
+#
+# This is required for the alabaster theme
+# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
+html_sidebars = {
+ '**': [
+ 'relations.html', # needs 'show_related': True theme option to display
+ 'searchbox.html',
+ ]
+}
+
+
+# -- Options for HTMLHelp output ------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'FunctestUserGuidedoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ #
+ # 'papersize': 'letterpaper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ #
+ # 'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ #
+ # 'preamble': '',
+
+ # Latex figure (float) alignment
+ #
+ # 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc,
+ 'FunctestUserGuide.tex',
+ u'Functest User Guide Documentation',
+ u'Cédric Ollivier \\textless{}cedric.ollivier@orange.com\\textgreater{}',
+ 'manual'),
+]
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc,
+ 'functestuserguide',
+ u'Functest User Guide Documentation',
+ [author],
+ 1)
+]
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc,
+ 'FunctestUserGuide',
+ u'Functest User Guide Documentation',
+ author,
+ 'FunctestUserGuide',
+ 'One line description of project.',
+ 'Miscellaneous'),
+]
diff --git a/docs/testing/user/userguide/index.rst b/docs/testing/user/userguide/index.rst
index 66dfd3e7b..36951fc66 100644
--- a/docs/testing/user/userguide/index.rst
+++ b/docs/testing/user/userguide/index.rst
@@ -1,8 +1,7 @@
-.. _functest-userguide:
-
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
.. SPDX-License-Identifier: CC-BY-4.0
+.. _functest-userguide:
+
*******************
Functest User Guide
*******************
@@ -10,32 +9,13 @@ Functest User Guide
.. toctree::
:maxdepth: 2
-
-
-Introduction
-============
-
-The goal of this document is to describe the OPNFV Functest test cases and to
-provide a procedure to execute them.
-
-**IMPORTANT**: It is assumed here that Functest has been properly deployed
-following the installation guide procedure `[1]`_.
-
-.. include:: ./test_overview.rst
-
-.. include:: ./test_details.rst
-
-.. include:: ./runfunctest.rst
-
-.. include:: ./test_results.rst
-
-.. include:: ./reporting.rst
-
-.. figure:: ../../../images/functest-reporting-status.png
- :align: center
- :alt: Functest reporting portal Fuel status page
-
-.. include:: ./troubleshooting.rst
+ intro.rst
+ test_overview.rst
+ test_details.rst
+ runfunctest.rst
+ test_results.rst
+ reporting.rst
+ troubleshooting.rst
References
diff --git a/docs/testing/user/userguide/intro.rst b/docs/testing/user/userguide/intro.rst
new file mode 100644
index 000000000..0c36cd8c7
--- /dev/null
+++ b/docs/testing/user/userguide/intro.rst
@@ -0,0 +1,12 @@
+.. SPDX-License-Identifier: CC-BY-4.0
+
+Introduction
+============
+
+The goal of this document is to describe the OPNFV Functest test cases and to
+provide a procedure to execute them.
+
+**IMPORTANT**: It is assumed here that Functest has been properly deployed
+following the installation guide procedure `[1]`_.
+
+.. _`[1]`: http://docs.opnfv.org/en/latest/submodules/functest/docs/testing/user/configguide/index.html
diff --git a/docs/testing/user/userguide/reporting.rst b/docs/testing/user/userguide/reporting.rst
index 88f5e865a..337c203db 100644
--- a/docs/testing/user/userguide/reporting.rst
+++ b/docs/testing/user/userguide/reporting.rst
@@ -1,4 +1,3 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
.. http://creativecommons.org/licenses/by/4.0
Test reporting
@@ -42,7 +41,9 @@ and features) corresponding to this scenario.
+---------------------+---------+---------+---------+---------+
| copper | X | | | X |
+---------------------+---------+---------+---------+---------+
- src: os-odl_l2-nofeature-ha Colorado (see release note for the last matrix version)
+
+ src: os-odl_l2-nofeature-ha Colorado (see release note for the last matrix
+ version)
All the testcases (X) listed in the table are runnable on os-odl_l2-nofeature
scenarios.
@@ -69,16 +70,20 @@ scoring. In fact the success criteria are not always easy to define and may
require specific hardware configuration.
Please also note that all the test cases have the same "weight" for the score
-calculation whatever the complexity of the test case. Concretely a vping has the
-same weith than the 200 tempst tests.
+calculation whatever the complexity of the test case. Concretely a vping has
+the same weight than the 200 tempest tests.
Moreover some installers support more features than others. The more cases your
scenario is dealing with, the most difficult to rich a good scoring.
Therefore the scoring provides 3 types of indicators:
- * the richness of the scenario: if the target scoring is high, it means that the scenario includes lots of features
- * the maturity: if the percentage (scoring/target scoring * 100) is high, it means that all the tests are PASS
- * the stability: as the number of iteration is included in the calculation, the pecentage can be high only if the scenario is run regularly (at least more than 4 iterations over the last 10 days in CI)
+ * the richness of the scenario: if the target scoring is high, it means that
+ the scenario includes lots of features
+ * the maturity: if the percentage (scoring/target scoring * 100) is high, it
+ means that all the tests are PASS
+ * the stability: as the number of iteration is included in the calculation,
+ the pecentage can be high only if the scenario is run regularly (at least
+ more than 4 iterations over the last 10 days in CI)
In any case, the scoring is used to give feedback to the other projects and
does not represent an absolute value of the scenario.
@@ -86,5 +91,8 @@ does not represent an absolute value of the scenario.
See `reporting page`_ for details. For the status, click on the version,
Functest then the Status menu.
-
.. _`reporting page`: http://testresults.opnfv.org/reporting/
+
+.. figure:: ../../../images/functest-reporting-status.png
+ :align: center
+ :alt: Functest reporting portal Fuel status page
diff --git a/docs/testing/user/userguide/runfunctest.rst b/docs/testing/user/userguide/runfunctest.rst
index d5b95101a..07e16efed 100644
--- a/docs/testing/user/userguide/runfunctest.rst
+++ b/docs/testing/user/userguide/runfunctest.rst
@@ -1,6 +1,4 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
-
+.. SPDX-License-Identifier: CC-BY-4.0
Executing Functest suites
=========================
@@ -38,8 +36,8 @@ The result tables show the results by test case, it can be::
Manual run
==========
-If you want to run the test step by step, you may add docker option then run the
-different commands within the docker.
+If you want to run the test step by step, you may add docker option then run
+the different commands within the docker.
Considering the healthcheck example, running functest manaully means::
@@ -104,7 +102,7 @@ Note the list of test cases depend on the installer and the scenario.
Reporting results to the test Database
======================================
-In OPNFV CI we collect all the results from CI. A test APi shall be available
+In OPNFV CI we collect all the results from CI. A test API shall be available
as well as a test database `[17]`_.
Functest internal API
diff --git a/docs/testing/user/userguide/test_details.rst b/docs/testing/user/userguide/test_details.rst
index 1ce4fce76..f5381b8ac 100644
--- a/docs/testing/user/userguide/test_details.rst
+++ b/docs/testing/user/userguide/test_details.rst
@@ -1,8 +1,7 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
+.. SPDX-License-Identifier: CC-BY-4.0
-
-The different test cases are described in the remaining sections of this document.
+The different test cases are described in the remaining sections of this
+document.
VIM (Virtualized Infrastructure Manager)
----------------------------------------
@@ -59,8 +58,8 @@ Given the script **ping.sh**::
The goal of this test is to establish an SSH connection using a floating IP
-on the Public/External network and verify that 2 instances can talk over a Private
-Tenant network::
+on the Public/External network and verify that 2 instances can talk over a
+Private Tenant network::
vPing_ssh test case
+-------------+ +-------------+
@@ -105,7 +104,8 @@ vPing_userdata
This test case is similar to vPing_ssh but without the use of Floating IPs
and the Public/External network to transfer the ping script.
-Instead, it uses Nova metadata service to pass it to the instance at booting time.
+Instead, it uses Nova metadata service to pass it to the instance at booting
+time.
As vPing_ssh, it checks that 2 instances can talk to
each other on a Private Tenant network::
@@ -166,14 +166,15 @@ Tiers:
* Neutron Trunk Port - Test case 'neutron_trunk'
NOTE: Test case 'tempest_smoke_serial' executes a defined set of tempest smoke
-tests with a single thread (i.e. serial mode). Test case 'tempest_full_parallel'
-executes all defined Tempest tests using several concurrent threads
-(i.e. parallel mode). The number of threads activated corresponds to the number
-of available logical CPUs.
+tests with a single thread (i.e. serial mode). Test case
+'tempest_full_parallel' executes all defined Tempest tests using several
+concurrent threads (i.e. parallel mode). The number of threads activated
+corresponds to the number of available logical CPUs.
-NOTE: The 'neutron_trunk' test set allows to connect a VM to multiple VLAN separated
-networks using a single NIC. The feature neutron trunk ports have been supported
-by Apex, Fuel and Compass, so the tempest testcases have been integrated normally.
+NOTE: The 'neutron_trunk' test set allows to connect a VM to multiple VLAN
+separated networks using a single NIC. The feature neutron trunk ports have
+been supported by Apex, Fuel and Compass, so the tempest testcases have been
+integrated normally.
The goal of the Tempest test suite is to check the basic functionalities of the
different OpenStack components on an OPNFV fresh installation, using the
@@ -187,10 +188,11 @@ Rally `[3]`_ is a benchmarking tool that answers the question:
*How does OpenStack work at scale?*
-The goal of this test suite is to benchmark all the different OpenStack modules and
-get significant figures that could help to define Telco Cloud KPIs.
+The goal of this test suite is to benchmark all the different OpenStack modules
+and get significant figures that could help to define Telco Cloud KPIs.
-The OPNFV Rally scenarios are based on the collection of the actual Rally scenarios:
+The OPNFV Rally scenarios are based on the collection of the actual Rally
+scenarios:
* authenticate
* cinder
@@ -258,8 +260,8 @@ In OPNFV Continuous Integration(CI) system, the command line method is used.
*manually*
-Prepare the tempest configuration file and the testcases want to run with the SUT,
-run the testcases with:
+Prepare the tempest configuration file and the testcases want to run with the
+SUT, run the testcases with:
::
@@ -282,16 +284,16 @@ is generated by Rally.
*manually*
-When running manually is used, recommended way to generate tempest configuration
-file is:
+When running manually is used, recommended way to generate tempest
+configuration file is:
::
cd /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/refstack_client
python tempest_conf.py
-a file called tempest.conf is stored in the current path by default, users can do
-some adjustment according to the SUT:
+a file called tempest.conf is stored in the current path by default, users can
+do some adjustment according to the SUT:
::
@@ -306,8 +308,8 @@ snaps_smoke
This test case contains tests that setup and destroy environments with VMs with
and without Floating IPs with a newly created user and project. Set the config
value snaps.use_floating_ips (True|False) to toggle this functionality.
-Please note that When the configuration value of snaps.use_keystone is True, Functest must have access
-the cloud's private network.
+Please note that when the configuration value of snaps.use_keystone is True,
+Functest must have access the cloud's private network.
This suite consists in 38 tests (test duration < 10 minutes)
@@ -325,31 +327,40 @@ OpenDaylight and Neutron.
The list of tests can be described as follows:
* Basic Restconf test cases
+
* Connect to Restconf URL
* Check the HTTP code status
* Neutron Reachability test cases
+
* Get the complete list of neutron resources (networks, subnets, ports)
* Neutron Network test cases
+
* Check OpenStack networks
* Check OpenDaylight networks
- * Create a new network via OpenStack and check the HTTP status code returned by Neutron
+ * Create a new network via OpenStack and check the HTTP status code returned
+ by Neutron
* Check that the network has also been successfully created in OpenDaylight
* Neutron Subnet test cases
+
* Check OpenStack subnets
* Check OpenDaylight subnets
- * Create a new subnet via OpenStack and check the HTTP status code returned by Neutron
+ * Create a new subnet via OpenStack and check the HTTP status code returned
+ by Neutron
* Check that the subnet has also been successfully created in OpenDaylight
* Neutron Port test cases
+
* Check OpenStack Neutron for known ports
* Check OpenDaylight ports
- * Create a new port via OpenStack and check the HTTP status code returned by Neutron
+ * Create a new port via OpenStack and check the HTTP status code returned by
+ Neutron
* Check that the new port has also been successfully created in OpenDaylight
* Delete operations
+
* Delete the port previously created via OpenStack
* Check that the port has been also successfully deleted in OpenDaylight
* Delete previously subnet created via OpenStack
@@ -364,7 +375,7 @@ code returned by OpenDaylight.
Features
--------
-Functest has been supporting several feature projects since Brahpamutra:
+Functest has been supporting several feature projects since Brahmaputra:
+-----------------+---------+----------+--------+-----------+
@@ -414,9 +425,9 @@ The IP Multimedia Subsystem or IP Multimedia Core Network Subsystem (IMS) is an
architectural framework for delivering IP multimedia services.
vIMS has been integrated in Functest to demonstrate the capability to deploy a
-relatively complex NFV scenario on the OPNFV platform. The deployment of a complete
-functional VNF allows the test of most of the essential functions needed for a
-NFV platform.
+relatively complex NFV scenario on the OPNFV platform. The deployment of a
+complete functional VNF allows the test of most of the essential functions
+needed for a NFV platform.
The goal of this test suite consists of:
@@ -438,12 +449,14 @@ This testcase extends the cloudify_ims test case.
The first part is similar but the testing part is different.
The testing part consists in automating a realistic signaling load on the vIMS
using an Ixia loader (proprietary tools)
- - You need to have access to an Ixia licence server defined in the configuration
- file and have ixia image locally.
+
+ - You need to have access to an Ixia licence server defined in the
+ configuration file and have ixia image locally.
This test case is available but not declared in testcases.yaml. The declaration
-of the testcase is simple, connect to your functest-vnf docker, add the following
-section in /usr/lib/python2.7/site-packacges/functest/ci/testcases.yaml::
+of the testcase is simple, connect to your functest-vnf docker, add the
+following section in
+/usr/lib/python2.7/site-packacges/functest/ci/testcases.yaml::
-
case_name: cloudify_ims_perf
@@ -494,6 +507,7 @@ The vyos-vrouter architecture is described in `[14]`_
.. _`[3]`: https://rally.readthedocs.org/en/latest/index.html
.. _`[5]`: https://github.com/Orange-OpenSource/opnfv-cloudify-clearwater/blob/master/openstack-blueprint.yaml
.. _`[8]`: https://github.com/openstack/refstack-client
+.. _`[9]`: https://github.com/openstack/defcore
.. _`[10]`: https://github.com/openstack/interop/blob/master/2016.08/procedure.rst
.. _`[11]`: http://robotframework.org/
.. _`[12]`: http://docs.opnfv.org/en/latest/submodules/functest/docs/testing/user/userguide/index.html
diff --git a/docs/testing/user/userguide/test_overview.rst b/docs/testing/user/userguide/test_overview.rst
index 8919ed713..250a0f014 100644
--- a/docs/testing/user/userguide/test_overview.rst
+++ b/docs/testing/user/userguide/test_overview.rst
@@ -1,5 +1,4 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
+.. SPDX-License-Identifier: CC-BY-4.0
Overview of the Functest suites
===============================
@@ -21,151 +20,147 @@ healthcheck, smoke, features, components, performance, VNF, Stress tests.
All the Healthcheck and smoke tests of a given scenario must be succesful to
validate the scenario for the release.
-+-------------+---------------+----------------+----------------------------------+
-| Domain | Tier | Test case | Comments |
-+=============+===============+================+==================================+
-| VIM | healthcheck | connection | Check OpenStack connectivity |
-| | | _check | through SNAPS framework |
-| | +----------------+----------------------------------+
-| | | api_check | Check OpenStack API through |
-| | | | SNAPS framework |
-| | +----------------+----------------------------------+
-| | | snaps_health | basic instance creation, check |
-| | | \_check | DHCP |
-| +---------------+----------------+----------------------------------+
-| | smoke | vping_ssh | NFV "Hello World" using an SSH |
-| | | | connection to a destination VM |
-| | | | over a created floating IP |
-| | | | address on the SUT Public / |
-| | | | External network. Using the SSH |
-| | | | connection a test script is then |
-| | | | copied to the destination |
-| | | | VM and then executed via SSH. |
-| | | | The script will ping another |
-| | | | VM on a specified IP address over|
-| | | | the SUT Private Tenant network |
-| | +----------------+----------------------------------+
-| | | vping_userdata | Uses Ping with given userdata |
-| | | | to test intra-VM connectivity |
-| | | | over the SUT Private Tenant |
-| | | | network. The correct operation |
-| | | | of the NOVA Metadata service is |
-| | | | also verified in this test |
-| | +----------------+----------------------------------+
-| | | tempest_smoke | Generate and run a relevant |
-| | | \_serial | Tempest Test Suite in smoke mode.|
-| | | | The generated test set is |
-| | | | dependent on the OpenStack |
-| | | | deployment environment |
-| | +----------------+----------------------------------+
-| | | rally_sanity | Run a subset of the OpenStack |
-| | | | Rally Test Suite in smoke mode |
-| | +----------------+----------------------------------+
-| | | snaps_smoke | Run the SNAPS-OO integration |
-| | | | tests |
-| | +----------------+----------------------------------+
-| | | refstack | Reference RefStack suite |
-| | | \_defcore | tempest selection for NFV |
-| | +----------------+----------------------------------+
-| | | neutron_trunk | The neutron trunk port testcases |
-| | | | have been introduced and they are|
-| | | | supported by installers : |
-| | | | Apex, Fuel and Compass. |
-| +---------------+----------------+----------------------------------+
-| | components | tempest_full | Generate and run a full set of |
-| | | \_parallel | the OpenStack Tempest Test Suite.|
-| | | | See the OpenStack reference test |
-| | | | suite `[2]`_. The generated |
-| | | | test set is dependent on the |
-| | | | OpenStack deployment environment |
-| | +----------------+----------------------------------+
-| | | rally_full | Run the OpenStack testing tool |
-| | | | benchmarking OpenStack modules |
-| | | | See the Rally documents `[3]`_ |
-+-------------+---------------+----------------+----------------------------------+
-| Controllers | smoke | odl | Opendaylight Test suite |
-| | | | Limited test suite to check the |
-| | | | basic neutron (Layer 2) |
-| | | | operations mainly based on |
-| | | | upstream testcases. See below |
-| | | | for details |
-| | +----------------+----------------------------------+
-| | | odl_netvirt | Test Suite for the OpenDaylight |
-| | | | SDN Controller when the NetVirt |
-| | | | features are installed. It |
-| | | | integrates some test suites from |
-| | | | upstream using Robot as the test |
-| | | | framework |
-+-------------+---------------+----------------+----------------------------------+
-| Features | features | bgpvpn | Implementation of the OpenStack |
-| | | | bgpvpn API from the SDNVPN |
-| | | | feature project. It allows for |
-| | | | the creation of BGP VPNs. |
-| | | | See `SDNVPN User Guide`_ for |
-| | | | details |
-| | +----------------+----------------------------------+
-| | | doctor | Doctor platform, as of Colorado |
-| | | | release, provides the three |
-| | | | features: |
-| | | | * Immediate Notification |
-| | | | * Consistent resource state |
-| | | | awareness for compute host down |
-| | | | * Valid compute host status |
-| | | | given to VM owner |
-| | | | See `Doctor User Guide`_ for |
-| | | | details |
-| | +----------------+----------------------------------+
-| | | odl-sfc | SFC testing for odl scenarios |
-| | | | See `SFC User Guide`_ for details|
-| | +----------------+----------------------------------+
-| | | parser | Parser is an integration project |
-| | | | which aims to provide |
-| | | | placement/deployment templates |
-| | | | translation for OPNFV platform, |
-| | | | including TOSCA -> HOT, POLICY ->|
-| | | | TOSCA and YANG -> TOSCA. it |
-| | | | deals with a fake vRNC. |
-| | | | See `Parser User Guide`_ for |
-| | | | details |
-| | +----------------+----------------------------------+
-| | | fds | Test Suite for the OpenDaylight |
-| | | | SDN Controller when the GBP |
-| | | | features are installed. It |
-| | | | integrates some test suites from |
-| | | | upstream using Robot as the test |
-| | | | framework |
-+-------------+---------------+----------------+----------------------------------+
-| VNF | vnf | cloudify_ims | Example of a real VNF deployment |
-| | | | to show the NFV capabilities of |
-| | | | the platform. The IP Multimedia |
-| | | | Subsytem is a typical Telco test |
-| | | | case, referenced by ETSI. |
-| | | | It provides a fully functional |
-| | | | VoIP System |
-| | +----------------+----------------------------------+
-| | | orchestra | OpenIMS deployment using |
-| | | \_openims | Openbaton orchestrator |
-| | +----------------+----------------------------------+
-| | | orchestra | Cleawater IMS deployment using |
-| | | \_cleawaterims | Openbaton orchestrator |
-| | +----------------+----------------------------------+
-| | | vyos_vrouter | vRouter testing |
-| | +----------------+----------------------------------+
-| | | cloudify_ims | Based on cloudify_ims test case |
-| | | perf | cloudify_ims_perf substitutes |
-| | | | the signaling test suite by an |
-| | | | automatic deployment of an Ixia |
-| | | | loader and generic SIP stress |
-| | | | tests. |
-| | | | This work has been initiated |
-| | | | during the plugfest and allows |
-| | | | realistic load tests on top of |
-| | | | cloudify_ims. Please note that |
-| | | | this test is available but not |
-| | | | declared in testcases.yaml as it |
-| | | | requires access to proprietary |
-| | | | resources (Ixia loader) |
-+-------------+---------------+----------------+----------------------------------+
++-------------+---------------+------------+----------------------------------+
+| Domain | Tier | Test case | Comments |
++=============+===============+============+==================================+
+| VIM | healthcheck | connection | Check OpenStack connectivity |
+| | | \_check | through SNAPS framework |
+| | +------------+----------------------------------+
+| | | api_check | Check OpenStack API through |
+| | | | SNAPS framework |
+| | +------------+----------------------------------+
+| | | snaps | basic instance creation, check |
+| | | \_health | DHCP |
+| | | \_check | |
+| +---------------+------------+----------------------------------+
+| | smoke | vping_ssh | NFV "Hello World" using an SSH |
+| | | | connection to a destination VM |
+| | | | over a created floating IP |
+| | | | address on the SUT Public / |
+| | | | External network. Using the SSH |
+| | | | connection a test script is then |
+| | | | copied to the destination |
+| | | | VM and then executed via SSH. |
+| | | | The script will ping another |
+| | | | VM on a specified IP address over|
+| | | | the SUT Private Tenant network |
+| | +------------+----------------------------------+
+| | | vping | Uses Ping with given userdata |
+| | | \_userdata | to test intra-VM connectivity |
+| | | | over the SUT Private Tenant |
+| | | | network. The correct operation |
+| | | | of the NOVA Metadata service is |
+| | | | also verified in this test |
+| | +------------+----------------------------------+
+| | | tempest | Generate and run a relevant |
+| |               | \_smoke | Tempest Test Suite in smoke mode.|
+| | | \_serial | The generated test set is |
+| | | | dependent on the OpenStack |
+| | | | deployment environment |
+| | +------------+----------------------------------+
+| | | rally | Run a subset of the OpenStack |
+| |  | \_sanity | Rally Test Suite in smoke mode |
+| | +------------+----------------------------------+
+| | | snaps\ | Run the SNAPS-OO integration |
+| |  | \_smoke | tests |
+| | +------------+----------------------------------+
+| | | refstack | Reference RefStack suite |
+| | | \_defcore | tempest selection for NFV |
+| | +------------+----------------------------------+
+| | | neutron | The neutron trunk port testcases |
+| | | \_trunk | have been introduced and they are|
+| | | | supported by installers : |
+| | | | Apex, Fuel and Compass. |
+| +---------------+------------+----------------------------------+
+| | components | tempest | Generate and run a full set of |
+| | | \_full | the OpenStack Tempest Test Suite.|
+| | | \_parallel | See the OpenStack reference test |
+| | | | suite `[2]`_. The generated |
+| | | | test set is dependent on the |
+| | | | OpenStack deployment environment |
+| | +------------+----------------------------------+
+| | | rally_full | Run the OpenStack testing tool |
+| | | | benchmarking OpenStack modules |
+| | | | See the Rally documents `[3]`_ |
++-------------+---------------+------------+----------------------------------+
+| Controllers | smoke | odl | Opendaylight Test suite |
+| | | | Limited test suite to check the |
+| | | | basic neutron (Layer 2) |
+| | | | operations mainly based on |
+| | | | upstream testcases. See below |
+| | | | for details |
+| | +------------+----------------------------------+
+| | | odl | Test Suite for the OpenDaylight |
+| | | \_netvirt | SDN Controller when the NetVirt |
+| | | | features are installed. It |
+| | | | integrates some test suites from |
+| | | | upstream using Robot as the test |
+| | | | framework |
++-------------+---------------+------------+----------------------------------+
+| Features | features | bgpvpn | Implementation of the OpenStack |
+| | | | bgpvpn API from the SDNVPN |
+| | | | feature project. It allows for |
+| | | | the creation of BGP VPNs. |
+| | | | See `SDNVPN User Guide`_ for |
+| | | | details |
+| | +------------+----------------------------------+
+| | | doctor | Doctor platform, as of Colorado |
+| | | | release, provides the three |
+| | | | features: |
+| | | | * Immediate Notification |
+| | | | * Consistent resource state |
+| | | | awareness for compute host down |
+| | | | * Valid compute host status |
+| | | | given to VM owner |
+| | | | See `Doctor User Guide`_ for |
+| | | | details |
+| | +------------+----------------------------------+
+| | | odl-sfc | SFC testing for odl scenarios |
+| | | | See `SFC User Guide`_ for details|
+| | +------------+----------------------------------+
+| | | parser | Parser is an integration project |
+| | | | which aims to provide |
+| | | | placement/deployment templates |
+| | | | translation for OPNFV platform, |
+| | | | including TOSCA -> HOT, POLICY ->|
+| | | | TOSCA and YANG -> TOSCA. it |
+| | | | deals with a fake vRNC. |
+| | | | See `Parser User Guide`_ for |
+| | | | details |
+| | +------------+----------------------------------+
+| | | fds | Test Suite for the OpenDaylight |
+| | | | SDN Controller when the GBP |
+| | | | features are installed. It |
+| | | | integrates some test suites from |
+| | | | upstream using Robot as the test |
+| | | | framework |
++-------------+---------------+------------+----------------------------------+
+| VNF | vnf | cloudify | Example of a real VNF deployment |
+| | | \_ims | to show the NFV capabilities of |
+| | | | the platform. The IP Multimedia |
+| | | | Subsytem is a typical Telco test |
+| | | | case, referenced by ETSI. |
+| | | | It provides a fully functional |
+| | | | VoIP System |
+| | +------------+----------------------------------+
+| | | vyos | vRouter testing |
+| | | \_vrouter | |
+| | +------------+----------------------------------+
+| | | cloudify | Based on cloudify_ims test case |
+| | | \_ims_perf | cloudify_ims_perf substitutes |
+| | | | the signaling test suite by an |
+| | | | automatic deployment of an Ixia |
+| | | | loader and generic SIP stress |
+| | | | tests. |
+| | | | This work has been initiated |
+| | | | during the plugfest and allows |
+| | | | realistic load tests on top of |
+| | | | cloudify_ims. Please note that |
+| | | | this test is available but not |
+| | | | declared in testcases.yaml as it |
+| | | | requires access to proprietary |
+| | | | resources (Ixia loader) |
++-------------+---------------+------------+----------------------------------+
As shown in the above table, Functest is structured into different 'domains',
@@ -174,7 +169,8 @@ As shown in the above table, Functest is structured into different 'domains',
Test cases also have an implicit execution order. For example, if the early
'healthcheck' Tier testcase fails, or if there are any failures in the 'smoke'
-Tier testcases, there is little point to launch a full testcase execution round.
+Tier testcases, there is little point to launch a full testcase execution
+round.
In Danube, we merged smoke and sdn controller tiers in smoke tier.
@@ -190,13 +186,13 @@ are integrated from upstream communities or other OPNFV projects. For example,
OpenStack integration test suite and Functest is in charge of the selection,
integration and automation of those tests that fit suitably to OPNFV.
-The Tempest test suite is the default OpenStack smoke test suite but no new test
-cases have been created in OPNFV Functest.
+The Tempest test suite is the default OpenStack smoke test suite but no new
+test cases have been created in OPNFV Functest.
The results produced by the tests run from CI are pushed and collected into a
-NoSQL database. The goal is to populate the database with results from different
-sources and scenarios and to show them on a `Functest Dashboard`_. A screenshot
-of a live Functest Dashboard is shown below:
+NoSQL database. The goal is to populate the database with results from
+different sources and scenarios and to show them on a `Functest Dashboard`_. A
+screenshot of a live Functest Dashboard is shown below:
.. figure:: ../../../images/FunctestDashboardEuphrates.png
:align: center
@@ -222,9 +218,9 @@ combinations (which may change from one version to another):
Most of the tests are runnable by any combination, but some tests might have
restrictions imposed by the utilized installers or due to the available
-deployed features. The system uses the environment variables (INSTALLER_TYPE and
-DEPLOY_SCENARIO) to automatically determine the valid test cases, for each given
-environment.
+deployed features. The system uses the environment variables (INSTALLER_TYPE
+and DEPLOY_SCENARIO) to automatically determine the valid test cases, for each
+given environment.
A convenience Functest CLI utility is also available to simplify setting up the
Functest evironment, management of the OpenStack environment (e.g. resource
diff --git a/docs/testing/user/userguide/test_results.rst b/docs/testing/user/userguide/test_results.rst
index 3941ba0a1..2d36bdc33 100644
--- a/docs/testing/user/userguide/test_results.rst
+++ b/docs/testing/user/userguide/test_results.rst
@@ -1,3 +1,5 @@
+.. SPDX-License-Identifier: CC-BY-4.0
+
Test results
============
@@ -11,10 +13,10 @@ If you want additional logs, you may configure the logging.ini under
/usr/lib/python2.7/site-packages/functest/ci.
Automated testing
---------------
+-----------------
-In automated mode, test results are displayed in jenkins logs, a summary is provided
-at the end of the job and can be described as follow::
+In automated mode, test results are displayed in jenkins logs, a summary is
+provided at the end of the job and can be described as follow::
+-------------------------+----------------------------------------------------------+
| ENV VAR | VALUE |
@@ -25,6 +27,8 @@ at the end of the job and can be described as follow::
| CI_LOOP | daily |
+-------------------------+----------------------------------------------------------+
+::
+
+------------------------------+------------------+---------------------+------------------+----------------+
| TEST CASE | PROJECT | TIER | DURATION | RESULT |
+------------------------------+------------------+---------------------+------------------+----------------+
diff --git a/docs/testing/user/userguide/troubleshooting.rst b/docs/testing/user/userguide/troubleshooting.rst
index 1649ac140..49f460078 100644
--- a/docs/testing/user/userguide/troubleshooting.rst
+++ b/docs/testing/user/userguide/troubleshooting.rst
@@ -1,5 +1,4 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
+.. SPDX-License-Identifier: CC-BY-4.0
Troubleshooting
===============
@@ -26,8 +25,8 @@ rally_full).
vPing common
^^^^^^^^^^^^
-For both vPing test cases (**vPing_ssh**, and **vPing_userdata**), the first steps are
-similar:
+For both vPing test cases (**vPing_ssh**, and **vPing_userdata**), the first
+steps are similar:
* Create Glance image
* Create Network
@@ -37,15 +36,17 @@ similar:
After these actions, the test cases differ and will be explained in their
respective section.
-These test cases can be run inside the container, using new Functest CLI as follows::
+These test cases can be run inside the container, using new Functest CLI as
+follows::
$ functest testcase run vping_ssh
$ functest testcase run vping_userdata
The Functest CLI is designed to route a call to the corresponding internal
-python scripts, located in paths
-/usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/vping/vping_ssh.py
-and /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/vping/vping_userdata.py
+python scripts, located in paths::
+
+ /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/vping/vping_ssh.py
+ /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/vping/vping_userdata.py
Notes:
@@ -73,10 +74,11 @@ Some of the common errors that can appear in this test case are::
vPing_ssh- ERROR - There has been a problem when creating the neutron network....
-This means that there has been some problems with Neutron, even before creating the
-instances. Try to create manually a Neutron network and a Subnet to see if that works.
-The debug messages will also help to see when it failed (subnet and router creation).
-Example of Neutron commands (using 10.6.0.0/24 range for example)::
+This means that there has been some problems with Neutron, even before creating
+the instances. Try to create manually a Neutron network and a Subnet to see if
+that works. The debug messages will also help to see when it failed (subnet and
+router creation). Example of Neutron commands (using 10.6.0.0/24 range for
+example)::
neutron net-create net-test
neutron subnet-create --name subnet-test --allocation-pool start=10.6.0.2,end=10.6.0.100 \
@@ -85,7 +87,8 @@ Example of Neutron commands (using 10.6.0.0/24 range for example)::
neutron router-interface-add <ROUTER_ID> test_subnet
neutron router-gateway-set <ROUTER_ID> <EXT_NET_NAME>
-Another related error can occur while creating the Security Groups for the instances::
+Another related error can occur while creating the Security Groups for the
+instances::
vPing_ssh- ERROR - Failed to create the security group...
@@ -100,9 +103,9 @@ In this case, proceed to create it manually. These are some hints::
--protocol tcp --port-range-min 80 --port-range-max 80 --remote-ip-prefix 0.0.0.0/0
The next step is to create the instances. The image used is located in
-*/home/opnfv/functest/data/cirros-0.4.0-x86_64-disk.img* and a Glance image is created
-with the name **functest-vping**. If booting the instances fails (i.e. the status
-is not **ACTIVE**), you can check why it failed by doing::
+*/home/opnfv/functest/data/cirros-0.4.0-x86_64-disk.img* and a Glance image is
+created with the name **functest-vping**. If booting the instances fails (i.e.
+the status is not **ACTIVE**), you can check why it failed by doing::
nova list
nova show <INSTANCE_ID>
@@ -113,7 +116,8 @@ It might show some messages about the booting failure. To try that manually::
This will spawn a VM using the network created previously manually.
In all the OPNFV tested scenarios from CI, it never has been a problem with the
-previous actions. Further possible problems are explained in the following sections.
+previous actions. Further possible problems are explained in the following
+sections.
vPing_SSH
@@ -143,19 +147,21 @@ from the DHCP agent. It can be checked by doing::
nova console-log opnfv-vping-2
If the message *Sending discover* and *No lease, failing* is shown, it probably
-means that the Neutron dhcp-agent failed to assign an IP or even that it was not
-responding. At this point it does not make sense to try to ping the floating IP.
+means that the Neutron dhcp-agent failed to assign an IP or even that it was
+not responding. At this point it does not make sense to try to ping the
+floating IP.
-If the instance got an IP properly, try to ping manually the VM from the container::
+If the instance got an IP properly, try to ping manually the VM from the
+container::
nova list
<grab the public IP>
ping <public IP>
-If the ping does not return anything, try to ping from the Host where the Docker
-container is running. If that solves the problem, check the iptable rules because
-there might be some rules rejecting ICMP or TCP traffic coming/going from/to the
-container.
+If the ping does not return anything, try to ping from the Host where the
+Docker container is running. If that solves the problem, check the iptable
+rules because there might be some rules rejecting ICMP or TCP traffic
+coming/going from/to the container.
At this point, if the ping does not work either, try to reproduce the test
manually with the steps described above in the vPing common section with the
@@ -176,7 +182,8 @@ vPing_userdata
This test case does not create any floating IP neither establishes an SSH
connection. Instead, it uses nova-metadata service when creating an instance
to pass the same script as before (ping.sh) but as 1-line text. This script
-will be executed automatically when the second instance **opnfv-vping-2** is booted.
+will be executed automatically when the second instance **opnfv-vping-2** is
+booted.
The only known problem here for this test to fail is mainly the lack of support
of cloud-init (nova-metadata service). Check the console of the instance::
@@ -219,39 +226,41 @@ In the upstream OpenStack CI all the Tempest test cases are supposed to pass.
If some test cases fail in an OPNFV deployment, the reason is very probably one
of the following
-+-----------------------------+-----------------------------------------------------+
-| Error | Details |
-+=============================+=====================================================+
-| Resources required for test | Such resources could be e.g. an external network |
-| case execution are missing | and access to the management subnet (adminURL) from |
-| | the Functest docker container. |
-+-----------------------------+-----------------------------------------------------+
-| OpenStack components or | Check running services in the controller and compute|
-| services are missing or not | nodes (e.g. with "systemctl" or "service" commands).|
-| configured properly | Configuration parameters can be verified from the |
-| | related .conf files located under '/etc/<component>'|
-| | directories. |
-+-----------------------------+-----------------------------------------------------+
-| Some resources required for | The tempest.conf file, automatically generated by |
-| execution test cases are | Rally in Functest, does not contain all the needed |
-| missing | parameters or some parameters are not set properly. |
-| | The tempest.conf file is located in directory |
-| | 'root/.rally/verification/verifier-<UUID> |
-| | /for-deployment-<UUID>' |
-| | in the Functest Docker container. Use the "rally |
-| | deployment list" command in order to check the UUID |
-| | the UUID of the current deployment. |
-+-----------------------------+-----------------------------------------------------+
++----------------------------+------------------------------------------------+
+| Error | Details |
++============================+================================================+
+| Resources required for | Such resources could be e.g. an external |
+| testcase execution are | network and access to the management subnet |
+| missing | (adminURL) from the Functest docker container. |
++----------------------------+------------------------------------------------+
+| OpenStack components or | Check running services in the controller and |
+| services are missing or | compute nodes (e.g. with "systemctl" or |
+| not configured properly | "service" commands). |
+| | Configuration parameters can be verified from |
+| | the related .conf files located under |
+| | '/etc/<component>' directories. |
++----------------------------+------------------------------------------------+
+| Some resources required | The tempest.conf file, automatically generated |
+| for execution test cases | by Rally in Functest, does not contain all the |
+| are missing | needed parameters or some parameters are not |
+| | set properly. |
+| | The tempest.conf file is located in directory |
+| | 'root/.rally/verification/verifier-<UUID> |
+| | /for-deployment-<UUID>' |
+| | in the Functest Docker container. Use the |
+| | "rally deployment list" command in order to |
+| | check the UUID of the current deployment. |
++----------------------------+------------------------------------------------+
When some Tempest test case fails, captured traceback and possibly also the
-related REST API requests/responses are output to the console. More detailed debug
-information can be found from tempest.log file stored into related Rally deployment
-folder.
+related REST API requests/responses are output to the console. More detailed
+debug information can be found from tempest.log file stored into related Rally
+deployment folder.
Functest offers a possibility to test a customized list of Tempest test cases.
-To enable that, add a new entry in docker/components/testcases.yaml on the "components" container
-with the following content::
+To enable that, add a new entry in docker/components/testcases.yaml on the
+"components" container with the following content::
-
case_name: tempest_custom
@@ -268,8 +277,8 @@ with the following content::
module: 'functest.opnfv_tests.openstack.tempest.tempest'
class: 'TempestCustom'
-Also, a list of the Tempest test cases must be provided to the container or modify
-the existing one in
+Also, a list of the Tempest test cases must be provided to the container or
+modify the existing one in
/usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
Example of custom list of tests 'my-custom-tempest-tests.txt'::
@@ -290,8 +299,8 @@ This is an example of running a customized list of Tempest tests in Functest::
Rally
^^^^^
-The same error causes which were mentioned above for Tempest test cases, may also
-lead to errors in Rally as well.
+The same error causes which were mentioned above for Tempest test cases, may
+also lead to errors in Rally as well.
Possible scenarios are:
* authenticate
@@ -305,14 +314,16 @@ Possible scenarios are:
* quotas
* vm
-To know more about what those scenarios are doing, they are defined in directory:
+To know more about what those scenarios are doing, they are defined in
+directory:
/usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/rally/scenario
-For more info about Rally scenario definition please refer to the Rally official
-documentation. `[3]`_
+For more info about Rally scenario definition please refer to the Rally
+official documentation. `[3]`_
To check any possible problems with Rally, the logs are stored under
*/home/opnfv/functest/results/rally/* in the Functest Docker container.
+.. _`[3]`: https://rally.readthedocs.org/en/latest/index.html
Controllers
-----------
@@ -372,7 +383,7 @@ described in the following table:
| the VM | the vIMS VNF installation fails |
+-----------------------------------+------------------------------------+
-Please note that this test case requires resources (8 VM (2Go) + 1 VM (4Go)), it
-is there fore not recommended to run it on a light configuration.
+Please note that this test case requires resources (8 VM (2Go) + 1 VM (4Go)),
+it is there fore not recommended to run it on a light configuration.
.. _`OPNFV Functest Developer Guide`: http://artifacts.opnfv.org/functest/docs/testing_developer_devguide/index.html#