summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--INFO.yaml12
-rw-r--r--docs/release/release-notes/index.rst218
-rw-r--r--docs/testing/developer/genericframework/index.rst135
-rw-r--r--docs/testing/developer/testcaserequirements/index.rst8
-rw-r--r--docs/testing/user/certificationworkflow/ApplicationForm.rst6
-rw-r--r--docs/testing/user/certificationworkflow/index.rst14
-rw-r--r--docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst14
-rw-r--r--docs/testing/user/ovpaddendum/index.rst98
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_log_setup.pngbin72844 -> 12385 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_pass_fraction.pngbin83821 -> 36240 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_pass_percentage.pngbin22057 -> 8757 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_result_overview.pngbin71800 -> 0 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_result_review.pngbin14835 -> 13652 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_top_nav.pngbin21387 -> 20489 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/review_status.pngbin0 -> 9887 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/sut_info.pngbin17061 -> 12822 bytes
-rw-r--r--docs/testing/user/reviewerguide/index.rst173
-rw-r--r--docs/testing/user/systempreparation/index.rst6
-rw-r--r--docs/testing/user/testspecification/highavailability/index.rst2
-rw-r--r--docs/testing/user/testspecification/security_patrole/index.rst88
-rw-r--r--docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst13
-rw-r--r--docs/testing/user/testspecification/stress/index.rst4
-rw-r--r--docs/testing/user/testspecification/tempest_compute/index.rst24
-rw-r--r--docs/testing/user/testspecification/tempest_identity_v3/index.rst8
-rw-r--r--docs/testing/user/testspecification/tempest_image/index.rst10
-rw-r--r--docs/testing/user/testspecification/tempest_ipv6/index.rst16
-rw-r--r--docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst68
-rw-r--r--docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst32
-rw-r--r--docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst30
-rw-r--r--docs/testing/user/testspecification/tempest_network_api/index.rst22
-rw-r--r--docs/testing/user/testspecification/tempest_network_scenario/index.rst26
-rw-r--r--docs/testing/user/testspecification/tempest_network_security/index.rst26
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/index.rst8
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst60
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst67
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst48
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst235
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst188
-rw-r--r--docs/testing/user/testspecification/tempest_trunk_ports/index.rst123
-rw-r--r--docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst44
-rw-r--r--docs/testing/user/testspecification/tempest_volume/index.rst6
-rw-r--r--docs/testing/user/testspecification/vnf/index.rst18
-rw-r--r--docs/testing/user/testspecification/vping/index.rst103
-rw-r--r--docs/testing/user/userguide/api_testing_guide.rst279
-rw-r--r--docs/testing/user/userguide/cli_reference.rst66
-rw-r--r--docs/testing/user/userguide/images/tocsa_vnf_test_environment.pngbin0 -> 101795 bytes
-rw-r--r--docs/testing/user/userguide/images/tosca_vnf_test_flow.pngbin0 -> 40614 bytes
-rw-r--r--docs/testing/user/userguide/index.rst2
-rw-r--r--docs/testing/user/userguide/testing_guide.rst216
-rw-r--r--docs/testing/user/userguide/vnf_test_guide.rst722
-rw-r--r--dovetail/api/app/routes.py28
-rw-r--r--dovetail/api/app/server.py412
-rw-r--r--dovetail/api/app/utils.py33
-rw-r--r--dovetail/api/swagger.yaml4
-rw-r--r--dovetail/container.py13
-rw-r--r--dovetail/report.py51
-rw-r--r--dovetail/test_runner.py3
-rw-r--r--dovetail/tests/unit/test_container.py30
-rw-r--r--dovetail/tests/unit/test_report.py39
-rw-r--r--dovetail/tests/unit/test_test_runner.py12
-rw-r--r--dovetail/tests/unit/utils/test_dovetail_utils.py40
-rw-r--r--dovetail/utils/dovetail_utils.py29
-rw-r--r--etc/compliance/ovp.2019.12.yaml (renamed from etc/compliance/ovp.2019.0x.yaml)6
-rw-r--r--etc/compliance/proposed_tests.yml1
-rw-r--r--etc/conf/bottlenecks_config.yml15
-rw-r--r--etc/conf/cmd_config.yml2
-rw-r--r--etc/conf/dovetail_config.yml3
-rw-r--r--etc/conf/functest-k8s_config.yml5
-rw-r--r--etc/conf/functest_config.yml9
-rw-r--r--etc/conf/yardstick_config.yml13
-rw-r--r--etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch345
-rw-r--r--etc/testcase/functest.tempest.networking_sfc.yml31
-rw-r--r--etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml88
-rw-r--r--etc/userconfig/patrole_blacklist.yaml3
-rw-r--r--etc/userconfig/trunk_port_blacklist.yaml3
-rw-r--r--setup.cfg2
-rw-r--r--tox.ini7
77 files changed, 3011 insertions, 1454 deletions
diff --git a/INFO.yaml b/INFO.yaml
index 79c00c0c..79dfa2bc 100644
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -13,11 +13,11 @@ project_creation_date: 'September 1st, 2015'
project_category: 'Testing'
lifecycle_state: 'Incubation'
project_lead: &opnfv_dovetail_ptl
- name: 'Dan Xu'
- email: 'xudan16@huawei.com'
+ name: 'Kanagaraj Manickam'
+ email: 'kanagaraj.manickam@huawei.com'
company: 'huawei.com'
- id: 'xudan'
- timezone: 'Asia/Shanghai'
+ id: 'mkr1481'
+ timezone: 'India/Bangalore'
primary_contact: *opnfv_dovetail_ptl
issue_tracking:
type: 'jira'
@@ -64,6 +64,10 @@ committers:
email: 'georg.kunz@ericsson.com'
company: 'ericsson.com'
id: 'georgkunz'
+ - name: 'Dan Xu'
+ email: 'xudan16@huawei.com'
+ company: 'huawei.com'
+ id: 'xudan'
tsc:
# yamllint disable rule:line-length
approval: 'http//meetbot.opnfv.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-09-01-13.59.html'
diff --git a/docs/release/release-notes/index.rst b/docs/release/release-notes/index.rst
index 1ec19960..2de92c55 100644
--- a/docs/release/release-notes/index.rst
+++ b/docs/release/release-notes/index.rst
@@ -3,70 +3,71 @@
.. _dovetail-releasenotes:
-==================================================================
-OPNFV Verified Program (OVP) 2018.09 / Dovetail 2.2.0 Release Note
-==================================================================
+======================================================================
+OPNFV Verification Program (OVP) 2019.12 / Dovetail 3.0.0 Release Note
+======================================================================
-OPNFV 2018.09 Release
+OPNFV 2019.12 Release
=====================
-The OPNFV Verified Program (OVP) allows vendors and operators to obtain 'OPNFV Verified'
+The OPNFV Verification Program (OVP) allows vendors and operators to obtain 'OPNFV Verified'
status based on an agreed upon set of compliance verification test cases that align to OPNFV
-releases. The reference System under Test (SUT) are the NFV components deployed by the OPNFV
-installers for a given release, where OVP 2018.09 is based on the Fraser release. Participants
-of the program can verify commercial or open source offerings against an OVP release. This implies
-that the SUT used for verification has interfaces, components, functions and behaviors that align
-to OPNFV installer integrations.
-
-Dovetail is the overall framework used to execute tests and collect results for OVP. Dovetail does
-not deliver test content directly. Rather, test content is developed in other OPNFV test frameworks
-such as Functest and upstream test communities such as OpenStack's RefStack/Tempest projects.
-Dovetail leverages this upstream test content and provides a common set of test platform services
-for the OVP.
-
-Dovetail works in conjunction with a web portal interface dubbed the 'OVP web portal' to allow
-users to upload test results to a centralized community repository. This facilitates user
-collaboration, result sharing, self-testing and community reviews. It also serves as a hub for
-new participants to learn about the program and access key resources. The link for this portal
-is at: `OPNFV Verified Program <https://verified.opnfv.org>`_.
-
-Use of the OVP web portal is open to all and only requires a valid Linux Foundation or OpenStack
+releases. The reference System under Test (SUT) is either the NFV components deployed by the OPNFV
+installers for a given release, where OVP 2019.12 is based on the OPNFV Hunter release, or a VNF
+being on-boarded and orchestrated by the ONAP El Alto release. Participants of the program can
+verify commercial or open source offerings against an OVP release. This implies that the SUT
+used for verification has interfaces, components, functions and behaviors that align to OPNFV
+installer integrations and ONAP deployments.
+
+Dovetail is the overall framework used to execute tests and collect results for the OVP
+Infrastructure badge. Dovetail does not deliver test content directly. Rather, test content
+is developed in other OPNFV test frameworks such as Functest and upstream test communities such
+as OpenStack's RefStack/Tempest projects. Dovetail leverages this upstream test content and
+provides a common set of test platform services for the OVP.
+
+Approved test tools (OPNFV Dovetail, ONAP VTP, and ONAP VVP) work in conjunction with a web portal
+interface dubbed the 'OVP web portal' to allow users to upload test results to a centralized community
+repository. This facilitates user collaboration, result sharing, self-testing and community reviews.
+It also serves as a hub for new participants to learn about the program and access key resources. The
+link for this portal is at: `OPNFV Verification Program <https://nfvi-verified.lfnetworking.org>`_.
+
+Use of the OVP web portal is open to all and only requires a valid Linux Foundation
ID to login. Users are welcome to use the portal to upload, inspect and share results in a private
manner. In order to submit results for official review, the first step is apply for acceptance
-into the program with the participation form provided in the link: `OPNFV Verified Program
+into the program with the participation form provided in the link: `OPNFV Verification Program
Participation Form <https://na3.docusign.net/Member/PowerFormSigning.aspx?PowerFormId=dc24bf38-ea41-40d4-9e58-9babc6eec778>`_
-Test Suites & Test Areas
-------------------------
+NFVI Test Suites and Test Areas
+-------------------------------
OVP/Dovetail groups test cases into test suites and test areas. Test suites are currently a basic
-categorization around releases for the most part. Executing the test suite 'ovp.2019.0x' without
-further specification will run all the test cases in the OVP 2018.09 release. Test suites are
+categorization around releases for the most part. Executing the test suite 'ovp.2019.12' without
+further specification will run all the test cases in the OVP 2019.12 release. Test suites are
divided into test areas that can be executed separately.
-Test areas include a division into **'mandatory'** and **'optional'** in an overarching
+Test cases include a division into **'mandatory'** and **'optional'** in an overarching
categorization.
All the mandatory test cases are required to be executed with passing results for all inclusive
test cases for results to be reviewed and approved by the community made up of peer reviewers.
The optional test cases are not required to be executed for the official compliance verification
-review in the OVP 2018.09 release. However, execution of these cases is encouraged, as some
+review in the OVP 2019.12 release. However, execution of these cases is encouraged, as some
optional test cases may become mandatory in future releases.
-Test Cases and Sub Test Cases
------------------------------
+NFVI Test Cases and Sub Test Cases
+----------------------------------
Each test area consists of multiple test cases where each test case can be a single test or
broken down into sub test cases. A listing of test cases with the number of sub test cases noted
-in parenthesis is shown below for the OVP 2018.09 release.
+in parenthesis is shown below for the OVP 2019.12 release.
**Mandatory**
+- bottlenecks.stress.ping (1)
- functest.vping.userdata (1)
- functest.vping.ssh (1)
-- bottlenecks.stress.ping (1)
-- functest.tempest.osinterop (200)
+- functest.tempest.osinterop (219)
- functest.tempest.compute (12)
- functest.tempest.identity_v3 (11)
- functest.tempest.image (2)
@@ -74,7 +75,7 @@ in parenthesis is shown below for the OVP 2018.09 release.
- functest.tempest.volume (2)
- functest.tempest.neutron_trunk_ports (38)
- functest.tempest.ipv6_api (21)
-- functest.security.patrole (117)
+- functest.security.patrole (124)
- yardstick.ha.nova_api (1)
- yardstick.ha.neutron_server (1)
- yardstick.ha.keystone (1)
@@ -87,50 +88,59 @@ in parenthesis is shown below for the OVP 2018.09 release.
- yardstick.ha.database (1)
-There are a total of 432 mandatory test cases.
+There are a total of 456 mandatory test cases.
**Optional**
- functest.vnf.vims (1)
- functest.vnf.vepc (1)
-- yardstick.ha.neutron_l3_agent (1)
-- yardstick.ha.controller_restart (1)
- functest.tempest.ipv6_scenario (8)
- functest.tempest.multi_node_scheduling (6)
- functest.tempest.network_security (6)
- functest.tempest.vm_lifecycle (12)
- functest.tempest.network_scenario (5)
-- functest.tempest.bgpvpn (15)
+- functest.tempest.bgpvpn (21)
- functest.security.patrole_vxlan_dependent (2)
+- yardstick.ha.neutron_l3_agent (1)
+- yardstick.ha.controller_restart (1)
-There are a total of 58 optional test cases.
+There are a total of 64 optional test cases.
OPNFV Test Projects and Components
----------------------------------
The OPNFV test frameworks integrated into the Dovetail framework that deliver test content are:
- * Functest (leverages OpenStack RefStack/Tempest projects in addition to supplying native test cases)
- * Yardstick
- * Bottlenecks
+ - Functest (leverages OpenStack RefStack/Tempest projects in addition to supplying native test cases)
+ - Yardstick
+ - Bottlenecks
+
+ONAP Test Projects and Components
+---------------------------------
+
+The ONAP test projects and components used with this OVP release to provide the test requirements
+and test scripting are:
+- VNFRQTS
+- VNFSDK
+- VVP
Acceptence and Marketing
------------------------
-Upon successful community review of results for OVP 2018.09, the Linux Foundation Compliance
+Upon successful community review of results for OVP 2019.12, the Linux Foundation Compliance
Verification Committee (LFN CVC) on behalf of the Board of Directors can award a product 'OPNFV
Verified' status. Use of 'OPNFV Verified' Program Marks shall be awarded to the platform used
for compliance verification. The category label of 'Infrastructure' is used within the Program
Marks logo and limits the scope of this OVP release to a SUT consisting of NFVI and VIM components
using ETSI terminology. It does not provide compliance verification for specific VNFs in any fashion.
-The date '2018.09' corresponds to a reference SUT that aligns to the OPNFV Fraser release and
-currently aligns to the Dovetail framework version 2.2.0.
+The date '2019.12' corresponds to a reference SUT that aligns to the OPNFV Hunter release and
+currently aligns to the Dovetail framework version 3.0.0.
Organizations shall not use the Program Marks in any way that would associate it with any
individual or company logo or brand, beyond the association to the specific platform to which it
was awarded. While OpenStack RefStack interoperability and Tempest integration test cases are
-executed as part of the OVP 2018.09 compliance verification test suites, the OVP does not grant or
+executed as part of the OVP 2019.12 compliance verification test suites, the OVP does not grant or
award OpenStack Marks in any fashion. 'OPNFV Verified' status does not assert readiness for
commercial deployment.
@@ -147,122 +157,118 @@ Release Data
| **Project** | Dovetail |
| | |
+--------------------------------------+---------------------------------------+
-| **Repo tag** | ovp-2.0.0 |
+| **Repo tag** | ovp-3.0.1 |
| | |
+--------------------------------------+---------------------------------------+
-| **Release designation** | OPNFV Verified Program (OVP) |
-| | 2018.09 (Fraser) |
+| **Release designation** | OPNFV Verification Program (OVP) |
+| | 2019.12 (Hunter) |
+--------------------------------------+---------------------------------------+
-| **Release date** | September 2018 |
+| **Release date** | December 2019 |
| | |
+--------------------------------------+---------------------------------------+
-| **Purpose of the delivery** | Support OVP 2018.09 release with |
-| | OPNFV Fraser release as reference SUT |
+| **Purpose of the delivery** | Support OVP 2019.12 release with |
+| | OPNFV Hunter release as reference SUT |
++--------------------------------------+---------------------------------------+
+| **Notes** | Point release ovp-3.0.1 updates, |
+| | changes, and corrects the |
+| | documentation only. |
+--------------------------------------+---------------------------------------+
-There is a patch version of Dovetail after the above release.
-
-+------------------------------+--------------------------------------------------------------------------------+
-| **Project** | Dovetail |
-| | |
-+------------------------------+--------------------------------------------------------------------------------+
-| **Repo tag** | ovp-2.2.0 |
-| | |
-+------------------------------+--------------------------------------------------------------------------------+
-| **Release date** | March 2019 |
-| | |
-+------------------------------+--------------------------------------------------------------------------------+
-| **Purpose of the delivery** | - Test scope update |
-| | - Move 2 sub-test cases in functest.security.patrole to optional |
-| | - These 2 sub-test cases rely on vxlan as virtual networking implementation |
-| | - Three bug fixes |
-| | - Yardstick: Add SLA check and reverse the creation order of servers |
-| | - Dovetail: Set Yardstick_TAG to be ovp-2.0.0 instead of stable |
-| | - Bottlenecks: Make ram_num configurable for DPDK |
-+------------------------------+--------------------------------------------------------------------------------+
Deliverables
============
Software
--------
+
+OPNFV Software
+""""""""""""""
+
+-------------------------+-----------------------------------+----------------+
| **Docker Container** | **Docker Image** | **Tag** |
+-------------------------+-----------------------------------+----------------+
-| dovetail | opnfv/dovetail | ovp-2.2.0 |
+| dovetail | opnfv/dovetail | ovp-3.0.0 |
+-------------------------+-----------------------------------+----------------+
-| functest | opnfv/functest-smoke | opnfv-6.3.0 |
+| functest | opnfv/functest-smoke | hunter |
+-------------------------+-----------------------------------+----------------+
-| functest | opnfv/functest-healthcheck | opnfv-6.3.0 |
+| functest | opnfv/functest-healthcheck | hunter |
+-------------------------+-----------------------------------+----------------+
-| functest | opnfv/functest-features | opnfv-6.3.0 |
+| functest | opnfv/functest-vnf | hunter |
+-------------------------+-----------------------------------+----------------+
-| functest | opnfv/functest-vnf | opnfv-6.3.0 |
+| yardstick | opnfv/yardstick | opnfv-8.0.0 |
+-------------------------+-----------------------------------+----------------+
-| yardstick | opnfv/yardstick | ovp-2.0.0 |
+| bottlenecks | opnfv/bottlenecks | 8.0.1-latest |
+-------------------------+-----------------------------------+----------------+
-| bottlenecks | opnfv/bottlenecks | ovp-2.0.0 |
-+-------------------------+-----------------------------------+----------------+
-
-Docker images:
+**Docker images:**
- `Dovetail Docker images <https://hub.docker.com/r/opnfv/dovetail>`_
- `Functest-smoke Docker images <https://hub.docker.com/r/opnfv/functest-smoke/>`_
- `Functest-healthcheck Docker images <https://hub.docker.com/r/opnfv/functest-healthcheck/>`_
-- `Functest-features Docker images <https://hub.docker.com/r/opnfv/functest-features/>`_
- `Functest-vnf Docker images <https://hub.docker.com/r/opnfv/functest-vnf/>`_
- `Yardstick Docker images <https://hub.docker.com/r/opnfv/yardstick/>`_
- `Bottlenecks Docker images <https://hub.docker.com/r/opnfv/bottlenecks/>`_
+ONAP Software
+"""""""""""""
++-------------------------+--------------------------------------------------------------+
+| **Item** | **Repo Link** |
++-------------------------+--------------------------------------------------------------+
+| VTP/VNFSDK Test Scripts | `<https://gerrit.onap.org/r/admin/repos/vnfsdk/refrepo>`_ |
++-------------------------+--------------------------------------------------------------+
+| VVP Robot Test Scripts | `<https://gerrit.onap.org/r/admin/repos/oom>`_ |
++-------------------------+--------------------------------------------------------------+
Documents
---------
-- `System Preparation Guide <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/systempreparation/index.html>`_
+- `System Preparation Guide <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/systempreparation/index.html>`_
-- `User Guide <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/userguide/testing_guide.html>`_
+- `NFVI User Guide <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/userguide/testing_guide.html>`_
-- `OPV Test Specifications <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/testspecification/index.html>`_
+- `VNF User Guide: <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/userguide/testing_guide.html>`_
-- `Dovetail CLI Reference <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/userguide/cli_reference.html>`_
+- `OVP NFVI Test Specifications <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/testspecification/index.html>`_
-- `OPV Workflow <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/certificationworkflow/index.html>`_
+- `ONAP VNF Test Specifications <https://docs.onap.org/en/elalto/submodules/vnfrqts/testcases.git/docs/index.html>`_
-- `OPV Reviewer Guide <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/reviewerguide/index.html>`_
+- `Dovetail CLI Reference <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/userguide/cli_reference.html>`_
+- `Dovetail RESTful API <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/userguide/api_testing_guide.html>`_
-Testing with OPNFV Fraser Installers
+- `OVP Workflow <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/certificationworkflow/index.html>`_
+
+- `OVP Reviewer Guide <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/reviewerguide/index.html>`_
+
+
+Testing with OPNFV Hunter Installers
====================================
-OVP 2018.09 and Dovetail 2.2.0 are known to be have been tested with the following OPNFV
-Fraser installer versions.
+OVP 2019.12 and Dovetail 3.0.0 are known to be have been tested with the following OPNFV
+Hunter installer versions.
+-----------------+----------------------+
| Installer | Version |
+=================+======================+
-| Apex | stable/fraser |
-+-----------------+----------------------+
-| Compass | stable/fraser |
-+-----------------+----------------------+
-| Fuel | stable/fraser |
+| Fuel | stable/hunter |
+-----------------+----------------------+
-Fraser Known Restrictions/Issues
+Hunter Known Restrictions/Issues
================================
-Please refer to the Dovetail project JIRA for known issues with the Dovetail
-Fraser release:
+Please refer to the OPNFV and ONAP JIRA for known issues with each applicable project:
-.. https://jira.opnfv.org/projects/DOVETAIL
+- `<https://jira.opnfv.org/projects/DOVETAIL>`_
+- `<https://jira.onap.org/projects/VVP>`_
+- `<https://jira.onap.org/projects/VNFSDK>`_
Useful Links
============
- - `OVP Web Portal <https://verified.opnfv.org>`_
+ - `OVP Web Portal <https://nfvi-verified.lfnetworking.org>`_
- `Wiki Project Page <https://wiki.opnfv.org/display/dovetail>`_
@@ -274,4 +280,4 @@ Useful Links
- Dovetail IRC Channel: #opnfv-dovetail
- - `Dovetail Test Configuration <https://git.opnfv.org/dovetail/tree/etc/compliance/ovp.2019.0x.yaml>`_
+ - `Dovetail Test Configuration <https://git.opnfv.org/dovetail/tree/etc/compliance/ovp.2019.12.yaml>`_
diff --git a/docs/testing/developer/genericframework/index.rst b/docs/testing/developer/genericframework/index.rst
index 9bafb3e4..75721469 100644
--- a/docs/testing/developer/genericframework/index.rst
+++ b/docs/testing/developer/genericframework/index.rst
@@ -13,7 +13,7 @@ Dovetail as a Generic Test Framework
Overview
========
-Dovetail is responsible for the technical realization of the OPNFV Verified
+Dovetail is responsible for the technical realization of the OPNFV Verification
Program (OVP) and other compliance verification projects within the scope of
the Linux Foundation Networking (LFN) umbrella projects.
Dovetail provides a generic framework for executing a specific set of test cases
@@ -33,7 +33,7 @@ The following diagram illustrates Dovetail generic framework.
:scale: 50%
In this diagram, there are 5 main parts, `TestcaseFactory`, `TestRunnerFactory`,
-`CrawlerFactory`, `CheckerFactory` and test case groups.
+`CrawlerFactory`, `CheckerFactory` and `test case groups`.
- **TestcaseFactory**: For each project, there needs to create its own
testcase class such as `FunctestTestcase` and `OnapVtpTestcase`. All these
@@ -106,7 +106,7 @@ for all configuration files of all test cases.
objective: Test case description
validate:
type: 'shell' or name of the project already integrated in Dovetail
- testcase: The test case name called in this project
+ testcase: The original test case name called in the project that it is developed
image_name: Name of the Docker image used to run this test
pre_condition:
- 'Commands needed to be executed before running this test'
@@ -120,7 +120,9 @@ for all configuration files of all test cases.
- test.log
dest_archive_files:
- path/to/archive/test.log
- check_results_file: results.json
+ check_results_files:
+ - results.json
+ portal_key_file: path/to/key/logs/xxx.log
sub_testcase_list:
- sub_test_1
- sub_test_2
@@ -130,7 +132,7 @@ This is the complete format of test case configuration file. Here are some
detailed description for each of the configuration options.
- **Test case name in Dovetail**: All test cases should be named as 'xxx.yyy.zzz'.
- This is the name in Dovetail and has no relationship with its name in its own
+ This is the alias in Dovetail and has no relationship with its name in its own
project. The first part is used to identify the project where this test case
come from (e.g. functest, onap-vtp). The second part is used to classify this
test case according to test area (e.g. healthcheck, ha). Dovetail supports to
@@ -139,7 +141,7 @@ detailed description for each of the configuration options.
the test. The last part is special for this test case itself (e.g. image,
haproxy, csar). It's better to keep the file name the same as the test case
name to make it easier to find the config file according to this test case
- name in Dovetail.
+ alias in Dovetail.
- **validate**: This is the main section to define how to run this test case.
@@ -156,7 +158,7 @@ detailed description for each of the configuration options.
- **testcase**: This is the name defined in its own project. One test case can
be uniquely identified by `type` and `testcase`. Take the test case
`functest.vping.ssh` as an example. Its `type` is 'functest' and `testcase`
- is 'vping_ssh'. With these 2 properties, it can be uniquely identified. Users only
+ is 'vping_ssh'. With these 2 properties, it can be uniquely identified. End users only
need to know that there is a test case named `functest.vping.ssh` in OVP
compliance test scope. Dovetail Framework will run `vping_ssh` within Functest
Docker container.
@@ -200,9 +202,12 @@ detailed description for each of the configuration options.
list of `source_archive_files`. Also all paths here should be relative ones
according to `$DOVETAIL_HOME/results`.
- - **check_results_file**: This should be the name and relative path of the result
- file generated by this test case. Dovetail will parse this file to get the
- result (PASS or FAIL).
+ - **check_results_files**: This should be a list of relative paths of
+ the result files generated by this test case. Dovetail will parse these files
+ to get the result (PASS or FAIL).
+
+ - **portal_key_file**: This should be the key log file of this test case which will
+ be used by the OVP portal for review.
- **sub_testcase_list**: [optional] This section is almost only for Tempest tests
in Functest. Take `functest.tempest.osinterop` as an example. The `sub_testcase_list`
@@ -247,17 +252,29 @@ files of all integrated projects.
{% set userconfig_dir = '/tmp/userconfig' %}
{% set patches_dir = '/tmp/patches' %}
{% set result_dir = '/tmp/results' %}
+ {% set openrc_file = '/home/conf/env_file' %}
project name:
image_name: name of the docker image
docker_tag: tag of the docker image
- opts: options needed such as '-itd'
- envs: envs used to create containers such as '-e DEBUG={{debug}}'
+ opts:
+ detach: true
+ stdin_open: true
+ privileged: true
+ shell: '/bin/bash'
+ envs:
+ - 'CI_DEBUG={{debug}}'
+ - 'DEPLOY_SCENARIO={{deploy_scenario}}'
+ - 'ENV_NAME=env_value'
volumes:
- - '-v {{dovetail_home}}/pre_config:/home/opnfv/pre_config'
- - '-v {{dovetail_home}}/userconfig:{{userconfig_dir}}'
- - '-v {{dovetail_home}}/patches:{{patches_dir}}'
- - '-v {{dovetail_home}}/results:{{result_dir}}'
+ - '{{dovetail_home}}/userconfig:{{userconfig_dir}}'
+ - '{{dovetail_home}}/results:{{result_dir}}'
+ - '/path/on/host:/path/in/container'
+ - '/path/of/host/file:/file/path/in/container'
+ mounts:
+ - 'source={{dovetail_home}}/pre_config/env_config.sh,target={{openrc_file}}
+ - 'source={{dovetail_home}}/pre_config,target=/home/opnfv/pre_config'
+ - 'source=/file/or/derectory/on/host,target=/file/or/derectory/in/container'
patches_dir: {{patches_dir}}
pre_condition:
- 'Commands needed to be executed before running this test'
@@ -274,67 +291,77 @@ This is the complete format of project configuration file. Here are some
detailed description for each of the configuration options.
- **Jinja Template**: At the begining of this yaml file, it uses Jinja template
- to define some parameters that will be used somewhere in this file (e.g. result_dir).
- Also there are some parameters providing by Dovetail framework as input of this
- file, and other parameters can be defined by using these ones (e.g. testcase and
- dovetail_home). The whole input parameters are list below.
-
- - **validate_testcase**: This is the name of the test case instance which calls this
- project configuration file. The name is provided by the configuration file
- of this test case (validate -> testcase).
+ to define some parameters that will be used somewhere in this file (e.g. result_dir
+ and openrc_file). Besides those, there are some other parameters providing by Dovetail
+ framework as input of this file, and other parameters can be defined by using these
+ ones (e.g. testcase and dovetail_home). The whole input parameters which can be used
+ are list below.
- - **testcase**: This is the name of the test case which calls this project
- configuration file. Different from `validate_testcase`, this is the name
- defined in Dovetail not its own project.
+ - **attack_host**: This is the attack host name of the test case which calls this
+ project configuration file. It's only for HA test cases and can be given in HA
+ configuration file `pod.yaml`.
- - **os_insecure**: This is only for test cases aiming at OpenStack. This is
- `True` or `False` according to `env_config.sh` file.
+ - **attack_process**: This is the attack process name of the test case which calls
+ this project configuration file. It's only for HA test cases and can be given in HA
+ configuration file `pod.yaml`.
- - **cacert**: This is also only for OpenStack test cases. It is the absolute
- path of the OpenStack certificate provided in `env_config.sh` file.
+ - **build_tag**: This is a string includes the UUID generated by Dovetail.
- - **deploy_scenario**: This is the input when running Dovetail with option
- `--deploy-scenario`.
+ - **cacert**: This is also only for OpenStack test cases. It is the absolute
+ path of the OpenStack certificate provided in `env_config.sh` file.
- - **ram_num**: This is the input when running Dovetail with option
- `--ram-num`.
+ - **deploy_scenario**: This is the input when running Dovetail with option
+ `--deploy-scenario`.
- - **dovetail_home**: This is the `DOVETAIL_HOME` getting from the ENV.
+ - **debug**: This is `True` or `False` according to the command running test
+ cases with or without option `--debug`.
- - **debug**: This is `True` or `False` according to the command running test
- cases with or without option `--debug`.
+ - **dovetail_home**: This is the `DOVETAIL_HOME` getting from the ENV.
- - **build_tag**: This is a string includes the UUID generated by Dovetail.
+ - **os_insecure**: This is only for test cases aiming at OpenStack. This is
+ `True` or `False` according to `env_config.sh` file.
- - **host_url**: This is only for ONAP VNF SDK to get the HOST_URL provided
- in `env_config.sh` file.
+ - **testcase**: This is the name of the test case which calls this project
+ configuration file. Different from `validate_testcase`, this is the alias
+ defined in Dovetail not in its own project.
- - **csar_file**: This is also only for ONAP VNF SDK to get the CSAR_FILE
- provided in `env_config.sh` file.
+ - **validate_testcase**: This is the name of the test case instance which calls this
+ project configuration file. The name is provided by the configuration file
+ of this test case (validate -> testcase).
- **project name**: This is the project name defined in Dovetail. For example
OPNFV Functest project is named as 'functest' here in Dovetail. This project
name will be used by test case configuration files as well as somewhere in
Dovetail source code.
-- **image_name**: This is the name of the default Docker image for all test cases
+- **image_name**: This is the name of the default Docker image for most test cases
within this project. Each test case can overwrite it with its own configuration.
- **docker_tag**: This is the tag of all Docker images for all test cases within
this project. For each release, it should use one Docker image with a stable
and official release version.
-- **opts**: Here are all options used to run Docker containers except envs and
- volume mappings (e.g. '-it --privileged=true').
+- **opts**: Here are all options used to run Docker containers except 'image',
+ 'command', 'environment', 'volumes', 'mounts' and 'extra_hosts'. For example,
+ the options include 'detach', 'privileged' and 'tty'. The full list of all
+ options can be found in `Docker python SDK docs <https://docker-py.readthedocs.io/en/stable/containers.html>`_.
+
+- **shell**: This is the command used to run in the container.
-- **envs**: Here are all envs used to run Docker containers (e.g. '-e ONE=one
- -e TWO=two').
+- **envs**: This is a list of all envs used to run Docker containers.
-- **volumes**: A volume mapping list used to run Docker containers. Every project
- should at least map the `$DOVETAIL_HOME/pre_config` and `$DOVETAIL_HOME/results`
- in the test host to containers to get config files and collect all result files.
+- **volumes**: A volume mapping list used to run Docker containers. The source volumes
+ list here are allowed to be nonexistent and Docker will create new directories for them
+ on the host. Every project should at least map the `$DOVETAIL_HOME/results`
+ in the test host to containers to collect all result files.
-- **patches_dir**: This is an absolute path of the patches applied to the containers.
+- **mounts**: A mount mapping list used to run Docker containers. More powerful alternative
+ to **volumes**. The source volumes list here are not allowed to be nonexistent.
+ Every project should at least mount the `$DOVETAIL_HOME/pre_config` in the test host to
+ containers to get config files.
+
+- **patches_dir**: [optional] This is an absolute path of the patches applied to
+ the containers.
- **pre_condition**: A list of all default preparations needed by this project.
It can be overwritten by configurations of test cases.
@@ -349,6 +376,10 @@ detailed description for each of the configuration options.
provide the absolute path here to copy the credential file in the Test Host to
containers.
+- **extra_container**: [optional] The extra containers needed to be removed at the
+ end of the test. These containers are created by the test cases themselves at
+ runtime rather than created by Dovetail.
+
Step 2: Add related classes
^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/docs/testing/developer/testcaserequirements/index.rst b/docs/testing/developer/testcaserequirements/index.rst
index 6171a07f..b03a033c 100644
--- a/docs/testing/developer/testcaserequirements/index.rst
+++ b/docs/testing/developer/testcaserequirements/index.rst
@@ -23,7 +23,7 @@ portability across NFVI instances. All OVP tests are available in open source
and are executed in open source test frameworks.
-Test case requirements
+Test Case Requirements
======================
The following requirements are mandatory for a test to be submitted for
@@ -101,7 +101,7 @@ consideration in the OVP test suite:
- Use case specification
- Test preconditions
- Basic test flow execution description and test assertions
- - Pass fail criteria
+ - Pass/Fail criteria
- The following things may be documented for the test case:
@@ -120,9 +120,9 @@ Dovetail Test Suite Naming Convention
Test case naming and structuring must comply with the following conventions.
The fully qualified name of a test case must comprise three sections:
-`<testproject>.<test_area>.<test_case_name>`
+`<test_project>.<test_area>.<test_case_name>`
-- **testproject**: The fully qualified test case name must identify the test
+- **test_project**: The fully qualified test case name must identify the test
project which developed and maintains the test case.
- **test_area**: The fully qualified test case name must identify the test case
diff --git a/docs/testing/user/certificationworkflow/ApplicationForm.rst b/docs/testing/user/certificationworkflow/ApplicationForm.rst
index aac9a46e..1aa937b0 100644
--- a/docs/testing/user/certificationworkflow/ApplicationForm.rst
+++ b/docs/testing/user/certificationworkflow/ApplicationForm.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) OPNFV, Intel Corporation and others.
-=======================================
-OPNFV Verified Program Application Form
-=======================================
+===========================================
+OPNFV Verification Program Application Form
+===========================================
+----------------------------------+--------------------------------------------------------------------------------------------+
diff --git a/docs/testing/user/certificationworkflow/index.rst b/docs/testing/user/certificationworkflow/index.rst
index 27a1b511..bc43e775 100644
--- a/docs/testing/user/certificationworkflow/index.rst
+++ b/docs/testing/user/certificationworkflow/index.rst
@@ -60,7 +60,10 @@ LFN, please contact `LF Networking`_ for participation fee information.
Step 2: Testing
===============
-The following documents guide testers to prepare the test environment and run tests:
+NFVI Testing
+------------
+
+The following documents guide testers to prepare the NFVI test environment and run tests:
- :ref:`dovetail-system_preparation_guide`
- :ref:`dovetail-test_case_specification`
@@ -69,6 +72,15 @@ The following documents guide testers to prepare the test environment and run te
A unique Test ID is generated by the Dovetail tool for each test run and can only be
submitted to the OVP web portal once.
+VNF Testing
+-----------
+
+The following document guide testers to prepare the environment and run the VNF tests:
+
+- `Deploy ONAP via OOM <https://logs.onap.org/production/vex-yul-ecomp-jenkins-1/doc-elalto-verify-rtd/214/html/submodules/oom.git/docs/oom_quickstart_guide.html>`_
+- `ONAP VNF Test Specifications <https://docs.onap.org/en/elalto/submodules/vnfrqts/testcases.git/docs/index.html>`_
+- :ref:`dovetail-vnf_testers_guide`
+
Step 3: Submitting Test Results
===============================
diff --git a/docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst b/docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst
index c2546b81..27173ed9 100644
--- a/docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst
+++ b/docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst
@@ -30,7 +30,7 @@ Consequently, such cloud implementations do not pass Tempest tests which
validate API responses despite actually implementing and providing the tested
functionality.
-This document describes an exemption process for use within the OPNFV Verified
+This document describes an exemption process for use within the OPNFV Verification
Program which
i) allows vendors to pass Tempest tests if the tested functionality is
@@ -63,7 +63,7 @@ is actually available. As a result, a Tempest test failing due to extended API
responses does not provide information about whether the tested functionality
is available or not.
-The OPNFV Verified Program has inherited the policy to strictly validate API
+The OPNFV Verification Program has inherited the policy to strictly validate API
responses from OpenStack by including a selection of Tempest tests in its
compliance test suite. However, it was never discussed if OVP should adopt this
policy as well. It turns out that this policy causes challenges for vendors of
@@ -168,18 +168,18 @@ responses is as follows:
not.
#. The exemption will be made available to participants of OVP as part of a
- service release of OVP 2018.01 and 2018.09.
+ service release of OVP 2018.01, 2018.09 and 2019.12.
#. The C&C committee will monitor the situation around exemptions and may
decide changes to the above process at any time, including the possibility
to stop issuing exemptions.
-.. [1] https://review.openstack.org/#/c/156130/
+.. [1] https://review.opendev.org/gitweb?p=openstack%2Ftempest.git;a=commitdiff;h=f0c30bc241e5160e3fe7402e738ea8f56a8b1315
.. [2] https://github.com/openstack/tempest/tree/master/tempest/lib/api_schema/response/compute
-.. [3] https://developer.openstack.org/api-ref/compute/#show-server-details
+.. [3] https://docs.openstack.org/api-ref/compute/#show-server-details
.. [4] https://wiki.openstack.org/wiki/Governance/InteropWG
.. [5] https://www.openstack.org/brand/interop/
.. [6] http://lists.openstack.org/pipermail/openstack-dev/2016-June/097349.html
-.. [7] https://review.openstack.org/#/c/333067/
-.. [8] https://review.openstack.org/#/c/512447/
+.. [7] https://review.opendev.org/gitweb?p=openstack%2Finterop.git;a=commitdiff;h=c38e18b343505f16a74a97b748362fa7f1a01e57
+.. [8] https://review.opendev.org/gitweb?p=openstack%2Finterop.git;a=commitdiff;h=5748c296a658cf5efebc16ad9d7644ca1125b073
diff --git a/docs/testing/user/ovpaddendum/index.rst b/docs/testing/user/ovpaddendum/index.rst
index 7072d3f1..811c2bcd 100644
--- a/docs/testing/user/ovpaddendum/index.rst
+++ b/docs/testing/user/ovpaddendum/index.rst
@@ -3,8 +3,10 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Intel and others
+.. _dovetail-ovp-addendum:
+
=======================================
-Guidelines Addendum for 2018.09 release
+Guidelines Addendum for 2019.12 release
=======================================
.. toctree::
@@ -15,12 +17,12 @@ Introduction
============
This addendum provides a high-level description of the testing scope and
-pass/fail criteria used in the OPNFV Verified Program (OVP) for the 2018.09
+pass/fail criteria used in the OPNFV Verification Program (OVP) for the 2019.12
release. This information is intended as an overview for OVP testers and for
the Dovetail Project to help guide test-tool and test-case development for the
-OVP 2018.09 release. The Dovetail project is responsible for documenting
+OVP 2019.12 release. The Dovetail project is responsible for documenting
test-case specifications as well as implementing the OVP tool-chain through
-collaboration with the OPNFV testing community. OVP testing focuses on
+collaboration with the OPNFV and ONAP testing communities. OVP testing focuses on
establishing the ability of the System Under Test (SUT) to perform NFVI and VIM
operations and support Service Provider oriented features that ensure
manageable, resilient and secure networks.
@@ -29,15 +31,15 @@ manageable, resilient and secure networks.
Meaning of Compliance
=====================
-OPNFV Compliance indicates adherence of an NFV platform to behaviors defined
-through specific platform capabilities, allowing to prepare, instantiate,
-operate and remove VNFs running on the NFVI. OVP 2018.09 compliance evaluates
+OPNFV Compliance indicates adherence of an NFV platform and VNF to behaviors
+defined through specific platform capabilities, allowing to prepare, instantiate,
+operate and remove VNFs running on the NFVI. OVP 2019.12 compliance evaluates
the ability of a platform to support Service Provider network capabilities and
-workloads that are supported in the OPNFV platform as of this release.
-Compliance test cases are designated as compulsory or optional based on the
-maturity of OPNFV capabilities as well as industry expectations. Compulsory
-test cases may for example include NFVI management capabilities whereas tests
-for certain high-availability features may be deemed as optional.
+workloads that are supported in the OPNFV and ONAP platforms as of this release.
+Test cases are designated as compulsory or optional based on the maturity
+of capabilities as well as industry expectations. Compulsory test cases may for
+example include NFVI management capabilities whereas tests for certain
+high-availability features may be deemed as optional.
Test coverage and pass/fail criteria are designed to ensure an acceptable level
of compliance but not be so restrictive as to disqualify variations in platform
@@ -47,14 +49,24 @@ implementations, capabilities and features.
SUT Assumptions
===============
-Assumptions about the System Under Test (SUT) include ...
+Assumptions about the NFVI System Under Test (SUT) for the OVP Infrastructure
+badge include ...
- The minimal specification of physical infrastructure, including controller
- nodes, compute nodes and networks, is defined by the `Pharos specification`_.
+ nodes, compute nodes and networks, is defined for the NFVI by the
+ `Pharos specification`_.
- The SUT is fully deployed and operational, i.e. SUT deployment tools are
out of scope of testing.
+Assumptions about the VNF System Under Test (SUT) for the OVP VNF
+badge include ...
+
+- The VNF templates and disk image(s) file are available, and the disk image(s)
+ have been deployed to the ONAP Cloud Site.
+
+- The required value for the VNF pre-load files are available for the selected
+ ONAP Cloud Site.
Scope of Testing
================
@@ -64,7 +76,7 @@ outlines the key objectives of the OVP as follows:
- Help build the market for
- - OPNFV based infrastructure
+ - LFN based infrastructure
- applications designed to run on that infrastructure
@@ -76,8 +88,8 @@ outlines the key objectives of the OVP as follows:
- Enhance interoperability
The guidelines further directs the scope to be constrained to "features,
-capabilities, components, and interfaces included in an OPNFV release that are
-generally available in the industry (e.g., through adoption by an upstream
+capabilities, components, and interfaces included in an OPNFV and ONAP releases
+that are generally available in the industry (e.g., through adoption by an upstream
community)", and that compliance verification is evaluated using "functional
tests that focus on defined interfaces and/or behaviors without regard to the
implementation of the underlying system under test".
@@ -92,6 +104,13 @@ also out of scope or for further study. Newer functional areas such as MANO
(outside of APIs in the NFVI and VIM) are still developing and are for future
considerations.
+ONAP provides a comprehensive platform for real-time, policy-driven orchestration
+and automation of physical and virtual network functions that will enable software,
+network, IT and cloud providers and developers to rapidly automate new services and
+support complete lifecycle management. By unifying member resources, ONAP is
+accelerating the development of a vibrant ecosystem around a globally shared
+architecture and implementation for network automation–with an open standards focus–
+faster than any one product could on its own.
General Approach
----------------
@@ -137,7 +156,7 @@ test scope.
Analysis of Scope
-----------------
-In order to define the scope of the 2018.09 release of the compliance and
+In order to define the scope of the 2019.12 release of the compliance and
verification program, this section analyzes NFV-focused platform capabilities
with respect to the high-level objectives and the general approach outlined in
the previous section. The analysis determines which capabilities are suitable
@@ -169,8 +188,8 @@ including:
suspend/resume, reboot, migrate)
- simple virtual machine resource scheduling on multiple nodes
-OPNFV mainly supports OpenStack as the VIM up to the 2018.09 release. The VNFs
-used in the OVP program, and features in scope for the program which are
+OPNFV mainly supports OpenStack as the VIM up to the 2019.12 release. The VNFs
+used in the OVP NFVI program, and features in scope for the program which are
considered to be basic to all VNFs, require commercial OpenStack distributions
to support a common basic level of cloud capabilities, and to be compliant to a
common specification for these capabilities. This requirement significantly
@@ -198,7 +217,7 @@ feature requirements expand beyond common OpenStack (or other VIM)
requirements. OPNFV OVP will incorporate test cases to verify compliance in
these areas as they become mature. Because these extensions may impose new API
demands, maturity and industry adoption is a prerequisite for making them a
-mandatory requirement for OPNFV compliance. At the time of the 2018.09 release,
+mandatory requirement for OPNFV compliance. At the time of the 2019.12 release,
we have promoted tests of the OpenStack IPv6 API from optional to mandatory
while keeping BGPVPN as optional test area. Passing optional tests will not be
required to pass OPNFV compliance verification.
@@ -207,7 +226,7 @@ BGPVPNs are relevant due to the wide adoption of MPLS/BGP based VPNs in wide
area networks, which makes it necessary for data centers hosting VNFs to be
able to seamlessly interconnect with such networks. SFC is also an important
NFV requirement, however its implementation has not yet been accepted or
-adopted in the upstream at the time of the 2018.09 release.
+adopted in the upstream at the time of the 2019.12 release.
3. High availability
@@ -233,7 +252,7 @@ Resiliency testing involves stressing the SUT and verifying its ability to
absorb stress conditions and still provide an acceptable level of service.
Resiliency is an important requirement for end-users.
-The 2018.09 release of OVP includes a load test which spins up a number of VMs
+The 2019.12 release of OVP includes a load test which spins up a number of VMs
pairs in parallel to assert that the system under test can process the workload
spike in a stable and deterministic fashion.
@@ -248,12 +267,12 @@ capabilities expected of an end-user deployment. It is an area that we should
address in the near future, to define a common set of requirements and develop
test cases for verifying those requirements.
-The 2018.09 release includes new test cases which verify that the role-based
+The 2019.12 release includes new test cases which verify that the role-based
access control (RBAC) functionality of the VIM is behaving as expected.
Another common requirement is security vulnerability scanning. While the OPNFV
security project integrated tools for security vulnerability scanning, this has
-not been fully analyzed or exercised in 2018.09 release. This area needs
+not been fully analyzed or exercised in 2019.12 release. This area needs
further work to identify the required level of security for the purpose of
OPNFV in order to be integrated into the OVP. End-user inputs on specific
requirements in security is needed.
@@ -266,7 +285,7 @@ essential information and control mechanisms. These subsystems include
telemetry, fault management (e.g. alarms), performance management, audits, and
control mechanisms such as security and configuration policies.
-The current 2018.09 release implements some enabling capabilities in NFVI/VIM
+The current 2019.12 release implements some enabling capabilities in NFVI/VIM
such as telemetry, policy, and fault management. However, the specification of
expected system components, behavior and the test cases to verify them have not
yet been adequately developed. We will therefore not be testing this area at
@@ -285,10 +304,10 @@ compliance because it validates design patterns and support for the types of
NFVI features that users care about.
There are a lot of projects in OPNFV developing use cases and sample VNFs. The
-2018.09 release of OVP features two such use-case tests, spawning and verifying
+2019.12 release of OVP features two such use-case tests, spawning and verifying
a vIMS and a vEPC, correspondingly.
-8. Additional capabilities
+8. Additional NFVI capabilities
In addition to the capabilities analyzed above, there are further system
aspects which are of importance for the OVP. These comprise operational and
@@ -305,15 +324,24 @@ considered widely available in commercial systems in order to include them in
the OVP. Hence, these aspects are left for inclusion in future releases of the
OVP.
+9. VNF Compliance
+
+VNF Compliance verifies the VNF template files conform to the requirements documented
+in by ONAP VNFRQTS project.
+10. VNF Validation
-Scope of the 2018.09 release of the OVP
+VNF Validation verifies the VNF is able to onbroad within ONAP and ONAP is able to
+perform basic orchestration operations with the VNF, including instantiating the
+VNF on the Cloud Site.
+
+Scope of the 2019.12 release of the OVP
---------------------------------------
-Summarizing the results of the analysis above, the scope of the 2018.09 release
+Summarizing the results of the analysis above, the scope of the 2019.12 release
of OVP is as follows:
-- Mandatory test scope:
+- Mandatory NFVI test scope:
- functest.vping.userdata
- functest.vping.ssh
@@ -338,7 +366,7 @@ of OVP is as follows:
- yardstick.ha.database
- bottlenecks.stress.ping
-- Optional test scope:
+- Optional NFVI test scope:
- functest.tempest.ipv6_scenario
- functest.tempest.multi_node_scheduling
@@ -351,6 +379,10 @@ of OVP is as follows:
- functest.vnf.vims
- functest.vnf.vepc
+- Mandatory VNF test scope:
+
+ - Refer to `ONAP VNF Test Case Descriptions <https://docs.onap.org/en/elalto/submodules/vnfrqts/testcases.git/docs/index.html>`_
+
\* The OPNFV OVP utilizes the same set of test cases as the OpenStack
interoperability program *OpenStack Powered Compute*. Passing the OPNFV OVP
does **not** imply that the SUT is certified according to the *OpenStack
@@ -369,7 +401,7 @@ Scope considerations for future OVP releases
--------------------------------------------
Based on the previous analysis, the following items are outside the scope of
-the 2018.09 release of OVP but are being considered for inclusion in future
+the 2019.12 release of OVP but are being considered for inclusion in future
releases:
- service assurance
diff --git a/docs/testing/user/reviewerguide/images/ovp_log_setup.png b/docs/testing/user/reviewerguide/images/ovp_log_setup.png
index 4a68d9b6..f53b94d9 100644
--- a/docs/testing/user/reviewerguide/images/ovp_log_setup.png
+++ b/docs/testing/user/reviewerguide/images/ovp_log_setup.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_pass_fraction.png b/docs/testing/user/reviewerguide/images/ovp_pass_fraction.png
index 94dcd45a..30672e02 100644
--- a/docs/testing/user/reviewerguide/images/ovp_pass_fraction.png
+++ b/docs/testing/user/reviewerguide/images/ovp_pass_fraction.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_pass_percentage.png b/docs/testing/user/reviewerguide/images/ovp_pass_percentage.png
index 0d477a78..1a61f7b4 100644
--- a/docs/testing/user/reviewerguide/images/ovp_pass_percentage.png
+++ b/docs/testing/user/reviewerguide/images/ovp_pass_percentage.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_result_overview.png b/docs/testing/user/reviewerguide/images/ovp_result_overview.png
deleted file mode 100644
index 1f66a69c..00000000
--- a/docs/testing/user/reviewerguide/images/ovp_result_overview.png
+++ /dev/null
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_result_review.png b/docs/testing/user/reviewerguide/images/ovp_result_review.png
index 427127e0..56633447 100644
--- a/docs/testing/user/reviewerguide/images/ovp_result_review.png
+++ b/docs/testing/user/reviewerguide/images/ovp_result_review.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_top_nav.png b/docs/testing/user/reviewerguide/images/ovp_top_nav.png
index 3dfc0b09..a1c261f8 100644
--- a/docs/testing/user/reviewerguide/images/ovp_top_nav.png
+++ b/docs/testing/user/reviewerguide/images/ovp_top_nav.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/review_status.png b/docs/testing/user/reviewerguide/images/review_status.png
new file mode 100644
index 00000000..911b06fd
--- /dev/null
+++ b/docs/testing/user/reviewerguide/images/review_status.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/sut_info.png b/docs/testing/user/reviewerguide/images/sut_info.png
index 53c3d51a..29c249b2 100644
--- a/docs/testing/user/reviewerguide/images/sut_info.png
+++ b/docs/testing/user/reviewerguide/images/sut_info.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/index.rst b/docs/testing/user/reviewerguide/index.rst
index 391f6e3d..f08ae784 100644
--- a/docs/testing/user/reviewerguide/index.rst
+++ b/docs/testing/user/reviewerguide/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-=============================================
+==================
OVP Reviewer Guide
-=============================================
+==================
.. toctree::
:maxdepth: 2
@@ -16,120 +16,110 @@ Introduction
This document provides detailed guidance for reviewers on how to handle the result review
process.
-The OPNFV Verified program (OVP) provides the ability for users to upload test results in
-`OVP portal <https://verified.opnfv.org>`_ and request from OVP community to review them.
-After the user submit for review the test results **Status** is changed from 'private' to 'review'
-(as shown in figure 2).
+The OPNFV Verification Program (OVP) provides the ability for users to upload test results in
+`OVP portal <https://nfvi-verified.lfnetworking.org>`_ and request from OVP community to review them.
OVP administrator will ask for review volunteers using the ovp-support@lfnetworking.org email alias.
The incoming results for review will be identified by the administrator with particular **Test ID**
and **Owner** values.
Volunteers that will accept the review request can access the test results by login to the
-`OVP portal <https://verified.opnfv.org>`_ and the click on the **My Results** tab in top-level
-navigation bar.
+`OVP portal <https://nfvi-verified.lfnetworking.org>`_ and then click on the **Incoming Reviews**
+tab in top-level navigation bar.
.. image:: images/ovp_top_nav.png
:align: center
:scale: 100%
-Figure 1
-The corresponding OVP portal result will have a status of 'review'.
+After the user submit for review the test results **Status** is changed from 'private' to 'review'.
+Reviewers can find that the corresponding OVP portal result will have a status of 'review'.
+Also there are **Application** information list here for review. All the application information
+is submitted by users at the same time they submit their results for review. Reviewers can also
+find who has already approve/not approve the test results by clicking on the **View Reviews**.
.. image:: images/ovp_result_review.png
:align: center
:scale: 100%
-Figure 2
Reviewers must follow the checklist below to ensure review consistency for the OPNFV
-Verified Program (OVP) 2018.09 (Fraser) release at a minimum.
+Verification Program (OVP) 2019.12 (Hunter) release at a minimum.
-#. **Mandatory Test Area Results** - Validate that results for all mandatory test areas are present.
-#. **Test-Case Pass Percentage** - Ensure all tests have passed (100% pass rate).
-#. **Log File Verification** - Inspect the log file for each test area.
+#. **Test Case Pass Percentage** - Ensure all mandatory tests have passed (100% pass rate).
+#. **Mandatory Test Case Results** - Validate that results for all mandatory test cases are present.
+#. **Log File Verification** - Inspect the log file for each test case.
#. **SUT Info Verification** - Validate the system under test (SUT) hardware and software endpoint info is present.
+Test Case Pass Percentage
+=========================
-1. Mandatory Test Area Results
-==============================
+All mandatory test cases have to run successfully. The below figure of the **Test Run Results**
+is one method and shows that 96.71% of the mandatory test cases have passed.
+This value must not be lower than 100%.
+
+.. image:: images/ovp_pass_percentage.png
+ :align: center
+ :width: 350 px
-Test results can be displayed by clicking on the hyperlink under the 'Test ID' column.
-User should validate that results for all mandatory test areas are included in the overall test suite. The required
-mandatory test cases are:
-- functest.vping.userdata
-- functest.vping.ssh
+Mandatory Test Case Results
+===========================
+
+Test results can be displayed by clicking on the hyperlink under the **Test ID** column.
+Reviewers should validate that results for all mandatory test cases are included in the overall
+test suite. The required mandatory test cases are:
+
- bottlenecks.stress.ping
-- functest.tempest.osinterop
+- functest.security.patrole
- functest.tempest.compute
- functest.tempest.identity_v3
- functest.tempest.image
+- functest.tempest.ipv6_api
- functest.tempest.network_api
-- functest.tempest.volume
- functest.tempest.neutron_trunk_ports
-- functest.tempest.ipv6_api
-- functest.security.patrole
-- yardstick.ha.nova_api
-- yardstick.ha.neutron_server
-- yardstick.ha.keystone
-- yardstick.ha.glance_api
+- functest.tempest.osinterop
+- functest.tempest.volume
+- functest.vping.ssh
+- functest.vping.userdata
- yardstick.ha.cinder_api
- yardstick.ha.cpu_load
+- yardstick.ha.database
- yardstick.ha.disk_load
+- yardstick.ha.glance_api
- yardstick.ha.haproxy
+- yardstick.ha.keystone
+- yardstick.ha.neutron_server
+- yardstick.ha.nova_api
- yardstick.ha.rabbitmq
-- yardstick.ha.database
*Note, that the 'Test ID' column in this view condenses the UUID used for 'Test ID' to
eight characters even though the 'Test ID' is a longer UUID in the back-end.*
-.. image:: images/ovp_result_overview.png
- :align: center
- :scale: 100%
-
-Figure 3
-
-2. Test-Case Pass Percentage
-============================
+Failed test cases can be easy identified by the color of pass/total number:
-All mandatory test-cases have to run successfully. The below diagram of the 'Test Run Results' is one method and
-shows that 98.15% of the mandatory test-cases have passed.
-This value must not be lower than 100%.
-
-.. image:: images/ovp_pass_percentage.png
- :align: center
- :width: 350 px
-
-Figure 4
-
-Failed test cases can also be easy identified by the color of pass/total number. :
-
-- Green when all test-cases pass
-- Orange when at least one fails
-- Red when all test-cases fail
+- **Green** when all test cases pass
+- **Orange** when at least one fails/skips
+- **Red** when all test cases fail/skip
.. image:: images/ovp_pass_fraction.png
:align: center
:width: 350 px
-Figure 5
-3. Log File Verification
-========================
+Log File Verification
+=====================
Each log file of the mandatory test cases have to be verified for content.
Log files can be displayed by clicking on the setup icon to the right of the results,
-as shown in figure below.
+as shown in the figure below.
.. image:: images/ovp_log_setup.png
:align: center
:scale: 100%
-Figure 6
*Note, all log files can be found at results/ directory as shown at the following table.*
@@ -148,37 +138,46 @@ Figure 6
+------------------------+--------------------------+
-The bottlenecks log must contain the 'SUCCESS' result as shown in following example:
+Bottlenecks Logs
+----------------
- 2018-08-22 14:11:21,815 [INFO] yardstick.benchmark.core.task task.py:127 Testcase: "ping_bottlenecks" **SUCCESS**!!!
+It must contain the 'SUCCESS' result at the end of Bottlenecks log as shown in following example:
-Functest logs opens an html page that lists all test cases as shown in figure 7. All test cases must have run
-successfuly.
+ 2019-12-03 07:35:14,630 [INFO] yardstick.benchmark.core.task task.py:129 Testcase: "ping_bottlenecks" SUCCESS!!!
-.. image:: images/ovp_log_files_functest_image.png
- :align: center
- :scale: 100%
-Figure 7
+Functest Logs
+-------------
-For the vping test area log file (functest.log). The two entries displayed in the tables below must be present in
-this log file.
+There are 2 different types of Functest logs, one is plain text for **vping** test cases and the other
+is html file for **tempest** and **security** test cases.
-**functest.vping_userdata**
+For **vping** test cases, two entries displayed in the tables below must be present in log files.
+
+**functest.vping.ssh**
.. image:: images/ovp_vping_ssh.png
:align: center
:scale: 100%
-Figure 8
-**functest.vping_ssh**
+**functest.vping.userdata**
.. image:: images/ovp_vping_user.png
:align: center
:scale: 100%
-Figure 9
+
+For **tempest** and **security** test cases, it opens an html page that lists all test cases as shown
+below. All test cases must have run successfully.
+
+.. image:: images/ovp_log_files_functest_image.png
+ :align: center
+ :scale: 100%
+
+
+Yardstick Logs
+--------------
The yardstick log must contain the 'SUCCESS' result for each of the test-cases within this
test area. This can be verified by searching the log for the keyword 'SUCCESS'.
@@ -190,29 +189,39 @@ An example of a FAILED and a SUCCESS test case are listed below:
2018-08-28 10:23:41,907 [INFO] yardstick.benchmark.core.task task.py:127 Testcase: "opnfv_yardstick_tc052" **SUCCESS**!!!
-4. SUT Info Verification
-========================
+SUT Info Verification
+=====================
SUT information must be present in the results to validate that all required endpoint services
and at least two controllers were present during test execution. For the results shown below,
-click the '**info**' hyperlink in the **SUT** column to navigate to the SUT information page.
+click the **info** hyperlink in the **SUT** column to navigate to the SUT information page.
.. image:: images/sut_info.png
:align: center
:scale: 100%
-Figure 10
-In the '**Endpoints**' listing shown below for the SUT VIM component, ensure that services are
+In the **Endpoints** listing shown below for the SUT VIM component, ensure that services are
present for identify, compute, image, volume and network at a minimum by inspecting the
-'**Service Type**' column.
+**Service Type** column.
.. image:: images/sut_endpoints.png
:align: center
:scale: 100%
-Figure 11
-Inspect the '**Hosts**' listing found below the Endpoints secion of the SUT info page and ensure
+Inspect the **Hosts** listing found below the Endpoints secion of the SUT info page and ensure
at least two hosts are present, as two controllers are required the for the mandatory HA
-test-cases.
+test cases.
+
+
+Approve or Not Approve Results
+==============================
+
+When you decide to approve or not approve this test, you can click the **Operation** and choose
+**approve** or **not approve**. Once you have approved or not approved the test, you can click
+**View Reviews** to find the review status as shown below.
+
+.. image:: images/review_status.png
+ :align: center
+ :scale: 100%
diff --git a/docs/testing/user/systempreparation/index.rst b/docs/testing/user/systempreparation/index.rst
index 5bc150a3..71916736 100644
--- a/docs/testing/user/systempreparation/index.rst
+++ b/docs/testing/user/systempreparation/index.rst
@@ -5,9 +5,9 @@
.. _dovetail-system_preparation_guide:
-============================
-OVP System Preparation Guide
-============================
+=================================
+OVP NFVI System Preparation Guide
+=================================
This document provides a general guide to hardware system prerequisites
and expectations for running OPNFV OVP testing. For detailed guide of
diff --git a/docs/testing/user/testspecification/highavailability/index.rst b/docs/testing/user/testspecification/highavailability/index.rst
index dd98ba94..e489894f 100644
--- a/docs/testing/user/testspecification/highavailability/index.rst
+++ b/docs/testing/user/testspecification/highavailability/index.rst
@@ -31,7 +31,7 @@ This test area references the following specifications:
- ETSI GS NFV-REL 001
- - http://www.etsi.org/deliver/etsi_gs/NFV-REL/001_099/001/01.01.01_60/gs_nfv-rel001v010101p.pdf
+ - https://www.etsi.org/deliver/etsi_gs/NFV-REL/001_099/001/01.01.01_60/gs_nfv-rel001v010101p.pdf
- OpenStack High Availability Guide
diff --git a/docs/testing/user/testspecification/security_patrole/index.rst b/docs/testing/user/testspecification/security_patrole/index.rst
index 610b7f0d..250d8bbd 100644
--- a/docs/testing/user/testspecification/security_patrole/index.rst
+++ b/docs/testing/user/testspecification/security_patrole/index.rst
@@ -17,13 +17,12 @@ The test area specifically validates services image and networking.
References
================
-- `OpenStack image service API reference <https://developer.openstack.org/api-ref/image/v2/index.html>`_
-- `OpenStack metadata definitions service API reference <https://developer.openstack.org/api-ref/image/v2/metadefs-index.html>`_
-- `OpenStack layer 2 networking service API reference <https://developer.openstack.org/api-ref/network/v2/index.html#layer-2-networking>`_
-- `OpenStack layer 3 networking service API reference <https://developer.openstack.org/api-ref/network/v2/index.html#layer-3-networking>`_
-- `OpenStack network security API reference <https://developer.openstack.org/api-ref/network/v2/index.html#security>`_
-- `OpenStack resource management API reference <https://developer.openstack.org/api-ref/network/v2/index.html#resource-management>`_
-- `OpenStack networking agents API reference <https://developer.openstack.org/api-ref/network/v2/index.html#networking-agents>`_
+- `OpenStack image service API reference <https://docs.openstack.org/api-ref/image/v2/index.html>`_
+- `OpenStack metadata definitions service API reference <https://docs.openstack.org/api-ref/image/v2/metadefs-index.html>`_
+- `OpenStack layer 2 networking service API reference <https://docs.openstack.org/api-ref/network/v2/index.html#layer-2-networking>`_
+- `OpenStack layer 3 networking service API reference <https://docs.openstack.org/api-ref/network/v2/index.html#layer-3-networking>`_
+- `OpenStack network security API reference <https://docs.openstack.org/api-ref/network/v2/index.html#security>`_
+- `OpenStack resource management API reference <https://docs.openstack.org/api-ref/network/v2/index.html#resource-management>`_
System Under Test (SUT)
@@ -46,7 +45,7 @@ by the tests, review the Python source code accessible via the following links.
These tests cover the RBAC tests of image basic operations.
Implementation:
-`BasicOperationsImagesRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_images_rbac.py>`_
+`BasicOperationsImagesRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_images_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_images_rbac.BasicOperationsImagesRbacTest.test_create_image
- patrole_tempest_plugin.tests.api.image.test_images_rbac.BasicOperationsImagesRbacTest.test_create_image_tag
@@ -67,10 +66,9 @@ Implementation:
These tests cover the RBAC tests of image namespaces.
Implementation:
-`ImageNamespacesRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_rbac.py>`_
+`ImageNamespacesRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_create_metadef_namespace
-- patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_list_metadef_namespaces
- patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_modify_metadef_namespace
@@ -79,7 +77,7 @@ Implementation:
These tests cover the RBAC tests of image namespaces objects.
Implementation:
-`ImageNamespacesObjectsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_objects_rbac.py>`_
+`ImageNamespacesObjectsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_objects_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_namespace_objects_rbac.ImageNamespacesObjectsRbacTest.test_create_metadef_object_in_namespace
- patrole_tempest_plugin.tests.api.image.test_image_namespace_objects_rbac.ImageNamespacesObjectsRbacTest.test_list_metadef_objects_in_namespace
@@ -92,7 +90,7 @@ Implementation:
These tests cover the RBAC tests of image namespaces property.
Implementation:
-`NamespacesPropertyRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_property_rbac.py>`_
+`NamespacesPropertyRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_property_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_namespace_property_rbac.NamespacesPropertyRbacTest.test_add_md_properties
- patrole_tempest_plugin.tests.api.image.test_image_namespace_property_rbac.NamespacesPropertyRbacTest.test_get_md_properties
@@ -105,7 +103,7 @@ Implementation:
These tests cover the RBAC tests of image namespaces tags.
Implementation:
-`NamespaceTagsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_tags_rbac.py>`_
+`NamespaceTagsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_tags_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_namespace_tags_rbac.NamespaceTagsRbacTest.test_create_namespace_tag
- patrole_tempest_plugin.tests.api.image.test_image_namespace_tags_rbac.NamespaceTagsRbacTest.test_create_namespace_tags
@@ -119,7 +117,7 @@ Implementation:
These tests cover the RBAC tests of image resource types.
Implementation:
-`ImageResourceTypesRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_resource_types_rbac.py>`_
+`ImageResourceTypesRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_resource_types_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_resource_types_rbac.ImageResourceTypesRbacTest.test_add_metadef_resource_type
- patrole_tempest_plugin.tests.api.image.test_image_resource_types_rbac.ImageResourceTypesRbacTest.test_get_metadef_resource_type
@@ -131,7 +129,7 @@ Implementation:
These tests cover the RBAC tests of image member.
Implementation:
-`ImagesMemberRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_images_member_rbac.py>`_
+`ImagesMemberRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_images_member_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_images_member_rbac.ImagesMemberRbacTest.test_add_image_member
- patrole_tempest_plugin.tests.api.image.test_images_member_rbac.ImagesMemberRbacTest.test_delete_image_member
@@ -144,8 +142,8 @@ Implementation:
These tests cover the RBAC tests of network agents.
Implementation:
-`AgentsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_agents_rbac.py#L24>`_ and
-`DHCPAgentSchedulersRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_agents_rbac.py#L147>`_.
+`AgentsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_agents_rbac.py#L24>`_ and
+`DHCPAgentSchedulersRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_agents_rbac.py#L147>`_.
- patrole_tempest_plugin.tests.api.network.test_agents_rbac.AgentsRbacTest.test_show_agent
- patrole_tempest_plugin.tests.api.network.test_agents_rbac.AgentsRbacTest.test_update_agent
@@ -159,7 +157,7 @@ Implementation:
These tests cover the RBAC tests of network floating ips.
Implementation:
-`FloatingIpsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_floating_ips_rbac.py>`_
+`FloatingIpsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_floating_ips_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_create_floating_ip
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_create_floating_ip_floatingip_address
@@ -173,25 +171,24 @@ Implementation:
These tests cover the RBAC tests of network basic operations.
Implementation:
-`NetworksRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_networks_rbac.py>`_
+`NetworksRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_networks_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network
+- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_is_default
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_router_external
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_shared
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_delete_network
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_delete_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_list_dhcp_agents_on_hosting_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_network_type
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_physical_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_segmentation_id
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_router_external
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network
+- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_provider_physical_network
+- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_provider_segmentation_id
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_router_external
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_shared
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_subnet
**Network ports RBAC test:**
@@ -199,14 +196,14 @@ Implementation:
These tests cover the RBAC tests of network ports.
Implementation:
-`PortsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_ports_rbac.py>`_
+`PortsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_ports_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_allowed_address_pairs
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_binding_host_id
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_binding_profile
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_device_owner
-- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_fixed_ips
+- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_fixed_ips_ip_address
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_mac_address
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_security_enabled
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_delete_port
@@ -220,7 +217,7 @@ Implementation:
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_binding_host_id
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_binding_profile
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_device_owner
-- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_fixed_ips
+- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_fixed_ips_ip_address
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_mac_address
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_security_enabled
@@ -230,7 +227,7 @@ Implementation:
These tests cover the RBAC tests of network routers.
Implementation:
-`RouterRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_routers_rbac.py>`_
+`RouterRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_routers_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_add_router_interface
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_create_router
@@ -238,6 +235,7 @@ Implementation:
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_create_router_external_fixed_ips
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_delete_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_remove_router_interface
+- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_show_high_availability_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_show_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_update_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_update_router_enable_snat
@@ -251,7 +249,7 @@ Implementation:
These tests cover the RBAC tests of network security groups.
Implementation:
-`SecGroupRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_security_groups_rbac.py>`_
+`SecGroupRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_security_groups_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_create_security_group
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_create_security_group_rule
@@ -260,7 +258,7 @@ Implementation:
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_list_security_group_rules
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_list_security_groups
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_group_rule
-- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_groups
+- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_group
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_update_security_group
@@ -269,7 +267,7 @@ Implementation:
These tests cover the RBAC tests of network service providers.
Implementation:
-`ServiceProvidersRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_service_providers_rbac.py>`_
+`ServiceProvidersRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_service_providers_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_service_providers_rbac.ServiceProvidersRbacTest.test_list_service_providers
@@ -279,7 +277,7 @@ Implementation:
These tests cover the RBAC tests of network subnetpools.
Implementation:
-`SubnetPoolsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_subnetpools_rbac.py>`_
+`SubnetPoolsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_subnetpools_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_subnetpools_rbac.SubnetPoolsRbacTest.test_create_subnetpool
- patrole_tempest_plugin.tests.api.network.test_subnetpools_rbac.SubnetPoolsRbacTest.test_create_subnetpool_shared
@@ -294,10 +292,36 @@ Implementation:
These tests cover the RBAC tests of network subnets.
Implementation:
-`SubnetsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_subnets_rbac.py>`_
+`SubnetsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_subnets_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_create_subnet
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_delete_subnet
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_list_subnets
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_show_subnet
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_update_subnet
+
+
+**Network flavors RBAC test:**
+
+These tests cover the RBAC tests of network flavors.
+
+Implementation:
+`FlavorsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_flavors_rbac.py>`_
+
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_create_flavor
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_delete_flavor
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_list_flavors
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_show_flavor
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_update_flavor
+
+
+**Network segments RBAC test:**
+
+These tests cover the RBAC tests of network segments.
+
+Implementation:
+`SegmentsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_network_segments_rbac.py>`_
+
+- patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_create_network_segments
+- patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_show_network_segments
+- patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_update_network_segments
diff --git a/docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst b/docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst
index 3ff48aa6..646cb8b6 100644
--- a/docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst
+++ b/docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst
@@ -16,13 +16,12 @@ which depend on vxlan physical networks.
References
================
-- `OpenStack image service API reference <https://developer.openstack.org/api-ref/image/v2/index.html>`_
-- `OpenStack metadata definitions service API reference <https://developer.openstack.org/api-ref/image/v2/metadefs-index.html>`_
-- `OpenStack layer 2 networking service API reference <https://developer.openstack.org/api-ref/network/v2/index.html#layer-2-networking>`_
-- `OpenStack layer 3 networking service API reference <https://developer.openstack.org/api-ref/network/v2/index.html#layer-3-networking>`_
-- `OpenStack network security API reference <https://developer.openstack.org/api-ref/network/v2/index.html#security>`_
-- `OpenStack resource management API reference <https://developer.openstack.org/api-ref/network/v2/index.html#resource-management>`_
-- `OpenStack networking agents API reference <https://developer.openstack.org/api-ref/network/v2/index.html#networking-agents>`_
+- `OpenStack image service API reference <https://docs.openstack.org/api-ref/image/v2/index.html>`_
+- `OpenStack metadata definitions service API reference <https://docs.openstack.org/api-ref/image/v2/metadefs-index.html>`_
+- `OpenStack layer 2 networking service API reference <https://docs.openstack.org/api-ref/network/v2/index.html#layer-2-networking>`_
+- `OpenStack layer 3 networking service API reference <https://docs.openstack.org/api-ref/network/v2/index.html#layer-3-networking>`_
+- `OpenStack network security API reference <https://docs.openstack.org/api-ref/network/v2/index.html#security>`_
+- `OpenStack resource management API reference <https://docs.openstack.org/api-ref/network/v2/index.html#resource-management>`_
System Under Test (SUT)
diff --git a/docs/testing/user/testspecification/stress/index.rst b/docs/testing/user/testspecification/stress/index.rst
index 74961fd1..5483fc93 100644
--- a/docs/testing/user/testspecification/stress/index.rst
+++ b/docs/testing/user/testspecification/stress/index.rst
@@ -71,7 +71,7 @@ Test Case 1 - Concurrent capacity based on life-cycle ping test
Short name
----------
-dovetail.stress.ping
+bottlenecks.stress.ping
Use case specification
----------------------
@@ -133,7 +133,7 @@ Test execution
* Test action 8: Go to *Test action 3* and do the test again to create *N2* VM pairs with PASS VM pairs counted as *S2*
* Test action 9: If *S2<N3*, the SUT is marked with FAIL. Otherwise marked with PASS.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
Typical setting of *(N1, N2, N3, P1)* is *(5, 5, 5, 10)*.
diff --git a/docs/testing/user/testspecification/tempest_compute/index.rst b/docs/testing/user/testspecification/tempest_compute/index.rst
index 18a0019a..ee86ae77 100644
--- a/docs/testing/user/testspecification/tempest_compute/index.rst
+++ b/docs/testing/user/testspecification/tempest_compute/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+==================================
Tempest Compute test specification
-===========================================
+==================================
Scope
@@ -29,30 +29,30 @@ These runtime operations includes:
References
==========
-`Security Groups: <https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups>`_
+`Security Groups: <https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups>`_
- create security group
- delete security group
-`Networks: <https://developer.openstack.org/api-ref/networking/v2/index.html#networks>`_
+`Networks: <https://docs.openstack.org/api-ref/network/v2/index.html#networks>`_
- create network
- delete network
-`Routers and interface: <https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers>`_
+`Routers and interface: <https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers>`_
- create router
- update router
- delete router
- add interface to router
-`Subnets: <https://developer.openstack.org/api-ref/networking/v2/index.html#subnets>`_
+`Subnets: <https://docs.openstack.org/api-ref/network/v2/index.html#subnets>`_
- create subnet
- update subnet
- delete subnet
-`Servers: <https://developer.openstack.org/api-ref/compute/>`_
+`Servers: <https://docs.openstack.org/api-ref/compute/>`_
- create keypair
- create server
@@ -60,13 +60,13 @@ References
- add/assign floating IP
- disassociate floating IP
-`Ports: <https://developer.openstack.org/api-ref/networking/v2/index.html#ports>`_
+`Ports: <https://docs.openstack.org/api-ref/network/v2/index.html#ports>`_
- create port
- update port
- delete port
-`Floating IPs: <https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips>`_
+`Floating IPs: <https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips>`_
- create floating IP
- delete floating IP
@@ -86,7 +86,7 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.compute of
+All these test cases are included in the test case functest.tempest.compute of
OVP test suite.
@@ -110,8 +110,8 @@ by the tests, review the Python source code accessible via the following links:
- `Security Groups test <https://github.com/openstack/tempest/blob/master/tempest/api/compute/security_groups/test_security_groups.py#L23>`_
- tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_groups_create_list_delete
-- `Attach Interfaces test <https://github.com/openstack/tempest/blob/master/tempest/api/compute/servers/test_attach_interfaces.py#L32>`_
- - tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesTestJSON.test_add_remove_fixed_ip
+- `Attach Interfaces test <https://github.com/openstack/tempest/blob/master/tempest/api/compute/servers/test_attach_interfaces.py#L347>`_
+ - tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesUnderV243Test.test_add_remove_fixed_ip
- `Server Addresses test <https://github.com/openstack/tempest/blob/master/tempest/api/compute/servers/test_server_addresses.py#L21>`_
diff --git a/docs/testing/user/testspecification/tempest_identity_v3/index.rst b/docs/testing/user/testspecification/tempest_identity_v3/index.rst
index df5bdbb6..599427a3 100644
--- a/docs/testing/user/testspecification/tempest_identity_v3/index.rst
+++ b/docs/testing/user/testspecification/tempest_identity_v3/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+======================================
Tempest Identity v3 test specification
-===========================================
+======================================
Scope
@@ -31,7 +31,7 @@ These runtime operations may include that create, list, verify and delete:
References
==========
-`Identity API v3.0 <https://developer.openstack.org/api-ref/identity/v3/index.html>`_
+`Identity API v3.0 <https://docs.openstack.org/api-ref/identity/v3/index.html>`_
System Under Test (SUT)
=======================
@@ -46,7 +46,7 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.identity_v3 of
+All these test cases are included in the test case functest.tempest.identity_v3 of
OVP test suite.
- `Create, Get, Update and Delete Credentials <https://github.com/openstack/tempest/blob/12.2.0/tempest/api/identity/admin/v3/test_credentials.py#L21>`_
diff --git a/docs/testing/user/testspecification/tempest_image/index.rst b/docs/testing/user/testspecification/tempest_image/index.rst
index da438e52..121b38e6 100644
--- a/docs/testing/user/testspecification/tempest_image/index.rst
+++ b/docs/testing/user/testspecification/tempest_image/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+================================
Tempest Image test specification
-===========================================
+================================
Scope
@@ -17,7 +17,7 @@ network runtime operations functionality.
References
==========
-`Image Service API v2 <https://developer.openstack.org/api-ref/image/v2/index.html#images>`_
+`Image Service API v2 <https://docs.openstack.org/api-ref/image/v2/index.html>`_
System Under Test (SUT)
=======================
@@ -32,11 +32,11 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.image of
+All these test cases are included in the test case functest.tempest.image of
OVP test suite.
- `Register, Upload, Get Image and Get Image File API's <https://github.com/openstack/tempest/blob/18.0.0/tempest/api/image/v2/test_images.py#L32>`_
- tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file
- `List Versions <https://github.com/openstack/tempest/blob/18.0.0/tempest/api/image/v2/test_versions.py>`_
- - tempest.api.image.v2.test_versions.VersionsTest.test_list_versions \ No newline at end of file
+ - tempest.api.image.v2.test_versions.VersionsTest.test_list_versions
diff --git a/docs/testing/user/testspecification/tempest_ipv6/index.rst b/docs/testing/user/testspecification/tempest_ipv6/index.rst
index d78370c8..3da23c99 100644
--- a/docs/testing/user/testspecification/tempest_ipv6/index.rst
+++ b/docs/testing/user/testspecification/tempest_ipv6/index.rst
@@ -23,7 +23,7 @@ References
- upstream openstack API reference
- - http://developer.openstack.org/api-ref
+ - https://docs.openstack.org/api-ref/
- upstream openstack IPv6 reference
@@ -68,7 +68,7 @@ Test Descriptions
API Used and Reference
----------------------
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- show network details
- update network
@@ -77,7 +77,7 @@ Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#netwo
- create netowrk
- bulk create networks
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- list subnets
- create subnet
@@ -86,7 +86,7 @@ Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnet
- update subnet
- delete subnet
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- list routers
- create router
@@ -96,7 +96,7 @@ Routers and interface: https://developer.openstack.org/api-ref/networking/v2/ind
- add interface to router
- remove interface from router
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- show port details
- update port
@@ -105,7 +105,7 @@ Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
- create port
- bulk create ports
-Security groups: https://developer.openstack.org/api-ref/networking/v2/index.html#security-groups-security-groups
+Security groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- list security groups
- create security groups
@@ -113,14 +113,14 @@ Security groups: https://developer.openstack.org/api-ref/networking/v2/index.htm
- update security group
- delete security group
-Security groups rules: https://developer.openstack.org/api-ref/networking/v2/index.html#security-group-rules-security-group-rules
+Security groups rules: https://docs.openstack.org/api-ref/network/v2/index.html#security-group-rules-security-group-rules
- list security group rules
- create security group rule
- show security group rule
- delete security group rule
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- list servers
- create server
diff --git a/docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst b/docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst
index 60a5633e..b3d2c9dc 100644
--- a/docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst
+++ b/docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst
@@ -9,7 +9,7 @@ Test Case 1 - Create and Delete Bulk Network, IPv6 Subnet and Port
Short name
----------
-dovetail.tempest.ipv6_api.bulk_network_subnet_port_create_delete
+functest.tempest.ipv6_api.bulk_network_subnet_port_create_delete
Use case specification
----------------------
@@ -61,7 +61,7 @@ Test execution
* Test action 18: List all networks, verifying the network ids are no longer present
* **Test assertion 8:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use bulk create commands to create networks, IPv6 subnets and ports on
@@ -84,7 +84,7 @@ Test Case 2 - Create, Update and Delete an IPv6 Network and Subnet
Short name
-----------
-dovetail.tempest.ipv6_api.network_subnet_create_update_delete
+functest.tempest.ipv6_api.network_subnet_create_update_delete
Use case specification
----------------------
@@ -125,7 +125,7 @@ Test execution
* **Test assertion 5:** The network "id" is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to create, update, delete network, IPv6 subnet on the
@@ -148,7 +148,7 @@ Test Case 3 - Check External Network Visibility
Short name
-----------
-dovetail.tempest.ipv6_api.external_network_visibility
+functest.tempest.ipv6_api.external_network_visibility
Use case specification
----------------------
@@ -189,7 +189,7 @@ Test execution
* **Test assertion 4:** There is no subnet of the external network with the configured
public network id
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use list commands to list external networks, pre-configured
@@ -211,7 +211,7 @@ Test Case 4 - List IPv6 Networks and Subnets
Short name
-----------
-dovetail.tempest.ipv6_api.network_subnet_list
+functest.tempest.ipv6_api.network_subnet_list
Use case specification
----------------------
@@ -248,7 +248,7 @@ Test execution
* Test action 8: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The network "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to use create commands to create network, IPv6 subnet, list
@@ -270,7 +270,7 @@ Test Case 5 - Show Details of an IPv6 Network and Subnet
Short name
----------
-dovetail.tempest.ipv6_api.network_subnet_show
+functest.tempest.ipv6_api.network_subnet_show
Use case specification
----------------------
@@ -308,7 +308,7 @@ Test execution
* Test action 8: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use create commands to create network, IPv6 subnet and show
@@ -330,7 +330,7 @@ Test Case 6 - Create an IPv6 Port in Allowed Allocation Pools
Short name
----------
-dovetail.tempest.ipv6_api.port_create_in_allocation_pool
+functest.tempest.ipv6_api.port_create_in_allocation_pool
Use case specification
----------------------
@@ -373,7 +373,7 @@ Test execution
* Test action 12: List all networks, verifying the network id is no longer present
* **Test assertion 5:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use create commands to create an IPv6 subnet within allowed
@@ -395,7 +395,7 @@ Test Case 7 - Create an IPv6 Port with Empty Security Groups
Short name
-----------
-dovetail.tempest.ipv6_api.port_create_empty_security_group
+functest.tempest.ipv6_api.port_create_empty_security_group
Use case specification
----------------------
@@ -431,7 +431,7 @@ Test execution
* Test action 10: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use create commands to create port with
@@ -452,7 +452,7 @@ Test Case 8 - Create, Update and Delete an IPv6 Port
Short name
----------
-dovetail.tempest.ipv6_api.port_create_update_delete
+functest.tempest.ipv6_api.port_create_update_delete
Use case specification
----------------------
@@ -489,7 +489,7 @@ Test execution
* Test action 9: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to use create/update/delete commands to create/update/delete port
@@ -511,7 +511,7 @@ Test Case 9 - List IPv6 Ports
Short name
----------
-dovetail.tempest.ipv6_api.port_list
+functest.tempest.ipv6_api.port_list
Use case specification
----------------------
@@ -543,7 +543,7 @@ Test execution
* Test action 7: List all networks, verifying the network id is no longer present
* **Test assertion 3:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use list commands to list the networks and ports on
@@ -564,7 +564,7 @@ Test Case 10 - Show Key/Valus Details of an IPv6 Port
Short name
----------
-dovetail.tempest.ipv6_api.port_show_details
+functest.tempest.ipv6_api.port_show_details
Use case specification
----------------------
@@ -602,7 +602,7 @@ Test execution
* Test action 8: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use show commands to show port details on the SUT API.
@@ -625,7 +625,7 @@ Test Case 11 - Add Multiple Interfaces for an IPv6 Router
Short name
-----------
-dovetail.tempest.ipv6_api.router_add_multiple_interface
+functest.tempest.ipv6_api.router_add_multiple_interface
Use case specification
----------------------
@@ -667,7 +667,7 @@ Test execution
* **Test assertion 3:** The interfaces, router, IPv6 subnets and networks ids are not present in the lists
after deleting
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use bulk create commands to create networks, IPv6 subnets and ports on
@@ -690,7 +690,7 @@ Test Case 12 - Add and Remove an IPv6 Router Interface with port_id
Short name
----------
-dovetail.tempest.ipv6_api.router_interface_add_remove_with_port
+functest.tempest.ipv6_api.router_interface_add_remove_with_port
Use case specification
----------------------
@@ -728,7 +728,7 @@ Test execution
ones are not found in the list.
* **Test assertion 3:** interfaces, ports, routers, subnets and networks are not found in the lists after deleting
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use add/remove commands to add/remove router interface to the port,
@@ -750,7 +750,7 @@ Test Case 13 - Add and Remove an IPv6 Router Interface with subnet_id
Short name
----------
-dovetail.tempest.ipv6_api.router_interface_add_remove
+functest.tempest.ipv6_api.router_interface_add_remove
Use case specification
----------------------
@@ -794,7 +794,7 @@ Test execution
* Test action 13: List all networks, verifying the network id is no longer present
* **Test assertion 7:** The network "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to add and remove router interface with the subnet id on the
@@ -818,7 +818,7 @@ Test Case 14 - Create, Show, List, Update and Delete an IPv6 router
Short name
----------
-dovetail.tempest.ipv6_api.router_create_show_list_update_delete
+functest.tempest.ipv6_api.router_create_show_list_update_delete
Use case specification
----------------------
@@ -856,7 +856,7 @@ Test execution
* Test action 7: List all routers, verifying the router id is no longer present
* **Test assertion 8:** The "id" parameter is not present in the router list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to create, show, list, update and delete router on
@@ -882,7 +882,7 @@ Test Case 15 - Create, List, Update, Show and Delete an IPv6 security group
Short name
----------
-dovetail.tempest.ipv6_api.security_group_create_list_update_show_delete
+functest.tempest.ipv6_api.security_group_create_list_update_show_delete
Use case specification
----------------------
@@ -917,7 +917,7 @@ Test execution
* Test action 7: List all security groups, verifying the security group's id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the security group list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to create list, update, show and delete security group on
@@ -940,7 +940,7 @@ Test Case 16 - Create, Show and Delete IPv6 security group rule
Short name
----------
-dovetail.tempest.ipv6_api.security_group_rule_create_show_delete
+functest.tempest.ipv6_api.security_group_rule_create_show_delete
Use case specification
----------------------
@@ -977,7 +977,7 @@ Test execution
* Test action 8: List all security groups, verifying the security group's id is no longer present
* **Test assertion 4:** The security group "id" parameter is not present in the list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to create, show, list and delete security group rules on
@@ -998,7 +998,7 @@ Test Case 17 - List IPv6 Security Groups
Short name
----------
-dovetail.tempest.ipv6_api.security_group_list
+functest.tempest.ipv6_api.security_group_list
Use case specification
----------------------
@@ -1023,7 +1023,7 @@ Test execution
if the default security group exists
* **Test assertion 1:** The default security group is in the list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to list security groups on the SUT API.
diff --git a/docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst b/docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst
index f3a279f0..5871321d 100644
--- a/docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst
+++ b/docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst
@@ -9,7 +9,7 @@ Test Case 1 - IPv6 Address Assignment - Dual Stack, SLAAC, DHCPv6 Stateless
Short name
----------
-dovetail.tempest.ipv6_scenario.dhcpv6_stateless
+functest.tempest.ipv6_scenario.dhcpv6_stateless
Use case specification
----------------------
@@ -60,7 +60,7 @@ Test execution
* Test action 14: List all networks, verifying the id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode
@@ -84,7 +84,7 @@ Test Case 2 - IPv6 Address Assignment - Dual Net, Dual Stack, SLAAC, DHCPv6 Stat
Short name
----------
-dovetail.tempest.ipv6_scenario.dualnet_dhcpv6_stateless
+functest.tempest.ipv6_scenario.dualnet_dhcpv6_stateless
Use case specification
----------------------
@@ -138,7 +138,7 @@ Test execution
* Test action 16: List all networks, verifying the ids are no longer present
* **Test assertion 6:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'dhcpv6_stateless'
@@ -163,7 +163,7 @@ Test Case 3 - IPv6 Address Assignment - Multiple Prefixes, Dual Stack, SLAAC, DH
Short name
----------
-dovetail.tempest.ipv6_scenario.multiple_prefixes_dhcpv6_stateless
+functest.tempest.ipv6_scenario.multiple_prefixes_dhcpv6_stateless
Use case specification
----------------------
@@ -216,7 +216,7 @@ Test execution
* Test action 14: List all networks, verifying the id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'dhcpv6_stateless'
@@ -241,7 +241,7 @@ Test Case 4 - IPv6 Address Assignment - Dual Net, Multiple Prefixes, Dual Stack,
Short name
----------
-dovetail.tempest.ipv6_scenario.dualnet_multiple_prefixes_dhcpv6_stateless
+functest.tempest.ipv6_scenario.dualnet_multiple_prefixes_dhcpv6_stateless
Use case specification
----------------------
@@ -296,7 +296,7 @@ Test execution
* Test action 16: List all networks, verifying the ids are no longer present
* **Test assertion 6:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'dhcpv6_stateless'
@@ -321,7 +321,7 @@ Test Case 5 - IPv6 Address Assignment - Dual Stack, SLAAC
Short name
----------
-dovetail.tempest.ipv6_scenario.slaac
+functest.tempest.ipv6_scenario.slaac
Use case specification
----------------------
@@ -371,7 +371,7 @@ Test execution
* Test action 14: List all networks, verifying the id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'slaac'
@@ -395,7 +395,7 @@ Test Case 6 - IPv6 Address Assignment - Dual Net, Dual Stack, SLAAC
Short name
----------
-dovetail.tempest.ipv6_scenario.dualnet_slaac
+functest.tempest.ipv6_scenario.dualnet_slaac
Use case specification
----------------------
@@ -448,7 +448,7 @@ Test execution
* Test action 16: List all networks, verifying the ids are no longer present
* **Test assertion 6:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'slaac'
@@ -473,7 +473,7 @@ Test Case 7 - IPv6 Address Assignment - Multiple Prefixes, Dual Stack, SLAAC
Short name
----------
-dovetail.tempest.ipv6_scenario.multiple_prefixes_slaac
+functest.tempest.ipv6_scenario.multiple_prefixes_slaac
Use case specification
----------------------
@@ -524,7 +524,7 @@ Test execution
* Test action 14: List all networks, verifying the id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'slaac'
@@ -549,7 +549,7 @@ Test Case 8 - IPv6 Address Assignment - Dual Net, Dual Stack, Multiple Prefixes,
Short name
----------
-dovetail.tempest.ipv6_scenario.dualnet_multiple_prefixes_slaac
+functest.tempest.ipv6_scenario.dualnet_multiple_prefixes_slaac
Use case specification
----------------------
@@ -602,7 +602,7 @@ Test execution
* Test action 16: List all networks, verifying the ids are no longer present
* **Test assertion 6:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'slaac'
diff --git a/docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst b/docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst
index 92c7e856..f414de61 100644
--- a/docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst
+++ b/docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst
@@ -53,7 +53,7 @@ on multiple nodes. Each test case is able to run independently, i.e. irrelevant
the state created by a previous test. Specifically, every test performs clean-up
operations which return the system to the same state as before the test.
-All these test cases are included in the test case dovetail.tempest.multi_node_scheduling of
+All these test cases are included in the test case functest.tempest.multi_node_scheduling of
OVP test suite.
Test Descriptions
@@ -63,28 +63,28 @@ Test Descriptions
API Used and Reference
----------------------
-Security Groups: https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
+Security Groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- create security group
- delete security group
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- create network
- delete network
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- create router
- delete router
- add interface to router
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- create subnet
- delete subnet
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create keypair
- create server
@@ -96,17 +96,17 @@ Servers: https://developer.openstack.org/api-ref/compute/
- list server groups
- show server group details
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- create port
- delete port
-Floating IPs: https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips
+Floating IPs: https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips
- create floating IP
- delete floating IP
-Availability zone: https://developer.openstack.org/api-ref/compute/
+Availability zone: https://docs.openstack.org/api-ref/compute/
- get availability zone
@@ -145,7 +145,7 @@ Test execution
that every server ended up on a different host
* Test action 6: Delete the created servers
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of VM resource scheduling.
@@ -190,7 +190,7 @@ Test execution
* Test action 5: List all server groups
* **Test assertion 4:** SERG1 and SERG2 are not in the list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of creating and deleting server groups with the same name and policy.
@@ -233,7 +233,7 @@ Test execution
* Test action 3: Delete SERG1 and list all server groups
* **Test assertion 3:** SERG1 is not in the list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of creating and deleting server group with affinity policy.
@@ -275,7 +275,7 @@ Test execution
* Test action 3: Delete SERG1 and list all server groups
* **Test assertion 3:** SERG1 is not in the list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of creating and deleting server group with anti-affinity policy.
@@ -316,7 +316,7 @@ Test execution
* **Test assertion 1:** SERG1 is in the list
* Test action 4: Delete SERG1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of listing server groups.
@@ -358,7 +358,7 @@ Test execution
* **Test assertion 1:** All values in D1 are the same as the values in D2
* Test action 4: Delete SERG1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of showing server group details.
diff --git a/docs/testing/user/testspecification/tempest_network_api/index.rst b/docs/testing/user/testspecification/tempest_network_api/index.rst
index ccbe8e0a..4fc47208 100644
--- a/docs/testing/user/testspecification/tempest_network_api/index.rst
+++ b/docs/testing/user/testspecification/tempest_network_api/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+======================================
Tempest Network API test specification
-===========================================
+======================================
Scope
@@ -29,42 +29,42 @@ These runtime operations may include that create, list, verify or delete:
References
==========
-`Networks: <https://developer.openstack.org/api-ref/network/v2/#networks>`_
+`Networks: <https://docs.openstack.org/api-ref/network/v2/#networks>`_
- create network
- delete network
-`Routers and interface: <https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers>`_
+`Routers and interface: <https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers>`_
- create router
- update router
- delete router
- add interface to router
-`Subnets: <https://developer.openstack.org/api-ref/networking/v2/index.html#subnets>`_
+`Subnets: <https://docs.openstack.org/api-ref/network/v2/index.html#subnets>`_
- create subnet
- update subnet
- delete subnet
-`Subnetpools: <https://developer.openstack.org/api-ref/network/v2/#subnet-pools-extension-subnetpools>`_
+`Subnetpools: <https://docs.openstack.org/api-ref/network/v2/#subnet-pools-extension-subnetpools>`_
- create subnetpool
- update subnetpool
- delete subnetpool
-`Ports: <https://developer.openstack.org/api-ref/networking/v2/index.html#ports>`_
+`Ports: <https://docs.openstack.org/api-ref/network/v2/index.html#ports>`_
- create port
- update port
- delete port
-`Floating IPs: <https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips>`_
+`Floating IPs: <https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips>`_
- create floating IP
- delete floating IP
-`Api Versions <https://developer.openstack.org/api-ref/network/v2/#api-versions>`_
+`Api Versions <https://docs.openstack.org/api-ref/network/v2/#api-versions>`_
- list version
- show version
@@ -82,7 +82,7 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.network of
+All these test cases are included in the test case functest.tempest.network of
OVP test suite.
@@ -127,4 +127,4 @@ OVP test suite.
- tempest.api.network.test_subnetpools_extensions.SubnetPoolsTestJSON.test_create_list_show_update_delete_subnetpools
`API Version Resources test <https://github.com/openstack/tempest/blob/master/tempest/api/network/test_versions.py>`_
- - tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources \ No newline at end of file
+ - tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources
diff --git a/docs/testing/user/testspecification/tempest_network_scenario/index.rst b/docs/testing/user/testspecification/tempest_network_scenario/index.rst
index 6c172474..feee105d 100644
--- a/docs/testing/user/testspecification/tempest_network_scenario/index.rst
+++ b/docs/testing/user/testspecification/tempest_network_scenario/index.rst
@@ -58,7 +58,7 @@ test case is able to run independently, i.e. irrelevant of the state created by
a previous test. Specifically, every test performs clean-up operations which
return the system to the same state as before the test.
-All these test cases are included in the test case dovetail.tempest.network_scenario of
+All these test cases are included in the test case functest.tempest.network_scenario of
OVP test suite.
Test Descriptions
@@ -68,30 +68,30 @@ Test Descriptions
API Used and Reference
----------------------
-Security Groups: https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
+Security Groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- create security group
- delete security group
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- create network
- delete network
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- create router
- update router
- delete router
- add interface to router
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- create subnet
- update subnet
- delete subnet
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create keypair
- create server
@@ -99,13 +99,13 @@ Servers: https://developer.openstack.org/api-ref/compute/
- add/assign floating IP
- disassociate floating IP
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- create port
- update port
- delete port
-Floating IPs: https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips
+Floating IPs: https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips
- create floating IP
- delete floating IP
@@ -149,7 +149,7 @@ Test execution
* **Test assertion 5:** Ping FIP1 and SSH to VM2 via FIP1 successfully
* Test action 8: Delete SG1, NET1, SUBNET1, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of basic network operations.
@@ -220,7 +220,7 @@ Test execution
* **Test assertion 2:** Ping NET2's internal gateway successfully
* Test action 8: Delete SG1, NET1, NET2, SUBNET1, SUBNET2, R1, NIC2, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of adding network to an active VM.
@@ -278,7 +278,7 @@ Test execution
retrieve the VM1's configured dns and verify it has been successfully updated
* Test action 7: Delete SG1, NET1, SUBNET1, R1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of updating subnet's configurations.
@@ -336,7 +336,7 @@ Test execution
* **Test assertion 6:** Ping FIP1 via SSHCLNT1 successfully
* Test action 10: Delete SG1, NET1, SUBNET1, R1, SSHCLNT1, VM1, VM2 and FIP1, FIP2
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the VM public and project connectivity status by changing VM port
@@ -395,7 +395,7 @@ Test execution
* **Test assertion 3:** Ping FIP1 and SSH to VM1 with FIP1 successfully
* Test action 8: Delete SG1, NET1, SUBNET1, R1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the router public connectivity status by changing
diff --git a/docs/testing/user/testspecification/tempest_network_security/index.rst b/docs/testing/user/testspecification/tempest_network_security/index.rst
index 2a785289..6934be1f 100644
--- a/docs/testing/user/testspecification/tempest_network_security/index.rst
+++ b/docs/testing/user/testspecification/tempest_network_security/index.rst
@@ -53,7 +53,7 @@ port security. Each test case is able to run independently, i.e. irrelevant of
the state created by a previous test. Specifically, every test performs clean-up
operations which return the system to the same state as before the test.
-All these test cases are included in the test case dovetail.tempest.network_security of
+All these test cases are included in the test case functest.tempest.network_security of
OVP test suite.
Test Descriptions
@@ -63,12 +63,12 @@ Test Descriptions
API Used and Reference
----------------------
-Security Groups: https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
+Security Groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- create security group
- delete security group
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- create network
- delete network
@@ -76,27 +76,27 @@ Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#netwo
- create floating ip
- delete floating ip
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- create router
- delete router
- list routers
- add interface to router
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- create subnet
- list subnets
- delete subnet
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create keypair
- create server
- delete server
- add/assign floating ip
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- update port
- list ports
@@ -145,7 +145,7 @@ Test execution
* **Test assertion 2:** The ping operation is successful
* Test action 16: Delete SG1, NET1, NET2, SUBNET1, SUBNET2, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to prevent MAC spoofing by using port security.
@@ -206,7 +206,7 @@ Test execution
* Test action 14: Delete SG1, SG2, SG3, SG4, NET1, NET2, SUBNET1, SUBNET2, R1, R2,
VM1, VM2, FIP1 and FIP2
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability of the security group to filter packets cross tenant.
@@ -263,7 +263,7 @@ Test execution
* **Test assertion 2:** The ping operation is successful
* Test action 11: Delete SG1, SG2, NET1, SUBNET1, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability of the security group to filter packets in one tenant.
@@ -319,7 +319,7 @@ Test execution
* **Test assertion 3:** Can SSH to VM1 successfully
* Test action 11: Delete SG1, SG2, NET1, SUBNET1, R1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability of multiple security groups to filter packets.
@@ -379,7 +379,7 @@ Test execution
* **Test assertion 2:** The ping operation is successful
* Test action 12: Delete SG1, SG2, NET1, SUBNET1, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability of port security to disable security group.
@@ -436,7 +436,7 @@ Test execution
* **Test assertion 2:** The ping operation is successful
* Test action 13: Delete SG1, SG2, SG3, NET1, SUBNET1, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to update port with a new security group.
diff --git a/docs/testing/user/testspecification/tempest_osinterop/index.rst b/docs/testing/user/testspecification/tempest_osinterop/index.rst
index 6773275e..d2a54e86 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/index.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/index.rst
@@ -3,11 +3,11 @@
.. (c) Huawei Technologies Co.,Ltd and others
=============================================
-OpenStack Interoperability test specification
+OpenStack Interoperability Test Specification
=============================================
The test cases documented here are the API test cases in the OpenStack
-Interop guideline 2017.09 as implemented by the RefStack client.
+Interop guideline 2018.11 as implemented by the RefStack client.
References
================
@@ -16,9 +16,9 @@ References
- https://wiki.openstack.org/wiki/Governance/InteropWG
-- OpenStack Interoperability guidelines (version 2017.09)
+- OpenStack Interoperability guidelines (version 2018.11)
- - https://github.com/openstack/interop/blob/master/2017.09.json
+ - https://github.com/openstack/interop/blob/master/2018.11.json
- Refstack client
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst
index 601d1054..3e663d98 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst
@@ -11,7 +11,7 @@ Scope
The VIM compute operations test area evaluates the ability of the system under
test to support VIM compute operations. The test cases documented here are the
-compute API test cases in the OpenStack Interop guideline 2017.09 as implemented
+compute API test cases in the OpenStack Interop guideline 2018.11 as implemented
by the RefStack client. These test cases will evaluate basic OpenStack (as a VIM)
compute operations, including:
@@ -22,7 +22,7 @@ compute operations, including:
- Basic server operations
- Volume management operations
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -30,14 +30,15 @@ The following terms and abbreviations are used in conjunction with this test are
- API - Application Programming Interface
- NFVi - Network Functions Virtualization infrastructure
- SUT - System Under Test
-- UUID - Universally Unique Identifier
+- UUID - Universally Unique IDentifier
- VIM - Virtual Infrastructure Manager
- VM - Virtual Machine
System Under Test (SUT)
=======================
-The system under test is assumed to be the NFVi and VIM deployed with a Pharos compliant infrastructure.
+The system under test is assumed to be the NFVi and VIM deployed with a Pharos
+compliant infrastructure.
Test Area Structure
====================
@@ -50,7 +51,7 @@ the same state as before the test.
For brevity, the test cases in this test area are summarized together based on
the operations they are testing.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Test Descriptions
@@ -60,7 +61,7 @@ Test Descriptions
API Used and Reference
----------------------
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create server
- delete server
@@ -79,7 +80,7 @@ Servers: https://developer.openstack.org/api-ref/compute/
- create keypair
- delete keypair
-Block storage: https://developer.openstack.org/api-ref/block-storage
+Block storage: https://docs.openstack.org/api-ref/block-storage/
- create volume
- delete volume
@@ -759,3 +760,48 @@ Post conditions
---------------
N/A
+
+--------------------------------------------------------
+Test Case 10 - Keypair operations within the Compute API
+--------------------------------------------------------
+
+Test case specification
+-----------------------
+
+This test case evaluates the Compute API ability of creating keypair with type,
+the reference is,
+
+tempest.api.compute.keypairs.test_keypairs_v22.KeyPairsV22TestJSON.test_keypairsv22_create_list_show_with_type
+
+Test preconditions
+------------------
+
+* Compute server extension API
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+''''''''''''''
+
+* Test action 1: Create a keypair with type 'x509' and a random name
+* **Test assertion 1:** The keypair type received in the response body is equal to 'x509'
+* Test action 2: Show the details of this created keypair
+* **Test assertion 2:** The keypair type received in the response body is equal to 'x509'
+* Test action 3: List all keypairs and find the one with the same name as given in test action 1
+* **Test assertion 3:** The keypair type of this keypair is equal to 'x509'
+
+Pass / fail criteria
+''''''''''''''''''''
+
+This test evaluates the functionality of keypair operations within the Compute API.
+Specifically, the test verifies that:
+
+* Can create keypair by specifying keypair type.
+
+In order to pass this test, all test assertions listed in the test execution above need to pass.
+
+Post conditions
+---------------
+
+N/A
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst
index 6c0d23b7..34a71168 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst
@@ -14,7 +14,7 @@ support VIM identity operations. The tests in this area will evaluate
API discovery operations within the Identity v3 API, auth operations within
the Identity API.
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -34,7 +34,7 @@ Test Area Structure
The test area is structured based on VIM identity operations. Each test case
is able to run independently, i.e. irrelevant of the state created by a previous test.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Dependency Description
@@ -42,15 +42,15 @@ Dependency Description
The VIM identity operations test cases are a part of the OpenStack
interoperability tempest test cases. For Fraser based dovetail release, the
-OpenStack interoperability guidelines (version 2017.09) is adopted, which is
+OpenStack interoperability guidelines (version 2018.11) is adopted, which is
valid for Mitaka, Newton, Ocata and Pike releases of Openstack.
Test Descriptions
=================
-----------------------------------------------------
-API discovery operations within the Identity v3 API
-----------------------------------------------------
+-----------------------------------------------------------------
+Test Case 1 - API discovery operations within the Identity v3 API
+-----------------------------------------------------------------
Use case specification
-----------------------
@@ -79,7 +79,7 @@ Test execution
'stable', 'experimental', 'supported', 'deprecated' are all of the identity api 'status'
values.
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test case passes if all test action steps execute successfully and all assertions
@@ -91,14 +91,15 @@ Post conditions
None
-------------------------------------------
-Auth operations within the Identity API
-------------------------------------------
+-----------------------------------------------------
+Test Case 2 - Auth operations within the Identity API
+-----------------------------------------------------
Use case specification
-----------------------
tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token
+tempest.api.identity.v3.test_tokens.TokensV3Test.test_validate_token
Test preconditions
-------------------
@@ -119,8 +120,16 @@ Test execution
passes if it is equal to the user_name which is used to get token.
* Test action 4: Get the method in getting token response message, the test
passes if it is equal to the password which is used to get token.
+* Test action 5: Get the token by system credentials and show the token,
+ the test passes if the response bodies of the get and show operations are the same.
+* Test action 6: Get the user_id in showing token response message, the test
+ passes if it is equal to the user_id which is used to get token.
+* Test action 7: Get the username in showing token response message, the test
+ passes if it is equal to the username which is used to get token.
+* Test action 8: Delete this token by non-admin compute client, the test passes
+ if it raises a NotFound exception.
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test case passes if all test action steps execute successfully and all assertions
@@ -132,3 +141,39 @@ Post conditions
None
+--------------------------------------------------------
+Test Case 3 - Catalog operations within the Identity API
+--------------------------------------------------------
+
+Use case specification
+-----------------------
+
+tempest.api.identity.v3.test_catalog.IdentityCatalogTest.test_catalog_standardization
+
+Test preconditions
+-------------------
+
+None
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+'''''''''''''''
+
+* Test action 1: Show all catalogs by non-admin catalog client, the test passes
+ if the catalog types getting in the show response message equal to the
+ standard service values. Standard catalog types of 'keystone', 'nova', 'glance' and
+ 'swift' should be 'identity', 'compute', 'image' and 'object-store' respectively.
+
+Pass / Fail criteria
+'''''''''''''''''''''
+
+This test case passes if all test action steps execute successfully and all assertions
+are affirmed. If any test steps fails to execute successfully or any of the assertions
+is not met, the test case fails.
+
+Post conditions
+---------------
+
+None
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst
index 96a98631..b6cdb77f 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst
@@ -11,11 +11,11 @@ Scope
The VIM image test area evaluates the ability of the system under test to support
VIM image operations. The test cases documented here are the Image API test cases
-in the Openstack Interop guideline 2017.09 as implemented by the Refstack client.
+in the Openstack Interop guideline 2018.11 as implemented by the Refstack client.
These test cases will evaluate basic Openstack (as a VIM) image operations including
image creation, image list, image update and image deletion capabilities using Glance v2 API.
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -40,7 +40,7 @@ to run independently, i.e. irrelevant of the state created by a previous test.
For brevity, the test cases in this test area are summarized together based on
the operations they are testing.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Test Descriptions
@@ -50,7 +50,7 @@ Test Descriptions
API Used and Reference
----------------------
-Images: https://developer.openstack.org/api-ref/image/v2/
+Images: https://docs.openstack.org/api-ref/image/v2/
- create image
- delete image
@@ -62,9 +62,9 @@ Images: https://developer.openstack.org/api-ref/image/v2/
- add image tag
- delete image tag
----------------------------------------
-Image get tests using the Glance v2 API
----------------------------------------
+-----------------------------------------------------
+Test Case 1 - Image get tests using the Glance v2 API
+-----------------------------------------------------
Test case specification
-----------------------
@@ -104,7 +104,7 @@ Test execution
whether the 6 images' ids are not in the show list.
* **Test assertion 6:** The 6 images' ids are not found in the show list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
The first two test cases evaluate the ability to use Glance v2 API to show image
@@ -124,9 +124,9 @@ Post conditions
None
---------------------------------------
-CRUD image operations in Images API v2
---------------------------------------
+----------------------------------------------------
+Test Case 2 - CRUD image operations in Images API v2
+----------------------------------------------------
Test case specification
-----------------------
@@ -148,7 +148,7 @@ Test execution
* Test action 2: List all images and check whether the ids listed are in the created images list.
* **Test assertion 1:** The ids get from the list images API are in the created images list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the ability to use Glance v2 API to list images.
@@ -163,9 +163,9 @@ Post conditions
None
-----------------------------------------
-Image list tests using the Glance v2 API
-----------------------------------------
+------------------------------------------------------
+Test Case 3 - Image list tests using the Glance v2 API
+------------------------------------------------------
Test case specification
-----------------------
@@ -224,7 +224,7 @@ Test execution
the 6 ids are not in the show list.
* **Test assertion 8:** The stored 6 ids are not found in the show list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the ability to use Glance v2 API to list images with
@@ -245,9 +245,9 @@ Post conditions
None
-------------------------------------------
-Image update tests using the Glance v2 API
-------------------------------------------
+--------------------------------------------------------
+Test Case 4 - Image update tests using the Glance v2 API
+--------------------------------------------------------
Test case specification
-----------------------
@@ -291,7 +291,7 @@ Test execution
and check whether the ids are not in the show list.
* **Test assertion 6:** The two ids are not found in the show list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the ability to use Glance v2 API to update images with
@@ -308,9 +308,9 @@ Post conditions
None
---------------------------------------------
-Image deletion tests using the Glance v2 API
---------------------------------------------
+----------------------------------------------------------
+Test Case 5 - Image deletion tests using the Glance v2 API
+----------------------------------------------------------
Test case specification
-----------------------
@@ -349,7 +349,7 @@ Test execution
and check whether the ids are in the list.
* **Test assertion 5:** The two ids are not found in the list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
The first three test cases evaluate the ability to use Glance v2 API to delete images
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst
index a21b303c..d8e7413b 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst
@@ -11,11 +11,11 @@ Scope
The VIM network test area evaluates the ability of the system under test to support
VIM network operations. The test cases documented here are the network API test cases
-in the Openstack Interop guideline 2017.09 as implemented by the Refstack client.
+in the Openstack Interop guideline 2018.11 as implemented by the Refstack client.
These test cases will evaluate basic Openstack (as a VIM) network operations including
basic CRUD operations on L2 networks, L2 network ports and security groups.
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -42,7 +42,7 @@ the same state as before the test.
For brevity, the test cases in this test area are summarized together based on
the operations they are testing.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Test Descriptions
@@ -52,7 +52,7 @@ Test Descriptions
API Used and Reference
----------------------
-Network: http://developer.openstack.org/api-ref/networking/v2/index.html
+Network: https://docs.openstack.org/api-ref/network/v2/index.html
- create network
- update network
@@ -84,9 +84,9 @@ Network: http://developer.openstack.org/api-ref/networking/v2/index.html
- show security group rule
- delete security group rule
----------------------------------------------------------
-Basic CRUD operations on L2 networks and L2 network ports
----------------------------------------------------------
+-----------------------------------------------------------------------
+Test Case 1 - Basic CRUD operations on L2 networks and L2 network ports
+-----------------------------------------------------------------------
Test case specification
-----------------------
@@ -100,6 +100,7 @@ tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_without
tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_all_attributes
tempest.api.network.test_networks.NetworksTest.test_create_update_delete_network_subnet
tempest.api.network.test_networks.NetworksTest.test_delete_network_with_subnet
+tempest.api.network.test_networks.NetworksTest.test_external_network_visibility
tempest.api.network.test_networks.NetworksTest.test_list_networks
tempest.api.network.test_networks.NetworksTest.test_list_networks_fields
tempest.api.network.test_networks.NetworksTest.test_list_subnets
@@ -114,6 +115,7 @@ tempest.api.network.test_ports.PortsTestJSON.test_create_port_in_allowed_allocat
tempest.api.network.test_ports.PortsTestJSON.test_create_update_delete_port
tempest.api.network.test_ports.PortsTestJSON.test_list_ports
tempest.api.network.test_ports.PortsTestJSON.test_list_ports_fields
+tempest.api.network.test_ports.PortsTestJSON.test_port_list_filter_by_router_id
tempest.api.network.test_ports.PortsTestJSON.test_show_port
tempest.api.network.test_ports.PortsTestJSON.test_show_port_fields
@@ -163,55 +165,64 @@ Test execution
found after deletion
* Test action 9: Create a network and create a subnet of this network, then delete this network
* **Test assertion 9:** The subnet has also been deleted after deleting the network
-* Test action 10: Create a network and list all networks
-* **Test assertion 10:** The network created is found in the list
-* Test action 11: Create a network and list networks with the id and name of the created network
-* **Test assertion 11:** The id and name of the list network equal to the created network's id and name
-* Test action 12: Create a network and create a subnet of this network, then list all subnets
-* **Test assertion 12:** The subnet created is found in the list
-* Test action 13: Create a network and create a subnet of this network, then list subnets with
+* Test action 10: List all external networks, find the one with the same public_network_id
+ as defined in tempest.conf and list its subnets
+* **Test assertion 10:** The external network can be found, no internal network got by the
+ list operation, if this external network is shared, the subnets list is not empty,
+ otherwise, it should be empty
+* Test action 11: Create a network and list all networks
+* **Test assertion 11:** The network created is found in the list
+* Test action 12: Create a network and list networks with the id and name of the created network
+* **Test assertion 12:** The id and name of the list network equal to the created network's id and name
+* Test action 13: Create a network and create a subnet of this network, then list all subnets
+* **Test assertion 13:** The subnet created is found in the list
+* Test action 14: Create a network and create a subnet of this network, then list subnets with
the id and network_id of the created subnet
-* **Test assertion 13:** The id and network_id of the list subnet equal to the created subnet
-* Test action 14: Create a network and show network's details with the id of the created network
-* **Test assertion 14:** The id and name returned in the response equal to the created network's id and name
-* Test action 15: Create a network and just show network's id and name info with the id of the created network
-* **Test assertion 15:** The keys returned in the response are only id and name, and the values
+* **Test assertion 14:** The id and network_id of the list subnet equal to the created subnet
+* Test action 15: Create a network and show network's details with the id of the created network
+* **Test assertion 15:** The id and name returned in the response equal to the created network's id and name
+* Test action 16: Create a network and just show network's id and name info with the id of the created network
+* **Test assertion 16:** The keys returned in the response are only id and name, and the values
of all the keys equal to network's id and name
-* Test action 16: Create a network and create a subnet of this network, then show subnet's details
+* Test action 17: Create a network and create a subnet of this network, then show subnet's details
with the id of the created subnet
-* **Test assertion 16:** The id and cidr info returned in the response equal to the created
+* **Test assertion 17:** The id and cidr info returned in the response equal to the created
subnet's id and cidr
-* Test action 17: Create a network and create a subnet of this network, then show subnet's id and
+* Test action 18: Create a network and create a subnet of this network, then show subnet's id and
network_id info with the id of the created subnet
-* **Test assertion 17:** The keys returned in the response are just id and network_id, and the values
+* **Test assertion 18:** The keys returned in the response are just id and network_id, and the values
of all the keys equal to subnet's id and network_id
-* Test action 18: Create a network and create a subnet of this network, then update subnet's
+* Test action 19: Create a network and create a subnet of this network, then update subnet's
name, host_routes, dns_nameservers and gateway_ip
-* **Test assertion 18:** The name, host_routes, dns_nameservers and gateway_ip returned in the
+* **Test assertion 19:** The name, host_routes, dns_nameservers and gateway_ip returned in the
response equal to the values used to update the subnet
-* Test action 19: Create 2 networks and bulk create 2 ports with the ids of the created networks
-* **Test assertion 19:** The network_id of each port equals to the one used to create the port and
+* Test action 20: Create 2 networks and bulk create 2 ports with the ids of the created networks
+* **Test assertion 20:** The network_id of each port equals to the one used to create the port and
the admin_state_up of each port is True
-* Test action 20: Create a network and create a subnet of this network by setting allocation_pools,
+* Test action 21: Create a network and create a subnet of this network by setting allocation_pools,
then create a port with the created network's id
-* **Test assertion 20:** The ip_address of the created port is in the range of the allocation_pools
-* Test action 21: Create a network and create a port with its id, then update the port's name and
+* **Test assertion 21:** The ip_address of the created port is in the range of the allocation_pools
+* Test action 22: Create a network and create a port with its id, then update the port's name and
set its admin_state_up to be False
-* **Test assertion 21:** The name returned in the response equals to the name used to update
+* **Test assertion 22:** The name returned in the response equals to the name used to update
the port and the port's admin_state_up is False
-* Test action 22: Create a network and create a port with its id, then list all ports
-* **Test assertion 22:** The created port is found in the list
-* Test action 23: Create a network and create a port with its id, then list ports with the id
- and mac_address of the created port
+* Test action 23: Create a network and create a port with its id, then list all ports
* **Test assertion 23:** The created port is found in the list
-* Test action 24: Create a network and create a port with its id, then show the port's details
-* **Test assertion 24:** The key 'id' is in the details
-* Test action 25: Create a network and create a port with its id, then show the port's id
+* Test action 24: Create a network and create a port with its id, then list ports with the id
+ and mac_address of the created port
+* **Test assertion 24:** The created port is found in the list
+* Test action 25: Create a network and create a subnet, port with its id, create a router
+ and add this port as this router's interface, then list ports with this router id
+* **Test assertion 25:** The number of the ports list is 1, the port id and device_id
+ getting with the list operation are the same as the ones got when creating them
+* Test action 26: Create a network and create a port with its id, then show the port's details
+* **Test assertion 26:** The key 'id' is in the details
+* Test action 27: Create a network and create a port with its id, then show the port's id
and mac_address info with the port's id
-* **Test assertion 25:** The keys returned in the response are just id and mac_address,
+* **Test assertion 27:** The keys returned in the response are just id and mac_address,
and the values of all the keys equal to port's id and mac_address
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
These test cases evaluate the ability of basic CRUD operations on L2 networks and L2 network ports.
@@ -232,9 +243,9 @@ Post conditions
N/A
-----------------------------------------
-Basic CRUD operations on security groups
-----------------------------------------
+------------------------------------------------------
+Test Case 2 - Basic CRUD operations on security groups
+------------------------------------------------------
Test case specification
-----------------------
@@ -321,7 +332,7 @@ Test execution
* Test action 19: Generate a random uuid and use this id to show security group rule
* **Test assertion 19:** Failed to show security group rule because of nonexistent id of security group rule
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
These test cases evaluate the ability of Basic CRUD operations on security groups and security group rules.
@@ -342,9 +353,9 @@ Post conditions
N/A
--------------------------------
-CRUD operations on subnet pools
--------------------------------
+---------------------------------------------
+Test Case 3 - CRUD operations on subnet pools
+---------------------------------------------
Test case specification
-----------------------
@@ -371,7 +382,7 @@ Test execution
* Test action 4: Delete SNP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
These test cases evaluate the ability of Basic CRUD operations on subnetpools.
@@ -385,3 +396,131 @@ Post conditions
---------------
N/A
+
+----------------------------------------
+Test Case 4 - CRUD operations on routers
+----------------------------------------
+
+Test case specification
+-----------------------
+
+tempest.api.network.test_routers.RoutersTest.test_add_multiple_router_interfaces
+tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_port_id
+tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_subnet_id
+tempest.api.network.test_routers.RoutersTest.test_create_show_list_update_delete_router
+tempest.api.network.test_routers.RoutersTest.test_update_delete_extra_route
+tempest.api.network.test_routers.RoutersTest.test_update_router_admin_state
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_add_router_interfaces_on_overlapping_subnets_returns_400
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_delete_non_existent_router_returns_404
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_add_gateway_invalid_network_returns_404
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_add_gateway_net_not_external_returns_400
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_remove_interface_in_use_returns_409
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_show_non_existent_router_returns_404
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_update_non_existent_router_returns_404
+
+Test preconditions
+------------------
+
+Neutron is available.
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+''''''''''''''
+
+* Test action 1: Create 2 networks NET1 and NET2, create SubNet1 of NET1 and SubNet2 with cidr of NET2,
+ create a router R1, add SubNet1 and SubNet2 to be R1's interfaces and get port details
+* **Test assertion 1:** The device_id and subnet_id of the port equals to the router id
+ and subnet id getting from the creating response body
+* Test action 2: Create a network NET1, create SubNet1 of NET1, create P1 of NET1, create a router R1,
+ add P1 to be its interface, store the response body B1, show P1 and store the response body B2 and
+ remove the interface with port id
+* **Test assertion 2:** B1 includes keys 'subnet_id' and 'port_id', 'device_id' of B2 equals to router id
+* Test action 3: Create a network NET1, create SubNet1 of NET1, create a router R1, add SubNet1 to be its
+ interface, store the response body B1, show port details with 'port_id' in B1
+* **Test assertion 3:** B1 includes keys 'subnet_id' and 'port_id', 'device_id' equals to router id
+* Test action 4: Create a router R1 with name, admin_state_up False and external_network_id Ext-Net,
+ store the request body B1, show R1 with 'id' in B1, list all routers, update R1's name
+* **Test assertion 4:** 'name', 'admin_state_up' and 'network_id' in B1 equal to the name, False and Ext-Net,
+ 'name' in show details equals to 'name' in B1, 'network_id' in show details equals to Ext-Net,
+ R1 is in the router list, the 'name' has been updated
+* Test action 5: Create a router R1 with admin_state_up=True, create 4 networks and 4 subnets with different
+ cidr, add these 4 subnets to R1 as its interfaces, update R1 by given routes and show R1, delete extra
+ route of R1 and then show R1
+* **Test assertion 5:** The number of routes queals to 4 and routes is empty after deletion
+* Test action 6: Create a router R1, update 'admin_state_up' of R1 to be True
+* **Test assertion 6:** 'admin_state_up' of R1 is False, 'admin_state_up' of R1 is True after updating
+* Test action 7: Create 2 networks NET1 and NET2, create SubNet1 of NET1 and SubNet2 of NET2,
+ create a router R1, add SubNet1 to be R1's interface and then trying to add SubNet2 to be R1's interface
+* **Test assertion 7:** It raises a BadRequest exception when trying to add SubNet2
+* Test action 8: Try to delete router with a random name
+* **Test assertion 8:** It raises a NotFound exception when trying delete operation
+* Test action 9: Create a router R1 and try to update it with wrong external_gateway_info
+* **Test assertion 9:** It raises a NotFound exception when trying to update R1
+* Test action 10: Create an internal network NET1, create SubNet1 with cidr of NET1,
+ create a router R1 and try to update it with NET1 as external_gateway_info
+* **Test assertion 10:** It raises a BadRequest exception when trying to update R1
+* Test action 11: Create a network NET1, create SubNet1 of NET1, create a router R1, add SubNet1 to
+ the interface of R1 and try to delete R1
+* **Test assertion 11:** It raises a Conflict exception when trying to delete R1
+* Test action 12: Try to show router with a random name
+* **Test assertion 12:** It raises a NotFound exception when trying to show router
+* Test action 13: Try to update router with a random name
+* **Test assertion 13:** It raises a NotFound exception when trying to update router
+
+
+Pass / Fail criteria
+''''''''''''''''''''
+
+These test cases evaluate the ability of Basic CRUD operations on routers.
+Specifically it verifies that:
+
+* Routers can be created, updated, shown and deleted.
+* Can not show, update and delete non existent router.
+* Can not remove interface in use.
+
+In order to pass this test, all test assertions listed in the test execution above need to pass.
+
+Post conditions
+---------------
+
+N/A
+
+----------------------------------------------
+Test Case 5 - List versions within Network API
+----------------------------------------------
+
+Test case specification
+-----------------------
+
+tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources
+
+Test preconditions
+------------------
+
+Neutron is available.
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+''''''''''''''
+
+* Test action 1: List network API versions
+* **Test assertion 1:** The network API version is 'v2.0'
+
+Pass / Fail criteria
+''''''''''''''''''''
+
+This test case evaluates the ability of listing network API versions.
+Specifically it verifies that:
+
+* The network API version is 'v2.0'.
+
+In order to pass this test, all test assertions listed in the test execution above need to pass.
+
+Post conditions
+---------------
+
+N/A
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst
index 097123aa..263e87cc 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst
@@ -11,7 +11,7 @@ Scope
The VIM volume operations test area evaluates the ability of the system under
test to support VIM volume operations. The test cases documented here are the
-volume API test cases in the OpenStack Interop guideline 2017.09 as implemented
+volume API test cases in the OpenStack Interop guideline 2018.11 as implemented
by the RefStack client. These test cases will evaluate basic OpenStack (as a VIM)
volume operations, including:
@@ -24,7 +24,7 @@ volume operations, including:
- Volume metadata operations
- Volume snapshot operations
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -51,7 +51,7 @@ the same state as before the test.
For brevity, the test cases in this test area are summarized together based on
the operations they are testing.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Test Descriptions
@@ -61,7 +61,7 @@ Test Descriptions
API Used and Reference
----------------------
-Block storage: https://developer.openstack.org/api-ref/block-storage
+Block storage: https://docs.openstack.org/api-ref/block-storage/
- create volume
- delete volume
@@ -77,49 +77,8 @@ Block storage: https://developer.openstack.org/api-ref/block-storage
- update snapshot
- delete snapshot
------------------------------------------------------
-Test Case 1 - Upload volumes with Cinder v2 or v3 API
------------------------------------------------------
-
-Test case specification
------------------------
-
-tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_volume_upload
-
-Test preconditions
-------------------
-
-* Volume extension API
-
-Basic test flow execution description and pass/fail criteria
-------------------------------------------------------------
-
-Test execution
-''''''''''''''
-* Test action 1: Create a volume VOL1
-* Test action 2: Convert VOL1 and upload image IMG1 to the Glance
-* Test action 3: Wait until the status of IMG1 is 'ACTIVE' and VOL1 is 'available'
-* Test action 4: Show the details of IMG1
-* **Test assertion 1:** The name of IMG1 shown is the same as the name used to upload it
-* **Test assertion 2:** The disk_format of IMG1 is the same as the disk_format of VOL1
-
-Pass / fail criteria
-''''''''''''''''''''
-
-This test case evaluates the volume API ability of uploading images.
-Specifically, the test verifies that:
-
-* The Volume can convert volumes and upload images.
-
-In order to pass this test, all test assertions listed in the test execution above need to pass.
-
-Post conditions
----------------
-
-N/A
-
--------------------------------------------------------------------------------------
-Test Case 2 - Volume service availability zone operations with the Cinder v2 or v3 API
+Test Case 1 - Volume service availability zone operations with the Cinder v2 or v3 API
--------------------------------------------------------------------------------------
Test case specification
@@ -140,7 +99,7 @@ Test execution
* Test action 1: List all existent availability zones
* **Test assertion 1:** Verify the availability zone list length is greater than 0
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of listing availability zones.
@@ -156,7 +115,7 @@ Post conditions
N/A
--------------------------------------------------------------------
-Test Case 3 - Volume cloning operations with the Cinder v2 or v3 API
+Test Case 2 - Volume cloning operations with the Cinder v2 or v3 API
--------------------------------------------------------------------
Test case specification
@@ -190,7 +149,7 @@ Test execution
* Test action 7: Update the name of VOL3 and description with the original value
* **Test assertion 6:** Verify VOL3's bootable flag is 'False'
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of creating a cloned volume from a source volume,
@@ -210,7 +169,7 @@ Post conditions
N/A
--------------------------------------------------------------------------
-Test Case 4 - Image copy-to-volume operations with the Cinder v2 or v3 API
+Test Case 3 - Image copy-to-volume operations with the Cinder v2 or v3 API
--------------------------------------------------------------------------
Test case specification
@@ -249,7 +208,7 @@ Test execution
* Test action 11: Update the name of VOL3 and description with the original value
* **Test assertion 8:** Verify VOL3's bootable flag is 'True'
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of updating volume's bootable flag and creating
@@ -270,7 +229,7 @@ Post conditions
N/A
----------------------------------------------------------------------------------
-Test Case 5 - Volume creation and deletion operations with the Cinder v2 or v3 API
+Test Case 4 - Volume creation and deletion operations with the Cinder v2 or v3 API
----------------------------------------------------------------------------------
Test case specification
@@ -320,7 +279,7 @@ Test execution
* Test action 13: Create a volume with volume size '0'
* **Test assertion 12:** Verify create volume failed, a bad request error is returned in the response
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of creating a volume, getting volume
@@ -343,7 +302,7 @@ Post conditions
N/A
--------------------------------------------------------------------------------------
-Test Case 6 - Volume service extension listing operations with the Cinder v2 or v3 API
+Test Case 5 - Volume service extension listing operations with the Cinder v2 or v3 API
--------------------------------------------------------------------------------------
Test case specification
@@ -366,7 +325,7 @@ Test execution
* Test action 1: List all cinder service extensions
* **Test assertion 1:** Verify all extensions are list in the extension list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of listing all existent volume service extensions.
@@ -381,7 +340,7 @@ Post conditions
N/A
----------------------------------------------------------------
-Test Case 7 - Volume GET operations with the Cinder v2 or v3 API
+Test Case 6 - Volume GET operations with the Cinder v2 or v3 API
----------------------------------------------------------------
Test case specification
@@ -409,7 +368,7 @@ Test execution
* Test action 3: Retrieve a volume with a nonexistent volume ID
* **Test assertion 3:** Verify retrieve volume failed, a 'Not Found' error is returned in the response
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of getting volumes.
@@ -425,7 +384,7 @@ Post conditions
N/A
--------------------------------------------------------------------
-Test Case 8 - Volume listing operations with the Cinder v2 or v3 API
+Test Case 7 - Volume listing operations with the Cinder v2 or v3 API
--------------------------------------------------------------------
Test case specification
@@ -510,7 +469,7 @@ Test execution
* Test action 22: List all existent volumes and paginate the volume list by desired volume IDs
* **Test assertion 24:** Verify only the desired volumes are listed in the filtered volume list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of getting a list of volumes and filtering the volume list.
@@ -529,7 +488,7 @@ Post conditions
N/A
---------------------------------------------------------------------
-Test Case 9 - Volume metadata operations with the Cinder v2 or v3 API
+Test Case 8 - Volume metadata operations with the Cinder v2 or v3 API
---------------------------------------------------------------------
Test case specification
@@ -561,7 +520,7 @@ Test execution
* Test action 6: Update one metadata item 'key3' of VOL2
* **Test assertion 5:** Verify the metadata of VOL2 is updated
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of creating metadata for a volume, getting the
@@ -580,9 +539,9 @@ Post conditions
N/A
----------------------------------------------------------------------------------------
-Test Case 10 - Verification of read-only status on volumes with the Cinder v2 or v3 API
----------------------------------------------------------------------------------------
+--------------------------------------------------------------------------------------
+Test Case 9 - Verification of read-only status on volumes with the Cinder v2 or v3 API
+--------------------------------------------------------------------------------------
Test case specification
-----------------------
@@ -605,7 +564,7 @@ Test execution
* Test action 2: Update a provided volume VOL1's read-only access mode to 'False'
* **Test assertion 2:** Verify VOL1 is not in read-only access mode
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of setting and updating volume read-only access mode.
@@ -620,61 +579,8 @@ Post conditions
N/A
--------------------------------------------------------------------------
-Test Case 11 - Volume reservation operations with the Cinder v2 or v3 API
--------------------------------------------------------------------------
-
-Test case specification
------------------------
-
-tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_reserve_unreserve_volume
-tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_reserve_volume_with_negative_volume_status
-tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_reserve_volume_with_nonexistent_volume_id
-tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_unreserve_volume_with_nonexistent_volume_id
-
-Test preconditions
-------------------
-
-* Volume extension API
-
-Basic test flow execution description and pass/fail criteria
-------------------------------------------------------------
-
-Test execution
-''''''''''''''
-
-* Test action 1: Update a provided volume VOL1 as reserved
-* **Test assertion 1:** Verify VOL1 is in 'attaching' status
-* Test action 2: Update VOL1 as un-reserved
-* **Test assertion 2:** Verify VOL1 is in 'available' status
-* Test action 3: Update a provided volume VOL2 as reserved
-* Test action 4: Update VOL2 as reserved again
-* **Test assertion 3:** Verify update VOL2 status failed, a bad request error is returned in the response
-* Test action 5: Update VOL2 as un-reserved
-* Test action 6: Update a non-existent volume as reserved by using an invalid volume ID
-* **Test assertion 4:** Verify update non-existent volume as reserved failed, a 'Not Found' error is returned in the response
-* Test action 7: Update a non-existent volume as un-reserved by using an invalid volume ID
-* **Test assertion 5:** Verify update non-existent volume as un-reserved failed, a 'Not Found' error is returned in the response
-
-Pass / fail criteria
-''''''''''''''''''''
-
-This test case evaluates the volume API ability of reserving and un-reserving volumes.
-Specifically, the test verifies that:
-
-* Volume can be reserved and un-reserved.
-* Update a non-existent volume as reserved is not allowed.
-* Update a non-existent volume as un-reserved is not allowed.
-
-In order to pass this test, all test assertions listed in the test execution above need to pass.
-
-Post conditions
----------------
-
-N/A
-
----------------------------------------------------------------------------------------
-Test Case 12 - Volume snapshot creation/deletion operations with the Cinder v2 or v3 API
+Test Case 10 - Volume snapshot creation/deletion operations with the Cinder v2 or v3 API
----------------------------------------------------------------------------------------
Test case specification
@@ -753,7 +659,7 @@ Test execution
* Test action 27: Create a snapshot from a volume by using an empty volume ID
* **Test assertion 21:** Verify create snapshot failed, a 'Not Found' error is returned in the response
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of managing snapshot and snapshot metadata.
@@ -784,7 +690,7 @@ Post conditions
N/A
--------------------------------------------------------------------
-Test Case 13 - Volume update operations with the Cinder v2 or v3 API
+Test Case 11 - Volume update operations with the Cinder v2 or v3 API
--------------------------------------------------------------------
Test case specification
@@ -812,7 +718,7 @@ Test execution
* Test action 3: Update a non-existent volume by using a random generated volume ID
* **Test assertion 3:** Verify update volume failed, a 'Not Found' error is returned in the response
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of updating volume attributes.
@@ -828,3 +734,41 @@ Post conditions
---------------
N/A
+
+-------------------------------------------------------------------
+Test Case 12 - Volume list version operation with the Cinder v3 API
+-------------------------------------------------------------------
+
+Test case specification
+-----------------------
+
+tempest.api.volume.test_versions.VersionsTest.test_list_versions
+
+Test preconditions
+------------------
+
+* Volume API
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+''''''''''''''
+
+* Test action 1: List volume API versions
+* **Test assertion 1:** The list versions operation is successful executed
+
+Pass / Fail criteria
+''''''''''''''''''''
+
+This test case evaluates the volume API ability of listing volume API versions.
+Specifically, the test verifies that:
+
+* Successfully listing volume API versions.
+
+In order to pass this test, all test assertions listed in the test execution above need to pass.
+
+Post conditions
+---------------
+
+N/A
diff --git a/docs/testing/user/testspecification/tempest_trunk_ports/index.rst b/docs/testing/user/testspecification/tempest_trunk_ports/index.rst
index fd60a32e..f82fc468 100644
--- a/docs/testing/user/testspecification/tempest_trunk_ports/index.rst
+++ b/docs/testing/user/testspecification/tempest_trunk_ports/index.rst
@@ -17,7 +17,7 @@ operations, by means of both positive and negative tests.
References
================
-- `OpenStack API reference <https://developer.openstack.org/api-ref/network/v2/#trunk-networking>`_
+- `OpenStack API reference <https://docs.openstack.org/api-ref/network/v2/#trunk-networking>`_
System Under Test (SUT)
@@ -35,9 +35,9 @@ is able to run independently, i.e. irrelevant of the state created by a previous
test. For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-- `Neutron Trunk API tests <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py>`_
-- `Neutron Trunk API trunk details <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk_details.py>`_
-- `Neutron Trunk API negative tests <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk_negative.py>`_
+- `Neutron Trunk API tests <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk.py>`_
+- `Neutron Trunk API trunk details <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk_details.py>`_
+- `Neutron Trunk API negative tests <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk_negative.py>`_
**Trunk port and sub-port CRUD operations:**
@@ -46,38 +46,17 @@ These tests cover the CRUD (Create, Read, Update, Delete) life-cycle operations
of trunk ports and subports.
Implementation:
-`TrunkTestInheritJSONBase <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py#L228>`_
-and
-`TrunkTestJSON <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py#L83>`_.
+`TrunkTestJSON <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk.py#L71>`_
-- neutron.tests.tempest.api.test_trunk.TrunkTestInheritJSONBase.test_add_subport
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_add_subport
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_show_delete_trunk
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_trunk_empty_subports_list
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_trunk_subports_not_specified
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_update_trunk
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_update_trunk_with_description
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_delete_trunk_with_subport_is_allowed
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_get_subports
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_list_trunks
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_remove_subport
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_show_trunk_has_project_id
-
-
-**MTU-related operations:**
-
-These tests validate that trunk ports and subports can be created and added
-when specifying valid MTU sizes. These tests do not include negative tests
-covering invalid MTU sizes.
-
-Implementation:
-`TrunkTestMtusJSON <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py#L302>`_
-
-- neutron.tests.tempest.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_equal_to_trunk
-- neutron.tests.tempest.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_smaller_than_trunk
-- neutron.tests.tempest.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_equal_to_subport
-- neutron.tests.tempest.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_greater_than_subport
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_add_subports
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_show_delete_trunk
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_empty_subports_list
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_subports_not_specified
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_delete_trunk_with_subport_is_allowed
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_get_subports
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_list_trunks
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_remove_subport
**API for listing query results:**
@@ -86,17 +65,15 @@ These tests verify that listing operations of trunk port objects work. This
functionality is required for CLI and UI operations.
Implementation:
-`TrunksSearchCriteriaTest <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py#L346>`_
+`TrunksSearchCriteriaTest <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk.py#L306>`_
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_no_pagination_limit_0
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_asc
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_desc
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_href_links
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_marker
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_asc
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_desc
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_no_pagination_limit_0
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_asc
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_desc
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_marker
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_asc
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_desc
**Query trunk port details:**
@@ -104,11 +81,11 @@ Implementation:
These tests validate that all attributes of trunk port objects can be queried.
Implementation:
-`TestTrunkDetailsJSON <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk_details.py#L20>`_
+`TestTrunkDetailsJSON <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk_details.py#L20>`_
-- neutron.tests.tempest.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_empty_trunk_details
-- neutron.tests.tempest.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_no_subports
-- neutron.tests.tempest.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_with_subport
+- neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_empty_trunk_details
+- neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_no_subports
+- neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_with_subport
**Negative tests:**
@@ -117,30 +94,27 @@ These group of tests comprise negative tests which verify that invalid operation
are handled correctly by the system under test.
Implementation:
-`TrunkTestNegative <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk_negative.py#L24>`_
-
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_add_subport_duplicate_segmentation_details
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_add_subport_passing_dict
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_disabled_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_trunk_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_subport_invalid_inherit_network_segmentation_type
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_subport_missing_segmentation_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_duplicate_subport_segmentation_ids
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_subport_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_type
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_subport
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_delete_trunk_disabled_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_not_found
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_passing_dict
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_port_id_disabled_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestMtusJSON.test_add_subport_with_mtu_greater_than_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestMtusJSON.test_create_trunk_with_mtu_smaller_than_subport
+`TrunkTestNegative <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk_negative.py#L27>`_
+
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_duplicate_segmentation_details
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_passing_dict
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_disabled_trunk
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_parent_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_missing_segmentation_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_trunk
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_duplicate_subport_segmentation_ids
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_subport_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_type
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_subport
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_trunk
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_trunk_disabled_trunk
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_not_found
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_passing_dict
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_port_id_disabled_trunk
**Scenario tests (tests covering more than one functionality):**
@@ -151,7 +125,6 @@ functions as intended. To this end, they boot up two VMs with trunk ports and
sub ports and verify connectivity between those VMs.
Implementation:
-`TrunkTest <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/scenario/test_trunk.py#L45>`_
+`TrunkTest <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/scenario/test_trunk.py#L41>`_
-- neutron.tests.tempest.scenario.test_trunk.TrunkTest.test_subport_connectivity
-- neutron.tests.tempest.scenario.test_trunk.TrunkTest.test_trunk_subport_lifecycle
+- neutron_tempest_plugin.scenario.test_trunk.TrunkTest.test_trunk_subport_lifecycle
diff --git a/docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst b/docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst
index bbfd2884..1621d8f5 100644
--- a/docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst
+++ b/docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst
@@ -59,7 +59,7 @@ Each test case is able to run independently, i.e. irrelevant of the state
created by a previous test. Specifically, every test performs clean-up
operations which return the system to the same state as before the test.
-All these test cases are included in the test case dovetail.tempest.vm_lifecycle of
+All these test cases are included in the test case functest.tempest.vm_lifecycle of
OVP test suite.
Test Descriptions
@@ -69,35 +69,35 @@ Test Descriptions
API Used and Reference
----------------------
-Block storage: https://developer.openstack.org/api-ref/block-storage
+Block storage: https://docs.openstack.org/api-ref/block-storage/
- create volume
- delete volume
- attach volume to server
- detach volume from server
-Security Groups: https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
+Security Groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- create security group
- delete security group
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- create network
- delete network
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- create router
- delete router
- add interface to router
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- create subnet
- delete subnet
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create keypair
- create server
@@ -120,17 +120,17 @@ Servers: https://developer.openstack.org/api-ref/compute/
- migrate server
- live-migrate server
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- create port
- delete port
-Floating IPs: https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips
+Floating IPs: https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips
- create floating IP
- delete floating IP
-Availability zone: https://developer.openstack.org/api-ref/compute/
+Availability zone: https://docs.openstack.org/api-ref/compute/
- get availability zone
@@ -174,7 +174,7 @@ Test execution
* **Test assertion 8:** Verify VM1's addresses have been refreshed after disassociating FIP1
* Test action 10: Delete SG1, IMG1, KEYP1, VOL1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates a minimum basic scenario. Specifically, the test verifies that:
@@ -227,7 +227,7 @@ Test execution
* **Test assertion 3:** Verify SRC_HOST does not equal to DST_HOST
* Test action 11: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to cold migrate VMs. Specifically, the test verifies that:
@@ -274,7 +274,7 @@ Test execution
* **Test assertion 3:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 8: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to pause and unpause VMs. Specifically, the test verifies that:
@@ -319,7 +319,7 @@ Test execution
* **Test assertion 1:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 6: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to reboot servers. Specifically, the test verifies that:
@@ -362,7 +362,7 @@ Test execution
* **Test assertion 1:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 6: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to rebuild servers. Specifically, the test verifies that:
@@ -407,7 +407,7 @@ Test execution
* **Test assertion 1:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 8: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to resize servers. Specifically, the test verifies that:
@@ -453,7 +453,7 @@ Test execution
* **Test assertion 2:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 8: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to stop and start servers. Specifically, the test verifies that:
@@ -501,7 +501,7 @@ Test execution
* **Test assertion 2:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 8: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to suspend and resume servers. Specifically, the test verifies that:
@@ -553,7 +553,7 @@ Test execution
* **Test assertion 4:** Verify VM1 status is 'ACTIVE'
* Test action 10: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to suspend and resume servers in sequence.
@@ -597,7 +597,7 @@ Test execution
* **Test assertion 1:** VM1's status is 'ACTIVE'
* Test action 6: Delete VM1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to resize volume backed servers.
@@ -646,7 +646,7 @@ Test execution
* **Test assertion 1:** Verify the values written and read are the same
* Test action 9: Delete SG1, KEYP1 and VM1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to shelve and unshelve servers.
@@ -694,7 +694,7 @@ Test execution
* **Test assertion 1:** Verify T_STAMP1 equals to T_STAMP2
* Test action 9: Delete SG1, KEYP1 and VM1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to shelve and unshelve volume backed servers.
diff --git a/docs/testing/user/testspecification/tempest_volume/index.rst b/docs/testing/user/testspecification/tempest_volume/index.rst
index 7672e4de..64888c99 100644
--- a/docs/testing/user/testspecification/tempest_volume/index.rst
+++ b/docs/testing/user/testspecification/tempest_volume/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+=================================
Tempest Volume test specification
-===========================================
+=================================
Scope
@@ -35,7 +35,7 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.volume of
+All these test cases are included in the test case functest.tempest.volume of
OVP test suite.
diff --git a/docs/testing/user/testspecification/vnf/index.rst b/docs/testing/user/testspecification/vnf/index.rst
index a9219982..d13634a1 100644
--- a/docs/testing/user/testspecification/vnf/index.rst
+++ b/docs/testing/user/testspecification/vnf/index.rst
@@ -33,7 +33,7 @@ This test area references the following specifications and guides:
- 3GPP LTE
- - http://www.3gpp.org/technologies/keywords-acronyms/98-lte
+ - https://www.3gpp.org/technologies/keywords-acronyms/98-lte
- ETSI - TS 24.301
@@ -82,14 +82,14 @@ return the system to the same state as before the test.
Test Descriptions
=================
-----------------------------------------------------------------
+------------------
Test Case 1 - vEPC
-----------------------------------------------------------------
+------------------
Short name
----------
-dovetail.vnf.vepc
+functest.vnf.vepc
Use case specification
@@ -136,7 +136,7 @@ Test execution
* Test action 5: The deployed VMs are deleted.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
The VNF Manager (juju) should be deployed successfully
@@ -153,14 +153,14 @@ Post conditions
The clean-up operations are run.
-----------------------------------------------------------------
+------------------
Test Case 2 - vIMS
-----------------------------------------------------------------
+------------------
Short name
----------
-dovetail.vnf.vims
+functest.vnf.vims
Use case specification
----------------------
@@ -202,7 +202,7 @@ Test execution
* Test action 5: The deployed VMs are deleted.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
The VNF orchestrator (Cloudify) should be deployed successfully.
diff --git a/docs/testing/user/testspecification/vping/index.rst b/docs/testing/user/testspecification/vping/index.rst
index 666b732f..355018c6 100644
--- a/docs/testing/user/testspecification/vping/index.rst
+++ b/docs/testing/user/testspecification/vping/index.rst
@@ -3,7 +3,7 @@
.. (c) Ericsson AB
========================
-Vping test specification
+Vping Test Specification
========================
.. toctree::
@@ -31,10 +31,10 @@ References
- SCPClient
- - https://pypi.python.org/pypi/scp
+ - https://pypi.org/project/scp/
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test
@@ -42,10 +42,10 @@ area
- ICMP - Internet Control Message Protocol
- L3 - Layer 3
-- NFVi - Network functions virtualization infrastructure
-- SCP - Secure Copy
-- SSH - Secure Shell
-- VM - Virtual machine
+- NFVi - Network Functions Virtualization infrastructure
+- SCP - Secure CoPy
+- SSH - Secure SHell
+- VM - Virtual Machine
System Under Test (SUT)
@@ -73,7 +73,7 @@ Test Case 1 - vPing using userdata provided by nova metadata service
Short name
----------
-dovetail.vping.userdata
+functest.vping.userdata
Use case specification
@@ -106,39 +106,46 @@ checking the console output of the source VMs.
Test execution
''''''''''''''
-* Test action 1:
+* **Test action 1:**
* Create a private tenant network by using neutron client
* Create one subnet and one router in the network by neutron client
* Add one interface between the subnet and router
* Add one gateway route to the router by neutron client
* Store the network id in the response
* **Test assertion 1:** The network id, subnet id and router id can be found in the response
-* Test action 2:
+* **Test action 2:**
* Create an security group by using neutron client
* Store the security group id parameter in the response
* **Test assertion 2:** The security group id can be found in the response
-* Test action 3: boot VM1 by using nova client with configured name, image, flavor, private tenant
- network created in test action 1, security group created in test action 2
+* **Test action 3:**
+ * Boot VM1 by using nova client with configured name, image, flavor, private tenant
+ network created in test action 1 and security group created in test action 2
* **Test assertion 3:** The VM1 object can be found in the response
-* Test action 4: Generate ping script with the IP of VM1 to be passed as userdata provided by
- the **nova metadata service**.
-* Test action 5: Boot VM2 by using nova client with configured name, image, flavor, private tenant
- network created in test action 1, security group created in test action 2, userdata created
- in test action 4
+* **Test action 4:**
+ * Generate ping script with the IP of VM1 to be passed as userdata provided by
+ the **nova metadata service**
+* **Test action 5:**
+ * Boot VM2 by using nova client with configured name, image, flavor, private tenant
+ network created in test action 1, security group created in test action 2, and
+ userdata created in test action 4
* **Test assertion 4:** The VM2 object can be found in the response
-* Test action 6: Inside VM2, the ping script is executed automatically when booted and it contains a
- loop doing the ping until the return code is 0 or timeout reached. For each ping, when the return
- code is 0, "vPing OK" is printed in the VM2 console-log, otherwise, "vPing KO" is printed.
- Monitoring the console-log of VM2 to see the response generated by the script.
+* **Test action 6:**
+ * Inside VM2, the ping script is executed automatically when booted and it contains a
+ loop doing the ping until the return code is 0 or timeout reached
+ * For each ping, when the return code is 0, "vPing OK" is printed in the VM2 console-log,
+ otherwise, "vPing KO" is printed
+ * Monitoring the console-log of VM2 to see the response generated by the script
* **Test assertion 5:** "vPing OK" is detected, when monitoring the console-log in VM2
-* Test action 7: delete VM1, VM2
+* **Test action 7:**
+ * Delete VM1, VM2
* **Test assertion 6:** VM1 and VM2 are not present in the VM list
-* Test action 8: delete security group, gateway, interface, router, subnet and network
+* **Test action 8:**
+ * Delete security group, gateway, interface, router, subnet and network
* **Test assertion 7:** The security group, gateway, interface, router, subnet and network are
no longer present in the lists after deleting
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates basic NFVi capabilities of the system under test.
@@ -173,7 +180,7 @@ Test Case 2 - vPing using SSH to a floating IP
Short name
----------
-dovetail.vping.ssh
+functest.vping.ssh
Use case specification
@@ -207,45 +214,55 @@ Test execution
''''''''''''''
-* Test action 1:
+* **Test action 1:**
* Create a private tenant network by neutron client
* Create one subnet and one router are created in the network by using neutron client
* Create one interface between the subnet and router
* Add one gateway route to the router by neutron client
* Store the network id in the response
* **Test assertion 1:** The network id, subnet id and router id can be found in the response
-* Test action 2:
+* **Test action 2:**
* Create an security group by using neutron client
* Store the security group id parameter in the response
* **Test assertion 2:** The security group id can be found in the response
-* Test action 3: Boot VM1 by using nova client with configured name, image, flavor, private tenant
- network created in test action 1, security group created in test action 2
+* **Test action 3:**
+ * Boot VM1 by using nova client with configured name, image, flavor, private tenant
+ network created in test action 1 and security group created in test action 2
* **Test assertion 3:** The VM1 object can be found in the response
-* Test action 4: Boot VM2 by using nova client with configured name, image, flavor, private tenant
- network created in test action 1, security group created in test action 2
+* **Test action 4:**
+ * Boot VM2 by using nova client with configured name, image, flavor, private tenant
+ network created in test action 1 and security group created in test action 2
* **Test assertion 4:** The VM2 object can be found in the response
-* Test action 5: create one floating IP by using neutron client, storing the floating IP address
- returned in the response
+* **Test action 5:**
+ * Create one floating IP by using neutron client, storing the floating IP address
+ returned in the response
* **Test assertion 5:** Floating IP address can be found in the response
-* Test action 6: Assign the floating IP address created in test action 5 to VM2 by using nova client
+* **Test action 6:**
+ * Assign the floating IP address created in test action 5 to VM2 by using nova client
* **Test assertion 6:** The assigned floating IP can be found in the VM2 console log file
-* Test action 7: Establish SSH connection between the test host and VM2 through the floating IP
+* **Test action 7:**
+ * Establish SSH connection between the test host and VM2 through the floating IP
* **Test assertion 7:** SSH connection between the test host and VM2 is established within
300 seconds
-* Test action 8: Copy the Ping script from the test host to VM2 by using SCPClient
+* **Test action 8:**
+ * Copy the Ping script from the test host to VM2 by using SCPClient
* **Test assertion 8:** The Ping script can be found inside VM2
-* Test action 9: Inside VM2, to execute the Ping script to ping VM1, the Ping script contains a
- loop doing the ping until the return code is 0 or timeout reached, for each ping, when the return
- code is 0, "vPing OK" is printed in the VM2 console-log, otherwise, "vPing KO" is printed.
- Monitoring the console-log of VM2 to see the response generated by the script.
+* **Test action 9:**
+ * Inside VM2, to execute the Ping script to ping VM1, the Ping script contains a
+ loop doing the ping until the return code is 0 or timeout reached
+ * For each ping, when the return code is 0, "vPing OK" is printed in the VM2 console-log,
+ otherwise, "vPing KO" is printed
+ * Monitoring the console-log of VM2 to see the response generated by the script
* **Test assertion 9:** "vPing OK" is detected, when monitoring the console-log in VM2
-* Test action 10: delete VM1, VM2
+* **Test action 10:**
+ * Delete VM1, VM2
* **Test assertion 10:** VM1 and VM2 are not present in the VM list
-* Test action 11: delete floating IP, security group, gateway, interface, router, subnet and network
+* **Test action 11:**
+ * Delete floating IP, security group, gateway, interface, router, subnet and network
* **Test assertion 11:** The security group, gateway, interface, router, subnet and network are
no longer present in the lists after deleting
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates basic NFVi capabilities of the system under test.
diff --git a/docs/testing/user/userguide/api_testing_guide.rst b/docs/testing/user/userguide/api_testing_guide.rst
index edcb4329..119beff7 100644
--- a/docs/testing/user/userguide/api_testing_guide.rst
+++ b/docs/testing/user/userguide/api_testing_guide.rst
@@ -29,9 +29,8 @@ Environment Preparation
Install Docker
^^^^^^^^^^^^^^
-The main prerequisite software for Dovetail is Docker. Please refer to `official
-Docker installation guide <https://docs.docker.com/install/>`_ which is relevant
-to your Test Host's operating system.
+The main prerequisite software for Dovetail is Docker. Please refer to official
+Docker installation guide that is relevant to your Test Host's operating system.
Configuring the Test Host Environment
@@ -58,12 +57,12 @@ determine the right tag for OVP testing.
Downloading Dovetail Docker Image
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-The first version of Dovetail API is ovp-3.x.x.
+The first version of Dovetail API is ovp-3.0.0.
.. code-block:: bash
- $ sudo docker pull opnfv/dovetail:latest
- latest: Pulling from opnfv/dovetail
+ $ sudo docker pull opnfv/dovetail:ovp-3.0.0
+ ovp-3.0.0: Pulling from opnfv/dovetail
6abc03819f3e: Pull complete
05731e63f211: Pull complete
0bd67c50d6be: Pull complete
@@ -72,7 +71,7 @@ The first version of Dovetail API is ovp-3.x.x.
77d9a9603ec6: Pull complete
9463cdd9c628: Pull complete
Digest: sha256:45e2ffdbe217a4e6723536afb5b6a3785d318deff535da275f34cf8393af458d
- Status: Downloaded newer image for opnfv/dovetail:latest
+ Status: Downloaded newer image for opnfv/dovetail:ovp-3.0.0
Deploying Dovetail API
@@ -84,7 +83,7 @@ image downloaded before.
.. code-block:: bash
$ docker run -itd -p <swagger_port>:80 -p <api_port>:5000 --privileged=true \
- -e SWAGGER_HOST=<host_ip>:<swagger_port> -e DOVETAIL_HOME=/home/ovp \
+ -e SWAGGER_HOST=<host_ip>:<api_port> -e DOVETAIL_HOME=/home/ovp \
-v /home/ovp:/home/ovp -v /var/run/docker.sock:/var/run/docker.sock \
opnfv/dovetail:<version>
@@ -109,6 +108,266 @@ Swagger UI Webpage
^^^^^^^^^^^^^^^^^^
After deploying Dovetail container, the Swagger UI webpage can be accessed with
-any browser. The url is `http://localhost:<swagger_port>/dovetail-api/index.html`
+any browser. The url is ``http://localhost:<swagger_port>/dovetail-api/index.html``
if accessing from the same host as deploying this container. Otherwise, the url
-is `http://<host_ip>:<swagger_port>/dovetail-api/index.html`.
+is ``http://<host_ip>:<swagger_port>/dovetail-api/index.html``.
+
+
+Calling APIs
+^^^^^^^^^^^^
+
+There are totally 5 APIs provided by Dovetail.
+
+ * Get all test suites
+
+ * Get all test cases
+
+ * Run test cases
+
+ * Run test cases with execution ID
+
+ * Get status of test cases
+
+Here give some easy guide of how to call these APIs. For more detailed infomation,
+please refer to the Swagger UI page.
+
+
+Getting All Test Suites
+=======================
+
+ * This is a **GET** function with no parameter to get all test suites defined
+ in Dovetail container.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/testsuites``.
+
+ * The response body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "testsuites": {
+ "debug": {
+ "name": "debug",
+ "testcases_list": {
+ "optional": [
+ "functest.vping.userdata"
+ ]
+ }
+ },
+ "healthcheck": {
+ "name": "healthcheck",
+ "testcases_list": {
+ "optional": [
+ "functest.healthcheck.connection_check"
+ ]
+ }
+ }
+ }
+ }
+
+
+Getting All Test Cases
+======================
+
+ * This is a **GET** function without no parameter to get all test cases integrated
+ in Dovetail container.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/testcases``.
+
+ * The response body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "testcases": [
+ {
+ "description": "This test case will verify the high availability of the user service provided by OpenStack (keystone) on control node.",
+ "scenario": "nfvi",
+ "subTestCase": null,
+ "testCaseName": "yardstick.ha.keystone"
+ },
+ {
+ "description": "testing for vping using userdata",
+ "scenario": "nfvi",
+ "subTestCase": null,
+ "testCaseName": "functest.vping.userdata"
+ },
+ {
+ "description": "tempest smoke test cases about volume",
+ "scenario": "nfvi",
+ "subTestCase": [
+ "tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_attach_detach_volume_to_instance[compute,id-fff42874-7db5-4487-a8e1-ddda5fb5288d,smoke]",
+ "tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern[compute,id-557cd2c2-4eb8-4dce-98be-f86765ff311b,image,slow,volume]"
+ ],
+ "testCaseName": "functest.tempest.volume"
+ }
+ ]
+ }
+
+
+Running Test Cases
+==================
+
+ * This is a **POST** function with some parameters to run a subset of the whole test cases.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/execution``.
+
+ * The request body is structured as following. The ``conf`` section is used to
+ give all configuration items those are required to run test cases. They are
+ the same as all configuration files provided under ``$DOVETAIL_HOME/pre_config/``.
+ If you already have these files under this directory, the whole ``conf`` section
+ can be ignored. If you provide these configuration items with the request body,
+ then the corresponding files under ``$DOVETAIL_HOME/pre_config/`` will be ignored
+ by Dovetail. The ``testcase``, ``testsuite``, ``testarea`` and ``deploy_scenario``
+ correspond to ``--testcase``, ``--testsuite``, ``--testarea`` and ``--deploy-scenario``
+ defined with Dovetail CLI. The ``options`` section support to set all options
+ which have already been implemented by Dovetail CLI including ``--optional``,
+ ``--mandatory``, ``--no-clean``, ``--no-api-validation``, ``--offline``,
+ ``--report``, ``--stop`` and ``--debug``. For options list in ``options`` section,
+ they are set to be ``True``, otherwise, they are set to be ``False``.
+
+ .. code-block:: bash
+
+ {
+ "conf": {
+ "vm_images": "/home/ovp/images",
+ "pods": {
+ "nodes": [
+ {
+ "name": "node1",
+ "role": "Controller",
+ "ip": "192.168.117.222",
+ "user": "root",
+ "password": "root",
+ }
+ ],
+ "process_info": [
+ {
+ "testcase_name": "yardstick.ha.rabbitmq",
+ "attack_host": "node1",
+ "attack_process": "rabbitmq"
+ }
+ ]
+ },
+ "tempest_conf": {
+ "compute": {
+ "min_compute_nodes": "2",
+ "volume_device_name": "vdb",
+ "max_microversion": "2.65"
+ }
+ },
+ "hosts": {
+ "192.168.141.101": [
+ "volume.os.com",
+ "compute.os.com"
+ ]
+ },
+ "envs": {
+ "OS_USERNAME": "admin",
+ "OS_PASSWORD": "admin",
+ "OS_AUTH_URL": "https://192.168.117.222:5000/v3",
+ "EXTERNAL_NETWORK": "ext-net"
+ }
+ },
+ "testcase": [
+ "functest.vping.ssh",
+ "yardstick.ha.rabbitmq"
+ ],
+ "testsuite": "ovp.2019.12",
+ "testarea": [
+ "vping",
+ "ha"
+ ],
+ "deploy_scenario": "os-nosdn-ovs-ha",
+ "options": [
+ "debug",
+ "report"
+ ]
+ }
+
+
+ * The response body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "result": [
+ {
+ "endTime": null,
+ "executionId": "a65e24c0-1803-11ea-84f4-0242ac110004",
+ "results": null,
+ "scenario": "nfvi",
+ "status": "IN_PROGRESS",
+ "testCaseName": "functest.vping.ssh",
+ "testSuiteName": "ovp.2019.12",
+ "timestart": null
+ }
+ ]
+ }
+
+
+Running Test Cases with Execution ID
+====================================
+
+ * This is a **POST** function with some parameters to run a subset of
+ whole test cases and set the execution ID instead of using the random one.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/execution/{exec_id}``.
+
+ * It's almost the same as the above running test cases API except the execution ID.
+
+
+Getting Status of Test Cases
+============================
+
+ * This is a **POST** function to get the status of some test cases by using
+ the execution ID received in the response body of `Running Test Cases`_ or
+ `Running Test Cases with Execution ID`_ APIs.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/execution/status/{exec_id}``.
+
+ * The request body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "testcase": [
+ "functest.vping.ssh"
+ ]
+ }
+
+ * The response body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "result": [
+ {
+ "endTime": "2019-12-06 08:39:23",
+ "executionId": "a65e24c0-1803-11ea-84f4-0242ac110004",
+ "results": {
+ "criteria": "PASS",
+ "sub_testcase": [],
+ "timestart": "2019-12-06 08:38:40",
+ "timestop":"2019-12-06 08:39:23"
+ },
+ "scenario": "nfvi",
+ "status": "COMPLETED",
+ "testCaseName": "functest.vping.ssh",
+ "testSuiteName": "ovp.2019.12",
+ "timestart":"2019-12-06 08:38:40"
+ }
+ ]
+ }
+
+
+
+
+Getting Test Results
+^^^^^^^^^^^^^^^^^^^^
+
+Each time you call the running test case API, Dovetail creates a directory with the
+execution ID as the name under ``$DOVETAIL_HOME`` to store results on the host.
+You can find all result files under ``$DOVETAIL_HOME/<executionId>/results``.
+If you run test cases with ``report`` option, then there will be a tarball file
+under ``$DOVETAIL_HOME/<executionId>`` which can be upload to OVP portal.
diff --git a/docs/testing/user/userguide/cli_reference.rst b/docs/testing/user/userguide/cli_reference.rst
index 532da0a5..7dd5c8e4 100644
--- a/docs/testing/user/userguide/cli_reference.rst
+++ b/docs/testing/user/userguide/cli_reference.rst
@@ -135,7 +135,7 @@ Dovetail List Commands
.. code-block:: bash
- root@1f230e719e44:~/dovetail/dovetail# dovetail list ovp.2019.0x
+ root@1f230e719e44:~/dovetail/dovetail# dovetail list ovp.2019.12
- mandatory
functest.vping.userdata
functest.vping.ssh
@@ -166,6 +166,7 @@ Dovetail List Commands
functest.tempest.vm_lifecycle
functest.tempest.network_scenario
functest.tempest.bgpvpn
+ functest.security.patrole_vxlan_dependent
yardstick.ha.neutron_l3_agent
yardstick.ha.controller_restart
functest.vnf.vims
@@ -194,12 +195,15 @@ Dovetail Show Commands
validate:
type: functest
testcase: vping_ssh
+ image_name: opnfv/functest-healthcheck
report:
source_archive_files:
- functest.log
dest_archive_files:
- vping_logs/functest.vping.ssh.log
- check_results_file: 'functest_results.txt'
+ check_results_file:
+ - 'functest_results.txt'
+ portal_key_file: vping_logs/functest.vping.ssh.log
sub_testcase_list:
.. code-block:: bash
@@ -214,20 +218,20 @@ Dovetail Show Commands
testcase: tempest_custom
pre_condition:
- 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.image.functest.log
- tempest_logs/functest.tempest.image.log
- tempest_logs/functest.tempest.image.html
- check_results_file: 'functest_results.txt'
+ check_results_file:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.image.html
sub_testcase_list:
- tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file[id-139b765e-7f3d-4b3d-8b37-3ca3876ee318,smoke]
- tempest.api.image.v2.test_versions.VersionsTest.test_list_versions[id-659ea30a-a17c-4317-832c-0f68ed23c31d,smoke]
@@ -243,41 +247,43 @@ Dovetail Run Commands
Dovetail compliance test entry!
Options:
- --deploy-scenario TEXT Specify the DEPLOY_SCENARIO which will be used as input by each testcase respectively
+ --opnfv-ci Only enabled when running with OPNFV CI jobs and pushing results to TestAPI DB
--optional Run all optional test cases.
+ --mandatory Run all mandatory test cases.
+ --deploy-scenario TEXT Specify the DEPLOY_SCENARIO which will be used as input by each testcase respectively
+ -n, --no-clean Keep all Containers created for debuging.
+ --no-api-validation disable strict API response validation
--offline run in offline method, which means not to update the docker upstream images, functest, yardstick, etc.
-r, --report Create a tarball file to upload to OVP web portal
+ -s, --stop Flag for stopping on test case failure.
-d, --debug Flag for showing debug log on screen.
--testcase TEXT Compliance testcase. Specify option multiple times to include multiple test cases.
--testarea TEXT Compliance testarea within testsuite. Specify option multiple times to include multiple test areas.
- -s, --stop Flag for stopping on test case failure.
- -n, --no-clean Keep all Containers created for debuging.
- --no-api-validation disable strict API response validation
- --mandatory Run all mandatory test cases.
--testsuite TEXT compliance testsuite.
-h, --help Show this message and exit.
.. code-block:: bash
root@1f230e719e44:~/dovetail/dovetail# dovetail run --testcase functest.vping.ssh --offline -r --deploy-scenario os-nosdn-ovs-ha
- 2017-10-12 14:57:51,278 - run - INFO - ================================================
- 2017-10-12 14:57:51,278 - run - INFO - Dovetail compliance: ovp.2019.0x!
- 2017-10-12 14:57:51,278 - run - INFO - ================================================
- 2017-10-12 14:57:51,278 - run - INFO - Build tag: daily-master-b80bca76-af5d-11e7-879a-0242ac110002
- 2017-10-12 14:57:51,278 - run - INFO - DEPLOY_SCENARIO : os-nosdn-ovs-ha
- 2017-10-12 14:57:51,336 - run - WARNING - There is no hosts file /home/dovetail/pre_config/hosts.yaml, may be some issues with domain name resolution.
- 2017-10-12 14:57:51,336 - run - INFO - Get hardware info of all nodes list in file /home/cvp/pre_config/pod.yaml ...
- 2017-10-12 14:57:51,336 - run - INFO - Hardware info of all nodes are stored in file /home/cvp/results/all_hosts_info.json.
- 2017-10-12 14:57:51,517 - run - INFO - >>[testcase]: functest.vping.ssh
- 2017-10-12 14:58:21,325 - report.Report - INFO - Results have been stored with file /home/cvp/results/functest_results.txt.
- 2017-10-12 14:58:21,325 - report.Report - INFO -
+ 2019-12-06 02:51:52,634 - run - INFO - ================================================
+ 2019-12-06 02:51:52,634 - run - INFO - Dovetail compliance: ovp.2019.12!
+ 2019-12-06 02:51:52,634 - run - INFO - ================================================
+ 2019-12-06 02:51:52,634 - run - INFO - Build tag: daily-master-5b58584a-17d3-11ea-878a-0242ac110002
+ 2019-12-06 02:51:52,634 - run - INFO - DEPLOY_SCENARIO : os-nosdn-ovs-ha
+ 2019-12-06 02:51:53,077 - run - INFO - >>[testcase]: functest.vping.ssh
+ 2019-12-06 02:51:53,078 - dovetail.test_runner.DockerRunner - WARNING - There is no hosts file /home/ovp/pre_config/hosts.yaml. This may cause some issues with domain name resolution.
+ 2019-12-06 02:51:54,048 - dovetail.test_runner.DockerRunner - INFO - Get hardware info of all nodes list in file /home/ovp/pre_config/pod.yaml ...
+ 2019-12-06 02:51:54,049 - dovetail.test_runner.DockerRunner - INFO - Hardware info of all nodes are stored in file /home/dovetail/results/all_hosts_info.json.
+ 2019-12-06 02:51:54,073 - dovetail.container.Container - WARNING - There is no hosts file /home/ovp/pre_config/hosts.yaml. This may cause some issues with domain name resolution.
+ 2019-12-06 02:52:57,982 - dovetail.report.Report - INFO - Results have been stored with files: ['/home/ovp/results/functest_results.txt'].
+ 2019-12-06 02:52:57,986 - dovetail.report.Report - INFO -
Dovetail Report
- Version: 2018.09
- Build Tag: daily-master-b80bca76-af5d-11e7-879a-0242ac110002
- Test Date: 2018-08-13 03:23:56 UTC
- Duration: 291.92 s
+ Version: 2019.12
+ Build Tag: daily-master-5b58584a-17d3-11ea-878a-0242ac110002
+ Test Date: 2019-12-06 02:52:57 UTC
+ Duration: 64.91 s
- Pass Rate: 0.00% (1/1)
- vping: pass rate 100%
+ Pass Rate: 100.00% (1/1)
+ vping: pass rate 100.00%
-functest.vping.ssh PASS
diff --git a/docs/testing/user/userguide/images/tocsa_vnf_test_environment.png b/docs/testing/user/userguide/images/tocsa_vnf_test_environment.png
new file mode 100644
index 00000000..78b3f74a
--- /dev/null
+++ b/docs/testing/user/userguide/images/tocsa_vnf_test_environment.png
Binary files differ
diff --git a/docs/testing/user/userguide/images/tosca_vnf_test_flow.png b/docs/testing/user/userguide/images/tosca_vnf_test_flow.png
new file mode 100644
index 00000000..87dc8ec4
--- /dev/null
+++ b/docs/testing/user/userguide/images/tosca_vnf_test_flow.png
Binary files differ
diff --git a/docs/testing/user/userguide/index.rst b/docs/testing/user/userguide/index.rst
index f35e2927..98ca56e0 100644
--- a/docs/testing/user/userguide/index.rst
+++ b/docs/testing/user/userguide/index.rst
@@ -13,5 +13,5 @@ OVP Testing User Guide
testing_guide.rst
cli_reference.rst
- vnf_test_guide.rst
api_testing_guide.rst
+ vnf_test_guide.rst
diff --git a/docs/testing/user/userguide/testing_guide.rst b/docs/testing/user/userguide/testing_guide.rst
index 3f866c7d..d1c31683 100644
--- a/docs/testing/user/userguide/testing_guide.rst
+++ b/docs/testing/user/userguide/testing_guide.rst
@@ -2,13 +2,16 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) OPNFV, Huawei Technologies Co.,Ltd and others.
-====================================
-Conducting OVP Testing with Dovetail
-====================================
+=========================================
+Conducting OVP NFVI Testing with Dovetail
+=========================================
Overview
--------
+This guide provides the instructions for the OVP Infrastructure testing. For the
+OVP VNF testing, please refer to the next section.
+
The Dovetail testing framework for OVP consists of two major parts: the testing client which
executes all test cases in a lab (vendor self-testing or a third party lab),
and the server system which is hosted by the OVP administrator to store and
@@ -160,8 +163,8 @@ results files:
$ mkdir -p ${HOME}/dovetail
$ export DOVETAIL_HOME=${HOME}/dovetail
-For example, Here we set dovetail home directory to be ``${HOME}/dovetail``.
-Then create two directories named ``pre_config`` and ``images`` inside this directory
+For example, here we set dovetail home directory to be ``${HOME}/dovetail``.
+Then create two directories named ``pre_config`` and ``images`` under this directory
to store all Dovetail related config files and all test images respectively:
.. code-block:: bash
@@ -275,7 +278,12 @@ Here is an example of what this file should contain.
# Expected device name when a volume is attached to an instance.
volume_device_name: vdb
-Use the listing above as a minimum to execute the mandatory test areas.
+ # One sub test case of functest.tempest.osinterop will be skipped if not provide this version.
+ # The default range of microversion for tempest is [None - None].
+ # Test case functest.tempest.osinterop required the range to be [2.2 - latest].
+ max_microversion: 2.65
+
+Use the listing above as a minimum to execute the mandatory test cases.
If the optional BGPVPN Tempest API tests shall be run, Tempest needs to be told
that the BGPVPN service is available. To do that, add the following to the
@@ -309,10 +317,12 @@ Below is a sample with the required syntax when password is employed by the cont
nodes:
-
- # This can not be changed and must be node0.
+ # This info of node0 is used only for one optional test case 'yardstick.ha.controller_restart'.
+ # If you don't plan to test it, this Jumpserver node can be ignored.
+ # This can not be changed and **must** be node0.
name: node0
- # This must be Jumpserver.
+ # This **must** be Jumpserver.
role: Jumpserver
# This is the instance IP of a node which has ipmitool installed.
@@ -325,10 +335,13 @@ Below is a sample with the required syntax when password is employed by the cont
password: root
-
- # This can not be changed and must be node1.
+ # Almost all HA test cases are trying to login to a controller node named 'node1'
+ # and then kill some processes running on it.
+ # If you don't want to reset the attack node name for each test case, this
+ # name can not be changed and **must** be node1.
name: node1
- # This must be controller.
+ # This **must** be controller.
role: Controller
# This is the instance IP of a controller node, which is the haproxy primary node
@@ -342,6 +355,13 @@ Below is a sample with the required syntax when password is employed by the cont
process_info:
-
+ # For all HA test cases, there are 2 parameters, 'attack_process' and 'attack_host',
+ # which support to be set by users instead of using the default values.
+ # The 'attack_process' is the process name of one HA test case which it try to kill.
+ # The 'attack_host' is the host name which the test case try to login and then kill
+ # the process running on it.
+ # Fllowing is 2 samples.
+
# The default attack process of yardstick.ha.rabbitmq is 'rabbitmq-server'.
# Here can be reset to 'rabbitmq'.
testcase_name: yardstick.ha.rabbitmq
@@ -368,7 +388,7 @@ A sample is provided below to show the required syntax when using a key file.
# Private ssh key for accessing the controller nodes. If a keyfile is
# being used instead of password, it **must** be put under
- # ``$DOVETAIL_HOME/pre_config/.
+ # $DOVETAIL_HOME/pre_config/ and named 'id_rsa'.
key_filename: /home/dovetail/pre_config/id_rsa
Under nodes, repeat entries for name, role, ip, user and password or key file for each of the
@@ -438,20 +458,26 @@ OPNFV's OVP web page first to determine the right tag for OVP testing.
Online Test Host
""""""""""""""""
-If the Test Host is online, you can directly pull Dovetail Docker image and download Ubuntu
-and Cirros images. All other dependent docker images will automatically be downloaded. The
-Ubuntu and Cirros images are used by Dovetail for image creation and VM instantiation within
-the SUT.
+If the Test Host is online, you can directly pull Dovetail Docker image, then all
+other dependent docker images will automatically be downloaded. Also you can download
+other related VM images such as Ubuntu and Cirros images which are used by Dovetail
+for image creation and VM instantiation within the SUT.
+
+Following given the download url for each VM images. Cirros-0.4.0 and Ubuntu-16.04
+are used by mandatory test cases, so they are the only 2 images **must** be downloaded
+before doing the test. There are also 2 other optional VM images, Ubuntu-14.04 and
+Cloudify-manager, which are used by optional test cases functest.vnf.vepc and functest.vnf.vims.
+If you don't plan to test these 2 test cases, you can skip downloading these 2 images.
.. code-block:: bash
$ wget -nc http://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img -P ${DOVETAIL_HOME}/images
- $ wget -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_HOME}/images
$ wget -nc https://cloud-images.ubuntu.com/releases/16.04/release/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_HOME}/images
- $ wget -nc http://repository.cloudifysource.org/cloudify/4.0.1/sp-release/cloudify-manager-premium-4.0.1.qcow2 -P ${DOVETAIL_HOME}/images
+ $ wget -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_HOME}/images
+ $ wget -nc http://repository.cloudifysource.org/cloudify/19.01.24/community-release/cloudify-docker-manager-community-19.01.24.tar -P ${DOVETAIL_HOME}/images
- $ sudo docker pull opnfv/dovetail:ovp-2.2.0
- ovp-2.2.0: Pulling from opnfv/dovetail
+ $ sudo docker pull opnfv/dovetail:ovp-3.0.0
+ ovp-3.0.0: Pulling from opnfv/dovetail
324d088ce065: Pull complete
2ab951b6c615: Pull complete
9b01635313e2: Pull complete
@@ -463,7 +489,7 @@ the SUT.
0ad9f4168266: Pull complete
d949894f87f6: Pull complete
Digest: sha256:7449601108ebc5c40f76a5cd9065ca5e18053be643a0eeac778f537719336c29
- Status: Downloaded newer image for opnfv/dovetail:ovp-2.2.0
+ Status: Downloaded newer image for opnfv/dovetail:ovp-3.0.0
Offline Test Host
"""""""""""""""""
@@ -474,26 +500,25 @@ to pull all dependent images is because Dovetail normally does dependency checki
and automatically pulls images as needed, if the Test Host is online. If the Test Host is
offline, then all these dependencies will need to be manually copied.
-The Docker images and Cirros image below are necessary for all mandatory test cases.
+The Docker images, Ubuntu and Cirros image below are necessary for all mandatory test cases.
.. code-block:: bash
- $ sudo docker pull opnfv/dovetail:ovp-2.2.0
- $ sudo docker pull opnfv/functest-smoke:opnfv-6.3.0
- $ sudo docker pull opnfv/yardstick:ovp-2.0.0
- $ sudo docker pull opnfv/bottlenecks:ovp-2.0.0
+ $ sudo docker pull opnfv/dovetail:ovp-3.0.0
+ $ sudo docker pull opnfv/functest-smoke:hunter
+ $ sudo docker pull opnfv/functest-healthcheck:hunter
+ $ sudo docker pull opnfv/yardstick:opnfv-8.0.0
+ $ sudo docker pull opnfv/bottlenecks:8.0.1-latest
$ wget -nc http://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img -P {ANY_DIR}
+ $ wget -nc https://cloud-images.ubuntu.com/releases/16.04/release/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_HOME}/images
The other Docker images and test images below are only used by optional test cases.
.. code-block:: bash
- $ sudo docker pull opnfv/functest-healthcheck:opnfv-6.3.0
- $ sudo docker pull opnfv/functest-features:opnfv-6.3.0
- $ sudo docker pull opnfv/functest-vnf:opnfv-6.3.0
+ $ sudo docker pull opnfv/functest-vnf:hunter
$ wget -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P {ANY_DIR}
- $ wget -nc https://cloud-images.ubuntu.com/releases/16.04/release/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P {ANY_DIR}
- $ wget -nc http://repository.cloudifysource.org/cloudify/4.0.1/sp-release/cloudify-manager-premium-4.0.1.qcow2 -P {ANY_DIR}
+ $ wget -nc http://repository.cloudifysource.org/cloudify/19.01.24/community-release/cloudify-docker-manager-community-19.01.24.tar -P ${DOVETAIL_HOME}/images
Once all these images are pulled, save the images, copy them to the Test Host, and then load
the Dovetail image and all dependent images at the Test Host.
@@ -502,10 +527,10 @@ At the online host, save the images with the command below.
.. code-block:: bash
- $ sudo docker save -o dovetail.tar opnfv/dovetail:ovp-2.2.0 \
- opnfv/functest-smoke:opnfv-6.3.0 opnfv/functest-healthcheck:opnfv-6.3.0 \
- opnfv/functest-features:opnfv-6.3.0 opnfv/functest-vnf:opnfv-6.3.0 \
- opnfv/yardstick:ovp-2.0.0 opnfv/bottlenecks:ovp-2.0.0
+ $ sudo docker save -o dovetail.tar opnfv/dovetail:ovp-3.0.0 \
+ opnfv/functest-smoke:hunter opnfv/functest-healthcheck:hunter \
+ opnfv/functest-vnf:hunter \
+ opnfv/yardstick:opnfv-8.0.0 opnfv/bottlenecks:8.0.1-latest
The command above creates a dovetail.tar file with all the images, which can then be copied
to the Test Host. To load the Dovetail images on the Test Host execute the command below.
@@ -519,14 +544,13 @@ Now check to see that all Docker images have been pulled or loaded properly.
.. code-block:: bash
$ sudo docker images
- REPOSITORY TAG IMAGE ID CREATED SIZE
- opnfv/dovetail ovp-2.2.0 ac3b2d12b1b0 24 hours ago 784 MB
- opnfv/functest-smoke opnfv-6.3.0 010aacb7c1ee 17 hours ago 594.2 MB
- opnfv/functest-healthcheck opnfv-6.3.0 2cfd4523f797 17 hours ago 234 MB
- opnfv/functest-features opnfv-6.3.0 b61d4abd56fd 17 hours ago 530.5 MB
- opnfv/functest-vnf opnfv-6.3.0 929e847a22c3 17 hours ago 1.87 GB
- opnfv/yardstick ovp-2.0.0 84b4edebfc44 17 hours ago 2.052 GB
- opnfv/bottlenecks ovp-2.0.0 3d4ed98a6c9a 21 hours ago 638 MB
+ REPOSITORY TAG IMAGE ID CREATED SIZE
+ opnfv/dovetail ovp-3.0.0 4b68659da24d 22 hours ago 825MB
+ opnfv/functest-smoke hunter c0253f6de153 3 weeks ago 556MB
+ opnfv/functest-healthcheck hunter fb6d766e38e0 3 weeks ago 379MB
+ opnfv/functest-vnf hunter 31466d52d155 21 hours ago 1.1GB
+ opnfv/yardstick opnfv-8.0.0 189d7d9fbcb2 7 months ago 2.54GB
+ opnfv/bottlenecks 8.0.1-latest 44c1b9fb25aa 5 hours ago 837MB
After copying and loading the Dovetail images at the Test Host, also copy the test images
(Ubuntu, Cirros and cloudify-manager) to the Test Host.
@@ -534,7 +558,7 @@ After copying and loading the Dovetail images at the Test Host, also copy the te
- Copy image ``cirros-0.4.0-x86_64-disk.img`` to ``${DOVETAIL_HOME}/images/``.
- Copy image ``ubuntu-14.04-server-cloudimg-amd64-disk1.img`` to ``${DOVETAIL_HOME}/images/``.
- Copy image ``ubuntu-16.04-server-cloudimg-amd64-disk1.img`` to ``${DOVETAIL_HOME}/images/``.
-- Copy image ``cloudify-manager-premium-4.0.1.qcow2`` to ``${DOVETAIL_HOME}/images/``.
+- Copy image ``cloudify-docker-manager-community-19.01.24.tar`` to ``${DOVETAIL_HOME}/images/``.
Starting Dovetail Docker
------------------------
@@ -571,10 +595,10 @@ for the details of the CLI.
$ dovetail run --testsuite <test-suite-name>
-The '--testsuite' option is used to control the set of tests intended for execution
+The ``--testsuite`` option is used to control the set of tests intended for execution
at a high level. For the purposes of running the OVP test suite, the test suite name follows
-the following format, ``ovp.<major>.<minor>.<patch>``. The latest and default test suite is
-ovp.2019.0x.
+the following format, ``ovp.<release-version>``. The latest and default test suite is
+ovp.2019.12.
.. code-block:: bash
@@ -584,18 +608,18 @@ This command is equal to
.. code-block:: bash
- $ dovetail run --testsuite ovp.2019.0x
+ $ dovetail run --testsuite ovp.2019.12
Without any additional options, the above command will attempt to execute all mandatory and
-optional test cases with test suite ovp.2019.0x.
+optional test cases with test suite ovp.2019.12.
To restrict the breadth of the test scope, it can also be specified using options
-'--mandatory' or '--optional'.
+``--mandatory`` or ``--optional``.
.. code-block:: bash
$ dovetail run --mandatory
-Also there is a '--testcase' option provided to run a specified test case.
+Also there is a ``--testcase`` option provided to run a specified test case.
.. code-block:: bash
@@ -628,7 +652,7 @@ the DEPLOY_SCENARIO environment parameter having as substring the feature name "
(e.g. os-nosdn-ovs-ha).
Note for the users:
- - if their system uses DPDK, they should run with --deploy-scenario <xx-yy-ovs-zz>
+ - if their system uses DPDK, they should run with ``--deploy-scenario <xx-yy-ovs-zz>``
(e.g. os-nosdn-ovs-ha)
- this is an experimental feature
@@ -639,14 +663,14 @@ Note for the users:
By default, results are stored in local files on the Test Host at ``$DOVETAIL_HOME/results``.
Each time the 'dovetail run' command is executed, the results in the aforementioned directory
are overwritten. To create a singular compressed result file for upload to the OVP portal or
-for archival purposes, the tool provides an option '--report'.
+for archival purposes, the tool provides an option ``--report``.
.. code-block:: bash
$ dovetail run --report
If the Test Host is offline, ``--offline`` should be added to support running with
-local resources.
+local resources. Otherwise, it will try to download resources online during the run time.
.. code-block:: bash
@@ -658,22 +682,23 @@ result file on the Test Host.
.. code-block:: bash
$ dovetail run --offline --testcase functest.vping.userdata --report
- 2018-05-22 08:16:16,353 - run - INFO - ================================================
- 2018-05-22 08:16:16,353 - run - INFO - Dovetail compliance: ovp.2019.0x!
- 2018-05-22 08:16:16,353 - run - INFO - ================================================
- 2018-05-22 08:16:16,353 - run - INFO - Build tag: daily-master-660de986-5d98-11e8-b635-0242ac110001
- 2018-05-22 08:19:31,595 - run - WARNING - There is no hosts file /home/dovetail/pre_config/hosts.yaml, may be some issues with domain name resolution.
- 2018-05-22 08:19:31,595 - run - INFO - Get hardware info of all nodes list in file /home/dovetail/pre_config/pod.yaml ...
- 2018-05-22 08:19:39,778 - run - INFO - Hardware info of all nodes are stored in file /home/dovetail/results/all_hosts_info.json.
- 2018-05-22 08:19:39,961 - run - INFO - >>[testcase]: functest.vping.userdata
- 2018-05-22 08:31:17,961 - run - INFO - Results have been stored with file /home/dovetail/results/functest_results.txt.
- 2018-05-22 08:31:17,969 - report.Report - INFO -
+ 2019-12-04 07:31:13,156 - run - INFO - ================================================
+ 2019-12-04 07:31:13,157 - run - INFO - Dovetail compliance: ovp.2019.12!
+ 2019-12-04 07:31:13,157 - run - INFO - ================================================
+ 2019-12-04 07:31:13,157 - run - INFO - Build tag: daily-master-0c9184e6-1668-11ea-b1cd-0242ac110002
+ 2019-12-04 07:31:13,610 - run - INFO - >>[testcase]: functest.vping.userdata
+ 2019-12-04 07:31:13,612 - dovetail.test_runner.DockerRunner - WARNING - There is no hosts file /home/ovp/pre_config/hosts.yaml. This may cause some issues with domain name resolution.
+ 2019-12-04 07:31:14,587 - dovetail.test_runner.DockerRunner - INFO - Get hardware info of all nodes list in file /home/ovp/pre_config/pod.yaml ...
+ 2019-12-04 07:31:14,587 - dovetail.test_runner.DockerRunner - INFO - Hardware info of all nodes are stored in file /home/dovetail/results/all_hosts_info.json.
+ 2019-12-04 07:31:14,612 - dovetail.container.Container - WARNING - There is no hosts file /home/ovp/pre_config/hosts.yaml. This may cause some issues with domain name resolution.
+ 2019-12-04 07:32:13,804 - dovetail.report.Report - INFO - Results have been stored with files: ['/home/ovp/results/functest_results.txt'].
+ 2019-12-04 07:32:13,808 - dovetail.report.Report - INFO -
Dovetail Report
- Version: 1.0.0
- Build Tag: daily-master-660de986-5d98-11e8-b635-0242ac110001
- Upload Date: 2018-05-22 08:31:17 UTC
- Duration: 698.01 s
+ Version: 2019.12
+ Build Tag: daily-master-0c9184e6-1668-11ea-b1cd-0242ac110002
+ Test Date: 2019-12-04 07:32:13 UTC
+ Duration: 60.20 s
Pass Rate: 100.00% (1/1)
vping: pass rate 100.00%
@@ -682,28 +707,27 @@ result file on the Test Host.
When test execution is complete, a tar file with all result and log files is written in
``$DOVETAIL_HOME`` on the Test Host. An example filename is
-``${DOVETAIL_HOME}/logs_20180105_0858.tar.gz``. The file is named using a
-timestamp that follows the convention 'YearMonthDay-HourMinute'. In this case, it was generated
-at 08:58 on January 5th, 2018. This tar file is used for uploading the logs to the OVP portal.
+``${DOVETAIL_HOME}/logs_20191204_0732.tar.gz``. The file is named using a timestamp
+that follows the convention 'YearMonthDay_HourMinute'. In this case, it was generated
+at 07:32 on December 4th, 2019. This tar file is used for uploading the logs and
+results to the OVP portal.
Making Sense of OVP Test Results
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When a tester is performing trial runs, Dovetail stores results in local files on the Test
-Host by default within the directory specified below.
+Host by default within directory ``$DOVETAIL_HOME/results``.
+ * Log file: dovetail.log
-.. code-block:: bash
-
- cd $DOVETAIL_HOME/results
+ * Review the dovetail.log to see if all important information has been captured
-#. Local file
+ * In default mode without DEBUG.
- * Log file: dovetail.log
+ * Adding option ``-d/--debug`` to change the mode to be DEBUG.
- * Review the dovetail.log to see if all important information has been captured
- - in default mode without DEBUG.
+ * Result file: results.json
* Review the results.json to see all results data including criteria for PASS or FAIL.
@@ -713,11 +737,11 @@ Host by default within the directory specified below.
``security_logs/functest.security.XXX.html`` respectively,
which has the passed, skipped and failed test cases results.
- * This kind of files need to be opened with a web browser.
+ * This kind of files need to be opened with a web browser.
- * The skipped test cases are accompanied with the reason tag for the users to see why these test cases skipped.
+ * The skipped test cases are accompanied with the reason tag for the users to see why these test cases skipped.
- * The failed test cases have rich debug information for the users to see why these test cases failed.
+ * The failed test cases have rich debug information for the users to see why these test cases failed.
* Vping test cases
@@ -741,7 +765,7 @@ OVP Portal Web Interface
The OVP portal is a public web interface for the community to collaborate on results
and to submit results for official OPNFV compliance verification. The portal can be used as a
-resource by users and testers to navigate and inspect results more easily than by manually
+resource by users to navigate and inspect results more easily than by manually
inspecting the log files. The portal also allows users to share results in a private manner
until they are ready to submit results for peer community review.
@@ -751,11 +775,11 @@ until they are ready to submit results for peer community review.
* Sign In / Sign Up Links
- * Accounts are exposed through Linux Foundation or OpenStack account credentials.
+ * Accounts are exposed through Linux Foundation.
* If you already have a Linux Foundation ID, you can sign in directly with your ID.
- * If you do not have a Linux Foundation ID, you can sign up for a new one using 'Sign Up'
+ * If you do not have a Linux Foundation ID, you can sign up for a new one using 'Sign Up'.
* My Results Tab
@@ -763,20 +787,25 @@ until they are ready to submit results for peer community review.
* This page lists all results uploaded by you after signing in.
- * Following the two steps below, the results are status uploaded and in status 'private'.
+ * Following the two steps below, the results are uploaded and in status 'private'.
- * Obtain results tar file located at ``${DOVETAIL_HOME}/``, e.g. ``logs_20180105_0858.tar.gz``
+ * Obtain results tar file located at ``${DOVETAIL_HOME}/``, e.g. ``logs_20180105_0858.tar.gz``.
- * Use the *Choose File* button where a file selection dialog allows you to choose your result
- file from the hard-disk. Then click the *Upload* button and see a results ID once your
- upload succeeds.
+ * Use the *Choose File* button where a file selection dialog allows you to choose your result file from the hard-disk. Then click the *Upload result* button and see a results ID once your upload succeeds.
* Results are remaining in status 'private' until they are submitted for review.
- * Use the *Operation* column drop-down option 'submit to review', to expose results to
- OPNFV community peer reviewers. Use the 'withdraw submit' option to reverse this action.
+ * Use the *Operation* column drop-down option *submit to review*, to expose results to
+ OPNFV community peer reviewers. Use the *withdraw submit* option to reverse this action.
- * Use the *Operation* column drop-down option 'share with' to share results with other
+ * Results status are changed to be 'review' after submit to review.
+
+ * Use the *View Reviews* to find the review status including reviewers' names and the outcome.
+
+ * The administrator will approve the results which have got 2 positive outcome from 2 reviewers.
+ Then the status will be changed to be 'verified'.
+
+ * Use the *Operation* column drop-down option *share with* to share results with other
users by supplying either the login user ID or the email address associated with
the share target account. The result is exposed to the share target but remains private
otherwise.
@@ -785,6 +814,8 @@ until they are ready to submit results for peer community review.
* This page shows your account info after you sign in.
+ * There are 3 different roles: administrator, user and reviewer.
+
Updating Dovetail or a Test Suite
---------------------------------
@@ -796,5 +827,6 @@ Follow the instructions in section `Installing Dovetail on the Test Host`_ and
sudo docker pull opnfv/dovetail:<dovetail_new_tag>
sudo docker pull opnfv/functest:<functest_new_tag>
sudo docker pull opnfv/yardstick:<yardstick_new_tag>
+ sudo docker pull opnfv/bottlenecks:<bottlenecks_new_tag>
This step is necessary if dovetail software or the OVP test suite have updates.
diff --git a/docs/testing/user/userguide/vnf_test_guide.rst b/docs/testing/user/userguide/vnf_test_guide.rst
index cce94088..00c4e4ef 100644
--- a/docs/testing/user/userguide/vnf_test_guide.rst
+++ b/docs/testing/user/userguide/vnf_test_guide.rst
@@ -1,19 +1,26 @@
.. This work is licensed under a Creative Commons Attribution 4.0 International License.
.. http://creativecommons.org/licenses/by/4.0
-.. (c) OPNFV, Huawei Technologies Co.,Ltd and others.
+.. (c) OPNFV, ONAP, and others.
-=========================================
-Conducting ONAP VNF Testing with Dovetail
-=========================================
+.. _dovetail-vnf_testers_guide:
+
+===================================
+Conducting ONAP VNF Testing for OVP
+===================================
Overview
--------
-As the LFN verification framework, Dovetail covers ONAP VNF tests by integrating
-VNF SDK and VVP. This guide introduces only how to use Dovetail to run the tests.
+As the LFN verification framework, the Dovetail team has worked with the ONAP VVP, and VTP
+projects to enable VNF testing, results submission, and results review to be completed
+throught the same web portal and processes used for the NFVI testing.
For more details about VNF SDK and VVP, please refer to `ONAP VNF SDK Compliance Verification Program
-<https://docs.onap.org/en/dublin/submodules/vnfsdk/model.git/docs/files/VNFSDK-LFN-CVC.html>`_
-and `ONAP VVP <https://docs.onap.org/en/dublin/submodules/vvp/documentation.git/docs/index.html>`_.
+<https://docs.onap.org/en/elalto/submodules/vnfsdk/model.git/docs/files/VNFSDK-LFN-CVC.html>`_
+and `ONAP VVP <https://docs.onap.org/en/elalto/submodules/vvp/documentation.git/docs/index.html>`_.
+
+Testing is available for both HEAT and TOSCA defined VNFs, but the process is different depending
+on the template language. This userguide covers the testing process for both VNF types in the
+two sections below.
Definitions and abbreviations
@@ -21,158 +28,687 @@ Definitions and abbreviations
- LFN - Linux Foundation Networking
- ONAP - Open Network Automation Platform
+- OVP - OPNFV Verification Program
- VNF - Virtual Network Function
-- SDK - Software Development Kit
+- VNF SDK - VNF Software Development Kit
+- VTP - VNF Test Platform
- VVP - VNF Validation Program
+Testing of HEAT based VNFs
+--------------------------
Environment Preparation
------------------------
+^^^^^^^^^^^^^^^^^^^^^^^
-Currently, there are only VNF package validation tests which do not rely on the
-ONAP deployment. As a result, the preparation is very simple.
+Prerequisites
+"""""""""""""
+- `ONAP ElAlto Release deployed via OOM <https://onap.readthedocs.io/en/latest/submodules/oom.git/docs/oom_quickstart_guide.html>`_
+- An OpenStack deployment is available and privisioned as ONAP's Cloud Site
+- kubectl is installed on the system used to start the testing
+- bash
+- VNF Heat Templates
+- Preload json files
-Install Docker
-^^^^^^^^^^^^^^
+After deploying ONAP, you need to configure ONAP with:
-The main prerequisite software for Dovetail is Docker. Please refer to `official
-Docker installation guide <https://docs.docker.com/install/>`_ which is relevant
-to your Test Host's operating system.
+- A cloud owner
+- A cloud region
+- A subscriber
+- A service type
+- A project name
+- An owning entity
+- A platform
+- A line of business
+- A cloud site
+If you're not familiar with how to configure ONAP, there are guides that use
+`robot <https://onap.readthedocs.io/en/elalto/submodules/integration.git/docs/docs_robot.html>`_
+or `direct api <https://wiki.onap.org/pages/viewpage.action?pageId=25431491>`_ requests available
+to help, as well as a guide for adding a new OpenStack site to ONAP.
-Install VNF SDK Backend (optional)
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+VVP Test Tool Setup
+"""""""""""""""""""
-If it is TOSCA based VNF, then VNF SDK Backend must be installed before the
-tests. There is a `docker-compose.yml` in VNFSDK repo which runs 2 docker containers. Use
-the following commands to run the containers:
+On your local machine, or the system from which you will run the tests, you will need to clone the
+ONAP OOM project repo:
.. code-block:: bash
- $ export NEXUS_DOCKER_REPO=nexus3.onap.org:10001
- $ export REFREPO_TAG=1.2.1-STAGING-20181228T020411Z
- $ export POSTGRES_TAG=latest
- $ export MTU=1450
- $ wget https://raw.githubusercontent.com/onap/vnfsdk-refrepo/master/vnfmarket-be/deployment/install/docker-compose.yml
- $ sudo docker-compose up -d
+ git clone --branch 5.0.1-ONAP ssh://<username>@gerrit.onap.org:29418/oom --recurse-submodules
+
+VNF Preparation
+^^^^^^^^^^^^^^^
+
+The vnf lifecycle validation testsuite requires the VNF to be packaged into a specific directory
+hierarchy, shown below.
+
+.. code-block::
+
+ vnf_folder
+ ├── /templates
+ | └── base.yaml
+ | └── base.env
+ | └── incremental_0.yaml
+ | └── incremental_0.env
+ | └── ...
+ ├── /preloads
+ | └── base_preload.json
+ | └── incremental_0_preload.json
+ | └── ...
+ └── vnf-details.json
+
+- The name for vnf_folder is free-form, and can be located anywhere on your computer. The path to this folder will be passed to the testsuite as an argument.
+- /templates should contain your VVP-compliant VNF heat templates.
+- /preloads should contain a preload file for each VNF module (TODO: add link to preload documentation).
+ - For a VNF-API preload: vnf-name, vnf-type, generic-vnf-type, and generic-vnf-name should be empty strings.
+ - For a GR-API preload: vnf-name, vnf-type, vf-module-type, and vf-module-name should be empty strings.
+ - This information will be populated at runtime by the testsuite.
+- vnf-details should be a json file with the information that will be used by ONAP to instantiate the VNF. The structure of vnf-details is shown below.
+- VNF disk image must be uploaded and available in the OpenStack project being managed by ONAP
+- Modules must contain an entry for each module of the VNF. Only one module can be a base module.
+- api_type should match the format of the preloads that are provided in the package.
+- The other information should match what was used to configure ONAP during the pre-requisite section of this guide.
+
+.. code-block:: json
+
+ {
+ "vnf_name": "The Vnf Name",
+ "description": "Description of the VNF",
+ "modules": [
+ {
+ "filename": "base.yaml",
+ "isBase": "true",
+ "preload": "base_preload.json"
+ },
+ {
+ "filename": "incremental_0.yaml",
+ "isBase": "false",
+ "preload": "incremental_0.json"
+ },
+ ...
+ ],
+ "api_type": "[gr_api] or [vnf_api]",
+ "subscriber": "<subscriber name>",
+ "service_type": "<service type>",
+ "tenant_name": "<name of tenant>",
+ "region_id": "<name of region>",
+ "cloud_owner": "<name of cloud owner>",
+ "project_name": "<name of project>",
+ "owning_entity": "<name of owning entity>",
+ "platform": "<name of platform>",
+ "line_of_business": "<name of line of business>",
+ "os_password": "<openstack password>"
+ }
+
+Runnign the HEAT VNF Test
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ONAP OOM Robot framework will run the test, using kubectl to manage the execution. The framework
+will copy your VNF template files to the robot container required to execute the test.
+
+.. code-block:: bash
+
+ cd oom/kubernetes/robot
+ $ ./instantiate-k8s.sh --help
+ ./instantiate-k8s.sh [options]
+
+ required:
+ -n, --namespace <namespace> namespace that robot pod is running under.
+ -f, --folder <folder> path to folder containing heat templates, preloads, and vnf-details.json.
+
+ additional options:
+ -p, --poll some cloud environments (like azure) have a short time out value when executing
+ kubectl. If your shell exits before the testsuite finishes, using this option
+ will poll the testsuite logs every 30 seconds until the test finishes.
+ -t, --tag <tag> robot testcase tag to execute (default is instantiate_vnf).
+
+ This script executes the VNF instantiation robot testsuite.
+ - It copies the VNF folder to the robot container that is part of the ONAP deployment.
+ - It models, distributes, and instantiates a heat-based VNF.
+ - It copies the logs to an output directory, and creates a tarball for upload to the OVP portal.
+
+
+**Sample execution:**
+
+.. code-block:: bash
+
+ $ ./instantiate-k8s.sh --namespace onap --folder /tmp/vnf-instantiation/examples/VNF_API/pass/multi_module/ --poll
+ ...
+ ...
+ ...
+ ...
+ ------------------------------------------------------------------------------
+ Testsuites.Vnf Instantiation :: The main driver for instantiating ... | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Testsuites | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Output: /share/logs/0003_ete_instantiate_vnf/output.xml
+ + set +x
+ testsuite has finished
+ Copying Results from pod...
+ /tmp/vnf-instantiation /tmp/vnf-instantiation
+ a log.html
+ a results.json
+ a stack_report.json
+ a validation-scripts.json
+ /tmp/vnf-instantiation
+ VNF test results: /tmp/vnfdata.46749/vnf_heat_results.tar.gz
+
+The testsuite takes about 10-15 minutes for a simple VNF, and will take longer for a more complicated VNF.
+
+Reporting Results
+"""""""""""""""""
+Once the testsuite is finished, it will create a directory and tarball in /tmp (the name of the directory
+and file is shown at the end of the stdout of the script). There will be a results.json in that directory
+that has the ultimate outcome of the test, in the structure shown below.
+
+**Log Files**
+
+The output tar file will have 4 log files in it.
+
+- results.json: This is high-level results file of all of the test steps, and is consumed by the OVP portal.
+- report.json: This is the output of the vvp validation scripts.
+- stack_report.json: This is the output from querying openstack to validate the heat modules.
+- log.html: This is the robot log, and contains each execution step of the testcase.
+
+If the result is "PASS", that means the testsuite was successful and the tarball is ready for submission
+to the OVP portal.
+
+**results.json**
+
+.. code-block:: json
+
+ {
+ "vnf_checksum": "afc57604a3b3b7401d5b8648328807b594d7711355a2315095ac57db4c334a50",
+ "build_tag": "vnf-validation-53270",
+ "version": "2019.09",
+ "test_date": "2019-09-04 17:50:10.575",
+ "duration": 437.002,
+ "vnf_type": "heat",
+ "testcases_list": [
+ {
+ "mandatory": "true",
+ "name": "onap-vvp.validate.heat",
+ "result": "PASS",
+ "objective": "onap heat template validation",
+ "sub_testcase": [],
+ "portal_key_file": "report.json"
+ },
+ {
+ "mandatory": "true",
+ "name": "onap-vvp.lifecycle_validate.heat",
+ "result": "PASS",
+ "objective": "onap vnf lifecycle validation",
+ "sub_testcase": [
+ {
+ "name": "model-and-distribute",
+ "result": "PASS"
+ },
+ {
+ "name": "instantiation",
+ "result": "PASS"
+ }
+ ],
+ "portal_key_file": "log.html"
+ },
+ {
+ "mandatory": "true",
+ "name": "stack_validation",
+ "result": "PASS",
+ "objective": "onap vnf openstack validation",
+ "sub_testcase": [],
+ "portal_key_file": "stack_report.json"
+ }
+ ]
+ }
+
+
+Additional Resources
+^^^^^^^^^^^^^^^^^^^^
+
+- `ONAP VVP Project <https://wiki.onap.org/display/DW/VNF+Validation+Program+Project>`_
+- `VVP Wiki Users Guide (this will track current ONAP master) <https://wiki.onap.org/pages/viewpage.action?pageId=68546123>`_
+
+Sample VNF templates are available on the VVP Wiki Users Guide page.
+
+Testing of TOSCA based VNFs
+---------------------------
+
+VNF Test Platform (VTP) provides an platform to on-board different test cases required for
+OVP for various VNF testing provided by VNFSDK (for TOSCA) and VVP(for HEAT) projects in
+ONAP. And it generates the test case outputs which would be uploaded into OVP portal for
+VNF badging.
+
+TOSCA VNF Test Environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As pre-requestiests steps, Its assumed that, successful ONAP, Vendor VNFM and OpenStack
+cloud are already available. Below installation steps help to setup VTP components and CLI.
+
+.. image:: images/tocsa_vnf_test_environment.png
+ :align: center
+ :scale: 100%
+
+Installation
+^^^^^^^^^^^^
+
+Clone the VNFSDK repo.
+
+.. code-block:: bash
+
+ git clone --branch elalto https://git.onap.org/vnfsdk/refrepo
+
+Install the VTP by using script *refrepo/vnfmarket-be/deployment/install/vtp_install.sh*
+
+Follow the steps as below (in sequence):
+
+- vtp_install.sh --download : It will download all required artifacts into /opt/vtp_stage
+- vtp_install.sh --install : It will install VTP (/opt/controller) and CLI (/opt/oclip)
+- vtp_install.sh --start : It will start VTP controller as tomcat service and CLI as oclip service
+- vtp_install.sh --verify : It will verify the setup is done properly by running some test cases.
+
+Last step (verify) would check the health of VTP components and TOSCA VNF compliance and validation test cases.
+
+Check Available Test Cases
+""""""""""""""""""""""""""
+
+VTP supports to check the compliance of VNF and PNF based on ONAP VNFREQS.
+
+To check:
+
+- Go to command console
+- Run command oclip
+- Now it will provide a command prompt:
+*oclip:open-cli>*
-The command `docker ps` can be used to check if the 2 containers named
-'refrepo' and 'postgres' are running.
+Now run command as below and check the supported compliance testcases for VNFREQS.
-The VNF package to be tested should be copied to the container 'refrepo'.
+- csar-validate - Helps to validate given VNF CSAR for all configured VNFREQS.
+- csar-validate-rxxx - Helps to validate given VNF CSAR for a given VNFREQS xxx.
.. code-block:: bash
- $ sudo docker cp /path/to/VNF/name.csar refrepo:/opt
+ oclip:open-cli>schema-list --product onap-dublin --service vnf-compliance
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |product |service |command |ocs-version |enabled |rpc |
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |onap-dublin |vnf-compliance |csar-validate-r10087 |1.0 |true | |
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |onap-dublin |vnf-compliance |csar-validate |1.0 |true | |
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |onap-dublin |vnf-compliance |csar-validate-r26885 |1.0 |true | |
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |onap-dublin |vnf-compliance |csar-validate-r54356 |1.0 |true | |
+ ...
+
+To know the details of each VNFREQS, run as below.
+.. code-block:: bash
-Run Tests with Dovetail
------------------------
+ oclip:open-cli>use onap-dublin
+ oclip:onap-dublin>csar-validate-r54356 --help
+ usage: oclip csar-validate-r54356
-Setting up Configuration Files
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ Data types used by NFV node and is based on TOSCA/YAML constructs specified in draft GS NFV-SOL 001.
+ The node data definitions/attributes used in VNFD MUST comply.
-For convenience and as a convention, we will create a home directory for storing
-all Dovetail related config files and results files:
+Now run command as below and check the supported validation testcases
.. code-block:: bash
- $ mkdir -p ${HOME}/dovetail
- $ export DOVETAIL_HOME=${HOME}/dovetail
+ oclip:onap-dublin>use open-cli
+ oclip:open-cli>schema-list --product onap-dublin --service vnf-validation
+ +--------------+----------------+----------------------+--------------+----------+------+
+ |product |service |command |ocs-version |enabled |rpc |
+ +--------------+----------------+----------------------+--------------+----------+------+
+ |onap-dublin |vnf-validation |vnf-tosca-provision |1.0 |true | |
+ +--------------+----------------+----------------------+--------------+----------+------+
+
+Configure ONAP with required VNFM and cloud details
+"""""""""""""""""""""""""""""""""""""""""""""""""""
+**1. Setup the OCOMP profile onap-dublin**
-For example, here we set Dovetail home directory to be ``${HOME}/dovetail``.
-Afterwards, we will create a directory named ``pre_config`` inside this directory
-to store all Dovetail config related files and a directory named ``results``, where
-test results are going to be saved:
+Run following command to configure the ONAP service URL and creadentials as given below, which will be
+used by VTP while executing the test cases
.. code-block:: bash
- $ mkdir -p ${DOVETAIL_HOME}/pre_config
- $ mkdir -p ${DOVETAIL_HOME}/results
- $ chmod 777 ${DOVETAIL_HOME}/results
+ oclip:open-cli>use onap-dublin
+ oclip:onap-dublin>profile onap-dublin
+ oclip:onap-dublin>set sdc.onboarding:host-url=http://159.138.8.8:30280
+ oclip:onap-dublin>set sdc.onboarding:host-username=cs0008
+ oclip:onap-dublin>set sdc.onboarding:host-password=demo123456!
+ oclip:onap-dublin>set sdc.catalog:host-url=http://159.138.8.8:30205
+ oclip:onap-dublin>set sdc.catalog:host-password=demo123456\!
+ oclip:onap-dublin>set sdc.catalog:host-username=cs0008
+ oclip:onap-dublin>set sdc.catalog:service-model-approve:host-username=gv0001
+ oclip:onap-dublin>set sdc.catalog:service-model-distribute:host-username=op0001
+ oclip:onap-dublin>set sdc.catalog:service-model-test-start:host-username=jm0007
+ oclip:onap-dublin>set sdc.catalog:service-model-test-accept:host-username=jm0007
+ oclip:onap-dublin>set sdc.catalog:service-model-add-artifact:host-username=ocomp
+ oclip:onap-dublin>set sdc.catalog:vf-model-add-artifact:host-username=ocomp
+ oclip:onap-dublin>set aai:host-url=https://159.138.8.8:30233
+ oclip:onap-dublin>set aai:host-username=AAI
+ oclip:onap-dublin>set aai:host-password=AAI
+ oclip:onap-dublin>set vfc:host-url=http://159.138.8.8:30280
+ oclip:onap-dublin>set multicloud:host-url=http://159.138.8.8:30280
+
+NOTE: Mostly all above entries value would be same execept the IP address used in the
+URL, which would be ONAP k8s cluser IP.
+
+By default, SDC onboarding service does not provide node port, which is available to
+access from external ONAP network. so to enable for external access, register the SDC
+onboarding service into MSB and use MSB url for sdc.onboarding:host-url.
+
+.. code-block:: bash
+ oclip:onap-dublin> microservice-create --service-name sdcob --service-version v1.0 --service-url /onboarding-api/v1.0 --path /onboarding-api/v1.0 --node-ip 172.16.1.0 --node-port 8081
-There should be a file `env_config.sh` inside ``pre_config`` directory to provide
-some info needed by test cases.
+NOTE: To find the node-ip and node-port, use the following steps.
-For TOSCA based VNFs, it should look like this:
+Find out SDC onboarding service IP and port details as given here:
.. code-block:: bash
- $ cat ${DOVETAIL_HOME}/pre_config/env_config.sh
- export HOST_URL="http://<docker host ip>:8702"
- export CSAR_FILE="/opt/name.csar"
+ [root@onap-dublin-vfw-93996-50c1z ~]# kubectl get pods -n onap -o wide | grep sdc-onboarding-be
+ dev-sdc-sdc-onboarding-be-5564b877c8-vpwr5 2/2 Running 0 29d 172.16.1.0 192.168.2.163 <none> <none>
+ dev-sdc-sdc-onboarding-be-cassandra-init-mtvz6 0/1 Completed 0 29d 172.16.0.220 192.168.2.163 <none> <none>
+ [root@onap-dublin-vfw-93996-50c1z ~]#
+
+Note down the IP address for sdc-onboarding-be 172.16.1.0
+
+.. code-block:: bash
+ [root@onap-dublin-vfw-93996-50c1z ~]# kubectl get services -n onap -o wide | grep sdc-onboarding-be
+ sdc-onboarding-be ClusterIP 10.247.198.92 <none> 8445/TCP,8081/TCP 29d app=sdc-onboarding-be,release=dev-sdc
+ [root@onap-dublin-vfw-93996-50c1z ~]#
-For HEAT based VNFs, the user should copy an archive of the HEAT template VNF
-packages to `pre_config`. The archive must be in zip (.zip) format.
-In addition, the zip of HEAT templates must be a flat collection of files, which
-means there should be no top-level directory and no sub-directories.
+Note down the port for sdc-onboarding-be 8445 8081
-Configuration file `env_config.sh` should look like this for HEAT based VNFs:
+Similarly, other service IP and Port could be discovered like above, in case not know earlier :)
+
+Verify these details once by typing 'set'
.. code-block:: bash
- $ cat ${DOVETAIL_HOME}/pre_config/env_config.sh
- export VNF_ARCHIVE_NAME="vnf_archive_name"
+ oclip:onap-dublin> set
+This profile would be used by user while running the test cases with ONAP setup configured in it, as below
+oclip --profile onap-dublin vnf-tosca-provision ....
-Starting Dovetail Docker
-^^^^^^^^^^^^^^^^^^^^^^^^
+**2. Setup SDC consumer**
-Use the command below to create a Dovetail container and get access to its shell:
+SDC uses consumer concept to configure required VN model and service model artifacts. So
+following commands required to run, which will create consumer named ocomp, which is
+already configured in onap-dublin profile created in above steps.
.. code-block:: bash
- $ sudo docker run --privileged=true -it \
- -e DOVETAIL_HOME=$DOVETAIL_HOME \
- -v $DOVETAIL_HOME:$DOVETAIL_HOME \
- -v /var/run/docker.sock:/var/run/docker.sock \
- opnfv/dovetail:<tag> /bin/bash
+ oclip --product onap-dublin --profile onap-dublin sdc-consumer-create --consumer-name ocomp
+
+NOTE: command oclip could be used in scripting mode as above or in interactive mode as used
+in earlier steps
+
+**3. Update the cloud and vnfm driver details**
+
+In the configuration file /opt/oclip/conf/vnf-tosca-provision.json, update the cloud
+and VNFM details.
+
+.. code-block:: json
+
+ "cloud": {
+ "identity-url": "http://10.12.11.1:5000/v3",
+ "username": "admin",
+ "password": "password",
+ "region": "RegionOVP",
+ "version": "ocata",
+ "tenant": "ocomp"
+ },
+ "vnfm":{
+ "hwvnfmdriver":{
+ "version": "v1.0",
+ "url": "http://159.138.8.8:38088",
+ "username": "admin",
+ "password": "xxxx"
+ },
+ "gvnfmdriver":{
+ "version": "v1.0",
+ "url": "http://159.138.8.8:30280"
+ }
+ }
+
+**4.Configure the decided VNFRES (optional)**
+VTP allows to configure the set of VNFREQS to be considered while running the VNF
+compliance test cases in the configuration file /opt/oclip/conf/vnfreqs.properties.
+
+If not available, please create this file with following entries:
+.. code-block:: bash
-The ``-e`` option sets the DOVETAIL_HOME environment variable in the container
-and the ``-v`` options mount files from the Test Host to the destination path
-inside the container. The latter option allows the Dovetail container to read
-the configuration files and write result files into DOVETAIL_HOME on the Test
-Host. The user should be within the Dovetail container shell, once the command
-above is executed. In order to run ONAP VNF tests 'latest' <tag> must be used.
+ vnfreqs.enabled=r02454,r04298,r07879,r09467,r13390,r23823,r26881,r27310,r35851,r40293,r43958,r66070,r77707,r77786,r87234,r10087,r21322,r26885,r40820,r35854,r65486,r17852,r46527,r15837,r54356,r67895,r95321,r32155,r01123,r51347,r787965,r130206
+ pnfreqs.enabled=r10087,r87234,r35854,r15837,r17852,r293901,r146092,r57019,r787965,r130206
+ # ignored all chef and ansible related tests
+ vnferrors.ignored=
+ pnferrors.ignored=
+Runnign the TOSCA VNF Test
+^^^^^^^^^^^^^^^^^^^^^^^^^^
-Running OVP Test Suites
-^^^^^^^^^^^^^^^^^^^^^^^
+Every test provided in VTP is given with guidelines on how to use it. On every execution of test cases, use the following additional arguments based on requirements
-Run VNF tests with the following command:
+- --product onap-dublin - It helps VTP choose the test cases written for onap-dublin version
+- --profile onap-dublin - It helps VTP to use the profile settings provided by admin (optional)
+- --request-id - It helps VTP to track the progress of the test cases execution and user could use this id for same. (optional)
+
+So, final test case execution would be as below. To find the test case arguments details, run second command below.
.. code-block:: bash
- $ dovetail run --testsuite <suite name> -d -r
+ oclip --product onap-dublin --profile onap-dublin --request-id req-1 <test case name> <test case arguments>
+ oclip --product onap-dublin <test case name> --help
+
+Running TOSCA VNF Compliance Testing
+""""""""""""""""""""""""""""""""""""
+
+To run compliance test as below with given CSAR file
+
+.. clode-block:: bash
+
+ oclip --product onap-dublin csar-validate --csar <csar file complete path>
+
+It will produce the result format as below:
+
+.. code-block:: json
+
+ {
+ "date": "Fri Sep 20 17:34:24 CST 2019",
+ "criteria": "PASS",
+ "contact": "ONAP VTP Team onap-discuss@lists.onap.org",
+ "results": [
+ {
+ "description": "V2.4.1 (2018-02)",
+ "passed": true,
+ "vnfreqName": "SOL004",
+ "errors": []
+ },
+ {
+ "description": "If the VNF or PNF CSAR Package utilizes Option 2 for package security, then the complete CSAR file MUST be digitally signed with the VNF or PNF provider private key. The VNF or PNF provider delivers one zip file consisting of the CSAR file, a signature file and a certificate file that includes the VNF or PNF provider public key. The certificate may also be included in the signature container, if the signature format allows that. The VNF or PNF provider creates a zip file consisting of the CSAR file with .csar extension, signature and certificate files. The signature and certificate files must be siblings of the CSAR file with extensions .cms and .cert respectively.\n",
+ "passed": true,
+ "vnfreqName": "r787965",
+ "errors": []
+ }
+ ],
+ "platform": "VNFSDK - VNF Test Platform (VTP) 1.0",
+ "vnf": {
+ "mode": "WITH_TOSCA_META_DIR",
+ "vendor": "ONAP",
+ "name": null,
+ "type": "TOSCA",
+ "version": null
+ }
+ }
+
+In case of errors, the errors section will have list of details as below. Each error block, will be
+given with error code and error details. Error code would be very useful to provide the troubleshooting
+guide in future. Note, to generate the test result in OVP archieve format, its recommended to run this compliance
+test with request-id similar to running validation test as below.
+
+.. code-block:: bash
+ [
+ {
+ "vnfreqNo": "R66070",
+ "code": "0x1000",
+ "message": "MissinEntry-Definitions file",
+ "lineNumber": -1
+ }
+ ]
-For TOSCA based VNFs, `<suite name>` is `onap.tosca.2019.04` and for
-HEAT based ones, it is `onap.heat.2019.04`.
+Running TOSCA VNF Validation Testing
+""""""""""""""""""""""""""""""""""""
+VTP provides validation test case with following modes:
-When test execution is complete, a tar file with all result and log files is
-written in ``$DOVETAIL_HOME`` on the Test Host. An example filename is
-``${DOVETAIL_HOME}/logs_20180105_0858.tar.gz``. The file is named using a
-timestamp that follows the convention ‘YearMonthDay-HourMinute’. In this case,
-it was generated at 08:58 on January 5th, 2018. This tar file is used for
-uploading the logs to `OVP VNF portal`_.
+.. image:: images/tosca_vnf_test_flow.png
+ :align: left
+ :scale: 100%
-NOTE: If Dovetail run fails when testing `onap-vtp.validate.csar`, then follow the
-below guidelines to run the test again.
+
+setup: Create requires Vendor, Service Subscription and VNF cloud in ONAP
+standup: From the given VSP csar, VNF csar and NS csar, it creates VF Model, NS Model and NS service
+cleanup: Remove those entries created during provision
+provision: Runs setup -> standup
+validate: Runs setup -> standup -> cleanup
+checkup: mode helps to verify automation is deployed properly.
+
+For OVP badging, validate mode would be used as below:
+
+.. code-block:: bash
+
+ oclip --request-id WkVVu9fD--product onap-dublin --profile onap-dublin vnf-tosca-provision --vsp <vsp csar> --vnf-csar <v
+
+Validation testing would take for a while to complete the test execution, so user could use the above
+given request-id, to tracking the progress as below:
.. code-block:: bash
- $ sudo docker exec -it refrepo bash
- $ export OPEN_CLI_HOME=/opt/vtp
- $ cd $OPEN_CLI_HOME/bin
- $ ./oclip-grpc-server.sh
- $ #Exit docker by running CTRL+p+q
+ oclip execution-list --request-id WkVVu9fD
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |request-id |execution-id |product |service |command |profile |status |start-time |end-time |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731678753 |onap-dublin |vnf-validation |vnf-tosca-provision | |in-progress |2019-09-17T14:47:58.000 | |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731876397 |onap-dublin |sdc.catalog |service-model-test-request |onap-dublin |in-progress |2019-09-17T14:51:16.000 | |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731966966 |onap-dublin |sdc.onboarding |vsp-archive |onap-dublin |completed |2019-09-17T14:52:46.000 |2019-09-17T14:52:47.000 |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731976982 |onap-dublin |aai |subscription-delete |onap-dublin |completed |2019-09-17T14:52:56.000 |2019-09-17T14:52:57.000 |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731785780 |onap-dublin |aai |vnfm-create |onap-dublin |completed |2019-09-17T14:49:45.000 |2019-09-17T14:49:46.000 |
+ ......
+
+While executing the test cases, VTP provides unique execution-id (2nd column) for each step. As you note
+in the example above, some steps are in-progress, while others are completed already. If there is error
+then status will be set to failed.
+
+To find out the foot-print of each step, following commands are available
+
+.. code-block:: bash
+
+ oclip execution-show-out --execution-id WkVVu9fD-1568731785780 - Reports the standard output logs
+ oclip execution-show-err --execution-id WkVVu9fD-1568731785780 - Reports the standard error logs
+ oclip execution-show-debug --execution-id WkVVu9fD-1568731785780 - Reports the debug details like HTTP request and responseoclip execution-show --execution-id WkVVu9fD-1568731785780 - Reports the complete foot-print of inputs, outputs of steps
+
+Track the progress of the vnf-tosca-provision test cases until its completed. Then the out of the validation
+test cases could be retrieved as below:
+
+.. code-block:: bash
+ oclip execution-show --execution-id WkVVu9fD-1568731678753 - use vnf tosca test case execution id here
+
+It will provides the output format as below:
+
+.. code-block:: json
+
+ {
+ "output": {
+ "ns-id": null,
+ "vnf-id": "",
+ "vnfm-driver": "hwvnfmdriver",
+ "vnf-vendor-name": "huawei",
+ "onap-objects": {
+ "ns_instance_id": null,
+ "tenant_version": null,
+ "service_type_id": null,
+ "tenant_id": null,
+ "subscription_version": null,
+ "esr_vnfm_id": null,
+ "location_id": null,
+ "ns_version": null,
+ "vnf_status": "active",
+ "entitlement_id": null,
+ "ns_id": null,
+ "cloud_version": null,
+ "cloud_id": null,
+ "vlm_version": null,
+ "esr_vnfm_version": null,
+ "vlm_id": null,
+ "vsp_id": null,
+ "vf_id": null,
+ "ns_instance_status": "active",
+ "service_type_version": null,
+ "ns_uuid": null,
+ "location_version": null,
+ "feature_group_id": null,
+ "vf_version": null,
+ "vsp_version": null,
+ "agreement_id": null,
+ "vf_uuid": null,
+ "ns_vf_resource_id": null,
+ "vsp_version_id": null,
+ "customer_version": null,
+ "vf_inputs": null,
+ "customer_id": null,
+ "key_group_id": null,
+ },
+ "vnf-status": "active",
+ "vnf-name": "vgw",
+ "ns-status": "active"
+ },
+ "input": {
+ "mode": "validate",
+ "vsp": "/tmp/data/vtp-tmp-files/1568731645518.csar",
+ "vnfm-driver": "hwvnfmdriver",
+ "config-json": "/opt/oclip/conf/vnf-tosca-provision.json",
+ "vnf-vendor-name": "huawei",
+ "ns-csar": "/tmp/data/vtp-tmp-files/1568731660745.csar",
+ "onap-objects": "{}",
+ "timeout": "600000",
+ "vnf-name": "vgw",
+ "vnf-csar": "/tmp/data/vtp-tmp-files/1568731655310.csar"
+ },
+ "product": "onap-dublin",
+ "start-time": "2019-09-17T14:47:58.000",
+ "service": "vnf-validation",
+ "end-time": "2019-09-17T14:53:46.000",
+ "request-id": "WkVVu9fD-1568731678753",
+ "command": "vnf-tosca-provision",
+ "status": "completed"
+ }
+
+Reporting Results
+"""""""""""""""""
+
+VTP provides translation tool to migrate the VTP result into OVP portal format and generates the tar file
+for the given test case execution. Please refer `<https://github.com/onap/vnfsdk-refrepo/tree/master/vnfmarket-be/deployment/vtp2ovp>`_ for more details.
+
+Once tar is generated, it can be used to submit into OVP portal `<https://vnf-verified.lfnetworking.org/>`_
.. References
.. _`OVP VNF portal`: https://vnf-verified.lfnetworking.org
diff --git a/dovetail/api/app/routes.py b/dovetail/api/app/routes.py
index b1557b67..352d69f3 100644
--- a/dovetail/api/app/routes.py
+++ b/dovetail/api/app/routes.py
@@ -9,7 +9,7 @@ import uuid
from flask import Flask, jsonify, request
from flask_cors import CORS
-import server
+from app.server import Server
app = Flask(__name__)
CORS(app)
@@ -17,13 +17,13 @@ CORS(app)
@app.route('/api/v1/scenario/nfvi/testsuites', methods=['GET'])
def get_all_testsuites():
- testsuites = server.list_testsuites()
+ testsuites = Server.list_testsuites()
return jsonify({'testsuites': testsuites}), 200
@app.route('/api/v1/scenario/nfvi/testcases', methods=['GET'])
def get_testcases():
- testcases = server.list_testcases()
+ testcases = Server.list_testcases()
return jsonify({'testcases': testcases}), 200
@@ -37,23 +37,26 @@ def run_testcases():
else:
return 'No DOVETAIL_HOME found in env.\n', 500
- msg, ret = server.set_conf_files(request.json, dovetail_home, requestId)
+ server = Server(dovetail_home, requestId, request.json)
+
+ msg, ret = server.set_conf_files()
if not ret:
return msg, 500
- msg, ret = server.set_vm_images(request.json, dovetail_home, requestId)
+ msg, ret = server.set_vm_images()
if not ret:
return msg, 500
- input_str = server.parse_request(request.json)
+ input_str = server.parse_request()
repo_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir))
run_script = os.path.join(repo_dir, 'run.py')
- cmd = 'python {} {}'.format(run_script, input_str)
+ cmd = 'python3 {} {}'.format(run_script, input_str)
api_home = os.path.join(dovetail_home, str(requestId))
- subprocess.Popen(cmd, shell=True, env={'DOVETAIL_HOME': api_home})
+ subprocess.Popen(cmd, shell=True, env={'DOVETAIL_HOME': api_home,
+ 'LC_ALL': 'C.UTF-8', 'LANG': 'C.UTF-8'})
testcases_file = os.path.join(dovetail_home, str(requestId),
'results', 'testcases.json')
@@ -71,8 +74,7 @@ def run_testcases():
testcases = data['testcases']
testsuite = data['testsuite']
- result = server.get_execution_status(dovetail_home, testsuite,
- testcases, requestId)
+ result = server.get_execution_status(testsuite, testcases, testcases)
return jsonify({'result': result}), 200
@@ -86,6 +88,7 @@ def get_testcases_status(exec_id):
testcases = request.json['testcase']
dovetail_home = os.getenv('DOVETAIL_HOME')
+ server = Server(dovetail_home, exec_id, request.json)
testcases_file = os.path.join(dovetail_home, str(exec_id),
'results', 'testcases.json')
with open(testcases_file, "r") as f:
@@ -93,6 +96,7 @@ def get_testcases_status(exec_id):
data = json.loads(jsonfile)
testsuite = data['testsuite']
- result = server.get_execution_status(dovetail_home, testsuite,
- testcases, data['testcases'], exec_id)
+ result = server.get_execution_status(testsuite, testcases,
+ data['testcases'])
+
return jsonify({'result': result}), 200
diff --git a/dovetail/api/app/server.py b/dovetail/api/app/server.py
index e6b1df46..d44e2ee5 100644
--- a/dovetail/api/app/server.py
+++ b/dovetail/api/app/server.py
@@ -2,218 +2,296 @@ import json
import os
import shutil
-import constants
-import utils
+import app.constants as constants
+from app.utils import Utils
from dovetail.testcase import Testsuite, Testcase
-def list_testsuites():
- return Testsuite.load()
+class Server(object):
+ def __init__(self, dovetail_home=None, requestId=None, requestData=None):
+ self.dovetail_home = dovetail_home
+ self.requestId = requestId
+ self.requestData = requestData
-def list_testcases():
- testcases = Testcase.load()
- testcase_list = []
- for key, value in testcases.items():
- testcase = {'testCaseName': key,
- 'description': value.objective(),
- 'subTestCase': value.sub_testcase()}
- if value.validate_type() in constants.NFVI_PROJECT:
- testcase['scenario'] = 'nfvi'
- elif value.validate_type() in constants.VNF_PROJECT:
- testcase['scenario'] = 'vnf'
- else:
- testcase['scenario'] = 'unknown'
- testcase_list.append(testcase)
- return testcase_list
-
-
-def set_vm_images(data, dovetail_home, requestId):
- image_path = os.path.join(dovetail_home, str(requestId), 'images')
- try:
- origin_image_path = data['conf']['vm_images']
- except KeyError:
- origin_image_path = os.path.join(dovetail_home, 'images')
- if os.path.exists(origin_image_path):
- try:
- shutil.copytree(origin_image_path, image_path)
- except Exception as e:
- return str(e), False
- return "Success to set vm images.\n", True
- else:
- return "Could not find vm images.\n", False
+ @staticmethod
+ def list_testsuites():
+ return Testsuite.load()
+ @staticmethod
+ def list_testcases():
+ testcases = Testcase.load()
+ testcase_list = []
+ for key, value in testcases.items():
+ testcase = {'testCaseName': key,
+ 'description': value.objective(),
+ 'subTestCase': value.sub_testcase()}
+ if value.validate_type() in constants.NFVI_PROJECT:
+ testcase['scenario'] = 'nfvi'
+ elif value.validate_type() in constants.VNF_PROJECT:
+ testcase['scenario'] = 'vnf'
+ else:
+ testcase['scenario'] = 'unknown'
+ testcase_list.append(testcase)
+ return testcase_list
-def set_conf_files(data, dovetail_home, requestId):
- config_path = os.path.join(dovetail_home, str(requestId), 'pre_config')
- origin_config_path = os.path.join(dovetail_home, 'pre_config')
- if os.path.exists(origin_config_path):
+ def set_vm_images(self):
+ image_path = os.path.join(self.dovetail_home, str(self.requestId),
+ 'images')
try:
- shutil.copytree(origin_config_path, config_path)
- except Exception as e:
- return str(e), False
-
- # check and prepare mandatory env_config.sh file
- # if there are envs in request body, use it
- # otherwise, use the file in pre_config
- # if don't have this file, return False with error message
- env_file = os.path.join(config_path, 'env_config.sh')
- try:
- utils.write_env_file(data['conf']['envs'], env_file)
- except KeyError:
- if not os.path.isfile(env_file):
- return "No 'envs' found in the request body.\n", False
+ origin_image_path = self.requestData['conf']['vm_images']
+ except KeyError:
+ origin_image_path = os.path.join(self.dovetail_home, 'images')
+ if os.path.exists(origin_image_path):
+ try:
+ shutil.copytree(origin_image_path, image_path)
+ except Exception as e:
+ return str(e), False
+ return "Success to set vm images.\n", True
else:
- pass
- except Exception as e:
- return str(e), False
+ return "Could not find vm images.\n", False
- # check and prepare other optional yaml files
- for key, value in constants.CONFIG_YAML_FILES.items():
- config_file = os.path.join(config_path, value)
+ def set_conf_files(self):
+ config_path = os.path.join(self.dovetail_home, str(self.requestId),
+ 'pre_config')
+ origin_config_path = os.path.join(self.dovetail_home, 'pre_config')
+ if os.path.exists(origin_config_path):
+ try:
+ shutil.copytree(origin_config_path, config_path)
+ except Exception as e:
+ return str(e), False
+
+ # check and prepare mandatory env_config.sh file
+ # if there are envs in request body, use it
+ # otherwise, use the file in pre_config
+ # if don't have this file, return False with error message
+ env_file = os.path.join(config_path, 'env_config.sh')
try:
- utils.write_yaml_file(data['conf'][key], config_file)
+ Utils.write_env_file(self.requestData['conf']['envs'], env_file)
except KeyError:
- pass
+ if not os.path.isfile(env_file):
+ return "No 'envs' found in the request body.\n", False
+ else:
+ pass
except Exception as e:
return str(e), False
- return 'Success to prepare all config files.\n', True
+ # check and prepare other optional yaml files
+ for key, value in constants.CONFIG_YAML_FILES.items():
+ config_file = os.path.join(config_path, value)
+ try:
+ Utils.write_yaml_file(self.requestData['conf'][key],
+ config_file)
+ except KeyError:
+ pass
+ except Exception as e:
+ return str(e), False
+
+ return 'Success to prepare all config files.\n', True
+ def parse_request(self):
+ output = ''
+ default_args = constants.RUN_TEST_ITEMS['arguments']
+ default_options = constants.RUN_TEST_ITEMS['options']
-def parse_request(request_json):
- output = ''
- default_args = constants.RUN_TEST_ITEMS['arguments']
- default_options = constants.RUN_TEST_ITEMS['options']
+ for arg in default_args['no_multiple']:
+ if arg in self.requestData.keys():
+ output = output + ' --{} {}'.format(arg, self.requestData[arg])
+ for arg in default_args['multiple']:
+ if arg in self.requestData.keys() and self.requestData[arg]:
+ for item in self.requestData[arg]:
+ output = output + ' --{} {}'.format(arg, item)
- for arg in default_args['no_multiple']:
- if arg in request_json.keys():
- output = output + ' --{} {}'.format(arg, request_json[arg])
- for arg in default_args['multiple']:
- if arg in request_json.keys() and request_json[arg]:
- for item in request_json[arg]:
- output = output + ' --{} {}'.format(arg, item)
+ if 'options' not in self.requestData.keys():
+ return output
- if 'options' not in request_json.keys():
- return output
+ for option in default_options:
+ if option in self.requestData['options']:
+ output = output + ' --{}'.format(option)
- for option in default_options:
- if option in request_json['options']:
- output = output + ' --{}'.format(option)
+ return output
- return output
+ def get_execution_status(self, testsuite, request_testcases,
+ exec_testcases):
+ results_dir = os.path.join(self.dovetail_home, str(self.requestId),
+ 'results')
+ results = []
+ for tc in request_testcases:
+ if tc not in exec_testcases:
+ res = {'testCaseName': tc, 'status': 'NOT_EXECUTED'}
+ results.append(res)
+ continue
+ tc_type = tc.split('.')[0]
+ checker = CheckerFactory.create(tc_type)
+ status, result = checker.get_status(results_dir, tc)
-def get_execution_status(dovetail_home, testsuite, request_testcases,
- exec_testcases, requestId):
- results_dir = os.path.join(dovetail_home, str(requestId), 'results')
- results = []
- for tc in request_testcases:
- if tc not in exec_testcases:
- res = {'testCaseName': tc, 'status': 'NOT_EXECUTED'}
- results.append(res)
- continue
- if tc.startswith('functest'):
- status, result = get_functest_status(results_dir, tc)
res = {'testCaseName': tc, 'testSuiteName': testsuite,
- 'scenario': 'nfvi', 'executionId': requestId,
- 'results': result, 'status': status}
- if not result:
- res['timestart'] = None
- res['endTime'] = None
- else:
+ 'scenario': 'nfvi', 'executionId': self.requestId,
+ 'results': result, 'status': status, 'timestart': None,
+ 'endTime': None}
+ try:
res['timestart'] = result['timestart']
res['endTime'] = result['timestop']
+ except Exception:
+ pass
+
results.append(res)
- if tc.startswith('yardstick'):
- status, result = get_yardstick_status(results_dir, tc)
- res = {'testCaseName': tc, 'testSuiteName': testsuite,
- 'scenario': 'nfvi', 'executionId': requestId,
- 'results': result, 'status': status,
- 'timestart': None, 'endTime': None}
- results.append(res)
- if tc.startswith('bottlenecks'):
- pass
- return results
+ return results
-def get_functest_status(results_dir, testcase):
- functest_file = os.path.join(results_dir, 'functest_results.txt')
- total_file = os.path.join(results_dir, 'results.json')
- if not os.path.isfile(functest_file):
- if not os.path.isfile(total_file):
- return 'IN_PROGRESS', None
- return 'FAILED', None
- criteria = None
- sub_testcase = []
- timestart = None
- timestop = None
- # get criteria and sub_testcase from results.json when all tests completed
- if os.path.isfile(total_file):
+class Checker(object):
+
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def get_status_from_total_file(total_file, testcase):
with open(total_file, 'r') as f:
for jsonfile in f:
try:
data = json.loads(jsonfile)
for item in data['testcases_list']:
if item['name'] == testcase:
- criteria = item['result']
- sub_testcase = item['sub_testcase']
- break
- else:
- return 'FAILED', None
- except KeyError:
+ return item['result'], item['sub_testcase']
+ except KeyError as e:
return 'FAILED', None
except ValueError:
continue
+ return 'FAILED', None
- # get detailed results from functest_results.txt
- with open(functest_file, 'r') as f:
- for jsonfile in f:
- try:
- data = json.loads(jsonfile)
- if data['build_tag'].endswith(testcase):
- criteria = data['criteria'] if not criteria else criteria
- timestart = data['start_date']
- timestop = data['stop_date']
- break
- except KeyError:
+
+class FunctestChecker(Checker):
+
+ def get_status(self, results_dir, testcase):
+ functest_file = os.path.join(results_dir, 'functest_results.txt')
+ total_file = os.path.join(results_dir, 'results.json')
+ if not os.path.isfile(functest_file):
+ if not os.path.isfile(total_file):
+ return 'IN_PROGRESS', None
+ return 'FAILED', None
+ criteria = None
+ sub_testcase = []
+ timestart = None
+ timestop = None
+
+ # get criteria and sub_testcase when all tests completed
+ if os.path.isfile(total_file):
+ criteria, sub_testcase = self.get_status_from_total_file(
+ total_file, testcase)
+ if criteria == 'FAILED':
return 'FAILED', None
- except ValueError:
- continue
- else:
- if not criteria:
+
+ # get detailed results from functest_results.txt
+ with open(functest_file, 'r') as f:
+ for jsonfile in f:
+ try:
+ data = json.loads(jsonfile)
+ if data['build_tag'].endswith(testcase):
+ criteria = data['criteria'] if not criteria \
+ else criteria
+ timestart = data['start_date']
+ timestop = data['stop_date']
+ break
+ except KeyError:
+ return 'FAILED', None
+ except ValueError:
+ continue
+ else:
+ if not criteria:
+ return 'IN_PROGRESS', None
+
+ status = 'COMPLETED' if criteria == 'PASS' else 'FAILED'
+ results = {'criteria': criteria, 'sub_testcase': sub_testcase,
+ 'timestart': timestart, 'timestop': timestop}
+ return status, results
+
+
+class YardstickChecker(Checker):
+
+ def get_status(self, results_dir, testcase):
+ yardstick_file = os.path.join(results_dir, 'ha_logs',
+ '{}.out'.format(testcase))
+ total_file = os.path.join(results_dir, 'results.json')
+ if not os.path.isfile(yardstick_file):
+ if not os.path.isfile(total_file):
return 'IN_PROGRESS', None
+ return 'FAILED', None
- status = 'COMPLETED' if criteria == 'PASS' else 'FAILED'
- results = {'criteria': criteria, 'sub_testcase': sub_testcase,
- 'timestart': timestart, 'timestop': timestop}
- return status, results
+ criteria = None
+ # get criteria and sub_testcase when all tests completed
+ if os.path.isfile(total_file):
+ criteria, _ = self.get_status_from_total_file(total_file, testcase)
+ if criteria == 'FAILED':
+ return 'FAILED', None
-def get_yardstick_status(results_dir, testcase):
- yardstick_file = os.path.join(results_dir, 'ha_logs',
- '{}.out'.format(testcase))
- total_file = os.path.join(results_dir, 'results.json')
- if not os.path.isfile(yardstick_file):
- if not os.path.isfile(total_file):
- return 'IN_PROGRESS', None
- return 'FAILED', None
- with open(yardstick_file, 'r') as f:
- for jsonfile in f:
- data = json.loads(jsonfile)
- try:
- criteria = data['result']['criteria']
- if criteria == 'PASS':
- details = data['result']['testcases']
- for key, value in details.items():
- sla_pass = value['tc_data'][0]['data']['sla_pass']
- if not 1 == sla_pass:
- criteria = 'FAIL'
- except KeyError:
+ with open(yardstick_file, 'r') as f:
+ for jsonfile in f:
+ data = json.loads(jsonfile)
+ try:
+ if not criteria:
+ criteria = data['result']['criteria']
+ if criteria == 'PASS':
+ details = data['result']['testcases']
+ for key, value in details.items():
+ sla_pass = value['tc_data'][0]['data']['sla_pass']
+ if not 1 == sla_pass:
+ criteria = 'FAIL'
+ except KeyError:
+ return 'FAILED', None
+
+ status = 'COMPLETED' if criteria == 'PASS' else 'FAILED'
+ results = {'criteria': criteria, 'timestart': None, 'timestop': None}
+ return status, results
+
+
+class BottlenecksChecker(Checker):
+
+ def get_status(self, results_dir, testcase):
+ bottlenecks_file = os.path.join(results_dir, 'stress_logs',
+ '{}.out'.format(testcase))
+ total_file = os.path.join(results_dir, 'results.json')
+ if not os.path.isfile(bottlenecks_file):
+ if not os.path.isfile(total_file):
+ return 'IN_PROGRESS', None
+ return 'FAILED', None
+
+ criteria = None
+
+ # get criteria and sub_testcase when all tests completed
+ if os.path.isfile(total_file):
+ criteria, _ = self.get_status_from_total_file(total_file, testcase)
+ if criteria == 'FAILED':
return 'FAILED', None
- status = 'COMPLETED' if criteria == 'PASS' else 'FAILED'
- results = {'criteria': criteria, 'timestart': None, 'timestop': None}
- return status, results
+ with open(bottlenecks_file, 'r') as f:
+ for jsonfile in f:
+ data = json.loads(jsonfile)
+ try:
+ if not criteria:
+ criteria = data['data_body']['result']
+ except KeyError:
+ return 'FAILED', None
+
+ status = 'COMPLETED' if criteria == 'PASS' else 'FAILED'
+ results = {'criteria': criteria, 'timestart': None, 'timestop': None}
+ return status, results
+
+
+class CheckerFactory(object):
+
+ CHECKER_MAP = {
+ 'functest': FunctestChecker,
+ 'yardstick': YardstickChecker,
+ 'bottlenecks': BottlenecksChecker
+ }
+
+ @classmethod
+ def create(cls, tc_type):
+ try:
+ return cls.CHECKER_MAP[tc_type]()
+ except KeyError:
+ return None
diff --git a/dovetail/api/app/utils.py b/dovetail/api/app/utils.py
index 1708dfb7..9f35ee03 100644
--- a/dovetail/api/app/utils.py
+++ b/dovetail/api/app/utils.py
@@ -2,20 +2,23 @@ import json
import os
-def write_env_file(envs, file_path):
- file_dir = os.path.dirname(file_path)
- if not os.path.exists(file_dir):
- os.makedirs(file_dir)
- with open(file_path, "w") as f:
- for key, value in envs.items():
- f.write("export {}={}\n".format(key, value))
- return True
+class Utils(object):
+ @staticmethod
+ def write_env_file(envs, file_path):
+ file_dir = os.path.dirname(file_path)
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir)
+ with open(file_path, "w") as f:
+ for key, value in envs.items():
+ f.write("export {}={}\n".format(key, value))
+ return True
-def write_yaml_file(data, file_path):
- file_dir = os.path.dirname(file_path)
- if not os.path.exists(file_dir):
- os.makedirs(file_dir)
- with open(file_path, "w") as f:
- f.write(json.dumps(data) + '\n')
- return True
+ @staticmethod
+ def write_yaml_file(data, file_path):
+ file_dir = os.path.dirname(file_path)
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir)
+ with open(file_path, "w") as f:
+ f.write(json.dumps(data) + '\n')
+ return True
diff --git a/dovetail/api/swagger.yaml b/dovetail/api/swagger.yaml
index 98291d2b..54695d7e 100644
--- a/dovetail/api/swagger.yaml
+++ b/dovetail/api/swagger.yaml
@@ -265,7 +265,7 @@ definitions:
- "yardstick.ha.rabbitmq"
testsuite:
type: "string"
- example: "ovp.2019.0x"
+ example: "ovp.2019.12"
testarea:
type: "array"
items:
@@ -333,7 +333,7 @@ definitions:
example: "functest.vping.ssh"
testSuiteName:
type: "string"
- example: "ovp.2019.0x"
+ example: "ovp.2019.12"
timestart:
type: "string"
format: "date-time"
diff --git a/dovetail/container.py b/dovetail/container.py
index ec9b1fb2..b2a9428f 100644
--- a/dovetail/container.py
+++ b/dovetail/container.py
@@ -58,23 +58,28 @@ class Container(object):
kwargs = dt_utils.get_value_from_dict('opts', project_cfg)
shell = dt_utils.get_value_from_dict('shell', project_cfg)
if not shell:
- return None
+ return None, "Lacking of key word 'shell' in config file."
env_list = dt_utils.get_value_from_dict('envs', project_cfg)
if env_list:
kwargs['environment'] = \
[env for env in env_list if env is not None]
volume_list = dt_utils.get_value_from_dict('volumes', project_cfg)
kwargs['volumes'] = [vol for vol in volume_list if vol is not None]
+
+ kwargs['mounts'], msg = dt_utils.get_mount_list(project_cfg)
+ if not kwargs['mounts']:
+ return None, msg
+
kwargs['extra_hosts'] = dt_utils.get_hosts_info(self.logger)
try:
self.container = self.client.containers.run(
docker_image, shell, **kwargs)
except (docker.errors.ContainerError, docker.errors.ImageNotFound,
- docker.errors.APIError):
- return None
+ docker.errors.APIError) as e:
+ return None, e
- return self.container.id
+ return self.container.id, 'Successfully to create container.'
def get_image_id(self, image_name):
try:
diff --git a/dovetail/report.py b/dovetail/report.py
index d40303e4..ed3f942b 100644
--- a/dovetail/report.py
+++ b/dovetail/report.py
@@ -319,29 +319,36 @@ class FunctestCrawler(Crawler):
testcase.set_results(json_results)
return json_results
- @staticmethod
- def get_details(data):
- t_details = data['details']
- details = {
- 'tests': t_details['tests_number'],
- 'failures': t_details['failures_number'],
- 'success': t_details['success'],
- 'errors': t_details['failures'],
- 'skipped': t_details['skipped']
- }
- return details
+ def get_details(self, data):
+ try:
+ t_details = data['details']
+ details = {
+ 'tests': t_details['tests_number'],
+ 'failures': t_details['failures_number'],
+ 'success': t_details['success'],
+ 'errors': t_details['failures'],
+ 'skipped': t_details['skipped']
+ }
+ return details
+ except Exception as e:
+ self.logger.exception("Failed to get details, {}.".format(e))
+ return None
- @staticmethod
- def get_rally_details(data):
- t_details = data['details'][0]['details']
- details = {
- 'tests': len(t_details['success']) + len(t_details['failures']),
- 'failures': len(t_details['failures']),
- 'success': t_details['success'],
- 'errors': t_details['failures'],
- 'skipped': []
- }
- return details
+ def get_rally_details(self, data):
+ try:
+ t_details = data['details']['modules'][0]['details']
+ tests = len(t_details['success']) + len(t_details['failures'])
+ details = {
+ 'tests': tests,
+ 'failures': len(t_details['failures']),
+ 'success': t_details['success'],
+ 'errors': t_details['failures'],
+ 'skipped': []
+ }
+ return details
+ except Exception as e:
+ self.logger.exception("Failed to get details, {}.".format(e))
+ return None
class FunctestK8sCrawler(FunctestCrawler):
diff --git a/dovetail/test_runner.py b/dovetail/test_runner.py
index 97367db9..266bdc20 100644
--- a/dovetail/test_runner.py
+++ b/dovetail/test_runner.py
@@ -77,9 +77,10 @@ class DockerRunner(Runner):
self.logger.error("Failed to pull the image.")
return
- container_id = container.create(docker_image)
+ container_id, msg = container.create(docker_image)
if not container_id:
self.logger.error('Failed to create container.')
+ self.logger.error(msg)
return
self.logger.debug('container id: {}'.format(container_id))
diff --git a/dovetail/tests/unit/test_container.py b/dovetail/tests/unit/test_container.py
index 01c1d8fd..86da9d3c 100644
--- a/dovetail/tests/unit/test_container.py
+++ b/dovetail/tests/unit/test_container.py
@@ -409,6 +409,7 @@ class ContainerTesting(unittest.TestCase):
container_id = 'container_id'
mock_utils.get_value_from_dict.side_effect = [
{'key': 'value'}, 'shell', 'envs', ['volume_one', 'volume_two']]
+ mock_utils.get_mount_list.side_effect = [['mount', 'list'], 'success']
mock_utils.get_hosts_info.return_value = 'host_info'
container_obj = Mock()
container_obj.id = container_id
@@ -417,7 +418,7 @@ class ContainerTesting(unittest.TestCase):
mock_config.dovetail_config = {'bottlenecks': project_config}
expected = container_id
- result = self.container.create(docker_image)
+ result, msg = self.container.create(docker_image)
mock_utils.get_value_from_dict.assert_has_calls([
call('opts', project_config),
@@ -426,6 +427,7 @@ class ContainerTesting(unittest.TestCase):
call('volumes', project_config)])
mock_utils.get_hosts_info.assert_called_once_with(self.logger)
self.assertEqual(expected, result)
+ self.assertEqual('Successfully to create container.', msg)
@patch('dovetail.container.dt_utils')
@patch('dovetail.container.dt_cfg')
@@ -435,12 +437,32 @@ class ContainerTesting(unittest.TestCase):
mock_utils.get_value_from_dict.side_effect = ['opts', None]
mock_utils.get_hosts_info.return_value = 'host_info'
- result = self.container.create(docker_image)
+ result, msg = self.container.create(docker_image)
mock_utils.get_value_from_dict.assert_has_calls([
call('opts', 'value'),
call('shell', 'value')])
self.assertEqual(None, result)
+ self.assertEqual("Lacking of key word 'shell' in config file.", msg)
+
+ @patch('dovetail.container.dt_utils')
+ @patch('dovetail.container.dt_cfg')
+ def test_create_mounts_none(self, mock_config, mock_utils):
+ docker_image = 'docker_image'
+ project_config = {}
+ mock_config.dovetail_config = {'bottlenecks': project_config}
+ mock_utils.get_value_from_dict.side_effect = [
+ {'key': 'value'}, 'shell', ['envs'], ['volume_one']]
+ mock_utils.get_mount_list.side_effect = [[None, 'error']]
+ mock_utils.get_hosts_info.return_value = 'host_info'
+
+ result, msg = self.container.create(docker_image)
+
+ mock_utils.get_value_from_dict.assert_has_calls([
+ call('opts', project_config), call('shell', project_config),
+ call('envs', project_config), call('volumes', project_config)])
+ self.assertEqual(None, result)
+ self.assertEqual('error', msg)
@patch('dovetail.container.dt_utils')
@patch('dovetail.container.dt_cfg')
@@ -448,13 +470,14 @@ class ContainerTesting(unittest.TestCase):
docker_image = 'docker_image'
mock_utils.get_value_from_dict.side_effect = [
{'key': 'value'}, 'shell', ['envs'], ['volume_one']]
+ mock_utils.get_mount_list.side_effect = [['mount', 'list'], 'success']
mock_utils.get_hosts_info.return_value = 'host_info'
mock_utils.check_https_enabled.return_value = True
self.client.containers.run.side_effect = \
docker.errors.ImageNotFound('error')
project_config = {}
mock_config.dovetail_config = {'bottlenecks': project_config}
- result = self.container.create(docker_image)
+ result, msg = self.container.create(docker_image)
mock_utils.get_value_from_dict.assert_has_calls([
call('opts', project_config),
@@ -463,3 +486,4 @@ class ContainerTesting(unittest.TestCase):
call('volumes', project_config)])
mock_utils.get_hosts_info.assert_called_once_with(self.logger)
self.assertEqual(None, result)
+ self.assertEqual('error', str(docker.errors.ImageNotFound('error')))
diff --git a/dovetail/tests/unit/test_report.py b/dovetail/tests/unit/test_report.py
index fe6530c9..41d70d2f 100644
--- a/dovetail/tests/unit/test_report.py
+++ b/dovetail/tests/unit/test_report.py
@@ -640,6 +640,28 @@ class ReportTesting(unittest.TestCase):
'Result file not found: {}'.format(file_path))
self.assertEqual(None, result)
+ def test_functest_crawler_get_details_exception(self):
+ logger_obj = Mock()
+ dt_report.FunctestCrawler.logger = logger_obj
+ data = None
+ crawler = dt_report.FunctestCrawler()
+
+ excepted = None
+ result = crawler.get_details(data)
+ logger_obj.exception.assert_called_once()
+ self.assertEqual(excepted, result)
+
+ def test_functest_crawler_get_rally_details_exception(self):
+ logger_obj = Mock()
+ dt_report.FunctestCrawler.logger = logger_obj
+ data = None
+ crawler = dt_report.FunctestCrawler()
+
+ excepted = None
+ result = crawler.get_rally_details(data)
+ logger_obj.exception.assert_called_once()
+ self.assertEqual(excepted, result)
+
@patch('builtins.open')
@patch('dovetail.report.json')
@patch('dovetail.report.dt_cfg')
@@ -719,12 +741,17 @@ class ReportTesting(unittest.TestCase):
'criteria': 'criteria',
'start_date': 'start_date',
'stop_date': 'stop_date',
- 'details': [{
- 'details': {
- 'success': ['subt_a'],
- 'failures': ['subt_b', 'subt_c']
- }
- }]
+ 'details': {
+ 'modules': [
+ {
+ 'details': {
+ 'success': ['subt_a'],
+ 'failures': ['subt_b', 'subt_c']
+ },
+ 'module': 'module'
+ }
+ ]
+ }
}
mock_json.loads.return_value = data_dict
diff --git a/dovetail/tests/unit/test_test_runner.py b/dovetail/tests/unit/test_test_runner.py
index 3cb27536..232de7b1 100644
--- a/dovetail/tests/unit/test_test_runner.py
+++ b/dovetail/tests/unit/test_test_runner.py
@@ -107,7 +107,7 @@ class TestRunnerTesting(unittest.TestCase):
docker_img_obj = Mock()
container_obj.get_docker_image.return_value = docker_img_obj
container_obj.pull_image.return_value = True
- container_obj.create.return_value = False
+ container_obj.create.return_value = [None, 'error']
mock_container.return_value = container_obj
docker_runner.run()
@@ -116,8 +116,8 @@ class TestRunnerTesting(unittest.TestCase):
container_obj.get_docker_image.assert_called_once_with()
container_obj.pull_image.assert_called_once_with(docker_img_obj)
container_obj.create.assert_called_once_with(docker_img_obj)
- docker_runner.logger.error.assert_called_once_with(
- 'Failed to create container.')
+ docker_runner.logger.error.assert_has_calls([
+ call('Failed to create container.'), call('error')])
@patch('dovetail.test_runner.dt_utils')
@patch('dovetail.test_runner.dt_cfg')
@@ -137,7 +137,8 @@ class TestRunnerTesting(unittest.TestCase):
container_obj.get_docker_image.return_value = docker_img_obj
container_obj.pull_image.return_value = True
container_id = '12345'
- container_obj.create.return_value = container_id
+ container_msg = 'Successfully to create container.'
+ container_obj.create.return_value = [container_id, container_msg]
mock_container.return_value = container_obj
self.testcase.pre_condition.return_value = ['cmd']
self.testcase.prepare_cmd.return_value = False
@@ -180,7 +181,8 @@ class TestRunnerTesting(unittest.TestCase):
container_obj.get_docker_image.return_value = docker_img_obj
container_obj.pull_image.return_value = True
container_id = '12345'
- container_obj.create.return_value = container_id
+ container_msg = 'Successfully to create container.'
+ container_obj.create.return_value = [container_id, container_msg]
mock_container.return_value = container_obj
self.testcase.pre_condition.return_value = ['cmd']
self.testcase.prepare_cmd.return_value = True
diff --git a/dovetail/tests/unit/utils/test_dovetail_utils.py b/dovetail/tests/unit/utils/test_dovetail_utils.py
index 5b403a5c..7d1fddc1 100644
--- a/dovetail/tests/unit/utils/test_dovetail_utils.py
+++ b/dovetail/tests/unit/utils/test_dovetail_utils.py
@@ -510,15 +510,12 @@ class DovetailUtilsTesting(unittest.TestCase):
hosts_obj.add.assert_called_once_with([entry_obj])
hosts_obj.write.assert_called_once()
- @patch('dovetail.utils.dovetail_utils.objwalk')
- def test_get_obj_by_path(self, mock_walk):
- path = dist_path = 'path'
- obj = 'obj'
- mock_walk.return_value = [(path, obj)]
-
- expected = obj
- result = dovetail_utils.get_obj_by_path(obj, dist_path)
+ def test_get_obj_by_path(self):
+ obj = {'list': ['a', 'b'], 'name': 'name'}
+ dst_path = ('name',)
+ expected = 'name'
+ result = dovetail_utils.get_obj_by_path(obj, dst_path)
self.assertEqual(expected, result)
@patch('dovetail.utils.dovetail_utils.objwalk')
@@ -1233,9 +1230,9 @@ class DovetailUtilsTesting(unittest.TestCase):
subp_stdout = Mock()
subprocess_obj.stdout = subp_stdout
subprocess_obj.wait.return_value = 0
- subp_stdout.readline.side_effect = [cmd_output, '']
+ subp_stdout.readline.side_effect = [cmd_output.encode()]
- expected = (0, 'line')
+ expected = (0, "b'line'")
result = dovetail_utils.exec_cmd(
cmd, logger=logger, exit_on_error=True, info=False,
exec_msg_on=True, err_msg='', verbose=verbose,
@@ -1276,7 +1273,7 @@ class DovetailUtilsTesting(unittest.TestCase):
subp_stdout = Mock()
subprocess_obj.stdout = subp_stdout
subprocess_obj.wait.return_value = 1
- subp_stdout.readline.side_effect = [cmd_output, '']
+ subp_stdout.readline.side_effect = [cmd_output.encode()]
dovetail_utils.exec_cmd(
cmd, logger=logger, exit_on_error=True, info=False,
@@ -1286,7 +1283,6 @@ class DovetailUtilsTesting(unittest.TestCase):
log_calls = [
call(verbose, logger, "Executing command: '%s'" % cmd, 'debug'),
call(verbose, logger, cmd_output, 'debug', True),
- call(verbose, logger, '', 'debug', True),
call(verbose, logger, "The command '%s' failed." % cmd, 'error')]
mock_log.assert_has_calls(log_calls)
mock_open.assert_called_once_with(cmd, shell=True, stdout=mock_pipe,
@@ -1384,3 +1380,23 @@ class DovetailUtilsTesting(unittest.TestCase):
logger.debug.assert_not_called()
logger.exception.assert_called_once_with(
"The results cannot be pushed to DB.")
+
+ def test_get_mount_list_error_mount(self):
+ project_cfg = {'mounts': ['aaa']}
+ res, msg = dovetail_utils.get_mount_list(project_cfg)
+ self.assertEqual(None, res)
+ self.assertEqual('Error mount aaa.', msg)
+
+ def test_get_mount_list_keyerror_exception(self):
+ project_cfg = {'mounts': ['aaa=a,bbb=b', '']}
+ res, msg = dovetail_utils.get_mount_list(project_cfg)
+ self.assertEqual(None, res)
+ self.assertEqual("'target'", str(msg))
+
+ def test_get_mount_list(self):
+ project_cfg = {'mounts': ['target=a,source=b', '']}
+ res, msg = dovetail_utils.get_mount_list(project_cfg)
+ expected = [{'Source': 'b', 'Type': 'bind', 'ReadOnly': False,
+ 'Target': 'a'}]
+ self.assertEqual(expected, res)
+ self.assertEqual('Successfully to get mount list.', msg)
diff --git a/dovetail/utils/dovetail_utils.py b/dovetail/utils/dovetail_utils.py
index 9259b03f..1c4aca9d 100644
--- a/dovetail/utils/dovetail_utils.py
+++ b/dovetail/utils/dovetail_utils.py
@@ -21,6 +21,7 @@ from distutils.version import LooseVersion
import yaml
import python_hosts
import docker
+from docker.types import Mount
from dovetail import constants
from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
@@ -60,8 +61,9 @@ def exec_cmd(cmd, logger=None, exit_on_error=False, info=False,
count = 1
DEBUG = os.getenv('DEBUG')
for line in iter(p.stdout.readline, b''):
- exec_log(verbose, logger, line.strip(), level, True)
- stdout += line
+ exec_log(verbose, logger, line.strip().decode('unicode-escape'),
+ level, True)
+ stdout += str(line)
if progress_bar and (DEBUG is None or DEBUG.lower() != 'true'):
show_progress_bar(count)
count += 1
@@ -431,3 +433,26 @@ def push_results_to_db(case_name, details, start_date, stop_date, logger):
except Exception:
logger.exception('The results cannot be pushed to DB.')
return False
+
+
+def get_mount_list(project_cfg):
+ mount_list = []
+ mounts = get_value_from_dict('mounts', project_cfg)
+ for mount in mounts:
+ if mount:
+ param_dict = {}
+ for param in mount.split(','):
+ key_word = param.split('=')
+
+ if len(key_word) != 2:
+ return None, 'Error mount {}.'.format(mount)
+
+ param_dict[key_word[0]] = key_word[1]
+ try:
+ mount_list.append(Mount(target=param_dict['target'],
+ source=param_dict['source'],
+ type='bind'))
+ except Exception as e:
+ return None, e
+
+ return mount_list, 'Successfully to get mount list.'
diff --git a/etc/compliance/ovp.2019.0x.yaml b/etc/compliance/ovp.2019.12.yaml
index c0d9fe29..a1f7a50b 100644
--- a/etc/compliance/ovp.2019.0x.yaml
+++ b/etc/compliance/ovp.2019.12.yaml
@@ -8,9 +8,9 @@
##############################################################################
---
-ovp.2019.0x:
- name: ovp.2019.0x
- version: '2019.0x'
+ovp.2019.12:
+ name: ovp.2019.12
+ version: '2019.12'
testcases_list:
mandatory:
- functest.vping.userdata
diff --git a/etc/compliance/proposed_tests.yml b/etc/compliance/proposed_tests.yml
index 16bb5d4c..17eadff8 100644
--- a/etc/compliance/proposed_tests.yml
+++ b/etc/compliance/proposed_tests.yml
@@ -26,3 +26,4 @@ proposed_tests:
- functest.rally.nova
- functest.rally.quotas
- functest.tempest.neutron_tempest_plugin_api
+ - functest.tempest.networking_sfc
diff --git a/etc/conf/bottlenecks_config.yml b/etc/conf/bottlenecks_config.yml
index 13b7fd88..c23ad9cc 100644
--- a/etc/conf/bottlenecks_config.yml
+++ b/etc/conf/bottlenecks_config.yml
@@ -17,7 +17,7 @@
{% set build_tag = build_tag or '' %}
{% set cacert_volume = '' %}
{% if cacert %}
- {% set cacert_volume = cacert + ':' + cacert %}
+ {% set cacert_volume = 'source=' + cacert + ',target=' + cacert %}
{% endif %}
{% set openrc_file = '/tmp/admin_rc.sh' %}
{% set result_dir = '/home/opnfv/bottlenecks/results' %}
@@ -27,7 +27,7 @@
bottlenecks:
image_name: opnfv/bottlenecks
- docker_tag: latest
+ docker_tag: 8.0.1-latest
opts:
detach: true
stdin_open: true
@@ -35,21 +35,22 @@ bottlenecks:
shell: '/bin/bash'
envs:
- 'DEPLOY_SCENARIO={{deploy_scenario}}'
- - 'Yardstick_TAG=stable'
+ - 'Yardstick_TAG=opnfv-8.0.0'
- 'OUTPUT_FILE={{testcase}}.out'
- 'CI_DEBUG={{debug}}'
- 'BUILD_TAG={{build_tag}}-{{testcase}}'
volumes:
- - '/var/run/docker.sock:/var/run/docker.sock'
- '{{dovetail_home}}/results/bottlenecks:/tmp'
- - '{{dovetail_home}}/pre_config/env_config.sh:{{openrc_file}}'
- - {{cacert_volume}}
- '{{dovetail_home}}/images:{{images_dir}}'
- '{{dovetail_home}}/results:{{result_dir}}'
+ mounts:
+ - 'source=/var/run/docker.sock,target=/var/run/docker.sock'
+ - 'source={{dovetail_home}}/pre_config/env_config.sh,target={{openrc_file}}'
+ - {{cacert_volume}}
pre_condition:
- 'cp {{images_dir}}/ubuntu-16.04-server-cloudimg-amd64-disk1.img {{image_file}}'
cmds:
- - 'python3 /home/opnfv/bottlenecks/testsuites/run_testsuite.py testcase {{validate_testcase}} False'
+ - 'python /home/opnfv/bottlenecks/testsuites/run_testsuite.py testcase {{validate_testcase}} False'
post_condition:
- 'mkdir -p {{result_dir}}'
- 'cp /tmp/bottlenecks.log {{result_dir}}'
diff --git a/etc/conf/cmd_config.yml b/etc/conf/cmd_config.yml
index 5d41992c..63c87343 100644
--- a/etc/conf/cmd_config.yml
+++ b/etc/conf/cmd_config.yml
@@ -28,7 +28,7 @@ cli:
testsuite:
flags:
- '--testsuite'
- default: 'ovp.2019.0x'
+ default: 'ovp.2019.12'
help: 'compliance testsuite.'
testarea:
flags:
diff --git a/etc/conf/dovetail_config.yml b/etc/conf/dovetail_config.yml
index 3cf4c6f1..10b81fbb 100644
--- a/etc/conf/dovetail_config.yml
+++ b/etc/conf/dovetail_config.yml
@@ -8,6 +8,7 @@
##############################################################################
---
+version: '2019.12'
cli_file_name: 'cmd_config.yml'
result_file: 'results.json'
@@ -26,7 +27,7 @@ testsuite_supported:
- proposed_tests
- debug
- healthcheck
- - ovp.2019.0x
+ - ovp.2019.12
- onap.tosca.2019.04
- onap.heat.2019.04
diff --git a/etc/conf/functest-k8s_config.yml b/etc/conf/functest-k8s_config.yml
index 52631673..a01f5715 100644
--- a/etc/conf/functest-k8s_config.yml
+++ b/etc/conf/functest-k8s_config.yml
@@ -33,9 +33,10 @@ functest-k8s:
- 'CI_DEBUG={{debug}}'
- 'BUILD_TAG={{build_tag}}-{{testcase}}'
volumes:
- - '{{dovetail_home}}/pre_config/k8.creds:{{openrc_file}}'
- - '{{dovetail_home}}/pre_config/admin.conf:{{kube_file}}'
- '{{dovetail_home}}/results/:{{result_dir}}'
+ mounts:
+ - 'source={{dovetail_home}}/pre_config/k8.creds,target={{openrc_file}}'
+ - 'source={{dovetail_home}}/pre_config/admin.conf,target={{kube_file}}'
pre_condition:
- 'echo test for precondition in functest'
cmds:
diff --git a/etc/conf/functest_config.yml b/etc/conf/functest_config.yml
index 06def4d4..91fd68e2 100644
--- a/etc/conf/functest_config.yml
+++ b/etc/conf/functest_config.yml
@@ -22,7 +22,7 @@
{% set build_tag = build_tag or '' %}
{% set cacert_volume = '' %}
{% if cacert %}
- {% set cacert_volume = cacert + ':' + cacert %}
+ {% set cacert_volume = 'source=' + cacert + ',target=' + cacert %}
{% endif %}
{% set openrc_file = '/home/opnfv/functest/conf/env_file' %}
{% set result_dir = '/home/opnfv/functest/results' %}
@@ -47,13 +47,14 @@ functest:
- 'CI_DEBUG={{debug}}'
- 'BUILD_TAG={{build_tag}}-{{testcase}}'
volumes:
- - '{{dovetail_home}}/pre_config/env_config.sh:{{openrc_file}}'
- - {{cacert_volume}}
- - '{{dovetail_home}}/pre_config:/home/opnfv/pre_config'
- '{{dovetail_home}}/userconfig:{{userconfig_dir}}'
- '{{dovetail_home}}/patches:{{patches_dir}}'
- '{{dovetail_home}}/results:{{result_dir}}'
- '{{dovetail_home}}/images:{{images_dir}}'
+ mounts:
+ - 'source={{dovetail_home}}/pre_config/env_config.sh,target={{openrc_file}}'
+ - 'source={{dovetail_home}}/pre_config,target=/home/opnfv/pre_config'
+ - {{cacert_volume}}
patches_dir: {{patches_dir}}
pre_condition:
- 'echo test for precondition in functest'
diff --git a/etc/conf/yardstick_config.yml b/etc/conf/yardstick_config.yml
index e4758c8e..3c4273f8 100644
--- a/etc/conf/yardstick_config.yml
+++ b/etc/conf/yardstick_config.yml
@@ -23,7 +23,7 @@
{% set build_tag = build_tag or '' %}
{% set cacert_volume = '' %}
{% if cacert %}
- {% set cacert_volume = cacert + ':' + cacert %}
+ {% set cacert_volume = 'source=' + cacert + ',target=' + cacert %}
{% endif %}
{% set openrc_file = '/etc/yardstick/openstack.creds' %}
{% set pod_file = '/etc/yardstick/pod.yaml' %}
@@ -31,7 +31,7 @@
yardstick:
image_name: opnfv/yardstick
- docker_tag: latest
+ docker_tag: opnfv-8.0.0
opts:
detach: true
stdin_open: true
@@ -43,12 +43,13 @@ yardstick:
- 'CI_DEBUG={{debug}}'
- 'BUILD_TAG={{build_tag}}-{{testcase}}"'
volumes:
- - '{{dovetail_home}}/pre_config/env_config.sh:{{openrc_file}}'
- - {{cacert_volume}}
- - '{{dovetail_home}}/pre_config/pod.yaml:{{pod_file}}'
- '{{dovetail_home}}/images:/home/opnfv/images'
- '{{dovetail_home}}/results:{{result_dir}}'
- - '{{dovetail_home}}/pre_config:{{dovetail_home}}/pre_config'
+ mounts:
+ - 'source={{dovetail_home}}/pre_config,target={{dovetail_home}}/pre_config'
+ - 'source={{dovetail_home}}/pre_config/env_config.sh,target={{openrc_file}}'
+ - 'source={{dovetail_home}}/pre_config/pod.yaml,target={{pod_file}}'
+ - {{cacert_volume}}
pre_condition:
- 'echo this is pre_condition'
cmds:
diff --git a/etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch b/etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch
index 7e81f49b..51136813 100644
--- a/etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch
+++ b/etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch
@@ -1,47 +1,54 @@
-From 90e0e23ce73a4459d981acca588f3873eb29c770 Mon Sep 17 00:00:00 2001
-From: Georg Kunz <georg.kunz@ericsson.com>
-Date: Wed, 6 Jun 2018 15:07:43 +0200
+From 56c7c31129ca7942e7d658b249e32a4b05a08c94 Mon Sep 17 00:00:00 2001
+From: Dovetail <ovp-support@lfnetworking.org>
+Date: Fri, 8 Nov 2019 07:56:32 +0000
Subject: [PATCH] Allow additional properties in API responses
+Signed-off-by: Dovetail <ovp-support@lfnetworking.org>
---
- .../response/compute/v2_1/agents.py | 10 +--
+ .../response/compute/v2_1/agents.py | 10 ++--
.../response/compute/v2_1/aggregates.py | 8 +--
.../compute/v2_1/availability_zone.py | 8 +--
.../response/compute/v2_1/baremetal_nodes.py | 6 +-
.../response/compute/v2_1/certificates.py | 4 +-
.../response/compute/v2_1/extensions.py | 4 +-
.../response/compute/v2_1/fixed_ips.py | 4 +-
- .../response/compute/v2_1/flavors.py | 10 +--
+ .../response/compute/v2_1/flavors.py | 10 ++--
.../response/compute/v2_1/flavors_access.py | 4 +-
.../compute/v2_1/flavors_extra_specs.py | 2 +-
- .../response/compute/v2_1/floating_ips.py | 20 +++---
- .../api_schema/response/compute/v2_1/hosts.py | 14 ++--
+ .../response/compute/v2_1/floating_ips.py | 20 +++----
+ .../api_schema/response/compute/v2_1/hosts.py | 14 ++---
.../response/compute/v2_1/hypervisors.py | 22 +++----
.../response/compute/v2_1/images.py | 16 ++---
.../compute/v2_1/instance_usage_audit_logs.py | 8 +--
.../response/compute/v2_1/interfaces.py | 8 +--
- .../response/compute/v2_1/keypairs.py | 14 ++--
- .../response/compute/v2_1/limits.py | 10 +--
+ .../response/compute/v2_1/keypairs.py | 14 ++---
+ .../response/compute/v2_1/limits.py | 10 ++--
.../response/compute/v2_1/migrations.py | 4 +-
.../response/compute/v2_1/parameter_types.py | 4 +-
.../response/compute/v2_1/quotas.py | 4 +-
.../v2_1/security_group_default_rule.py | 8 +--
.../response/compute/v2_1/security_groups.py | 16 ++---
- .../response/compute/v2_1/servers.py | 66 +++++++++----------
+ .../response/compute/v2_1/server_groups.py | 6 +-
+ .../response/compute/v2_1/servers.py | 60 +++++++++----------
.../response/compute/v2_1/services.py | 12 ++--
.../response/compute/v2_1/snapshots.py | 6 +-
.../response/compute/v2_1/tenant_networks.py | 6 +-
- .../response/compute/v2_1/versions.py | 10 +--
+ .../response/compute/v2_1/versions.py | 10 ++--
.../response/compute/v2_1/volumes.py | 12 ++--
.../response/compute/v2_11/services.py | 4 +-
- .../response/compute/v2_16/servers.py | 14 ++--
+ .../response/compute/v2_16/servers.py | 14 ++---
.../response/compute/v2_23/migrations.py | 4 +-
.../response/compute/v2_26/servers.py | 2 +-
- .../response/compute/v2_3/servers.py | 14 ++--
+ .../response/compute/v2_3/servers.py | 14 ++---
+ .../response/compute/v2_45/images.py | 2 +-
.../response/compute/v2_47/servers.py | 2 +-
- .../response/compute/v2_48/servers.py | 10 +--
+ .../response/compute/v2_48/servers.py | 10 ++--
+ .../response/compute/v2_53/services.py | 4 +-
+ .../response/compute/v2_55/flavors.py | 10 ++--
.../response/compute/v2_6/servers.py | 4 +-
- 37 files changed, 187 insertions(+), 187 deletions(-)
+ .../response/compute/v2_61/flavors.py | 6 +-
+ tempest/lib/api_schema/response/volume/qos.py | 16 ++---
+ 43 files changed, 206 insertions(+), 206 deletions(-)
diff --git a/tempest/lib/api_schema/response/compute/v2_1/agents.py b/tempest/lib/api_schema/response/compute/v2_1/agents.py
index 6f712b41e..09feb73df 100644
@@ -252,7 +259,7 @@ index a653213f0..b53565aab 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_1/flavors.py b/tempest/lib/api_schema/response/compute/v2_1/flavors.py
-index 547d94d57..76cbb8a55 100644
+index bd5e3d636..27948a784 100644
--- a/tempest/lib/api_schema/response/compute/v2_1/flavors.py
+++ b/tempest/lib/api_schema/response/compute/v2_1/flavors.py
@@ -28,13 +28,13 @@ list_flavors = {
@@ -278,10 +285,10 @@ index 547d94d57..76cbb8a55 100644
- 'additionalProperties': False,
+ 'additionalProperties': True,
# 'OS-FLV-DISABLED', 'os-flavor-access', 'rxtx_factor' and
- # 'OS-FLV-EXT-DATA' are API extensions. So they are not 'required'.
+ # 'OS-FLV-EXT-DATA' are API extensions, so they are not 'required'.
'required': ['name', 'links', 'ram', 'vcpus', 'swap', 'disk', 'id']
@@ -77,7 +77,7 @@ list_flavors_details = {
- # to be present always So it is not 'required'.
+ # to be present always so it is not 'required'.
'flavors_links': parameter_types.links
},
- 'additionalProperties': False,
@@ -289,7 +296,7 @@ index 547d94d57..76cbb8a55 100644
'required': ['flavors']
}
}
-@@ -93,7 +93,7 @@ create_get_flavor_details = {
+@@ -89,7 +89,7 @@ create_update_get_flavor_details = {
'properties': {
'flavor': common_flavor_info
},
@@ -318,7 +325,7 @@ index a4d6af0d7..958ed02b5 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_1/flavors_extra_specs.py b/tempest/lib/api_schema/response/compute/v2_1/flavors_extra_specs.py
-index a438d4869..c8988b1de 100644
+index 3aa1edac4..081d21a92 100644
--- a/tempest/lib/api_schema/response/compute/v2_1/flavors_extra_specs.py
+++ b/tempest/lib/api_schema/response/compute/v2_1/flavors_extra_specs.py
@@ -24,7 +24,7 @@ set_get_flavor_extra_specs = {
@@ -996,8 +1003,39 @@ index 5ed5a5c80..d9f1794c6 100644
'required': ['security_group_rule']
}
}
+diff --git a/tempest/lib/api_schema/response/compute/v2_1/server_groups.py b/tempest/lib/api_schema/response/compute/v2_1/server_groups.py
+index 01db20b88..49a8f0d95 100644
+--- a/tempest/lib/api_schema/response/compute/v2_1/server_groups.py
++++ b/tempest/lib/api_schema/response/compute/v2_1/server_groups.py
+@@ -29,7 +29,7 @@ common_server_group = {
+ },
+ 'metadata': {'type': 'object'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['id', 'name', 'policies', 'members', 'metadata']
+ }
+
+@@ -40,7 +40,7 @@ create_show_server_group = {
+ 'properties': {
+ 'server_group': common_server_group
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['server_group']
+ }
+ }
+@@ -59,7 +59,7 @@ list_server_groups = {
+ 'items': common_server_group
+ }
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['server_groups']
+ }
+ }
diff --git a/tempest/lib/api_schema/response/compute/v2_1/servers.py b/tempest/lib/api_schema/response/compute/v2_1/servers.py
-index 2954de005..e22fba32c 100644
+index 3300298d1..ac050eb43 100644
--- a/tempest/lib/api_schema/response/compute/v2_1/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_1/servers.py
@@ -29,14 +29,14 @@ create_server = {
@@ -1167,34 +1205,7 @@ index 2954de005..e22fba32c 100644
'required': ['addresses']
}
}
-@@ -362,7 +362,7 @@ common_server_group = {
- },
- 'metadata': {'type': 'object'}
- },
-- 'additionalProperties': False,
-+ 'additionalProperties': True,
- 'required': ['id', 'name', 'policies', 'members', 'metadata']
- }
-
-@@ -373,7 +373,7 @@ create_show_server_group = {
- 'properties': {
- 'server_group': common_server_group
- },
-- 'additionalProperties': False,
-+ 'additionalProperties': True,
- 'required': ['server_group']
- }
- }
-@@ -392,7 +392,7 @@ list_server_groups = {
- 'items': common_server_group
- }
- },
-- 'additionalProperties': False,
-+ 'additionalProperties': True,
- 'required': ['server_groups']
- }
- }
-@@ -408,7 +408,7 @@ instance_actions = {
+@@ -356,7 +356,7 @@ instance_actions = {
'message': {'type': ['string', 'null']},
'instance_uuid': {'type': 'string'}
},
@@ -1203,16 +1214,16 @@ index 2954de005..e22fba32c 100644
'required': ['action', 'request_id', 'user_id', 'project_id',
'start_time', 'message', 'instance_uuid']
}
-@@ -429,7 +429,7 @@ instance_action_events = {
+@@ -377,7 +377,7 @@ instance_action_events = {
'result': {'type': ['string', 'null']},
'traceback': {'type': ['string', 'null']}
},
- 'additionalProperties': False,
+ 'additionalProperties': True,
- 'required': ['event', 'start_time', 'finish_time', 'result',
- 'traceback']
- }
-@@ -445,7 +445,7 @@ list_instance_actions = {
+ # NOTE(zhufl): events.traceback can only be seen by admin users
+ # with default policy.json, so it shouldn't be a required field.
+ 'required': ['event', 'start_time', 'finish_time', 'result']
+@@ -394,7 +394,7 @@ list_instance_actions = {
'items': instance_actions
}
},
@@ -1221,7 +1232,7 @@ index 2954de005..e22fba32c 100644
'required': ['instanceActions']
}
}
-@@ -463,7 +463,7 @@ show_instance_action = {
+@@ -412,7 +412,7 @@ show_instance_action = {
'properties': {
'instanceAction': instance_actions_with_events
},
@@ -1230,7 +1241,7 @@ index 2954de005..e22fba32c 100644
'required': ['instanceAction']
}
}
-@@ -475,7 +475,7 @@ show_password = {
+@@ -424,7 +424,7 @@ show_password = {
'properties': {
'password': {'type': 'string'}
},
@@ -1239,7 +1250,7 @@ index 2954de005..e22fba32c 100644
'required': ['password']
}
}
-@@ -494,11 +494,11 @@ get_vnc_console = {
+@@ -443,11 +443,11 @@ get_vnc_console = {
'format': 'uri'
}
},
@@ -1253,7 +1264,7 @@ index 2954de005..e22fba32c 100644
'required': ['console']
}
}
-@@ -510,7 +510,7 @@ get_console_output = {
+@@ -459,7 +459,7 @@ get_console_output = {
'properties': {
'output': {'type': 'string'}
},
@@ -1262,7 +1273,7 @@ index 2954de005..e22fba32c 100644
'required': ['output']
}
}
-@@ -527,7 +527,7 @@ set_server_metadata = {
+@@ -476,7 +476,7 @@ set_server_metadata = {
}
}
},
@@ -1271,7 +1282,7 @@ index 2954de005..e22fba32c 100644
'required': ['metadata']
}
}
-@@ -552,7 +552,7 @@ set_show_server_metadata_item = {
+@@ -501,7 +501,7 @@ set_show_server_metadata_item = {
}
}
},
@@ -1280,7 +1291,7 @@ index 2954de005..e22fba32c 100644
'required': ['meta']
}
}
-@@ -583,7 +583,7 @@ evacuate_server_with_admin_pass = {
+@@ -532,7 +532,7 @@ evacuate_server_with_admin_pass = {
'properties': {
'adminPass': {'type': 'string'}
},
@@ -1448,7 +1459,7 @@ index 7f5623928..b57d1b8cb 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_1/volumes.py b/tempest/lib/api_schema/response/compute/v2_1/volumes.py
-index c35dae981..c487aa5a4 100644
+index d367f2adb..1125cbfa4 100644
--- a/tempest/lib/api_schema/response/compute/v2_1/volumes.py
+++ b/tempest/lib/api_schema/response/compute/v2_1/volumes.py
@@ -42,7 +42,7 @@ create_get_volume = {
@@ -1460,9 +1471,9 @@ index c35dae981..c487aa5a4 100644
# NOTE- If volume is not attached to any server
# then, 'attachments' attributes comes as array
# with empty objects "[{}]" due to that elements
-@@ -52,13 +52,13 @@ create_get_volume = {
- }
- }
+@@ -53,13 +53,13 @@ create_get_volume = {
+ },
+ 'os-vol-host-attr:host': {'type': 'string'},
},
- 'additionalProperties': False,
+ 'additionalProperties': True,
@@ -1476,7 +1487,7 @@ index c35dae981..c487aa5a4 100644
'required': ['volume']
}
}
-@@ -93,7 +93,7 @@ list_volumes = {
+@@ -94,7 +94,7 @@ list_volumes = {
'volumeId': {'type': 'string'},
'serverId': {'type': 'string'}
},
@@ -1485,7 +1496,7 @@ index c35dae981..c487aa5a4 100644
# NOTE- If volume is not attached to any server
# then, 'attachments' attributes comes as array
# with empty object "[{}]" due to that elements
-@@ -103,7 +103,7 @@ list_volumes = {
+@@ -104,7 +104,7 @@ list_volumes = {
}
}
},
@@ -1494,7 +1505,7 @@ index c35dae981..c487aa5a4 100644
'required': ['id', 'status', 'displayName',
'availabilityZone', 'createdAt',
'displayDescription', 'volumeType',
-@@ -112,7 +112,7 @@ list_volumes = {
+@@ -113,7 +113,7 @@ list_volumes = {
}
}
},
@@ -1504,7 +1515,7 @@ index c35dae981..c487aa5a4 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_11/services.py b/tempest/lib/api_schema/response/compute/v2_11/services.py
-index 18b833bd2..885e8cdac 100644
+index 9ece1f9b3..b17d180e3 100644
--- a/tempest/lib/api_schema/response/compute/v2_11/services.py
+++ b/tempest/lib/api_schema/response/compute/v2_11/services.py
@@ -36,11 +36,11 @@ update_forced_down = {
@@ -1522,7 +1533,7 @@ index 18b833bd2..885e8cdac 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_16/servers.py b/tempest/lib/api_schema/response/compute/v2_16/servers.py
-index 3eb658f4e..d0a30e3b0 100644
+index fc81ff70a..495ed3ec9 100644
--- a/tempest/lib/api_schema/response/compute/v2_16/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_16/servers.py
@@ -32,7 +32,7 @@ server_detail = {
@@ -1611,10 +1622,10 @@ index 3cd0f6ec1..af6fd8ade 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_26/servers.py b/tempest/lib/api_schema/response/compute/v2_26/servers.py
-index b03bdf6fa..6b3936b3c 100644
+index 5a0f98732..248605b29 100644
--- a/tempest/lib/api_schema/response/compute/v2_26/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_26/servers.py
-@@ -54,7 +54,7 @@ list_tags = {
+@@ -68,7 +68,7 @@ list_tags = {
'properties': {
'tags': tag_items,
},
@@ -1624,7 +1635,7 @@ index b03bdf6fa..6b3936b3c 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_3/servers.py b/tempest/lib/api_schema/response/compute/v2_3/servers.py
-index f24103ea2..5b5c9c197 100644
+index 1674c1b11..4dcfad49c 100644
--- a/tempest/lib/api_schema/response/compute/v2_3/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_3/servers.py
@@ -40,7 +40,7 @@ server_detail = {
@@ -1690,8 +1701,21 @@ index f24103ea2..5b5c9c197 100644
# NOTE(gmann): servers_links attribute is not necessary to be
# present always So it is not 'required'.
'required': ['servers']
+diff --git a/tempest/lib/api_schema/response/compute/v2_45/images.py b/tempest/lib/api_schema/response/compute/v2_45/images.py
+index 8a48f363e..395dd177d 100644
+--- a/tempest/lib/api_schema/response/compute/v2_45/images.py
++++ b/tempest/lib/api_schema/response/compute/v2_45/images.py
+@@ -19,7 +19,7 @@ create_image = {
+ 'properties': {
+ 'image_id': {'type': 'string'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['image_id']
+ }
+ }
diff --git a/tempest/lib/api_schema/response/compute/v2_47/servers.py b/tempest/lib/api_schema/response/compute/v2_47/servers.py
-index 37a084f1c..aa2f312a4 100644
+index d580f2c63..52ac89370 100644
--- a/tempest/lib/api_schema/response/compute/v2_47/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_47/servers.py
@@ -30,7 +30,7 @@ flavor = {
@@ -1704,7 +1728,7 @@ index 37a084f1c..aa2f312a4 100644
}
diff --git a/tempest/lib/api_schema/response/compute/v2_48/servers.py b/tempest/lib/api_schema/response/compute/v2_48/servers.py
-index 59047583a..e688db305 100644
+index e2e45bc29..3310783ed 100644
--- a/tempest/lib/api_schema/response/compute/v2_48/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_48/servers.py
@@ -45,7 +45,7 @@ show_server_diagnostics = {
@@ -1750,11 +1774,78 @@ index 59047583a..e688db305 100644
'required': [
'state', 'driver', 'hypervisor', 'hypervisor_os', 'uptime',
'config_drive', 'num_cpus', 'num_nics', 'num_disks',
+diff --git a/tempest/lib/api_schema/response/compute/v2_53/services.py b/tempest/lib/api_schema/response/compute/v2_53/services.py
+index 97b0c7260..b526e638d 100644
+--- a/tempest/lib/api_schema/response/compute/v2_53/services.py
++++ b/tempest/lib/api_schema/response/compute/v2_53/services.py
+@@ -51,13 +51,13 @@ update_service = {
+ 'zone': {'type': 'string'},
+ 'forced_down': {'type': 'boolean'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['id', 'binary', 'disabled_reason', 'host',
+ 'state', 'status', 'updated_at', 'zone',
+ 'forced_down']
+ }
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['service']
+ }
+ }
+diff --git a/tempest/lib/api_schema/response/compute/v2_55/flavors.py b/tempest/lib/api_schema/response/compute/v2_55/flavors.py
+index 554f43b4c..07adf6270 100644
+--- a/tempest/lib/api_schema/response/compute/v2_55/flavors.py
++++ b/tempest/lib/api_schema/response/compute/v2_55/flavors.py
+@@ -45,13 +45,13 @@ list_flavors = {
+ 'id': {'type': 'string'},
+ 'description': flavor_description
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['name', 'links', 'id', 'description']
+ }
+ },
+ 'flavors_links': parameter_types.links
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ # NOTE(gmann): flavors_links attribute is not necessary
+ # to be present always So it is not 'required'.
+ 'required': ['flavors']
+@@ -76,7 +76,7 @@ common_flavor_info = {
+ 'OS-FLV-EXT-DATA:ephemeral': {'type': 'integer'},
+ 'description': flavor_description
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ # 'OS-FLV-DISABLED', 'os-flavor-access', 'rxtx_factor' and
+ # 'OS-FLV-EXT-DATA' are API extensions. So they are not 'required'.
+ 'required': ['name', 'links', 'ram', 'vcpus', 'swap', 'disk', 'id',
+@@ -96,7 +96,7 @@ list_flavors_details = {
+ # to be present always So it is not 'required'.
+ 'flavors_links': parameter_types.links
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['flavors']
+ }
+ }
+@@ -108,7 +108,7 @@ create_update_get_flavor_details = {
+ 'properties': {
+ 'flavor': common_flavor_info
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['flavor']
+ }
+ }
diff --git a/tempest/lib/api_schema/response/compute/v2_6/servers.py b/tempest/lib/api_schema/response/compute/v2_6/servers.py
-index 29b3e8600..4caf107a4 100644
+index 922bf7923..b94050d86 100644
--- a/tempest/lib/api_schema/response/compute/v2_6/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_6/servers.py
-@@ -38,11 +38,11 @@ get_remote_consoles = {
+@@ -50,11 +50,11 @@ get_remote_consoles = {
'format': 'uri'
}
},
@@ -1768,6 +1859,104 @@ index 29b3e8600..4caf107a4 100644
'required': ['remote_console']
}
}
+diff --git a/tempest/lib/api_schema/response/compute/v2_61/flavors.py b/tempest/lib/api_schema/response/compute/v2_61/flavors.py
+index 5119466ba..6d1d8ddb5 100644
+--- a/tempest/lib/api_schema/response/compute/v2_61/flavors.py
++++ b/tempest/lib/api_schema/response/compute/v2_61/flavors.py
+@@ -58,7 +58,7 @@ common_flavor_info = {
+ 'description': flavor_description,
+ 'extra_specs': flavor_extra_specs
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ # 'OS-FLV-DISABLED', 'os-flavor-access', 'rxtx_factor' and
+ # 'OS-FLV-EXT-DATA' are API extensions. so they are not 'required'.
+ 'required': ['name', 'links', 'ram', 'vcpus', 'swap', 'disk', 'id',
+@@ -78,7 +78,7 @@ list_flavors_details = {
+ # to be present always so it is not 'required'.
+ 'flavors_links': parameter_types.links
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['flavors']
+ }
+ }
+@@ -90,7 +90,7 @@ create_update_get_flavor_details = {
+ 'properties': {
+ 'flavor': common_flavor_info
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['flavor']
+ }
+ }
+diff --git a/tempest/lib/api_schema/response/volume/qos.py b/tempest/lib/api_schema/response/volume/qos.py
+index d1b391062..3594c3f4e 100644
+--- a/tempest/lib/api_schema/response/volume/qos.py
++++ b/tempest/lib/api_schema/response/volume/qos.py
+@@ -25,7 +25,7 @@ show_qos = {
+ 'consumer': {'type': 'string'},
+ 'specs': {'type': ['object', 'null']},
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['name', 'id', 'specs']
+ },
+ 'links': {
+@@ -37,12 +37,12 @@ show_qos = {
+ 'format': 'uri'},
+ 'rel': {'type': 'string'},
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['href', 'rel']
+ }
+ }
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['qos_specs', 'links']
+ }
+ }
+@@ -67,12 +67,12 @@ list_qos = {
+ 'id': {'type': 'string', 'format': 'uuid'},
+ 'name': {'type': 'string'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['specs', 'id', 'name']
+ }
+ }
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['qos_specs']
+ }
+ }
+@@ -87,7 +87,7 @@ set_qos_key = {
+ 'patternProperties': {'^.+$': {'type': 'string'}}
+ },
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['qos_specs']
+ }
+ }
+@@ -109,12 +109,12 @@ show_association_qos = {
+ 'id': {'type': 'string', 'format': 'uuid'},
+ 'name': {'type': 'string'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['association_type', 'id', 'name']
+ }
+ },
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['qos_associations']
+ }
+ }
--
-2.17.1
+2.20.1
diff --git a/etc/testcase/functest.tempest.networking_sfc.yml b/etc/testcase/functest.tempest.networking_sfc.yml
new file mode 100644
index 00000000..f6a2dd82
--- /dev/null
+++ b/etc/testcase/functest.tempest.networking_sfc.yml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.tempest.networking_sfc:
+ name: functest.tempest.networking_sfc
+ objective: validate SFC API CRUD operations by means of Tempest tests from networking-sfc repository
+ validate:
+ type: functest
+ testcase: networking-sfc
+ pre_condition:
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - networking-sfc/rally.log
+ - networking-sfc/tempest-report.html
+ dest_archive_files:
+ - tempest_logs/functest.tempest.networking_sfc.functest.log
+ - tempest_logs/functest.tempest.networking_sfc.log
+ - tempest_logs/functest.tempest.networking_sfc.html
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.networking_sfc.html
+ sub_testcase_list:
diff --git a/etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml b/etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml
index 73ae37c0..245dd6cc 100644
--- a/etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml
+++ b/etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml
@@ -53,7 +53,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_change_dhcp_flag_then_create_port
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create
- neutron_tempest_plugin.api.admin.test_tag.TagFilterSubnetpoolTestJSON.test_filter_subnetpool_tags
- - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_net_ip_availability_after_subnet_and_ports
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_greater_than_subport
- neutron_tempest_plugin.api.admin.test_routers_flavors.RoutersFlavorTestCase.test_delete_router_flavor_in_use
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_qos_network_policy_binding_bumps_revision
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_get_rules_by_policy
@@ -76,6 +76,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_routers.RoutersTest.test_network_attached_with_two_routers
- neutron_tempest_plugin.api.admin.test_extension_driver_port_security_admin.PortSecurityAdminTests.test_create_port_security_false_on_shared_network
- neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_list_networks_fields_keystone_v3
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithSecurityGroup.test_create_sgrule_with_timestamp
- neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_create_port_with_timestamp
- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_desc
- neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_create
@@ -88,6 +89,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_routers.RoutersTest.test_update_router_set_gateway_without_snat
- neutron_tempest_plugin.api.admin.test_networks.NetworksTestAdmin.test_create_tenant_network_vxlan
- neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_create_forbidden_for_regular_tenants
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_filter_fields
- neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_filter_fields
- neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_create_update_delete_service_profile
- neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_create_update_delete_service_profile
@@ -105,6 +107,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_policy_target_update
- neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_external_network_bumps_revision
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_update_network_segment_range_failed_with_existing_range_impacted
- neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination_with_marker
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_fail_for_the_same_type
- neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_filter_rbac_policies
@@ -114,10 +117,8 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_routers_negative.DvrRoutersNegativeTest.test_router_create_tenant_distributed_returns_forbidden
- neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_sorts_desc
- neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairIpV6TestJSON.test_update_port_with_address_pair
- - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_create_show_delete_security_group_rule_integers
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_tenant_update_subnetpool
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_get_subnetpool
- - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolTest.test_create_show_delete_security_group_rule_integers
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_admin_create_shared_subnetpool
- neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler.DHCPAgentSchedulersTestJSON.test_add_remove_network_from_dhcp_agent
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_update_forbidden_for_regular_tenants_own_policy
@@ -139,13 +140,15 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_list_shared_networks
- neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_delete_address_scope
- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_update_trunk
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_show_ip_availability_after_port_delete
- neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairTestJSON.test_update_port_with_multiple_ip_mac_address_pair
- neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_network_presence_prevents_policy_rbac_policy_deletion
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_sorts_asc
- neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_create_update_network_dns_domain
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_prefix_intersect
- neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler.DHCPAgentSchedulersTestJSON.test_dhcp_port_status_active
- - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_show_rule_type_details_as_admin
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_list_ip_availability_before_subnet
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_extra_dhcp_opt_bumps_revision
- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_desc
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_create_policy
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_tenant_get_not_shared_admin_subnetpool
@@ -156,10 +159,9 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_logging_negative.LoggingNegativeTestJSON.test_create_log_with_nonexistent_port
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_list_admin_rule_types
- neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairIpV6TestJSON.test_update_port_with_multiple_ip_mac_address_pair
- - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_show_ip_availability_after_subnet_and_ports_create
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_fail_for_the_same_type
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_create_sp_associate_address_scope_multiple_prefix_intersect
- - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_admin_create_shared_subnetpool
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update
- neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_port_presence_prevents_network_rbac_policy_deletion
@@ -169,13 +171,15 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_create_delete_metering_label_with_filters
- neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_port_sec_update_pass
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_prefixes_append
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination
- neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_create_router_with_snat_explicit
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_show_subnetpool_has_project_id
+ - neutron_tempest_plugin.api.test_floating_ips.FloatingIPPoolTestJSON.test_create_floatingip_from_specific_pool
- neutron_tempest_plugin.api.admin.test_tag.TagFilterRouterTestJSON.test_filter_router_tags
- neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_tenant_get_not_shared_admin_address_scope
- neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_sorts_desc
- neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
- - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_get_non_existent_address_scope
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_list_ip_availability_after_port_delete
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_disabled_trunk
- neutron_tempest_plugin.api.admin.test_tag.TagFilterTrunkTestJSON.test_filter_trunk_tags
- neutron_tempest_plugin.api.admin.test_floating_ips_admin_actions.FloatingIPAdminTestJSON.test_create_floatingip_with_specified_ip_address
@@ -186,8 +190,10 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_qos_policy_delete_with_rules
- neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_tenant_create_list_address_scope
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_add_subports
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_duplicate_policy_error
- neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_port_presence_prevents_policy_rbac_policy_deletion
- neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_conversion_on_policy_create
- neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination_page_reverse_asc
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_delete
@@ -196,6 +202,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_tag.TagFilterSubnetTestJSON.test_filter_subnet_tags
- neutron_tempest_plugin.api.admin.test_routers_dvr.RoutersTestDVR.test_centralized_router_creation
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_multiple_prefix_intersect
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_list_network_segment_ranges
- neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination_page_reverse_desc
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_invalid_inherit_network_segmentation_type
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_trunk_disabled_trunk
@@ -203,19 +210,19 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_tag.TagSubnetPoolTestJSON.test_subnetpool_tags
- neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_list_flavors
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_delete_self_share_rule
- - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination_page_reverse_asc
- neutron_tempest_plugin.api.admin.test_tag.TagSecGroupTestJSON.test_security_group_tags
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_user_create_port_with_admin_qos_policy
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_update_subnetpool_associate_address_scope
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update_forbidden_for_regular_tenants_foreign_policy
- neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_get_rules_by_policy
- neutron_tempest_plugin.api.admin.test_tag.TagSubnetTestJSON.test_subnet_tags
- - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_admin_network_availability_before_subnet
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination_page_reverse_asc
- neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_sorts_asc
- neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_allowed_address_pairs
- neutron_tempest_plugin.api.test_routers_negative.HaRoutersNegativeTest.test_router_create_tenant_ha_returns_forbidden
- neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_conversion_on_one_policy_delete
- neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_create_port_shared_network_as_non_admin_tenant
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_security_group_rule_protocol_ints
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_pagination_page_reverse_desc
- neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_show_network
- neutron_tempest_plugin.api.test_auto_allocated_topology.TestAutoAllocatedTopology.test_get_allocated_net_topology_as_tenant
@@ -224,7 +231,9 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairIpV6TestJSON.test_create_list_port_with_address_pair
- neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_create_delete_metering_label_rule_with_filters
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnetpool_associate_address_scope
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_sg_group_bumps_revision
- neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_invalid_rule_create
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_show_ip_availability_after_port_delete
- neutron_tempest_plugin.api.admin.test_l3_agent_scheduler.L3AgentSchedulerTestJSON.test_add_list_remove_router_on_l3_agent
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_get_policy_that_is_shared
- neutron_tempest_plugin.api.test_qos_negative.QosNegativeTestJSON.test_add_policy_with_too_long_name
@@ -235,24 +244,27 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_allowed_address_pairs_bumps_revision
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update_forbidden_for_regular_tenants_own_policy
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_disassociate_address_scope
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_port_with_propagate_uplink_status
- neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_sorts_asc
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_update_subnetpool_disassociate_address_scope
- neutron_tempest_plugin.api.test_routers_negative.RoutersNegativeTest.test_delete_router_in_use
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_no_pagination_limit_0
- neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_admin_update_shared_address_scope
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_get_non_existent_subnetpool
- - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_extra_dhcp_opt_bumps_revision
+ - neutron_tempest_plugin.api.admin.test_ports.PortTestCasesResourceRequest.test_port_resource_request_empty
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_show_rule_type_details_as_admin
- neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_admin_create_shared_address_scope
- neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_list_agent
- neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_network_attached_with_two_routers
- neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_create_floatingip_with_timestamp
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_associate_address_scope
- - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_fail_for_direction_ingress
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_parent_port_id
- neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_show_networks_attribute_with_timestamp
- neutron_tempest_plugin.api.test_floating_ips.FloatingIPTestJSON.test_create_update_floatingip_port_details
- neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_sorts_desc
- neutron_tempest_plugin.api.test_routers.RoutersTest.test_update_extra_route
- neutron_tempest_plugin.api.test_qos_negative.QosNegativeTestJSON.test_add_policy_with_too_long_description
- - neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_pagination
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_show_network_segment_range
- neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_policy_sharing_with_wildcard_and_tenant_id
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_create_rbac_policy_with_target_tenant_none
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestMtusJSON.test_create_trunk_with_mtu_smaller_than_subport
@@ -265,7 +277,9 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_get_rules_by_policy
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_port_security_bumps_revisions
- neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination_page_reverse_desc
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_update_security_group_with_boolean_type_name
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_port_id
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_policy_target_update
- neutron_tempest_plugin.api.admin.test_ports.PortTestCasesAdmin.test_regenerate_mac_address
- neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_equal_to_trunk
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_port_bumps_revision
@@ -275,22 +289,26 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_show_policy_has_project_id
- neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_show_shared_networks_attribute
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_tenant_create_subnetpool_associate_shared_address_scope
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_regular_client_shares_to_another_regular_client
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update_forbidden_for_regular_tenants_foreign_policy
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_get_non_existent_address_scope
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnet_from_pool_with_subnet_cidr
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestMtusJSON.test_add_subport_with_mtu_greater_than_trunk
- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_empty_subports_list
- neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_delete_address_scope_associated_with_subnetpool
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_update_trunk_with_description
- neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_port_sec_specific_value_2
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_list_ip_availability_after_subnet_and_ports
- neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_update_shared_address_scope_to_unshare
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_sg_rule_bumps_sg_revision
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update_association_with_port_shared_policy
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_subport
- - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_rule_nonexistent_policy
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_pass_for_direction_ingress
- neutron_tempest_plugin.api.test_routers_negative.RoutersNegativePolicyTest.test_add_interface_wrong_tenant
- neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_list_agents_non_admin
- - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_create_show_delete_security_group_rule_names
- neutron_tempest_plugin.api.admin.test_tag.UpdateTagsTest.test_update_tags_affects_only_updated_resource
- neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_update_subnet_with_timestamp
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_get_subnetpool
- neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination_with_href_links
- neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupIPv6Test.test_update_default_security_group_name
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_get_rules_by_policy
@@ -300,11 +318,12 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination_page_reverse_asc
- neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_regular_client_blocked_from_creating_external_wild_policies
- neutron_tempest_plugin.api.admin.test_shared_network_extension.AllowedAddressPairSharedNetworkTest.test_update_with_address_pair_blocked_on_other_network
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_list_ip_availability_after_port_delete
- neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_update_non_existent_address_scope
- - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_smaller_than_trunk
- neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_policy_allows_tenant_to_attach_ext_gw
- neutron_tempest_plugin.api.test_networks_negative.NetworksNegativeTest.test_delete_network_in_use
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_subnet_from_pool_with_default_prefixlen
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_security_group_rule_protocol_legacy_names
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_duplicate_subport_segmentation_ids
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_create_subnetpool_associate_non_exist_address_scope
- neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_create_delete_metering_label_with_filters
@@ -316,6 +335,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_list_service_profiles
- neutron_tempest_plugin.api.admin.test_tag.TagRouterTestJSON.test_router_tags
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_create_network_with_default_network_segment_range
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_prefixes_shrink
- neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_show_metering_label_rule
- neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_update_router_reset_gateway_without_snat
@@ -334,7 +354,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination_page_reverse_desc
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_trunk
- neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_update_router_set_gateway_without_snat
- - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_sg_group_bumps_revision
+ - neutron_tempest_plugin.api.admin.test_ports.PortTestCasesResourceRequest.test_port_resource_request
- neutron_tempest_plugin.api.admin.test_routers_dvr.RoutersTestDVR.test_distributed_router_creation
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update_forbidden_for_regular_tenants_own_policy
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_subnet_service_types_bumps_revisions
@@ -352,6 +372,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_filtering_shared_subnets
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_show_subnetpool_has_project_id
- neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_show_subnet_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_show_ip_availability_after_subnet_and_ports_create
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_port_shared_policy
- neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_validation_filters
- neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_rbac_policy_show
@@ -365,11 +386,12 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_show_service_profile
- neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithSecurityGroup.test_update_sg_with_timestamp
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_associate_another_address_scope
- - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_add_subport
- neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_net_bound_shared_policy_wildcard_and_tenant_id_wild_remove
- neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_delete_policies_while_tenant_attached_to_net
- neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_delete_with_port_sec
+ - neutron_tempest_plugin.api.test_availability_zones.ListAvailableZonesTest.test_list_available_zones
- neutron_tempest_plugin.api.test_routers.RoutersTest.test_create_router_with_default_snat_value
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_security_group_rule_protocol_names
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_trunk
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_subnet_from_pool_with_prefixlen
- neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_filtering_shared_networks
@@ -383,6 +405,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_regular_client_shares_to_another_regular_client
- neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_no_pagination_limit_0
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_list_policy_filter_by_name
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_list_ip_availability_before_subnet
- neutron_tempest_plugin.api.admin.test_tag.TagFilterSecGroupTestJSON.test_filter_security_group_tags
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_subnet_from_pool_with_quota
- neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_show_network_fields_keystone_v3
@@ -398,14 +421,14 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_create_network_with_timestamp
- neutron_tempest_plugin.api.admin.test_logging.LoggingTestJSON.test_list_supported_logging_types
- neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_delete_non_existent_address_scope
- - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_no_pagination_limit_0
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_regular_client_blocked_from_sharing_anothers_policy
- neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_fail_for_the_same_type
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_delete_policy
- neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_policy_allows_tenant_to_allocate_floatingip
- neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_update_router_set_gateway_with_snat_explicit
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_missing_segmentation_id
- neutron_tempest_plugin.api.admin.test_routers_ha.RoutersTestHA.test_legacy_router_creation
- - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_create_security_group_rule_with_ipv6_protocol_names
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_admin_create_shared_subnetpool
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_type
- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_asc
- neutron_tempest_plugin.api.test_trunk.TrunkTestInheritJSONBase.test_add_subport
@@ -419,15 +442,14 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler.DHCPAgentSchedulersTestJSON.test_list_networks_hosted_by_one_dhcp
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_update_forbidden_for_regular_tenants_foreign_policy
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_show_rule_type_details_as_user
- - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_net_ip_availability_after_port_delete
- neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_update_port_with_timestamp
- neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_delete
- - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnet_from_pool_with_prefixlen
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolTest.test_security_group_rule_protocol_names
- neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_update_port_with_dns_name
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_associate_address_scope_wrong_ip_version
- neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_create_network_with_project
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_rbac_bumps_network_revision
- - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_update_security_group_with_boolean_type_name
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolTest.test_security_group_rule_protocol_ints
- neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_policy_sharing_with_wildcard
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_floatingip_bumps_revision
- neutron_tempest_plugin.api.test_floating_ips_negative.FloatingIPNegativeTestJSON.test_associate_floatingip_with_port_with_floatingip
@@ -447,16 +469,15 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_metering_negative.MeteringNegativeTestJSON.test_create_metering_label_with_too_long_name
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_tenant_update_sp_prefix_associated_with_shared_addr_scope
- neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_sorts_desc
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_create_update_delete_network_segment_range
- neutron_tempest_plugin.api.admin.test_security_groups.SecGroupAdminTest.test_security_group_recreated_on_port_update
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_create_rbac_policy_with_target_tenant_too_long_id
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_subnet_bumps_revision
- neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_router_when_quotas_is_full
- neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_rule_nonexistent_policy
- - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_create_security_group_rule_with_ipv6_protocol_integers
- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_show_delete_trunk
- neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_port_when_quotas_is_full
- - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolTest.test_create_show_delete_security_group_rule_names
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_regular_client_blocked_from_sharing_anothers_network
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_rbac_policy_quota
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_create_rule_nonexistent_policy
@@ -466,13 +487,13 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_dual_stack_subnets_from_subnetpools
- neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_show_agent
- neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_sorting
- - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithSecurityGroup.test_create_sgrule_with_timestamp
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_rule_nonexistent_policy
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_validation_filters
- neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_subnet_on_network_only_visible_to_policy_target
- neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_list_flavors
- neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_show_address_scope_project_id
- - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_net_ip_availability_after_port_delete
- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_delete_trunk_with_subport_is_allowed
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_list_ip_availability_after_subnet_and_ports
- neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_greater_than_subport
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_pagination_with_marker
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_shared_policy_update
@@ -482,12 +503,12 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_create_subnet_with_timestamp
- neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_update_agent_status
- neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_router_extra_routes_bumps_revision
- - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_create_security_group_rule_with_ipv6_protocol_legacy_names
- neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_port_sec_specific_value_1
- - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_trunk_port_id
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_update
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_delete_network_segment_range_failed_with_segment_referenced
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_subport_port_id
- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_show_trunk_has_project_id
+ - neutron_tempest_plugin.api.admin.test_ports.PortTestCasesResourceRequest.test_port_resource_request_inherited_policy
- neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupIPv6Test.test_update_security_group_with_boolean_type_name
- neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_update_floatingip_with_timestamp
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update_forbidden_for_regular_tenants_foreign_policy
@@ -495,7 +516,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_routers_flavors.RoutersFlavorTestCase.test_badrequest_on_requesting_flags_and_flavor
- neutron_tempest_plugin.api.admin.test_routers_flavors.RoutersFlavorTestCase.test_create_router_with_flavor
- neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_router_interface_status
- - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_update_trunk_with_description
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_rbac_policy_show
- neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_sorts_asc
- neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_update_default_security_group_name
- neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination_with_href_links
@@ -507,10 +528,12 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_update_policy_from_wildcard_to_specific_tenant
- neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_tenant_id
- neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_show_floatingip_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnet_from_pool_with_prefixlen
- neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_security_group_rule_when_quotas_is_full
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnet_from_pool_with_quota
- neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_sorts_asc
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_port_nonexistent_policy
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_filter_rbac_policies
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_id
- neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_create_bulk_shared_network
- neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_no_pagination_limit_0
@@ -521,8 +544,9 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_routers.RoutersTest.test_update_router_reset_gateway_without_snat
- neutron_tempest_plugin.api.test_qos.QosTestJSON.test_delete_not_allowed_if_policy_in_use_by_network
- neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_create_delete_metering_label_rule_with_filters
- - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_admin_network_availability_before_subnet
+ - neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_pagination
- neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_list_metering_labels
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_port_without_propagate_uplink_status
- neutron_tempest_plugin.api.test_extra_dhcp_options.ExtraDHCPOptionsTestJSON.test_update_show_port_with_extra_dhcp_options
- neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_includes_all
- neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_update
@@ -549,7 +573,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_conversion_on_policy_delete
- neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairTestJSON.test_create_list_port_with_address_pair
- neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_project_id
- - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_net_ip_availability_after_subnet_and_ports
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_create_network_with_tenant_specific_network_segment_range
- neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_no_pagination_limit_0
- neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_create_fail_for_the_same_type
- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_marker
@@ -570,7 +594,7 @@ functest.tempest.neutron_tempest_plugin_api:
- neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_create_update_delete_flavor
- neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_create_port_sec_with_security_group
- neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination
- - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_get_subnetpool
+ - neutron_tempest_plugin.api.test_router_interface_fip.RouterInterfaceFip.test_router_interface_fip
- neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_tenant_update_subnetpool
- neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_create_fail_for_the_same_type
- neutron_tempest_plugin.api.test_floating_ips.FloatingIPTestJSON.test_create_update_floatingip_description
diff --git a/etc/userconfig/patrole_blacklist.yaml b/etc/userconfig/patrole_blacklist.yaml
index 7a8741bb..8fb99fee 100644
--- a/etc/userconfig/patrole_blacklist.yaml
+++ b/etc/userconfig/patrole_blacklist.yaml
@@ -1,8 +1,7 @@
---
-
scenarios:
- - unknown
- - os-nosdn-ovs-ha
+ - (.*)
tests:
# need metering extension, otherwise the following 6 sub test cases will skip
- patrole_tempest_plugin.tests.api.network.test_metering_label_rules_rbac.MeteringLabelRulesRbacTest.test_create_metering_label_rule
diff --git a/etc/userconfig/trunk_port_blacklist.yaml b/etc/userconfig/trunk_port_blacklist.yaml
index 0f1d805d..bf4cfe08 100644
--- a/etc/userconfig/trunk_port_blacklist.yaml
+++ b/etc/userconfig/trunk_port_blacklist.yaml
@@ -1,8 +1,7 @@
---
-
scenarios:
- - unknown
- - os-nosdn-ovs-ha
+ - (.*)
tests:
# need VLAN type driver, otherwise the following 1 sub test case will skip
- neutron_tempest_plugin.api.test_trunk.TrunkTestInheritJSONBase.test_add_subport
diff --git a/setup.cfg b/setup.cfg
index 2ef30db0..1ad83a05 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -9,7 +9,7 @@
[metadata]
name = dovetail
-version = 2018.09.0
+version = 2019.12
home-page = https://wiki.opnfv.org/display/dovetail
[files]
diff --git a/tox.ini b/tox.ini
index b8efce64..9f39a307 100644
--- a/tox.ini
+++ b/tox.ini
@@ -13,7 +13,6 @@ skipsdist = True
envlist =
pep8,
py35,
- docs,
docs-linkcheck
[testenv]
@@ -28,7 +27,7 @@ commands =
--cov=dovetail \
--cov-report term-missing \
--cov-report xml \
- --cov-fail-under=100 \
+ --cov-fail-under=99.8 \
--ignore=cvp \
{posargs}
setenv =
@@ -40,14 +39,17 @@ whitelist_externals = wget
echo
[testenv:pep8]
+basepython = python3.5
commands = flake8 {toxinidir}
[flake8]
+basepython = python3.5
show-source = True
ignore = E123,E125,H803,E722,W503
exclude = .tox,dist,docs,*egg,build,.venv,.git
[testenv:docs]
+basepython = python3.6
deps = -rdocs/requirements.txt
commands =
sphinx-build -b html -n -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/html
@@ -55,6 +57,7 @@ commands =
whitelist_externals = echo
[testenv:docs-linkcheck]
+basepython = python3.5
deps = -rdocs/requirements.txt
commands = sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/linkcheck