summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.coveragerc9
-rw-r--r--INFO.yaml35
-rw-r--r--docker/Dockerfile67
-rw-r--r--docker/Dockerfile.aarch64.patch12
-rw-r--r--docker/Dockerfile.centos717
-rw-r--r--docs/conf.py9
-rw-r--r--docs/conf.yaml9
-rw-r--r--docs/images/dovetail_generic_framework.pngbin0 -> 92053 bytes
-rw-r--r--docs/index.rst2
-rw-r--r--docs/release/release-notes/index.rst206
-rw-r--r--docs/testing/developer/genericframework/index.rst413
-rw-r--r--docs/testing/developer/testcaserequirements/index.rst8
-rw-r--r--docs/testing/developer/testscope/index.rst616
-rw-r--r--docs/testing/user/certificationworkflow/ApplicationForm.rst6
-rw-r--r--docs/testing/user/certificationworkflow/index.rst116
-rw-r--r--docs/testing/user/instructionsperinstaller/testing_instructions_xci.rst220
-rw-r--r--docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst16
-rw-r--r--docs/testing/user/ovpaddendum/index.rst103
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_log_setup.pngbin72844 -> 12385 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_pass_fraction.pngbin83821 -> 36240 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_pass_percentage.pngbin22057 -> 8757 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_result_overview.pngbin71800 -> 0 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_result_review.pngbin14835 -> 13652 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/ovp_top_nav.pngbin21387 -> 20489 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/review_status.pngbin0 -> 9887 bytes
-rw-r--r--docs/testing/user/reviewerguide/images/sut_info.pngbin17061 -> 12822 bytes
-rw-r--r--docs/testing/user/reviewerguide/index.rst175
-rw-r--r--docs/testing/user/systempreparation/index.rst6
-rw-r--r--docs/testing/user/testspecification/highavailability/index.rst2
-rw-r--r--docs/testing/user/testspecification/index.rst2
-rw-r--r--docs/testing/user/testspecification/security_patrole/index.rst90
-rw-r--r--docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst51
-rw-r--r--docs/testing/user/testspecification/snaps_smoke/index.rst232
-rw-r--r--docs/testing/user/testspecification/stress/index.rst4
-rw-r--r--docs/testing/user/testspecification/tempest_compute/index.rst24
-rw-r--r--docs/testing/user/testspecification/tempest_identity_v3/index.rst10
-rw-r--r--docs/testing/user/testspecification/tempest_image/index.rst10
-rw-r--r--docs/testing/user/testspecification/tempest_ipv6/index.rst16
-rw-r--r--docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst68
-rw-r--r--docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst32
-rw-r--r--docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst30
-rw-r--r--docs/testing/user/testspecification/tempest_network_api/index.rst22
-rw-r--r--docs/testing/user/testspecification/tempest_network_scenario/index.rst26
-rw-r--r--docs/testing/user/testspecification/tempest_network_security/index.rst26
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/index.rst8
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst60
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst67
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst48
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst235
-rw-r--r--docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst188
-rw-r--r--docs/testing/user/testspecification/tempest_trunk_ports/index.rst123
-rw-r--r--docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst46
-rw-r--r--docs/testing/user/testspecification/tempest_volume/index.rst6
-rw-r--r--docs/testing/user/testspecification/vnf/index.rst22
-rw-r--r--docs/testing/user/testspecification/vping/index.rst103
-rw-r--r--docs/testing/user/testspecification/vpn/index.rst476
-rw-r--r--docs/testing/user/userguide/api_testing_guide.rst373
-rw-r--r--docs/testing/user/userguide/cli_reference.rst71
-rw-r--r--docs/testing/user/userguide/images/tocsa_vnf_test_environment.pngbin0 -> 101795 bytes
-rw-r--r--docs/testing/user/userguide/images/tosca_vnf_test_flow.pngbin0 -> 40614 bytes
-rw-r--r--docs/testing/user/userguide/index.rst2
-rw-r--r--docs/testing/user/userguide/testing_guide.rst240
-rw-r--r--docs/testing/user/userguide/vnf_test_guide.rst714
-rw-r--r--dovetail/api/app/__init__.py0
-rw-r--r--dovetail/api/app/constants.py15
-rw-r--r--dovetail/api/app/routes.py102
-rw-r--r--dovetail/api/app/server.py297
-rw-r--r--dovetail/api/app/utils.py24
-rwxr-xr-xdovetail/api/boot.sh16
-rw-r--r--dovetail/api/swagger.yaml346
-rw-r--r--dovetail/cli/commands/cli_testcase.py15
-rw-r--r--dovetail/container.py199
-rw-r--r--dovetail/parser.py6
-rw-r--r--dovetail/report.py309
-rwxr-xr-xdovetail/run.py65
-rw-r--r--dovetail/test_runner.py143
-rw-r--r--dovetail/testcase.py75
-rw-r--r--dovetail/tests/unit/cli/commands/test_cli_testcase.py6
-rw-r--r--dovetail/tests/unit/cli/test_cli_base.py8
-rw-r--r--dovetail/tests/unit/cmd_config.yml17
-rw-r--r--dovetail/tests/unit/test_container.py387
-rw-r--r--dovetail/tests/unit/test_parser.py10
-rw-r--r--dovetail/tests/unit/test_report.py840
-rw-r--r--dovetail/tests/unit/test_run.py136
-rw-r--r--dovetail/tests/unit/test_test_runner.py233
-rw-r--r--dovetail/tests/unit/test_testcase.py203
-rw-r--r--dovetail/tests/unit/test_testcase.yaml11
-rw-r--r--dovetail/tests/unit/utils/test_dovetail_config.py2
-rw-r--r--dovetail/tests/unit/utils/test_dovetail_logger.py8
-rw-r--r--dovetail/tests/unit/utils/test_dovetail_utils.py219
-rw-r--r--dovetail/utils/dovetail_config.py8
-rw-r--r--dovetail/utils/dovetail_logger.py2
-rw-r--r--dovetail/utils/dovetail_utils.py101
-rw-r--r--etc/compliance/debug.yml9
-rw-r--r--etc/compliance/healthcheck.yml11
-rw-r--r--etc/compliance/onap.1.0.0.yml6
-rw-r--r--etc/compliance/onap.heat.2019.04.yaml17
-rw-r--r--etc/compliance/onap.tosca.2019.04.yaml17
-rw-r--r--etc/compliance/ovp.2019.12.yaml (renamed from etc/compliance/ovp.2018.09.yaml)19
-rw-r--r--etc/compliance/proposed_tests.yml20
-rw-r--r--etc/conf/bottlenecks_config.yml46
-rw-r--r--etc/conf/cmd_config.yml16
-rw-r--r--etc/conf/dovetail_config.yml31
-rw-r--r--etc/conf/functest-k8s_config.yml35
-rw-r--r--etc/conf/functest_config.yml51
-rw-r--r--etc/conf/onap-vtp_config.yml39
-rw-r--r--etc/conf/onap-vvp_config.yml32
-rw-r--r--etc/conf/vnftest_config.yml30
-rw-r--r--etc/conf/yardstick_config.yml45
-rw-r--r--etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch345
-rwxr-xr-xetc/patches/functest/disable-api-validation/apply.sh11
-rw-r--r--etc/testcase/bottlenecks.stress.ping.yml13
-rw-r--r--etc/testcase/functest.bgpvpn.router_association.yml20
-rw-r--r--etc/testcase/functest.bgpvpn.router_association_floating_ip.yml20
-rw-r--r--etc/testcase/functest.bgpvpn.subnet_connectivity.yml20
-rw-r--r--etc/testcase/functest.bgpvpn.tenant_separation.yml20
-rw-r--r--etc/testcase/functest.healthcheck.api_check.yml15
-rw-r--r--etc/testcase/functest.healthcheck.connection_check.yml13
-rw-r--r--etc/testcase/functest.healthcheck.snaps_health_check.yml15
-rw-r--r--etc/testcase/functest.k8s.conformance.yml13
-rw-r--r--etc/testcase/functest.k8s.smoke.yml13
-rw-r--r--etc/testcase/functest.rally.authenticate.yml40
-rw-r--r--etc/testcase/functest.rally.cinder.yml58
-rw-r--r--etc/testcase/functest.rally.glance.yml42
-rw-r--r--etc/testcase/functest.rally.gnocchi.yml50
-rw-r--r--etc/testcase/functest.rally.heat.yml47
-rw-r--r--etc/testcase/functest.rally.keystone.yml45
-rw-r--r--etc/testcase/functest.rally.neutron.yml54
-rw-r--r--etc/testcase/functest.rally.nova.yml61
-rw-r--r--etc/testcase/functest.rally.quotas.yml38
-rw-r--r--etc/testcase/functest.security.patrole.yml37
-rw-r--r--etc/testcase/functest.security.patrole_vxlan_dependent.yml27
-rw-r--r--etc/testcase/functest.snaps.smoke.yml14
-rw-r--r--etc/testcase/functest.tempest.bgpvpn.yml56
-rw-r--r--etc/testcase/functest.tempest.compute.yml25
-rw-r--r--etc/testcase/functest.tempest.identity_v3.yml25
-rw-r--r--etc/testcase/functest.tempest.image.yml23
-rw-r--r--etc/testcase/functest.tempest.ipv6_api.yml23
-rw-r--r--etc/testcase/functest.tempest.ipv6_scenario.yml25
-rw-r--r--etc/testcase/functest.tempest.multi_node_scheduling.yml23
-rw-r--r--etc/testcase/functest.tempest.network_api.yml23
-rw-r--r--etc/testcase/functest.tempest.network_scenario.yml23
-rw-r--r--etc/testcase/functest.tempest.network_security.yml23
-rw-r--r--etc/testcase/functest.tempest.networking_sfc.yml31
-rw-r--r--etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml600
-rw-r--r--etc/testcase/functest.tempest.osinterop.yml436
-rw-r--r--etc/testcase/functest.tempest.trunk-ports.yml22
-rw-r--r--etc/testcase/functest.tempest.vm_lifecycle.yml27
-rw-r--r--etc/testcase/functest.tempest.volume.yml25
-rw-r--r--etc/testcase/functest.vnf.vepc.yml15
-rw-r--r--etc/testcase/functest.vnf.vims.yml15
-rw-r--r--etc/testcase/functest.vping.ssh.yml13
-rw-r--r--etc/testcase/functest.vping.userdata.yml13
-rw-r--r--etc/testcase/onap-vtp.validate.csar.yml26
-rw-r--r--etc/testcase/onap-vvp.validate.heat.yml28
-rw-r--r--etc/testcase/onap.onap.vnf_lifecycle.yml16
-rw-r--r--etc/testcase/yardstick.ha.cinder_api.yml13
-rw-r--r--etc/testcase/yardstick.ha.controller_restart.yml13
-rw-r--r--etc/testcase/yardstick.ha.cpu_load.yml13
-rw-r--r--etc/testcase/yardstick.ha.database.yml13
-rw-r--r--etc/testcase/yardstick.ha.disk_load.yml13
-rw-r--r--etc/testcase/yardstick.ha.glance_api.yml13
-rw-r--r--etc/testcase/yardstick.ha.haproxy.yml13
-rw-r--r--etc/testcase/yardstick.ha.keystone.yml13
-rw-r--r--etc/testcase/yardstick.ha.neutron_l3_agent.yml17
-rw-r--r--etc/testcase/yardstick.ha.neutron_server.yml13
-rw-r--r--etc/testcase/yardstick.ha.nova_api.yml13
-rw-r--r--etc/testcase/yardstick.ha.rabbitmq.yml13
-rw-r--r--etc/userconfig/ansible.cfg9
-rw-r--r--etc/userconfig/bgpvpn_testcases.yaml18
-rw-r--r--etc/userconfig/env_config.sh.onap.sample22
-rw-r--r--etc/userconfig/env_config.sh.sample9
-rw-r--r--etc/userconfig/hosts.yaml.sample9
-rw-r--r--etc/userconfig/patrole_blacklist.yaml37
-rw-r--r--etc/userconfig/rally_authenticate_testcases.yaml31
-rw-r--r--etc/userconfig/rally_cinder_testcases.yaml31
-rw-r--r--etc/userconfig/rally_glance_testcases.yaml31
-rw-r--r--etc/userconfig/rally_gnocchi_testcases.yaml31
-rw-r--r--etc/userconfig/rally_heat_testcases.yaml31
-rw-r--r--etc/userconfig/rally_keystone_testcases.yaml31
-rw-r--r--etc/userconfig/rally_neutron_testcases.yaml31
-rw-r--r--etc/userconfig/rally_nova_testcases.yaml31
-rw-r--r--etc/userconfig/rally_quotas_testcases.yaml31
-rw-r--r--etc/userconfig/sdnvpn_config_tc000.yaml7
-rw-r--r--etc/userconfig/sdnvpn_config_testcase1.yaml27
-rw-r--r--etc/userconfig/sdnvpn_config_testcase2.yaml37
-rw-r--r--etc/userconfig/sdnvpn_config_testcase3.yaml26
-rw-r--r--etc/userconfig/sdnvpn_config_testcase4.yaml27
-rw-r--r--etc/userconfig/sdnvpn_config_testcase8.yaml23
-rw-r--r--etc/userconfig/tempest_conf.yaml.sample9
-rw-r--r--etc/userconfig/tempest_custom_testcases.yaml9
-rw-r--r--etc/userconfig/testcases.yaml9
-rw-r--r--etc/userconfig/trunk_port_blacklist.yaml28
-rw-r--r--etc/userconfig/vnf_descriptor.yaml.sample20
-rw-r--r--etc/userconfig/vnf_testcases.yaml9
-rw-r--r--etc/userconfig/vnftest_conf.yaml36
-rw-r--r--requirements.txt36
-rw-r--r--setup.cfg11
-rw-r--r--test-requirements.txt12
-rw-r--r--tox.ini23
200 files changed, 8727 insertions, 4431 deletions
diff --git a/.coveragerc b/.coveragerc
index 8777f5d0..bbea4282 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
[run]
omit = dovetail/tests/*
[report]
diff --git a/INFO.yaml b/INFO.yaml
index e505815e..79dfa2bc 100644
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -1,14 +1,23 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
project: 'OPNFV qualification testing (dovetail)'
project_creation_date: 'September 1st, 2015'
project_category: 'Testing'
lifecycle_state: 'Incubation'
project_lead: &opnfv_dovetail_ptl
- name: 'Dan Xu'
- email: 'xudan16@huawei.com'
+ name: 'Kanagaraj Manickam'
+ email: 'kanagaraj.manickam@huawei.com'
company: 'huawei.com'
- id: 'xudan'
- timezone: 'Asia/Shanghai'
+ id: 'mkr1481'
+ timezone: 'India/Bangalore'
primary_contact: *opnfv_dovetail_ptl
issue_tracking:
type: 'jira'
@@ -24,12 +33,12 @@ realtime_discussion:
channel: '#opnfv-dovetail'
meetings:
- type: 'gotomeeting+irc'
- agenda: 'https://wiki.opnfv.org/display/meetings/Dovetail'
- url: 'https://global.gotomeeting.com/join/819733085'
+ agenda: 'https://wiki.opnfv.org/display/meetings/Dovetail'
+ url: 'https://global.gotomeeting.com/join/819733085'
server: 'freenode.net'
channel: '#opnfv-dovetail'
repeats: 'weekly'
- time: 'Wednesday, 14:00 UTC'
+ time: 'Wednesday, 14:00 UTC'
repositories:
- 'dovetail'
- 'dovetail-webportal'
@@ -51,15 +60,16 @@ committers:
email: 'pkaralis@intracom-telecom.com'
company: 'intracom-telecom.com'
id: 'pkaralis'
- - name: 'Stamatis Katsaounis'
- email: 'mokats@intracom-telecom.com'
- company: 'intracom-telecom.com'
- id: 'skatsaounis'
- name: 'Georg Kunz'
email: 'georg.kunz@ericsson.com'
company: 'ericsson.com'
id: 'georgkunz'
+ - name: 'Dan Xu'
+ email: 'xudan16@huawei.com'
+ company: 'huawei.com'
+ id: 'xudan'
tsc:
+ # yamllint disable rule:line-length
approval: 'http//meetbot.opnfv.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-09-01-13.59.html'
changes:
- type: 'removal'
@@ -83,3 +93,6 @@ tsc:
- type: 'removal'
name: 'zenghui shi'
link: 'https://lists.opnfv.org/pipermail/opnfv-tsc/2018-May/004361.html'
+ - type: 'removal'
+ name: 'Stamatis Katsaounis'
+ link: 'https://lists.opnfv.org/g/opnfv-tech-discuss/message/22993'
diff --git a/docker/Dockerfile b/docker/Dockerfile
index c5239dbb..6d6a7db7 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,54 +1,69 @@
-FROM ubuntu:14.04
-MAINTAINER Leo Wang <grakiss.wanglei@huawei.com>
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+FROM ubuntu:18.04
+MAINTAINER Stamatis Katsaounis <mokats@intracom-telecom.com>
LABEL version="0.1" description="OPNFV Dovetail Docker Container"
ARG BRANCH=master
+ARG SWAGGER_UI_TAG=v3.22.3
RUN \
apt-get update \
&& \
apt-get install -y \
- build-essential \
+ apache2 \
gcc \
+ make \
git \
libssl-dev \
libffi-dev \
vim \
- python-dev \
- python-mock \
- python-pip \
- apt-transport-https \
- wget \
+ python3-dev \
+ python3-pip \
--no-install-recommends \
&& \
- apt-get update
-
-RUN easy_install -U setuptools==30.0.0
+ rm -rf /var/lib/apt/lists/*
-RUN wget -qO- https://get.docker.com/ \
-| \
- sed 's/-q docker-ce/-q docker-ce=17.03.0~ce-0~ubuntu-trusty/' \
-| \
- sed 's/edge/stable/' \
-| \
- sh
+RUN pip3 install -U setuptools wheel
+ENV LC_ALL C.UTF-8
+ENV LANG C.UTF-8
ENV HOME /home/opnfv
ENV REPOS_DIR ${HOME}/dovetail
-WORKDIR /home/opnfv
+ENV API_DIR ${REPOS_DIR}/dovetail/api
+WORKDIR $HOME
RUN \
mkdir -p ${REPOS_DIR} \
&& \
- git config --global http.sslVerify false \
+ git init /tmp/dovetail \
+&& \
+ (cd /tmp/dovetail && \
+ git fetch --tags https://gerrit.opnfv.org/gerrit/dovetail $BRANCH && \
+ git checkout FETCH_HEAD && \
+ pip3 install .) \
&& \
- pip install git+https://git.opnfv.org/dovetail@$BRANCH#egg=dovetail \
+ rm -rf /tmp/dovetail \
+&& \
+ ln -s /usr/local/lib/python3.6/dist-packages/dovetail ${REPOS_DIR}/dovetail
+
+RUN \
+ git clone https://github.com/swagger-api/swagger-ui.git \
&& \
- ln -s /usr/local/lib/python2.7/dist-packages/dovetail ${REPOS_DIR}/dovetail
+ cd swagger-ui && git checkout $SWAGGER_UI_TAG
WORKDIR ${REPOS_DIR}/dovetail
-# get db schema from opnfv sites
-# RUN mkdir -p ${REPOS_DIR}/dovetail/utils/local_db
-# ADD get_db_schema.py ${REPOS_DIR}/dovetail/utils/local_db
-# RUN cd ${REPOS_DIR}/dovetail/utils/local_db && python get_db_schema.py
+ENV FLASK_APP ${API_DIR}/app/routes.py
+# This port is for flask API in container
+EXPOSE 5000
+# This port is for Swagger UI in container
+EXPOSE 80
+CMD bash ${API_DIR}/boot.sh
diff --git a/docker/Dockerfile.aarch64.patch b/docker/Dockerfile.aarch64.patch
index 68f002c0..ecde81f1 100644
--- a/docker/Dockerfile.aarch64.patch
+++ b/docker/Dockerfile.aarch64.patch
@@ -32,9 +32,9 @@ index c5239db..b4fd9f7 100644
RUN \
apt-get update \
@@ -17,21 +25,11 @@ RUN \
- python-dev \
- python-mock \
- python-pip \
+ python3-dev \
+ python3-mock \
+ python3-pip \
- apt-transport-https \
wget \
+ sudo \
@@ -59,10 +59,10 @@ index c5239db..b4fd9f7 100644
RUN \
mkdir -p ${REPOS_DIR} \
&& \
-+ pip install -U setuptools --upgrade \
++ pip3 install -U setuptools --upgrade \
+&& \
-+ pip install -U wheel \
++ pip3 install -U wheel \
+&& \
git config --global http.sslVerify false \
&& \
- pip install git+https://git.opnfv.org/dovetail@$BRANCH#egg=dovetail \
+ pip3 install git+https://git.opnfv.org/dovetail@$BRANCH#egg=dovetail \
diff --git a/docker/Dockerfile.centos7 b/docker/Dockerfile.centos7
index c423badd..f9b944fe 100644
--- a/docker/Dockerfile.centos7
+++ b/docker/Dockerfile.centos7
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
FROM centos:centos7
MAINTAINER Tomofumi Hayashi <tohayash@redhat.com>
LABEL version="0.1" description="OPNFV Dovetail Docker Container"
@@ -5,7 +14,7 @@ LABEL version="0.1" description="OPNFV Dovetail Docker Container"
ARG BRANCH=master
RUN yum update -y && yum install -y sudo iproute epel-release && \
- yum install -y python-pip git docker && \
+ yum install -y python3-pip git docker && \
sed -ie 's/requiretty/!requiretty/g' /etc/sudoers
ENV HOME /home/opnfv
@@ -14,9 +23,9 @@ WORKDIR /home/opnfv
RUN git config --global http.sslVerify false && \
git clone --depth 1 -b $BRANCH https://git.opnfv.org/dovetail ${REPOS_DIR} && \
- pip install -U pip && \
- pip install -r ${REPOS_DIR}/requirements.txt && \
- pip install -e . && \
+ pip3 install -U pip3 && \
+ pip3 install -r ${REPOS_DIR}/requirements.txt && \
+ pip3 install -e . && \
mkdir -p ${REPOS_DIR}/results
WORKDIR ${REPOS_DIR}/dovetail
diff --git a/docs/conf.py b/docs/conf.py
index 317a2aec..a3d9ebb7 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
from docs_conf.conf import *
linkcheck_ignore = [
diff --git a/docs/conf.yaml b/docs/conf.yaml
index 5b62ac6e..fa2e75d7 100644
--- a/docs/conf.yaml
+++ b/docs/conf.yaml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
project_cfg: opnfv
project: dovetail
diff --git a/docs/images/dovetail_generic_framework.png b/docs/images/dovetail_generic_framework.png
new file mode 100644
index 00000000..b73a0678
--- /dev/null
+++ b/docs/images/dovetail_generic_framework.png
Binary files differ
diff --git a/docs/index.rst b/docs/index.rst
index 247c6368..5b83a60c 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -23,6 +23,6 @@ Dovetail
testing/user/userguide/index
testing/developer/testcaserequirements/index
- testing/developer/testscope/index
+ testing/developer/genericframework/index
release/release-notes/index
diff --git a/docs/release/release-notes/index.rst b/docs/release/release-notes/index.rst
index 3937e4aa..2de92c55 100644
--- a/docs/release/release-notes/index.rst
+++ b/docs/release/release-notes/index.rst
@@ -3,70 +3,71 @@
.. _dovetail-releasenotes:
-==================================================================
-OPNFV Verified Program (OVP) 2018.09 / Dovetail 2.0.0 Release Note
-==================================================================
+======================================================================
+OPNFV Verification Program (OVP) 2019.12 / Dovetail 3.0.0 Release Note
+======================================================================
-OPNFV 2018.09 Release
+OPNFV 2019.12 Release
=====================
-The OPNFV Verified Program (OVP) allows vendors and operators to obtain 'OPNFV Verified'
+The OPNFV Verification Program (OVP) allows vendors and operators to obtain 'OPNFV Verified'
status based on an agreed upon set of compliance verification test cases that align to OPNFV
-releases. The reference System under Test (SUT) are the NFV components deployed by the OPNFV
-installers for a given release, where OVP 2018.09 is based on the Fraser release. Participants
-of the program can verify commercial or open source offerings against an OVP release. This implies
-that the SUT used for verification has interfaces, components, functions and behaviors that align
-to OPNFV installer integrations.
-
-Dovetail is the overall framework used to execute tests and collect results for OVP. Dovetail does
-not deliver test content directly. Rather, test content is developed in other OPNFV test frameworks
-such as Functest and upstream test communities such as OpenStack's RefStack/Tempest projects.
-Dovetail leverages this upstream test content and provides a common set of test platform services
-for the OVP.
-
-Dovetail works in conjunction with a web portal interface dubbed the 'OVP web portal' to allow
-users to upload test results to a centralized community repository. This facilitates user
-collaboration, result sharing, self-testing and community reviews. It also serves as a hub for
-new participants to learn about the program and access key resources. The link for this portal
-is at: `OPNFV Verified Program <https://verified.opnfv.org>`_.
-
-Use of the OVP web portal is open to all and only requires a valid Linux Foundation or OpenStack
+releases. The reference System under Test (SUT) is either the NFV components deployed by the OPNFV
+installers for a given release, where OVP 2019.12 is based on the OPNFV Hunter release, or a VNF
+being on-boarded and orchestrated by the ONAP El Alto release. Participants of the program can
+verify commercial or open source offerings against an OVP release. This implies that the SUT
+used for verification has interfaces, components, functions and behaviors that align to OPNFV
+installer integrations and ONAP deployments.
+
+Dovetail is the overall framework used to execute tests and collect results for the OVP
+Infrastructure badge. Dovetail does not deliver test content directly. Rather, test content
+is developed in other OPNFV test frameworks such as Functest and upstream test communities such
+as OpenStack's RefStack/Tempest projects. Dovetail leverages this upstream test content and
+provides a common set of test platform services for the OVP.
+
+Approved test tools (OPNFV Dovetail, ONAP VTP, and ONAP VVP) work in conjunction with a web portal
+interface dubbed the 'OVP web portal' to allow users to upload test results to a centralized community
+repository. This facilitates user collaboration, result sharing, self-testing and community reviews.
+It also serves as a hub for new participants to learn about the program and access key resources. The
+link for this portal is at: `OPNFV Verification Program <https://nfvi-verified.lfnetworking.org>`_.
+
+Use of the OVP web portal is open to all and only requires a valid Linux Foundation
ID to login. Users are welcome to use the portal to upload, inspect and share results in a private
manner. In order to submit results for official review, the first step is apply for acceptance
-into the program with the participation form provided in the link: `OPNFV Verified Program
+into the program with the participation form provided in the link: `OPNFV Verification Program
Participation Form <https://na3.docusign.net/Member/PowerFormSigning.aspx?PowerFormId=dc24bf38-ea41-40d4-9e58-9babc6eec778>`_
-Test Suites & Test Areas
-------------------------
+NFVI Test Suites and Test Areas
+-------------------------------
OVP/Dovetail groups test cases into test suites and test areas. Test suites are currently a basic
-categorization around releases for the most part. Executing the test suite 'ovp.2018.09' without
-further specification will run all the test cases in the OVP 2018.09 release. Test suites are
+categorization around releases for the most part. Executing the test suite 'ovp.2019.12' without
+further specification will run all the test cases in the OVP 2019.12 release. Test suites are
divided into test areas that can be executed separately.
-Test areas include a division into **'mandatory'** and **'optional'** in an overarching
+Test cases include a division into **'mandatory'** and **'optional'** in an overarching
categorization.
All the mandatory test cases are required to be executed with passing results for all inclusive
test cases for results to be reviewed and approved by the community made up of peer reviewers.
The optional test cases are not required to be executed for the official compliance verification
-review in the OVP 2018.09 release. However, execution of these cases is encouraged, as some
+review in the OVP 2019.12 release. However, execution of these cases is encouraged, as some
optional test cases may become mandatory in future releases.
-Test Cases and Sub Test Cases
------------------------------
+NFVI Test Cases and Sub Test Cases
+----------------------------------
Each test area consists of multiple test cases where each test case can be a single test or
broken down into sub test cases. A listing of test cases with the number of sub test cases noted
-in parenthesis is shown below for the OVP 2018.09 release.
+in parenthesis is shown below for the OVP 2019.12 release.
**Mandatory**
+- bottlenecks.stress.ping (1)
- functest.vping.userdata (1)
- functest.vping.ssh (1)
-- bottlenecks.stress.ping (1)
-- functest.tempest.osinterop (200)
+- functest.tempest.osinterop (219)
- functest.tempest.compute (12)
- functest.tempest.identity_v3 (11)
- functest.tempest.image (2)
@@ -74,7 +75,7 @@ in parenthesis is shown below for the OVP 2018.09 release.
- functest.tempest.volume (2)
- functest.tempest.neutron_trunk_ports (38)
- functest.tempest.ipv6_api (21)
-- functest.security.patrole (119)
+- functest.security.patrole (124)
- yardstick.ha.nova_api (1)
- yardstick.ha.neutron_server (1)
- yardstick.ha.keystone (1)
@@ -87,55 +88,59 @@ in parenthesis is shown below for the OVP 2018.09 release.
- yardstick.ha.database (1)
-There are a total of 432 mandatory test cases.
+There are a total of 456 mandatory test cases.
**Optional**
- functest.vnf.vims (1)
- functest.vnf.vepc (1)
-- functest.snaps.smoke (1)
-- yardstick.ha.neutron_l3_agent (1)
-- yardstick.ha.controller_restart (1)
- functest.tempest.ipv6_scenario (8)
- functest.tempest.multi_node_scheduling (6)
- functest.tempest.network_security (6)
- functest.tempest.vm_lifecycle (12)
- functest.tempest.network_scenario (5)
-- functest.tempest.bgpvpn (15)
-- functest.bgpvpn.subnet_connectivity (1)
-- functest.bgpvpn.tenant_separation (1)
-- functest.bgpvpn.router_association (1)
-- functest.bgpvpn.router_association_floating_ip (1)
-
+- functest.tempest.bgpvpn (21)
+- functest.security.patrole_vxlan_dependent (2)
+- yardstick.ha.neutron_l3_agent (1)
+- yardstick.ha.controller_restart (1)
-There are a total of 61 optional test cases.
+There are a total of 64 optional test cases.
OPNFV Test Projects and Components
----------------------------------
The OPNFV test frameworks integrated into the Dovetail framework that deliver test content are:
- * Functest (leverages OpenStack RefStack/Tempest projects in addition to supplying native test cases)
- * Yardstick
- * Bottlenecks
+ - Functest (leverages OpenStack RefStack/Tempest projects in addition to supplying native test cases)
+ - Yardstick
+ - Bottlenecks
+
+ONAP Test Projects and Components
+---------------------------------
+
+The ONAP test projects and components used with this OVP release to provide the test requirements
+and test scripting are:
+- VNFRQTS
+- VNFSDK
+- VVP
Acceptence and Marketing
------------------------
-Upon successful community review of results for OVP 2018.09, the Linux Foundation Compliance
+Upon successful community review of results for OVP 2019.12, the Linux Foundation Compliance
Verification Committee (LFN CVC) on behalf of the Board of Directors can award a product 'OPNFV
Verified' status. Use of 'OPNFV Verified' Program Marks shall be awarded to the platform used
for compliance verification. The category label of 'Infrastructure' is used within the Program
Marks logo and limits the scope of this OVP release to a SUT consisting of NFVI and VIM components
using ETSI terminology. It does not provide compliance verification for specific VNFs in any fashion.
-The date '2018.09' corresponds to a reference SUT that aligns to the OPNFV Fraser release and
-currently aligns to the Dovetail framework version 2.0.0.
+The date '2019.12' corresponds to a reference SUT that aligns to the OPNFV Hunter release and
+currently aligns to the Dovetail framework version 3.0.0.
Organizations shall not use the Program Marks in any way that would associate it with any
individual or company logo or brand, beyond the association to the specific platform to which it
was awarded. While OpenStack RefStack interoperability and Tempest integration test cases are
-executed as part of the OVP 2018.09 compliance verification test suites, the OVP does not grant or
+executed as part of the OVP 2019.12 compliance verification test suites, the OVP does not grant or
award OpenStack Marks in any fashion. 'OPNFV Verified' status does not assert readiness for
commercial deployment.
@@ -152,101 +157,118 @@ Release Data
| **Project** | Dovetail |
| | |
+--------------------------------------+---------------------------------------+
-| **Repo tag** | ovp.2.0.0 |
+| **Repo tag** | ovp-3.0.1 |
| | |
+--------------------------------------+---------------------------------------+
-| **Release designation** | OPNFV Verified Program (OVP) |
-| | 2018.09 (Fraser) |
+| **Release designation** | OPNFV Verification Program (OVP) |
+| | 2019.12 (Hunter) |
+--------------------------------------+---------------------------------------+
-| **Release date** | September 2018 |
+| **Release date** | December 2019 |
| | |
+--------------------------------------+---------------------------------------+
-| **Purpose of the delivery** | Support OVP 2018.09 release with |
-| | OPNFV Fraser release as reference SUT |
+| **Purpose of the delivery** | Support OVP 2019.12 release with |
+| | OPNFV Hunter release as reference SUT |
+--------------------------------------+---------------------------------------+
+| **Notes** | Point release ovp-3.0.1 updates, |
+| | changes, and corrects the |
+| | documentation only. |
++--------------------------------------+---------------------------------------+
+
Deliverables
============
Software
--------
+
+OPNFV Software
+""""""""""""""
+
+-------------------------+-----------------------------------+----------------+
| **Docker Container** | **Docker Image** | **Tag** |
+-------------------------+-----------------------------------+----------------+
-| dovetail | opnfv/dovetail | ovp-2.0.0 |
-+-------------------------+-----------------------------------+----------------+
-| functest | opnfv/functest-smoke | opnfv-6.3.0 |
+| dovetail | opnfv/dovetail | ovp-3.0.0 |
+-------------------------+-----------------------------------+----------------+
-| functest | opnfv/functest-healthcheck | opnfv-6.3.0 |
+| functest | opnfv/functest-smoke | hunter |
+-------------------------+-----------------------------------+----------------+
-| functest | opnfv/functest-features | opnfv-6.3.0 |
+| functest | opnfv/functest-healthcheck | hunter |
+-------------------------+-----------------------------------+----------------+
-| functest | opnfv/functest-vnf | opnfv-6.3.0 |
+| functest | opnfv/functest-vnf | hunter |
+-------------------------+-----------------------------------+----------------+
-| yardstick | opnfv/yardstick | ovp-2.0.0 |
+| yardstick | opnfv/yardstick | opnfv-8.0.0 |
+-------------------------+-----------------------------------+----------------+
-| bottlenecks | opnfv/bottlenecks | ovp-2.0.0 |
+| bottlenecks | opnfv/bottlenecks | 8.0.1-latest |
+-------------------------+-----------------------------------+----------------+
-
-Docker images:
+**Docker images:**
- `Dovetail Docker images <https://hub.docker.com/r/opnfv/dovetail>`_
- `Functest-smoke Docker images <https://hub.docker.com/r/opnfv/functest-smoke/>`_
- `Functest-healthcheck Docker images <https://hub.docker.com/r/opnfv/functest-healthcheck/>`_
-- `Functest-features Docker images <https://hub.docker.com/r/opnfv/functest-features/>`_
- `Functest-vnf Docker images <https://hub.docker.com/r/opnfv/functest-vnf/>`_
- `Yardstick Docker images <https://hub.docker.com/r/opnfv/yardstick/>`_
- `Bottlenecks Docker images <https://hub.docker.com/r/opnfv/bottlenecks/>`_
+ONAP Software
+"""""""""""""
++-------------------------+--------------------------------------------------------------+
+| **Item** | **Repo Link** |
++-------------------------+--------------------------------------------------------------+
+| VTP/VNFSDK Test Scripts | `<https://gerrit.onap.org/r/admin/repos/vnfsdk/refrepo>`_ |
++-------------------------+--------------------------------------------------------------+
+| VVP Robot Test Scripts | `<https://gerrit.onap.org/r/admin/repos/oom>`_ |
++-------------------------+--------------------------------------------------------------+
Documents
---------
-- `System Preparation Guide <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/systempreparation/index.html>`_
+- `System Preparation Guide <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/systempreparation/index.html>`_
-- `User Guide <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/userguide/testing_guide.html>`_
+- `NFVI User Guide <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/userguide/testing_guide.html>`_
-- `OPV Test Specifications <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/testspecification/index.html>`_
+- `VNF User Guide: <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/userguide/testing_guide.html>`_
-- `Dovetail CLI Reference <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/userguide/cli_reference.html>`_
+- `OVP NFVI Test Specifications <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/testspecification/index.html>`_
-- `OPV Workflow <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/certificationworkflow/index.html>`_
+- `ONAP VNF Test Specifications <https://docs.onap.org/en/elalto/submodules/vnfrqts/testcases.git/docs/index.html>`_
-- `OPV Reviewer Guide <http://docs.opnfv.org/en/stable-fraser/submodules/dovetail/docs/testing/user/reviewerguide/index.html>`_
+- `Dovetail CLI Reference <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/userguide/cli_reference.html>`_
+- `Dovetail RESTful API <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/userguide/api_testing_guide.html>`_
-Testing with OPNFV Fraser Installers
+- `OVP Workflow <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/certificationworkflow/index.html>`_
+
+- `OVP Reviewer Guide <https://opnfv-dovetail.readthedocs.io/en/stable-hunter/testing/user/reviewerguide/index.html>`_
+
+
+Testing with OPNFV Hunter Installers
====================================
-OVP 2018.09 and Dovetail 2.0.0 are known to be have been tested with the following OPNFV
-Fraser installer versions.
+OVP 2019.12 and Dovetail 3.0.0 are known to be have been tested with the following OPNFV
+Hunter installer versions.
+-----------------+----------------------+
| Installer | Version |
+=================+======================+
-| Apex | stable/fraser |
-+-----------------+----------------------+
-| Compass | stable/fraser |
-+-----------------+----------------------+
-| Fuel | stable/fraser |
+| Fuel | stable/hunter |
+-----------------+----------------------+
-Fraser Known Restrictions/Issues
+Hunter Known Restrictions/Issues
================================
-Please refer to the Dovetail project JIRA for known issues with the Dovetail
-Fraser release:
+Please refer to the OPNFV and ONAP JIRA for known issues with each applicable project:
-.. https://jira.opnfv.org/projects/DOVETAIL
+- `<https://jira.opnfv.org/projects/DOVETAIL>`_
+- `<https://jira.onap.org/projects/VVP>`_
+- `<https://jira.onap.org/projects/VNFSDK>`_
Useful Links
============
- - `OVP Web Portal <https://verified.opnfv.org>`_
+ - `OVP Web Portal <https://nfvi-verified.lfnetworking.org>`_
- `Wiki Project Page <https://wiki.opnfv.org/display/dovetail>`_
@@ -258,4 +280,4 @@ Useful Links
- Dovetail IRC Channel: #opnfv-dovetail
- - `Dovetail Test Configuration <https://git.opnfv.org/dovetail/tree/etc/compliance/ovp.2018.09.yaml>`_
+ - `Dovetail Test Configuration <https://git.opnfv.org/dovetail/tree/etc/compliance/ovp.2019.12.yaml>`_
diff --git a/docs/testing/developer/genericframework/index.rst b/docs/testing/developer/genericframework/index.rst
new file mode 100644
index 00000000..75721469
--- /dev/null
+++ b/docs/testing/developer/genericframework/index.rst
@@ -0,0 +1,413 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) Huawei Technologies Co.,Ltd, and others
+
+====================================
+Dovetail as a Generic Test Framework
+====================================
+
+.. toctree::
+ :maxdepth: 2
+
+
+Overview
+========
+
+Dovetail is responsible for the technical realization of the OPNFV Verification
+Program (OVP) and other compliance verification projects within the scope of
+the Linux Foundation Networking (LFN) umbrella projects.
+Dovetail provides a generic framework for executing a specific set of test cases
+which define the scope of a given compliance verification program, such as OVP.
+
+This document aims at introducing what Dovetail generic framework looks like and
+how to develop within this framework.
+
+
+Introduction of Dovetail Framework
+==================================
+
+The following diagram illustrates Dovetail generic framework.
+
+.. image:: ../../../images/dovetail_generic_framework.png
+ :align: center
+ :scale: 50%
+
+In this diagram, there are 5 main parts, `TestcaseFactory`, `TestRunnerFactory`,
+`CrawlerFactory`, `CheckerFactory` and `test case groups`.
+
+- **TestcaseFactory**: For each project, there needs to create its own
+ testcase class such as `FunctestTestcase` and `OnapVtpTestcase`. All these
+ classes are based on class `Testcase`. There are already many functions in this
+ base class which are mainly used to parse test case configuration files. If no
+ other special requirements exist, it only needs to initialize these classes with
+ different types. Otherwise, it needs to overload or add some functions.
+
+- **TestRunnerFactory**: Similar to `TestcaseFactory`, each project has its own
+ test runner class. Dovetail supports 2 kinds of test runners, `DockerRunner`
+ and `ShellRunner`. For projects based on Docker, it needs to create
+ their own test runner classes such as `FunctestRunner` which inherit from class
+ `DockerRunner`. For other projects that are based on Shell, it can use `ShellRunner`
+ directly. Test case runners provide many functions to support test cases runs
+ such as preparing test tool of each project, run all the commands defined by
+ each test case and clean the environment.
+
+- **Test case groups**: Each group is composed of one project configuration file
+ and a set of test cases belonging to this project. These groups are used as the
+ input of test runners to provide information of projects and test cases. For
+ `ShellRunner`, it only needs test case configuratons as the input.
+
+- **CrawlerFactory**: This is used to parse the results of test cases and record
+ them with unified format. The original result data report by each project is
+ different. So it needs to create different crawler classes for different projects
+ to parse their results.
+
+- **CheckerFactory**: This is used to check the result data generated by crawler.
+ Each project should have its own checker class due to the different requirements
+ of different projects.
+
+
+Development with Dovetail Framework
+===================================
+
+Everyone who is interested in developing Dovetail framework to integrate new upstream
+test cases will face one of the two following scenarios:
+
+- **Adding test cases that belong to integrated projects**: There are already some
+ projects integrated in Dovetail. These projects are coming from OPNFV (Open Platform
+ for NFV) and ONAP (Open Network Automation Platform) communities. It will be
+ much easier to add new test cases that belong to these projects.
+
+- **Adding test cases that not belong to integrated projects**: The test cases
+ may belong to other projects that haven't been integrated into Dovetail yet.
+ These projects could be in OPNFV, ONAP or other communities. This scenario is a
+ little more complicated.
+
+
+Test cases belonging to integrated projects
+-------------------------------------------
+
+Dovetail framework already includes a large amount of test cases. All these test
+cases are implemented by upstream projects in OPNFV and ONAP. The upstream
+projects already integrated in Dovetail are Functest, Yardstick and Bottlenecks
+from OPNFV and VNF SDK and VVP from ONAP.
+
+In order to add a test case belonging to one of these projects, there
+only need to add one test case configuration file which is in yaml format.
+Following is the introduction about how to use the file to add one new test case.
+Please refer to `Dovetail test case github
+<https://github.com/opnfv/dovetail/tree/master/etc/testcase>`_
+for all configuration files of all test cases.
+
+.. code-block:: bash
+
+ ---
+ Test case name in Dovetail:
+ name: Test case name in Dovetail
+ objective: Test case description
+ validate:
+ type: 'shell' or name of the project already integrated in Dovetail
+ testcase: The original test case name called in the project that it is developed
+ image_name: Name of the Docker image used to run this test
+ pre_condition:
+ - 'Commands needed to be executed before running this test'
+ - 'e.g. cp src_file dest_file'
+ cmds:
+ - 'Commands used to run this test case'
+ post_condition:
+ - 'Commands needed to be executed after running this test case'
+ report:
+ source_archive_files:
+ - test.log
+ dest_archive_files:
+ - path/to/archive/test.log
+ check_results_files:
+ - results.json
+ portal_key_file: path/to/key/logs/xxx.log
+ sub_testcase_list:
+ - sub_test_1
+ - sub_test_2
+ - sub_test_3
+
+This is the complete format of test case configuration file. Here are some
+detailed description for each of the configuration options.
+
+- **Test case name in Dovetail**: All test cases should be named as 'xxx.yyy.zzz'.
+ This is the alias in Dovetail and has no relationship with its name in its own
+ project. The first part is used to identify the project where this test case
+ come from (e.g. functest, onap-vtp). The second part is used to classify this
+ test case according to test area (e.g. healthcheck, ha). Dovetail supports to
+ run whole test cases in one test suite with the same test area. Also the area
+ is used to group all test cases and generate the summary report at the end of
+ the test. The last part is special for this test case itself (e.g. image,
+ haproxy, csar). It's better to keep the file name the same as the test case
+ name to make it easier to find the config file according to this test case
+ alias in Dovetail.
+
+- **validate**: This is the main section to define how to run this test case.
+
+ - **type**: This is the type of this test case. It can be `shell` which means
+ running this test case with Linux bash commands within Dovetail container. Also it
+ can be one of the projects already integrated in Dovetail (functest, yardstick,
+ bottlenecks, onap-vtp and onap-vvp). Then this type is used to map to its project
+ configuration yaml file. For example, in order to add a test case
+ in OPNFV project Functest to Dovetail framework, the type here should be
+ `functest`, and will map to `functest_config.yml` for more configurations
+ in project level. Please refer to `Dovetail project config github
+ <https://github.com/opnfv/dovetail/tree/master/etc/conf>`_ for more details.
+
+ - **testcase**: This is the name defined in its own project. One test case can
+ be uniquely identified by `type` and `testcase`. Take the test case
+ `functest.vping.ssh` as an example. Its `type` is 'functest' and `testcase`
+ is 'vping_ssh'. With these 2 properties, it can be uniquely identified. End users only
+ need to know that there is a test case named `functest.vping.ssh` in OVP
+ compliance test scope. Dovetail Framework will run `vping_ssh` within Functest
+ Docker container.
+
+ - **image_name**: [optional] If the type is `shell`, there is no need to give
+ this. For other types, there are default docker images defined in their project
+ configuration files. If this test case uses a different docker image, it needs
+ to overwrite it by adding `image_name` here. The `image_name` here should only
+ be the docker image name without tag. The tag is defined in project's configuration
+ file for all test cases belonging to this project.
+
+ - **pre_condition**: [optional] A list of all preparations needed by this
+ test case. If the list is the same as the default one in its project configuration
+ file, then there is no need to repeat it here. Otherwise, it's necessary to
+ overwrite it. If its type is `shell`, then all commands in `pre_condition`,
+ `cmds` and `post_condition` should be executable within Dovetail Ubuntu 14.04
+ Docker container. If its type is one of the Docker runner projects, then all
+ commands should be executable within their own containers. For Functest, it's
+ alpine 3.8. For Yardstick and Bottlenecks it's Ubuntu 16.04. For VNF SDK it's
+ Ubuntu 14.04. Also all these commands should not require network connection
+ because some commercial platforms may be offline environments in private labs.
+
+ - **cmds**: [optional] A list of all commands used to run this test case.
+
+ - **post_condition**: [optional] A list of all commands needed after executing
+ this test case such as some clean up operations.
+
+- **report**: This is the section for this test case to archive some log files and
+ provide the result file for reporting PASS or FAIL.
+
+ - **source_archive_files**: [optional] If there is no need to archive any files,
+ this section can be removed. Otherwise, this is a list of all source files
+ needed to be archived. All files generated by all integrated projects will be
+ put under `$DOVETAIL_HOME/results`. In order to classify and avoid overwriting
+ them, it needs to rename some important files or move them to new directories.
+ Navigating directory `$DOVETAIL_HOME/results` to find out all files
+ needed to be archived. The paths here should be relative ones according to
+ `$DOVETAIL_HOME/results`.
+
+ - **dest_archive_files**: [optional] This should be a list corresponding to the
+ list of `source_archive_files`. Also all paths here should be relative ones
+ according to `$DOVETAIL_HOME/results`.
+
+ - **check_results_files**: This should be a list of relative paths of
+ the result files generated by this test case. Dovetail will parse these files
+ to get the result (PASS or FAIL).
+
+ - **portal_key_file**: This should be the key log file of this test case which will
+ be used by the OVP portal for review.
+
+ - **sub_testcase_list**: [optional] This section is almost only for Tempest tests
+ in Functest. Take `functest.tempest.osinterop` as an example. The `sub_testcase_list`
+ here is an check list for this kind of tempest tests. Only when all sub test
+ cases list here are passed, this test case can be taken as PASS. The other kind
+ of tempest tests is `tempest_custom` such as `functest.tempest.image`. Besides
+ taking the `sub_testcase_list` as the check list, it's also used to generate an
+ input file of Functest to define the list of sub test cases to be tested.
+
+
+Test cases not belonging to integrated projects
+-----------------------------------------------
+
+If test cases waiting to be added into Dovetail do not belong to any project
+that is already integrated into Dovetail framework, then besides adding the test
+case configuration files introduced before, there are some other files needed to
+be added or modified.
+
+
+Step 1: Add a project configuration file
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+For a new test case that belongs to a new project, it needs to create a project
+configuration file to define this new project in Dovetail first. Now Dovetail
+only supports integration of projects by using their Docker images. If this test
+case should be run with shell runner, then can only add test case configuration
+files with `type` 'shell' as describing before and skip the following steps. Following is
+the introduction of how to use project configuration file to add one new project
+into Dovetail. Please refer to `Dovetail projects configuration github
+<https://github.com/opnfv/dovetail/tree/master/etc/conf>`_ for all configuration
+files of all integrated projects.
+
+.. code-block:: bash
+
+ ---
+
+ {% set validate_testcase = validate_testcase or '' %}
+ {% set testcase = testcase or '' %}
+ {% set dovetail_home = dovetail_home or '' %}
+ {% set debug = debug or 'false' %}
+ {% set build_tag = build_tag or '' %}
+ {% set userconfig_dir = '/tmp/userconfig' %}
+ {% set patches_dir = '/tmp/patches' %}
+ {% set result_dir = '/tmp/results' %}
+ {% set openrc_file = '/home/conf/env_file' %}
+
+ project name:
+ image_name: name of the docker image
+ docker_tag: tag of the docker image
+ opts:
+ detach: true
+ stdin_open: true
+ privileged: true
+ shell: '/bin/bash'
+ envs:
+ - 'CI_DEBUG={{debug}}'
+ - 'DEPLOY_SCENARIO={{deploy_scenario}}'
+ - 'ENV_NAME=env_value'
+ volumes:
+ - '{{dovetail_home}}/userconfig:{{userconfig_dir}}'
+ - '{{dovetail_home}}/results:{{result_dir}}'
+ - '/path/on/host:/path/in/container'
+ - '/path/of/host/file:/file/path/in/container'
+ mounts:
+ - 'source={{dovetail_home}}/pre_config/env_config.sh,target={{openrc_file}}
+ - 'source={{dovetail_home}}/pre_config,target=/home/opnfv/pre_config'
+ - 'source=/file/or/derectory/on/host,target=/file/or/derectory/in/container'
+ patches_dir: {{patches_dir}}
+ pre_condition:
+ - 'Commands needed to be executed before running this test'
+ cmds:
+ - 'Commands used to run this test case'
+ post_condition:
+ - 'Commands needed to be executed after running this test case'
+ openrc: absolute path of openstack credential files
+ extra_container:
+ - container1_name
+ - container2_name
+
+This is the complete format of project configuration file. Here are some
+detailed description for each of the configuration options.
+
+- **Jinja Template**: At the begining of this yaml file, it uses Jinja template
+ to define some parameters that will be used somewhere in this file (e.g. result_dir
+ and openrc_file). Besides those, there are some other parameters providing by Dovetail
+ framework as input of this file, and other parameters can be defined by using these
+ ones (e.g. testcase and dovetail_home). The whole input parameters which can be used
+ are list below.
+
+ - **attack_host**: This is the attack host name of the test case which calls this
+ project configuration file. It's only for HA test cases and can be given in HA
+ configuration file `pod.yaml`.
+
+ - **attack_process**: This is the attack process name of the test case which calls
+ this project configuration file. It's only for HA test cases and can be given in HA
+ configuration file `pod.yaml`.
+
+ - **build_tag**: This is a string includes the UUID generated by Dovetail.
+
+ - **cacert**: This is also only for OpenStack test cases. It is the absolute
+ path of the OpenStack certificate provided in `env_config.sh` file.
+
+ - **deploy_scenario**: This is the input when running Dovetail with option
+ `--deploy-scenario`.
+
+ - **debug**: This is `True` or `False` according to the command running test
+ cases with or without option `--debug`.
+
+ - **dovetail_home**: This is the `DOVETAIL_HOME` getting from the ENV.
+
+ - **os_insecure**: This is only for test cases aiming at OpenStack. This is
+ `True` or `False` according to `env_config.sh` file.
+
+ - **testcase**: This is the name of the test case which calls this project
+ configuration file. Different from `validate_testcase`, this is the alias
+ defined in Dovetail not in its own project.
+
+ - **validate_testcase**: This is the name of the test case instance which calls this
+ project configuration file. The name is provided by the configuration file
+ of this test case (validate -> testcase).
+
+- **project name**: This is the project name defined in Dovetail. For example
+ OPNFV Functest project is named as 'functest' here in Dovetail. This project
+ name will be used by test case configuration files as well as somewhere in
+ Dovetail source code.
+
+- **image_name**: This is the name of the default Docker image for most test cases
+ within this project. Each test case can overwrite it with its own configuration.
+
+- **docker_tag**: This is the tag of all Docker images for all test cases within
+ this project. For each release, it should use one Docker image with a stable
+ and official release version.
+
+- **opts**: Here are all options used to run Docker containers except 'image',
+ 'command', 'environment', 'volumes', 'mounts' and 'extra_hosts'. For example,
+ the options include 'detach', 'privileged' and 'tty'. The full list of all
+ options can be found in `Docker python SDK docs <https://docker-py.readthedocs.io/en/stable/containers.html>`_.
+
+- **shell**: This is the command used to run in the container.
+
+- **envs**: This is a list of all envs used to run Docker containers.
+
+- **volumes**: A volume mapping list used to run Docker containers. The source volumes
+ list here are allowed to be nonexistent and Docker will create new directories for them
+ on the host. Every project should at least map the `$DOVETAIL_HOME/results`
+ in the test host to containers to collect all result files.
+
+- **mounts**: A mount mapping list used to run Docker containers. More powerful alternative
+ to **volumes**. The source volumes list here are not allowed to be nonexistent.
+ Every project should at least mount the `$DOVETAIL_HOME/pre_config` in the test host to
+ containers to get config files.
+
+- **patches_dir**: [optional] This is an absolute path of the patches applied to
+ the containers.
+
+- **pre_condition**: A list of all default preparations needed by this project.
+ It can be overwritten by configurations of test cases.
+
+- **cmds**: A list of all default commands used to run all test cases within
+ this project. Also it can be overwritten by configurations of test cases.
+
+- **post_condition**: A list of all default cleaning commands needed by this
+ project.
+
+- **openrc**: [optional] If the system under test is OpenStack, then it needs to
+ provide the absolute path here to copy the credential file in the Test Host to
+ containers.
+
+- **extra_container**: [optional] The extra containers needed to be removed at the
+ end of the test. These containers are created by the test cases themselves at
+ runtime rather than created by Dovetail.
+
+
+Step 2: Add related classes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+After adding the project and test case configuration files, there also need to
+add some related classes into the source code.
+
+- **Test Case class**: Each project should have its own test case class in
+ `testcase.py` for `TestcaseFactory`.
+
+- **Test Runner class**: Each project should have its own test runner class in
+ `test_runner.py` for `TestRunnerFactory`.
+
+- **Crawler class**: Each project should have its own test results crawler class
+ in `report.py` for `CrawlerFactory`.
+
+- **Checker class**: Each project should have its own test results checker class
+ in `report.py` for `CheckerFactory`.
+
+
+Step 3: Create related logs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If the classes added in step2 have function `create_log`, then need to call
+these functions in `run.py` to initial the log instances at the very begining.
+
+
+Step 4: Update unit tests
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A patch is not going to be verified without 100% coverage when applying acceptance check.
diff --git a/docs/testing/developer/testcaserequirements/index.rst b/docs/testing/developer/testcaserequirements/index.rst
index 6171a07f..b03a033c 100644
--- a/docs/testing/developer/testcaserequirements/index.rst
+++ b/docs/testing/developer/testcaserequirements/index.rst
@@ -23,7 +23,7 @@ portability across NFVI instances. All OVP tests are available in open source
and are executed in open source test frameworks.
-Test case requirements
+Test Case Requirements
======================
The following requirements are mandatory for a test to be submitted for
@@ -101,7 +101,7 @@ consideration in the OVP test suite:
- Use case specification
- Test preconditions
- Basic test flow execution description and test assertions
- - Pass fail criteria
+ - Pass/Fail criteria
- The following things may be documented for the test case:
@@ -120,9 +120,9 @@ Dovetail Test Suite Naming Convention
Test case naming and structuring must comply with the following conventions.
The fully qualified name of a test case must comprise three sections:
-`<testproject>.<test_area>.<test_case_name>`
+`<test_project>.<test_area>.<test_case_name>`
-- **testproject**: The fully qualified test case name must identify the test
+- **test_project**: The fully qualified test case name must identify the test
project which developed and maintains the test case.
- **test_area**: The fully qualified test case name must identify the test case
diff --git a/docs/testing/developer/testscope/index.rst b/docs/testing/developer/testscope/index.rst
deleted file mode 100644
index 7e05a9d2..00000000
--- a/docs/testing/developer/testscope/index.rst
+++ /dev/null
@@ -1,616 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
-.. (c) OPNFV
-
-.. _dovetail-test_scope:
-
-=======================================================
-Compliance and Verification program accepted test cases
-=======================================================
-
- .. toctree::
- :maxdepth: 2
-
-
-Mandatory OVP Test Areas
-========================
-
-----------------------------------
-Test Area VIM Operations - Compute
-----------------------------------
-
-Image operations within the Compute API
----------------------------------------
-
-| tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image
-| tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name
-
-
-Basic support Compute API for server actions such as reboot, rebuild, resize
-----------------------------------------------------------------------------
-
-| tempest.api.compute.servers.test_instance_actions.InstanceActionsTestJSON.test_get_instance_action
-| tempest.api.compute.servers.test_instance_actions.InstanceActionsTestJSON.test_list_instance_actions
-
-
-Generate, import, and delete SSH keys within Compute services
--------------------------------------------------------------
-
-| tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_specify_keypair
-
-
-List supported versions of the Compute API
-------------------------------------------
-
-| tempest.api.compute.test_versions.TestVersions.test_list_api_versions
-
-
-Quotas management in Compute API
---------------------------------
-
-| tempest.api.compute.test_quotas.QuotasTestJSON.test_get_default_quotas
-| tempest.api.compute.test_quotas.QuotasTestJSON.test_get_quotas
-
-
-Basic server operations in the Compute API
-------------------------------------------
-
-| tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_server_with_admin_password
-| tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_with_existing_server_name
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_numeric_server_name
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_metadata_exceeds_length_limit
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_name_length_exceeds_256
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_flavor
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_image
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_network_uuid
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_id_exceeding_length_limit
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_negative_id
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_get_non_existent_server
-| tempest.api.compute.servers.test_create_server.ServersTestJSON.test_host_name_is_same_as_server_name
-| tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_host_name_is_same_as_server_name
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_invalid_ip_v6_address
-| tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers
-| tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers_with_detail
-| tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers
-| tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers_with_detail
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_flavor
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_image
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_server_name
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_server_status
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_limit_results
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_flavor
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_image
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_limit
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_name
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_active_status
-| tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_name_wildcard
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_future_date
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_invalid_date
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_greater_than_actual_count
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_negative_value
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_string
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_flavor
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_image
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_server_name
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_detail_server_is_deleted
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_status_non_existing
-| tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_with_a_deleted_server
-| tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_unlock_server
-| tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_delete_server_metadata_item
-| tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_get_server_metadata_item
-| tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_list_server_metadata
-| tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata
-| tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata_item
-| tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_update_server_metadata
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_server_name_blank
-| tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_reboot_non_existent_server
-| tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_rebuild_server
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_deleted_server
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_non_existent_server
-| tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_stop_start_server
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_stop_non_existent_server
-| tempest.api.compute.servers.test_servers.ServersTestJSON.test_update_access_server_address
-| tempest.api.compute.servers.test_servers.ServersTestJSON.test_update_server_name
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_name_of_non_existent_server
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_name_length_exceeds_256
-| tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_set_empty_name
-| tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_created_server_vcpus
-| tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details
-| tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_created_server_vcpus
-| tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details
-
-
-Retrieve volume information through the Compute API
----------------------------------------------------
-
-| tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_attach_detach_volume
-| tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_list_get_volume_attachments
-
-
-
------------------------------------
-Test Area VIM Operations - Identity
------------------------------------
-
-API discovery operations within the Identity v3 API
----------------------------------------------------
-
-| tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_media_types
-| tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_resources
-| tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_statuses
-
-
-Auth operations within the Identity API
----------------------------------------
-
-| tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token
-
-
---------------------------------
-Test Area VIM Operations - Image
---------------------------------
-
-Image deletion tests using the Glance v2 API
---------------------------------------------
-
-| tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_delete_image
-| tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_image_null_id
-| tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_non_existing_image
-| tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_delete_non_existing_tag
-
-
-Image get tests using the Glance v2 API
----------------------------------------
-
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_get_image_schema
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_get_images_schema
-| tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_delete_deleted_image
-| tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_image_null_id
-| tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_non_existent_image
-
-
-CRUD image operations in Images API v2
---------------------------------------
-
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_list_no_params
-
-
-Image list tests using the Glance v2 API
-----------------------------------------
-
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_container_format
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_disk_format
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_limit
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_min_max_size
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_size
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_status
-| tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_visibility
-
-
-Image update tests using the Glance v2 API
-------------------------------------------
-
-| tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_update_image
-| tempest.api.image.v2.test_images_tags.ImagesTagsTest.test_update_delete_tags_for_image
-| tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_update_tags_for_non_existing_image
-
-
-----------------------------------
-Test Area VIM Operations - Network
-----------------------------------
-
-Basic CRUD operations on L2 networks and L2 network ports
----------------------------------------------------------
-
-| tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_allocation_pools
-| tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_dhcp_enabled
-| tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_gw
-| tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_gw_and_allocation_pools
-| tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_host_routes_and_dns_nameservers
-| tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_without_gateway
-| tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_all_attributes
-| tempest.api.network.test_networks.NetworksTest.test_create_update_delete_network_subnet
-| tempest.api.network.test_networks.NetworksTest.test_delete_network_with_subnet
-| tempest.api.network.test_networks.NetworksTest.test_list_networks
-| tempest.api.network.test_networks.NetworksTest.test_list_networks_fields
-| tempest.api.network.test_networks.NetworksTest.test_list_subnets
-| tempest.api.network.test_networks.NetworksTest.test_list_subnets_fields
-| tempest.api.network.test_networks.NetworksTest.test_show_network
-| tempest.api.network.test_networks.NetworksTest.test_show_network_fields
-| tempest.api.network.test_networks.NetworksTest.test_show_subnet
-| tempest.api.network.test_networks.NetworksTest.test_show_subnet_fields
-| tempest.api.network.test_networks.NetworksTest.test_update_subnet_gw_dns_host_routes_dhcp
-| tempest.api.network.test_ports.PortsTestJSON.test_create_bulk_port
-| tempest.api.network.test_ports.PortsTestJSON.test_create_port_in_allowed_allocation_pools
-| tempest.api.network.test_ports.PortsTestJSON.test_create_update_delete_port
-| tempest.api.network.test_ports.PortsTestJSON.test_list_ports
-| tempest.api.network.test_ports.PortsTestJSON.test_list_ports_fields
-| tempest.api.network.test_ports.PortsTestJSON.test_show_port
-| tempest.api.network.test_ports.PortsTestJSON.test_show_port_fields
-| tempest.api.network.test_ports.PortsTestJSON.test_update_port_with_security_group_and_extra_attributes
-| tempest.api.network.test_ports.PortsTestJSON.test_update_port_with_two_security_groups_and_extra_attributes
-
-
-Basic CRUD operations on security groups
-----------------------------------------
-
-| tempest.api.network.test_security_groups.SecGroupTest.test_create_list_update_show_delete_security_group
-| tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_additional_args
-| tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_icmp_type_code
-| tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_protocol_integer_value
-| tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_remote_group_id
-| tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_remote_ip_prefix
-| tempest.api.network.test_security_groups.SecGroupTest.test_create_show_delete_security_group_rule
-| tempest.api.network.test_security_groups.SecGroupTest.test_list_security_groups
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_additional_default_security_group_fails
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_duplicate_security_group_rule_fails
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_ethertype
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_protocol
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_remote_ip_prefix
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_invalid_ports
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_non_existent_remote_groupid
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_non_existent_security_group
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_delete_non_existent_security_group
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group
-| tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group_rule
-
-
----------------------------------
-Test Area VIM Operations - Volume
----------------------------------
-
-Volume attach and detach operations with the Cinder v2 API
-----------------------------------------------------------
-
-| tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_attach_detach_volume_to_instance
-| tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_get_volume_attachment
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_attach_volumes_with_nonexistent_volume_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_detach_volumes_with_invalid_volume_id
-
-
-Volume service availability zone operations with the Cinder v2 API
-------------------------------------------------------------------
-
-| tempest.api.volume.test_availability_zone.AvailabilityZoneV2TestJSON.test_get_availability_zone_list
-
-
-Volume cloning operations with the Cinder v2 API
-------------------------------------------------
-
-| tempest.api.volume.test_volumes_get.VolumesV2GetTest.test_volume_create_get_update_delete_as_clone
-
-
-Image copy-to-volume operations with the Cinder v2 API
-------------------------------------------------------
-
-| tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_volume_bootable
-| tempest.api.volume.test_volumes_get.VolumesV2GetTest.test_volume_create_get_update_delete_from_image
-
-
-Volume creation and deletion operations with the Cinder v2 API
---------------------------------------------------------------
-
-| tempest.api.volume.test_volumes_get.VolumesV2GetTest.test_volume_create_get_update_delete
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_invalid_size
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_nonexistent_source_volid
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_nonexistent_volume_type
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_without_passing_size
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_size_negative
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_size_zero
-
-
-Volume service extension listing operations with the Cinder v2 API
-------------------------------------------------------------------
-
-| tempest.api.volume.test_extensions.ExtensionsV2TestJSON.test_list_extensions
-
-
-Volume GET operations with the Cinder v2 API
---------------------------------------------
-
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_get_invalid_volume_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_get_volume_without_passing_volume_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_volume_get_nonexistent_volume_id
-
-
-Volume listing operations with the Cinder v2 API
-------------------------------------------------
-
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_by_name
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_details_by_name
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_param_display_name_and_status
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_with_detail_param_display_name_and_status
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_with_detail_param_metadata
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_with_details
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_with_param_metadata
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volumes_list_by_availability_zone
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volumes_list_by_status
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volumes_list_details_by_availability_zone
-| tempest.api.volume.test_volumes_list.VolumesV2ListTestJSON.test_volumes_list_details_by_status
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_list_volumes_detail_with_invalid_status
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_list_volumes_detail_with_nonexistent_name
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_list_volumes_with_invalid_status
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_list_volumes_with_nonexistent_name
-| tempest.api.volume.v2.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_details_pagination
-| tempest.api.volume.v2.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_details_with_multiple_params
-| tempest.api.volume.v2.test_volumes_list.VolumesV2ListTestJSON.test_volume_list_pagination
-
-
-Volume metadata operations with the Cinder v2 API
--------------------------------------------------
-
-| tempest.api.volume.test_volume_metadata.VolumesV2MetadataTest.test_crud_volume_metadata
-| tempest.api.volume.test_volume_metadata.VolumesV2MetadataTest.test_update_volume_metadata_item
-
-
-Verification of read-only status on volumes with the Cinder v2 API
-------------------------------------------------------------------
-
-| tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_volume_readonly_update
-
-
-Volume reservation operations with the Cinder v2 API
-----------------------------------------------------
-
-| tempest.api.volume.test_volumes_actions.VolumesV2ActionsTest.test_reserve_unreserve_volume
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_reserve_volume_with_negative_volume_status
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_reserve_volume_with_nonexistent_volume_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_unreserve_volume_with_nonexistent_volume_id
-
-
-Volume snapshot creation/deletion operations with the Cinder v2 API
--------------------------------------------------------------------
-
-| tempest.api.volume.test_snapshot_metadata.SnapshotV2MetadataTestJSON.test_crud_snapshot_metadata
-| tempest.api.volume.test_snapshot_metadata.SnapshotV2MetadataTestJSON.test_update_snapshot_metadata_item
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_create_volume_with_nonexistent_snapshot_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_delete_invalid_volume_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_delete_volume_without_passing_volume_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_volume_delete_nonexistent_volume_id
-| tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapshot_create_get_list_update_delete
-| tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_volume_from_snapshot
-| tempest.api.volume.test_volumes_snapshots_list.VolumesV2SnapshotListTestJSON.test_snapshots_list_details_with_params
-| tempest.api.volume.test_volumes_snapshots_list.VolumesV2SnapshotListTestJSON.test_snapshots_list_with_params
-| tempest.api.volume.test_volumes_snapshots_negative.VolumesV2SnapshotNegativeTestJSON.test_create_snapshot_with_nonexistent_volume_id
-| tempest.api.volume.test_volumes_snapshots_negative.VolumesV2SnapshotNegativeTestJSON.test_create_snapshot_without_passing_volume_id
-
-
-Volume update operations with the Cinder v2 API
------------------------------------------------
-
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_update_volume_with_empty_volume_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_update_volume_with_invalid_volume_id
-| tempest.api.volume.test_volumes_negative.VolumesV2NegativeTest.test_update_volume_with_nonexistent_volume_id
-
-
----------------------------
-Test Area High Availability
----------------------------
-
-Verify high availability of OpenStack controller services
----------------------------------------------------------
-
-| dovetail.ha.tc001.nova-api_service_down
-| dovetail.ha.tc002.neutron-server_service_down
-| dovetail.ha.tc003.keystone_service_down
-| dovetail.ha.tc004.glance-api_service_down
-| dovetail.ha.tc005.cinder-api_service_down
-| dovetail.ha.tc006.cpu_overload
-| dovetail.ha.tc007.disk_I/O_overload
-| dovetail.ha.tc008.load_balance_service_down
-
-----------------------------------------
-Test Area vPing - Basic VNF Connectivity
-----------------------------------------
-
-| dovetail.vping.tc001.userdata
-| dovetail.vping.tc002.ssh
-
-
-
-Optional OVP Test Areas
-========================
-
-
------------------
-Test Area BGP VPN
------------------
-
-Verify association and dissasocitation of node using route targets
-------------------------------------------------------------------
-
-| dovetail.sdnvpn.tc001.subnet_connectivity
-| dovetail.sdnvpn.tc002.tenant_separation
-| dovetail.sdnvpn.tc004.router_association
-| dovetail.sdnvpn.tc008.router_association_floating_ip
-
---------------------------------------------------
-IPv6 Compliance Testing Methodology and Test Cases
---------------------------------------------------
-
-Test Case 1: Create and Delete an IPv6 Network, Port and Subnet
----------------------------------------------------------------
-
-| tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_network
-| tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_port
-| tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_subnet
-
-Test Case 2: Create, Update and Delete an IPv6 Network and Subnet
------------------------------------------------------------------
-
-| tempest.api.network.test_networks.NetworksIpV6Test.test_create_update_delete_network_subnet
-
-Test Case 3: Check External Network Visibility
-----------------------------------------------
-
-| tempest.api.network.test_networks.NetworksIpV6Test.test_external_network_visibility
-
-Test Case 4: List IPv6 Networks and Subnets of a Tenant
--------------------------------------------------------
-
-| tempest.api.network.test_networks.NetworksIpV6Test.test_list_networks
-| tempest.api.network.test_networks.NetworksIpV6Test.test_list_subnets
-
-Test Case 5: Show Information of an IPv6 Network and Subnet
------------------------------------------------------------
-
-| tempest.api.network.test_networks.NetworksIpV6Test.test_show_network
-| tempest.api.network.test_networks.NetworksIpV6Test.test_show_subnet
-
-Test Case 6: Create an IPv6 Port in Allowed Allocation Pools
-------------------------------------------------------------
-
-| tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_port_in_allowed_allocation_pools
-
-Test Case 7: Create an IPv6 Port without Security Groups
---------------------------------------------------------
-
-| tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_port_with_no_securitygroups
-
-Test Case 8: Create, Update and Delete an IPv6 Port
----------------------------------------------------
-
-| tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_update_delete_port
-
-Test Case 9: List IPv6 Ports of a Tenant
-----------------------------------------
-
-| tempest.api.network.test_ports.PortsIpV6TestJSON.test_list_ports
-
-Test Case 10: Show Information of an IPv6 Port
-----------------------------------------------
-
-| tempest.api.network.test_ports.PortsIpV6TestJSON.test_show_port
-
-Test Case 11: Add Multiple Interfaces for an IPv6 Router
---------------------------------------------------------
-
-| tempest.api.network.test_routers.RoutersIpV6Test.test_add_multiple_router_interfaces
-
-Test Case 12: Add and Remove an IPv6 Router Interface with port_id
-------------------------------------------------------------------
-
-| tempest.api.network.test_routers.RoutersIpV6Test.test_add_remove_router_interface_with_port_id
-
-Test Case 13: Add and Remove an IPv6 Router Interface with subnet_id
---------------------------------------------------------------------
-
-| tempest.api.network.test_routers.RoutersIpV6Test.test_add_remove_router_interface_with_subnet_id
-
-Test Case 14: Create, Update, Delete, List and Show an IPv6 Router
-------------------------------------------------------------------
-
-| tempest.api.network.test_routers.RoutersIpV6Test.test_create_show_list_update_delete_router
-
-Test Case 15: Create, Update, Delete, List and Show an IPv6 Security Group
---------------------------------------------------------------------------
-
-| tempest.api.network.test_security_groups.SecGroupIPv6Test.test_create_list_update_show_delete_security_group
-
-Test Case 16: Create, Delete and Show Security Group Rules
-----------------------------------------------------------
-
-| tempest.api.network.test_security_groups.SecGroupIPv6Test.test_create_show_delete_security_group_rule
-
-Test Case 17: List All Security Groups
---------------------------------------
-
-| tempest.api.network.test_security_groups.SecGroupIPv6Test.test_list_security_groups
-
-Test Case 18: IPv6 Address Assignment - Dual Stack, SLAAC, DHCPv6 Stateless
----------------------------------------------------------------------------
-
-| tempest.scenario.test_network_v6.TestGettingAddress.test_dhcp6_stateless_from_os
-
-Test Case 19: IPv6 Address Assignment - Dual Net, Dual Stack, SLAAC, DHCPv6 Stateless
--------------------------------------------------------------------------------------
-
-| tempest.scenario.test_network_v6.TestGettingAddress.test_dualnet_dhcp6_stateless_from_os
-
-Test Case 20: IPv6 Address Assignment - Multiple Prefixes, Dual Stack, SLAAC, DHCPv6 Stateless
-----------------------------------------------------------------------------------------------
-
-| tempest.scenario.test_network_v6.TestGettingAddress.test_multi_prefix_dhcpv6_stateless
-
-Test Case 21: IPv6 Address Assignment - Dual Net, Multiple Prefixes, Dual Stack, SLAAC, DHCPv6 Stateless
---------------------------------------------------------------------------------------------------------
-
-| tempest.scenario.test_network_v6.TestGettingAddress.test_dualnet_multi_prefix_dhcpv6_stateless
-
-Test Case 22: IPv6 Address Assignment - Dual Stack, SLAAC
----------------------------------------------------------
-
-| tempest.scenario.test_network_v6.TestGettingAddress.test_slaac_from_os
-
-Test Case 23: IPv6 Address Assignment - Dual Net, Dual Stack, SLAAC
--------------------------------------------------------------------
-
-| tempest.scenario.test_network_v6.TestGettingAddress.test_dualnet_slaac_from_os
-
-Test Case 24: IPv6 Address Assignment - Multiple Prefixes, Dual Stack, SLAAC
-----------------------------------------------------------------------------
-
-| tempest.scenario.test_network_v6.TestGettingAddress.test_multi_prefix_slaac
-
-Test Case 25: IPv6 Address Assignment - Dual Net, Dual Stack, Multiple Prefixes, SLAAC
---------------------------------------------------------------------------------------
-
-| tempest.scenario.test_network_v6.TestGettingAddress.test_dualnet_multi_prefix_slaac
-
-------------------------------------------------------------------------
-Filtering Packets Based on Security Rules and Port Security in Data Path
-------------------------------------------------------------------------
-
-| tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_port_security_macspoofing_port
-| tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps.test_cross_tenant_traffic
-| tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps.test_in_tenant_traffic
-| tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps.test_multiple_security_groups
-| tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps.test_port_security_disable_security_group
-| tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps.test_port_update_new_security_group
-
-------------------------------------------------------------
-Dynamic Network Runtime Operations Through the Life of a VNF
-------------------------------------------------------------
-
-| tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_network_basic_ops
-| tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_hotplug_nic
-| tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_subnet_details
-| tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_update_instance_port_admin_state
-| tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_update_router_admin_state
-
-----------------------------------------------------------------
-Correct Behavior after Common Virtual Machine Life Cycles Events
-----------------------------------------------------------------
-
-| tempest.scenario.test_minimum_basic.TestMinimumBasicScenario.test_minimum_basic_scenario
-| tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_cold_migration
-| tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_pause_unpause
-| tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_reboot
-| tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_rebuild
-| tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_resize
-| tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_stop_start
-| tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_suspend_resume
-| tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_server_sequence_suspend_resume
-| tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_resize_volume_backed_server_confirm
-| tempest.scenario.test_shelve_instance.TestShelveInstance.test_shelve_instance
-| tempest.scenario.test_shelve_instance.TestShelveInstance.test_shelve_volume_backed_instance
-
-------------------------------------------------------------
-Simple Virtual Machine Resource Scheduling on Multiple Nodes
-------------------------------------------------------------
-
-| tempest.scenario.test_server_multinode.TestServerMultinode.test_schedule_to_all_nodes
-| tempest.api.compute.servers.test_server_group.ServerGroupTestJSON.test_create_delete_multiple_server_groups_with_same_name_policy
-| tempest.api.compute.servers.test_server_group.ServerGroupTestJSON.test_create_delete_server_group_with_affinity_policy
-| tempest.api.compute.servers.test_server_group.ServerGroupTestJSON.test_create_delete_server_group_with_anti_affinity_policy
-| tempest.api.compute.servers.test_server_group.ServerGroupTestJSON.test_list_server_groups
-| tempest.api.compute.servers.test_server_group.ServerGroupTestJSON.test_show_server_group
-
---------------------------------------------------------
-Forwarding Packets Through Virtual Networks in Data Path
---------------------------------------------------------
-
-| tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_mtu_sized_frames
diff --git a/docs/testing/user/certificationworkflow/ApplicationForm.rst b/docs/testing/user/certificationworkflow/ApplicationForm.rst
index aac9a46e..1aa937b0 100644
--- a/docs/testing/user/certificationworkflow/ApplicationForm.rst
+++ b/docs/testing/user/certificationworkflow/ApplicationForm.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) OPNFV, Intel Corporation and others.
-=======================================
-OPNFV Verified Program Application Form
-=======================================
+===========================================
+OPNFV Verification Program Application Form
+===========================================
+----------------------------------+--------------------------------------------------------------------------------------------+
diff --git a/docs/testing/user/certificationworkflow/index.rst b/docs/testing/user/certificationworkflow/index.rst
index 1dd99954..bc43e775 100644
--- a/docs/testing/user/certificationworkflow/index.rst
+++ b/docs/testing/user/certificationworkflow/index.rst
@@ -11,54 +11,59 @@ OVP Workflow
Introduction
============
-This document provides guidance for prospective participants on how to obtain 'OPNFV Verified'
-status. The OPNFV Verified Program (OVP) is administered by the OPNFV Compliance and Certification
-(C&C) committee.
+This document provides guidance for prospective participants on how to
+obtain "OPNFV Verified" for products and services. The OPNFV
+Verification Program (OVP) is administered by the LF Networking
+Compliance and Verification (C&V) Committee.
-For further information about the workflow and general inquiries about the
-program, please check out the `OVP web portal`_, or contact
-the C&C committee by email address verified@opnfv.org. This email address should be used
-for all communication with the OVP.
+For further information about the workflow and general inquiries about
+the program, please check out the `OVP landing page`_, or contact the
+C&V committee by email address ovp-support@lfnetworking.org. This email address
+should be used for all communication with the OVP.
Step 1: Participation Form Submission
=====================================
-A participant should start the process by submitting an online participation form. The participation
-form can found on the `OVP web portal`_ or directly at `OVP participation form`_ and the
-following information must be provided:
+A participant should start the process by submitting an online
+participation form. The participation form can be found on the `OVP
+landing page`_. Submitters are asked for the following information:
- Organization name
-- Organization website (if public)
-- Product name and/or identifier
-- Product specifications
-- Product public documentation
-- Product categories, choose one: (i) software and hardware (ii) software
- and third party hardware (please specify)
-- Primary contact name, business email, postal address and phone number
- Only the primary contact email address should be used for
- official communication with OPNFV OVP.
-- User ID for OVP web portal
- The OVP web portal supports the Linux Foundation user ID in the current release.
- If a new user ID is needed, visit https://identity.linuxfoundation.org.
-- Location where the verification testing is to be conducted. Choose one:
- (internal vendor lab, third-party lab)
-- If the test is to be conducted by a third-party lab, please specify
- name and contact information of the third-party lab, including email, address and
- phone number.
-- OVP software version for compliance verification
-- Testing date
-
-Once the participation form information is received and in order, an email response will be
-sent to the primary contact with confirmation and information to proceed. The primary contact
-specified in the participation form will be entered into OVP web portal back-end by the program
-administrator and will be permitted to submit results for review on behalf of their organization.
-
-There is no fee at this time for participation in the OVP.
+- Contact name
+- Contact address
+- Contact telephone
+- Contact email
+- Membership status in LF Networking
+- Qualifying Offerings Information
+- Testing Category
+- Product Category
+- User ID
+- Version of Testing Tools
+- Testing Date
+- Testing Location
+- Third-party lab information (if any)
+- Signature
+
+Once the participation form information is received and in order, an
+email response will be sent to the primary contact with confirmation and
+information to proceed. The primary contact specified in the
+participation form will be entered into the corresponding OVP web portal
+back-end by the program administrator and will be permitted to submit
+results for review on behalf of their organization. The OVP NFVI Portal
+can be found here (https://nfvi-verified.lfnetworking.org) and the OVP
+VNF Portal can be found here (https://vnf-verified.lfnetworking.org).
+
+There is no fee at this time for participation in the OVP for LF
+Networking (LFN) members or non-profit organizations. If not a member of
+LFN, please contact `LF Networking`_ for participation fee information.
Step 2: Testing
===============
-The following documents guide testers to prepare the test environment and run tests:
+NFVI Testing
+------------
+
+The following documents guide testers to prepare the NFVI test environment and run tests:
- :ref:`dovetail-system_preparation_guide`
- :ref:`dovetail-test_case_specification`
@@ -67,23 +72,33 @@ The following documents guide testers to prepare the test environment and run te
A unique Test ID is generated by the Dovetail tool for each test run and can only be
submitted to the OVP web portal once.
+VNF Testing
+-----------
+
+The following document guide testers to prepare the environment and run the VNF tests:
+
+- `Deploy ONAP via OOM <https://logs.onap.org/production/vex-yul-ecomp-jenkins-1/doc-elalto-verify-rtd/214/html/submodules/oom.git/docs/oom_quickstart_guide.html>`_
+- `ONAP VNF Test Specifications <https://docs.onap.org/en/elalto/submodules/vnfrqts/testcases.git/docs/index.html>`_
+- :ref:`dovetail-vnf_testers_guide`
+
Step 3: Submitting Test Results
===============================
Users/testers other than the primary contact may use the OVP web portal as a resource to upload,
evaluate and share results in a private manner. Testers can upload the test results to the
-`OVP web portal`_. By default, the results are visible only to the tester who uploaded the data.
+OVP web portal (either NFVI or VNF). By default, the results are visible only to the tester who uploaded the data.
Testers can self-review the test results through the portal until they are ready to ask
for OVP review. They may also add new test results as needed.
Once the tester is satisfied with the test result, the primary contact grants access to the test
-result for OVP review using a 'submit for review' operation via the portal. The test result is
-identified by the unique Test ID and becomes visible to a review group comprised of OPNFV
-community members.
+result for OVP review using a 'submit for review' operation via the portal. During this step,
+a new window may be appeared to ask primary contact to complete the application form online.
+Then the test result is identified by the unique Test ID and becomes visible to a review group
+comprised of OPNFV community members.
When a test result is made visible to the reviewers, the program administrator will ask for
-volunteers from the review group using the verified@opnfv.org email and CC the primary contact
+volunteers from the review group using the ovp-support@lfnetworking.org email and CC the primary contact
email that a review request has been made. The program administrator will supply the Test ID
and owner field (primary contact user ID) to the reviewers to identify the results.
@@ -97,28 +112,27 @@ organizations will not be part of the reviewers.
The primary contact may be asked via email for any missing information or clarification of the
test results. The reviewers will make a determination and recommend compliance or non-compliance
-to the C&C Committee. A positive review requires a minimum of two approvals from two distinct
-organizations without any negative reviews. The program administrator sends an email to OVP/C&C
+to the C&V Committee. A positive review requires a minimum of two approvals from two distinct
+organizations without any negative reviews. The program administrator sends an email to OVP/C&V
emails announcing a positive review. A one week limit is given for issues to be raised. If no
-issue is raised, the C&C Committee approves the result and the program administrator sends an
-email to OVP/C&C emails stating the result is approved.
+issue is raised, the C&V Committee approves the result and the program administrator sends an
+email to OVP/C&V emails stating the result is approved.
Normally, the outcome of the review should be communicated to the primary contact within 10
business days after all required information is in order.
-If a test result is denied, an appeal can be made to the C&C Committee for arbitration.
+If a test result is denied, an appeal can be made to the C&V Committee for arbitration.
Step 5: Grant of Use of Program Marks
=====================================
If an application is approved, further information will be communicated to the primary contact
-on the guidelines of using OVP Program Marks (including OVP logo) and the status of compliance
-for promotional purposes.
+that includes badges and on the guidelines of using OVP Program Marks for marketing and promotional purposes.
.. toctree::
:maxdepth: 2
.. References
-.. _`OVP web portal`: https://verified.opnfv.org
-.. _`OVP participation form`: https://na3.docusign.net/Member/PowerFormSigning.aspx?PowerFormId=dc24bf38-ea41-40d4-9e58-9babc6eec778
+.. _`OVP landing page`: https://www.lfnetworking.org/ovp/
+.. _`LF Networking`: https://www.lfnetworking.org/membership/join/
diff --git a/docs/testing/user/instructionsperinstaller/testing_instructions_xci.rst b/docs/testing/user/instructionsperinstaller/testing_instructions_xci.rst
new file mode 100644
index 00000000..017441f2
--- /dev/null
+++ b/docs/testing/user/instructionsperinstaller/testing_instructions_xci.rst
@@ -0,0 +1,220 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) OPNFV, Huawei Technologies Co.,Ltd and others.
+
+===========================================================
+Conducting OVP Testing with Dovetail using XCI installer
+===========================================================
+
+Overview
+------------------------------
+The purpose of this document is to give tips for the dovetail deployment
+on XCI installer.
+The general structure of the document is remaining according to the user guide
+document and the XCI related tips will be added under of the respective
+chapter's name.
+
+In order to deploy properly the XCI installer the below steps should be followed:
+
+1- The prerequisites of chapter 2.4.1 of XCI User Guide [1] should be applied.
+
+2- If you don’t have one already, generate an SSH key in $HOME/.ssh
+ ssh-keygen -t rsa
+
+3- Clone OPNFV releng-xci repository
+
+4- Change into directory where the sandbox script is located:
+ cd releng-xci/xci
+
+5- Set the sandbox flavor, OPNFV scenario, openstack version, VM size and releng_xci and bifrost versions:
+
+ export INSTALLER_TYPE=osa
+ export XCI_FLAVOR=xxx ,chapter 2.3 of XCI User Guide [1]
+ (e.g. export XCI_FLAVOR=mini)
+ export DEPLOY_SCENARIO=yyy
+ (e.g. export DEPLOY_SCENARIO=os-nosdn-nofeature)
+
+6- Execute the sandbox script
+ ./xci-deploy.sh
+
+Once the deployement is successfully completed, the instructions below should be completed:
+
+1- You should access the OPNFV VM using ssh (ssh root@192.168.122.2)
+
+2- export DEPLOY_SCENARIO=yyy
+
+3- export PDF=/root/releng-xci/xci/var/pdf.yml
+ export IDF=/root/releng-xci/xci/var/idf.yml
+ source openrc
+
+4- Run the following ansible playbook script:
+ ansible-playbook -i releng-xci/xci/playbooks/dynamic_inventory.py releng-xci/xci/playbooks/prepare-tests.yml
+
+5- Run the following bash script:
+ ./prepare-tests.sh
+
+
+
+[1] https://docs.opnfv.org/en/latest/infrastructure/xci.html
+
+
+Installing Dovetail
+--------------------
+
+
+Checking the Test Host Readiness
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+Installing Prerequisite Packages on the Test Host
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+Configuring the Test Host Environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In order to run the test scenarios properly and having access to all OS components
+that each scenario needs, the undercloud credentials should be used and copied in the
+docker container along with ssh key.
+
+The environment preparation should be applied on the Test Host environment.
+Therefore, the containers which are going to be used as part of this configuration,
+fetch the information, the files and the rest input from Test Host environment directly
+as part of the Docker command.
+
+Setting up Primary Configuration File
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Two new environment variables could be introduced in the ``env_config.sh`` file.
+
+
+.. code-block:: bash
+
+ # For XCI installer the following environment parameters should be added in
+ # this file. Otherwise, those parameters could be ignored.
+ export INSTALLER_TYPE=osa
+ export DEPLOY_SCENARIO=os-nosdn-nofeature
+ export XCI_FLAVOR=noha
+
+The OS_PASSWORD and the rest credential details could be retrieved directly by openrc file in the OPNFV VM.
+
+Configuration for Running Tempest Test Cases (Mandatory)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In order for Tempest Test cases to run properly, the ``$DOVETAIL_HOME/pre_config/tempest_conf.yaml``
+file should be updated, introducing the following configuration.
+
+service_available:
+ cinder: True
+
+Configuration for Running HA Test Cases (Mandatory)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Below is a sample of ``${DOVETAIL_HOME}/pre_config/pod.yaml`` file with
+the required syntax when key_filename is used instead of password is employed
+by the controller.
+Moreover, the 'heat-admin' should be used as user.
+
+.. code-block:: bash
+
+ nodes:
+ -
+ # This can not be changed and must be node0.
+ name: node0
+
+ # This must be Jumpserver.
+ role: Jumpserver
+
+ # This is the instance IP of a node which has ipmitool installed.
+ ip: xx.xx.xx.xx
+
+ # User name of the user of this node. This user **must** have sudo privileges.
+ user: root
+
+ # Password of the user.
+ #password: root
+ key_filename: /root/.ssh/id_rsa
+
+ -
+ # This can not be changed and must be node1.
+ name: node1
+
+ # This must be controller.
+ role: Controller
+
+ # This is the instance IP of a controller node, which is the haproxy primary node
+ ip: xx.xx.xx.xx
+
+ # User name of the user of this node. This user **must** have sudo privileges.
+ user: root
+
+ # Password of the user.
+ #password: root
+ key_filename: /root/.ssh/id_rsa
+
+ -
+ # This can not be changed and must be node2.
+ name: node2
+
+ # This must be Compute.
+ role: Compute
+
+ # This is the instance IP of a controller node, which is the haproxy primary node
+ ip: xx.xx.xx.xx
+
+ # User name of the user of this node. This user **must** have sudo privileges.
+ user: root
+
+ # Password of the user.
+ #password: root
+ key_filename: /root/.ssh/id_rsa
+
+ process_info:
+ -
+ # The default attack process of yardstick.ha.rabbitmq is 'rabbitmq-server'.
+ # Here can be reset to 'rabbitmq'.
+ testcase_name: yardstick.ha.rabbitmq
+ attack_process: rabbitmq
+
+ -
+ # The default attack host for all HA test cases is 'node1'.
+ # Here can be reset to any other node given in the section 'nodes'.
+ testcase_name: yardstick.ha.glance_api
+ attack_host: node2
+
+Note: The definition for each active controller and computer should be done in this file.
+
+Configuration of Hosts File (Optional)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+Installing Dovetail on the Test Host
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+Online Test Host
+""""""""""""""""
+
+
+Offline Test Host
+"""""""""""""""""
+
+
+Starting Dovetail Docker
+------------------------
+
+
+Running the OVP Test Suite
+----------------------------
+
+
+Making Sense of OVP Test Results
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+OVP Portal Web Interface
+------------------------
+
+
+Updating Dovetail or a Test Suite
+---------------------------------
diff --git a/docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst b/docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst
index aaac6c4c..27173ed9 100644
--- a/docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst
+++ b/docs/testing/user/ovpaddendum/exemption-strict-API-validation.rst
@@ -30,7 +30,7 @@ Consequently, such cloud implementations do not pass Tempest tests which
validate API responses despite actually implementing and providing the tested
functionality.
-This document describes an exemption process for use within the OPNFV Verified
+This document describes an exemption process for use within the OPNFV Verification
Program which
i) allows vendors to pass Tempest tests if the tested functionality is
@@ -63,7 +63,7 @@ is actually available. As a result, a Tempest test failing due to extended API
responses does not provide information about whether the tested functionality
is available or not.
-The OPNFV Verified Program has inherited the policy to strictly validate API
+The OPNFV Verification Program has inherited the policy to strictly validate API
responses from OpenStack by including a selection of Tempest tests in its
compliance test suite. However, it was never discussed if OVP should adopt this
policy as well. It turns out that this policy causes challenges for vendors of
@@ -168,18 +168,18 @@ responses is as follows:
not.
#. The exemption will be made available to participants of OVP as part of a
- service release of OVP 2018.01 and 2018.09.
+ service release of OVP 2018.01, 2018.09 and 2019.12.
#. The C&C committee will monitor the situation around exemptions and may
decide changes to the above process at any time, including the possibility
to stop issuing exemptions.
-.. [1] https://review.openstack.org/#/c/156130/
+.. [1] https://review.opendev.org/gitweb?p=openstack%2Ftempest.git;a=commitdiff;h=f0c30bc241e5160e3fe7402e738ea8f56a8b1315
.. [2] https://github.com/openstack/tempest/tree/master/tempest/lib/api_schema/response/compute
-.. [3] https://developer.openstack.org/api-ref/compute/#show-server-details
+.. [3] https://docs.openstack.org/api-ref/compute/#show-server-details
.. [4] https://wiki.openstack.org/wiki/Governance/InteropWG
-.. [5] https://refstack.openstack.org/
+.. [5] https://www.openstack.org/brand/interop/
.. [6] http://lists.openstack.org/pipermail/openstack-dev/2016-June/097349.html
-.. [7] https://review.openstack.org/#/c/333067/
-.. [8] https://review.openstack.org/#/c/512447/
+.. [7] https://review.opendev.org/gitweb?p=openstack%2Finterop.git;a=commitdiff;h=c38e18b343505f16a74a97b748362fa7f1a01e57
+.. [8] https://review.opendev.org/gitweb?p=openstack%2Finterop.git;a=commitdiff;h=5748c296a658cf5efebc16ad9d7644ca1125b073
diff --git a/docs/testing/user/ovpaddendum/index.rst b/docs/testing/user/ovpaddendum/index.rst
index 5e4c0a67..811c2bcd 100644
--- a/docs/testing/user/ovpaddendum/index.rst
+++ b/docs/testing/user/ovpaddendum/index.rst
@@ -3,8 +3,10 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Intel and others
+.. _dovetail-ovp-addendum:
+
=======================================
-Guidelines Addendum for 2018.09 release
+Guidelines Addendum for 2019.12 release
=======================================
.. toctree::
@@ -15,12 +17,12 @@ Introduction
============
This addendum provides a high-level description of the testing scope and
-pass/fail criteria used in the OPNFV Verified Program (OVP) for the 2018.09
+pass/fail criteria used in the OPNFV Verification Program (OVP) for the 2019.12
release. This information is intended as an overview for OVP testers and for
the Dovetail Project to help guide test-tool and test-case development for the
-OVP 2018.09 release. The Dovetail project is responsible for documenting
+OVP 2019.12 release. The Dovetail project is responsible for documenting
test-case specifications as well as implementing the OVP tool-chain through
-collaboration with the OPNFV testing community. OVP testing focuses on
+collaboration with the OPNFV and ONAP testing communities. OVP testing focuses on
establishing the ability of the System Under Test (SUT) to perform NFVI and VIM
operations and support Service Provider oriented features that ensure
manageable, resilient and secure networks.
@@ -29,15 +31,15 @@ manageable, resilient and secure networks.
Meaning of Compliance
=====================
-OPNFV Compliance indicates adherence of an NFV platform to behaviors defined
-through specific platform capabilities, allowing to prepare, instantiate,
-operate and remove VNFs running on the NFVI. OVP 2018.09 compliance evaluates
+OPNFV Compliance indicates adherence of an NFV platform and VNF to behaviors
+defined through specific platform capabilities, allowing to prepare, instantiate,
+operate and remove VNFs running on the NFVI. OVP 2019.12 compliance evaluates
the ability of a platform to support Service Provider network capabilities and
-workloads that are supported in the OPNFV platform as of this release.
-Compliance test cases are designated as compulsory or optional based on the
-maturity of OPNFV capabilities as well as industry expectations. Compulsory
-test cases may for example include NFVI management capabilities whereas tests
-for certain high-availability features may be deemed as optional.
+workloads that are supported in the OPNFV and ONAP platforms as of this release.
+Test cases are designated as compulsory or optional based on the maturity
+of capabilities as well as industry expectations. Compulsory test cases may for
+example include NFVI management capabilities whereas tests for certain
+high-availability features may be deemed as optional.
Test coverage and pass/fail criteria are designed to ensure an acceptable level
of compliance but not be so restrictive as to disqualify variations in platform
@@ -47,14 +49,24 @@ implementations, capabilities and features.
SUT Assumptions
===============
-Assumptions about the System Under Test (SUT) include ...
+Assumptions about the NFVI System Under Test (SUT) for the OVP Infrastructure
+badge include ...
- The minimal specification of physical infrastructure, including controller
- nodes, compute nodes and networks, is defined by the `Pharos specification`_.
+ nodes, compute nodes and networks, is defined for the NFVI by the
+ `Pharos specification`_.
- The SUT is fully deployed and operational, i.e. SUT deployment tools are
out of scope of testing.
+Assumptions about the VNF System Under Test (SUT) for the OVP VNF
+badge include ...
+
+- The VNF templates and disk image(s) file are available, and the disk image(s)
+ have been deployed to the ONAP Cloud Site.
+
+- The required value for the VNF pre-load files are available for the selected
+ ONAP Cloud Site.
Scope of Testing
================
@@ -64,7 +76,7 @@ outlines the key objectives of the OVP as follows:
- Help build the market for
- - OPNFV based infrastructure
+ - LFN based infrastructure
- applications designed to run on that infrastructure
@@ -76,8 +88,8 @@ outlines the key objectives of the OVP as follows:
- Enhance interoperability
The guidelines further directs the scope to be constrained to "features,
-capabilities, components, and interfaces included in an OPNFV release that are
-generally available in the industry (e.g., through adoption by an upstream
+capabilities, components, and interfaces included in an OPNFV and ONAP releases
+that are generally available in the industry (e.g., through adoption by an upstream
community)", and that compliance verification is evaluated using "functional
tests that focus on defined interfaces and/or behaviors without regard to the
implementation of the underlying system under test".
@@ -92,6 +104,13 @@ also out of scope or for further study. Newer functional areas such as MANO
(outside of APIs in the NFVI and VIM) are still developing and are for future
considerations.
+ONAP provides a comprehensive platform for real-time, policy-driven orchestration
+and automation of physical and virtual network functions that will enable software,
+network, IT and cloud providers and developers to rapidly automate new services and
+support complete lifecycle management. By unifying member resources, ONAP is
+accelerating the development of a vibrant ecosystem around a globally shared
+architecture and implementation for network automation–with an open standards focus–
+faster than any one product could on its own.
General Approach
----------------
@@ -137,7 +156,7 @@ test scope.
Analysis of Scope
-----------------
-In order to define the scope of the 2018.09 release of the compliance and
+In order to define the scope of the 2019.12 release of the compliance and
verification program, this section analyzes NFV-focused platform capabilities
with respect to the high-level objectives and the general approach outlined in
the previous section. The analysis determines which capabilities are suitable
@@ -169,8 +188,8 @@ including:
suspend/resume, reboot, migrate)
- simple virtual machine resource scheduling on multiple nodes
-OPNFV mainly supports OpenStack as the VIM up to the 2018.09 release. The VNFs
-used in the OVP program, and features in scope for the program which are
+OPNFV mainly supports OpenStack as the VIM up to the 2019.12 release. The VNFs
+used in the OVP NFVI program, and features in scope for the program which are
considered to be basic to all VNFs, require commercial OpenStack distributions
to support a common basic level of cloud capabilities, and to be compliant to a
common specification for these capabilities. This requirement significantly
@@ -198,7 +217,7 @@ feature requirements expand beyond common OpenStack (or other VIM)
requirements. OPNFV OVP will incorporate test cases to verify compliance in
these areas as they become mature. Because these extensions may impose new API
demands, maturity and industry adoption is a prerequisite for making them a
-mandatory requirement for OPNFV compliance. At the time of the 2018.09 release,
+mandatory requirement for OPNFV compliance. At the time of the 2019.12 release,
we have promoted tests of the OpenStack IPv6 API from optional to mandatory
while keeping BGPVPN as optional test area. Passing optional tests will not be
required to pass OPNFV compliance verification.
@@ -207,7 +226,7 @@ BGPVPNs are relevant due to the wide adoption of MPLS/BGP based VPNs in wide
area networks, which makes it necessary for data centers hosting VNFs to be
able to seamlessly interconnect with such networks. SFC is also an important
NFV requirement, however its implementation has not yet been accepted or
-adopted in the upstream at the time of the 2018.09 release.
+adopted in the upstream at the time of the 2019.12 release.
3. High availability
@@ -233,7 +252,7 @@ Resiliency testing involves stressing the SUT and verifying its ability to
absorb stress conditions and still provide an acceptable level of service.
Resiliency is an important requirement for end-users.
-The 2018.09 release of OVP includes a load test which spins up a number of VMs
+The 2019.12 release of OVP includes a load test which spins up a number of VMs
pairs in parallel to assert that the system under test can process the workload
spike in a stable and deterministic fashion.
@@ -248,12 +267,12 @@ capabilities expected of an end-user deployment. It is an area that we should
address in the near future, to define a common set of requirements and develop
test cases for verifying those requirements.
-The 2018.09 release includes new test cases which verify that the role-based
+The 2019.12 release includes new test cases which verify that the role-based
access control (RBAC) functionality of the VIM is behaving as expected.
Another common requirement is security vulnerability scanning. While the OPNFV
security project integrated tools for security vulnerability scanning, this has
-not been fully analyzed or exercised in 2018.09 release. This area needs
+not been fully analyzed or exercised in 2019.12 release. This area needs
further work to identify the required level of security for the purpose of
OPNFV in order to be integrated into the OVP. End-user inputs on specific
requirements in security is needed.
@@ -266,7 +285,7 @@ essential information and control mechanisms. These subsystems include
telemetry, fault management (e.g. alarms), performance management, audits, and
control mechanisms such as security and configuration policies.
-The current 2018.09 release implements some enabling capabilities in NFVI/VIM
+The current 2019.12 release implements some enabling capabilities in NFVI/VIM
such as telemetry, policy, and fault management. However, the specification of
expected system components, behavior and the test cases to verify them have not
yet been adequately developed. We will therefore not be testing this area at
@@ -285,10 +304,10 @@ compliance because it validates design patterns and support for the types of
NFVI features that users care about.
There are a lot of projects in OPNFV developing use cases and sample VNFs. The
-2018.09 release of OVP features two such use-case tests, spawning and verifying
+2019.12 release of OVP features two such use-case tests, spawning and verifying
a vIMS and a vEPC, correspondingly.
-8. Additional capabilities
+8. Additional NFVI capabilities
In addition to the capabilities analyzed above, there are further system
aspects which are of importance for the OVP. These comprise operational and
@@ -305,15 +324,24 @@ considered widely available in commercial systems in order to include them in
the OVP. Hence, these aspects are left for inclusion in future releases of the
OVP.
+9. VNF Compliance
+
+VNF Compliance verifies the VNF template files conform to the requirements documented
+in by ONAP VNFRQTS project.
+10. VNF Validation
-Scope of the 2018.09 release of the OVP
+VNF Validation verifies the VNF is able to onbroad within ONAP and ONAP is able to
+perform basic orchestration operations with the VNF, including instantiating the
+VNF on the Cloud Site.
+
+Scope of the 2019.12 release of the OVP
---------------------------------------
-Summarizing the results of the analysis above, the scope of the 2018.09 release
+Summarizing the results of the analysis above, the scope of the 2019.12 release
of OVP is as follows:
-- Mandatory test scope:
+- Mandatory NFVI test scope:
- functest.vping.userdata
- functest.vping.ssh
@@ -338,7 +366,7 @@ of OVP is as follows:
- yardstick.ha.database
- bottlenecks.stress.ping
-- Optional test scope:
+- Optional NFVI test scope:
- functest.tempest.ipv6_scenario
- functest.tempest.multi_node_scheduling
@@ -346,15 +374,14 @@ of OVP is as follows:
- functest.tempest.vm_lifecycle
- functest.tempest.network_scenario
- functest.tempest.bgpvpn
- - functest.bgpvpn.subnet_connectivity
- - functest.bgpvpn.tenant_separation
- - functest.bgpvpn.router_association
- - functest.bgpvpn.router_association_floating_ip
- yardstick.ha.neutron_l3_agent
- yardstick.ha.controller_restart
- functest.vnf.vims
- functest.vnf.vepc
- - functest.snaps.smoke
+
+- Mandatory VNF test scope:
+
+ - Refer to `ONAP VNF Test Case Descriptions <https://docs.onap.org/en/elalto/submodules/vnfrqts/testcases.git/docs/index.html>`_
\* The OPNFV OVP utilizes the same set of test cases as the OpenStack
interoperability program *OpenStack Powered Compute*. Passing the OPNFV OVP
@@ -374,7 +401,7 @@ Scope considerations for future OVP releases
--------------------------------------------
Based on the previous analysis, the following items are outside the scope of
-the 2018.09 release of OVP but are being considered for inclusion in future
+the 2019.12 release of OVP but are being considered for inclusion in future
releases:
- service assurance
diff --git a/docs/testing/user/reviewerguide/images/ovp_log_setup.png b/docs/testing/user/reviewerguide/images/ovp_log_setup.png
index 4a68d9b6..f53b94d9 100644
--- a/docs/testing/user/reviewerguide/images/ovp_log_setup.png
+++ b/docs/testing/user/reviewerguide/images/ovp_log_setup.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_pass_fraction.png b/docs/testing/user/reviewerguide/images/ovp_pass_fraction.png
index 94dcd45a..30672e02 100644
--- a/docs/testing/user/reviewerguide/images/ovp_pass_fraction.png
+++ b/docs/testing/user/reviewerguide/images/ovp_pass_fraction.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_pass_percentage.png b/docs/testing/user/reviewerguide/images/ovp_pass_percentage.png
index 0d477a78..1a61f7b4 100644
--- a/docs/testing/user/reviewerguide/images/ovp_pass_percentage.png
+++ b/docs/testing/user/reviewerguide/images/ovp_pass_percentage.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_result_overview.png b/docs/testing/user/reviewerguide/images/ovp_result_overview.png
deleted file mode 100644
index 1f66a69c..00000000
--- a/docs/testing/user/reviewerguide/images/ovp_result_overview.png
+++ /dev/null
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_result_review.png b/docs/testing/user/reviewerguide/images/ovp_result_review.png
index 427127e0..56633447 100644
--- a/docs/testing/user/reviewerguide/images/ovp_result_review.png
+++ b/docs/testing/user/reviewerguide/images/ovp_result_review.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/ovp_top_nav.png b/docs/testing/user/reviewerguide/images/ovp_top_nav.png
index 3dfc0b09..a1c261f8 100644
--- a/docs/testing/user/reviewerguide/images/ovp_top_nav.png
+++ b/docs/testing/user/reviewerguide/images/ovp_top_nav.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/review_status.png b/docs/testing/user/reviewerguide/images/review_status.png
new file mode 100644
index 00000000..911b06fd
--- /dev/null
+++ b/docs/testing/user/reviewerguide/images/review_status.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/images/sut_info.png b/docs/testing/user/reviewerguide/images/sut_info.png
index 53c3d51a..29c249b2 100644
--- a/docs/testing/user/reviewerguide/images/sut_info.png
+++ b/docs/testing/user/reviewerguide/images/sut_info.png
Binary files differ
diff --git a/docs/testing/user/reviewerguide/index.rst b/docs/testing/user/reviewerguide/index.rst
index 99585581..f08ae784 100644
--- a/docs/testing/user/reviewerguide/index.rst
+++ b/docs/testing/user/reviewerguide/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-=============================================
+==================
OVP Reviewer Guide
-=============================================
+==================
.. toctree::
:maxdepth: 2
@@ -16,120 +16,110 @@ Introduction
This document provides detailed guidance for reviewers on how to handle the result review
process.
-The OPNFV Verified program (OVP) provides the ability for users to upload test results in
-`OVP portal <https://verified.opnfv.org>`_ and request from OVP community to review them.
-After the user submit for review the test results **Status** is changed from 'private' to 'review'
-(as shown in figure 2).
+The OPNFV Verification Program (OVP) provides the ability for users to upload test results in
+`OVP portal <https://nfvi-verified.lfnetworking.org>`_ and request from OVP community to review them.
-OVP administrator will ask for review volunteers using the verified@opnfv.org email alias.
+OVP administrator will ask for review volunteers using the ovp-support@lfnetworking.org email alias.
The incoming results for review will be identified by the administrator with particular **Test ID**
and **Owner** values.
Volunteers that will accept the review request can access the test results by login to the
-`OVP portal <https://verified.opnfv.org>`_ and the click on the **My Results** tab in top-level
-navigation bar.
+`OVP portal <https://nfvi-verified.lfnetworking.org>`_ and then click on the **Incoming Reviews**
+tab in top-level navigation bar.
.. image:: images/ovp_top_nav.png
:align: center
:scale: 100%
-Figure 1
-The corresponding OVP portal result will have a status of 'review'.
+After the user submit for review the test results **Status** is changed from 'private' to 'review'.
+Reviewers can find that the corresponding OVP portal result will have a status of 'review'.
+Also there are **Application** information list here for review. All the application information
+is submitted by users at the same time they submit their results for review. Reviewers can also
+find who has already approve/not approve the test results by clicking on the **View Reviews**.
.. image:: images/ovp_result_review.png
:align: center
:scale: 100%
-Figure 2
Reviewers must follow the checklist below to ensure review consistency for the OPNFV
-Verified Program (OVP) 2018.09 (Fraser) release at a minimum.
+Verification Program (OVP) 2019.12 (Hunter) release at a minimum.
-#. **Mandatory Test Area Results** - Validate that results for all mandatory test areas are present.
-#. **Test-Case Pass Percentage** - Ensure all tests have passed (100% pass rate).
-#. **Log File Verification** - Inspect the log file for each test area.
+#. **Test Case Pass Percentage** - Ensure all mandatory tests have passed (100% pass rate).
+#. **Mandatory Test Case Results** - Validate that results for all mandatory test cases are present.
+#. **Log File Verification** - Inspect the log file for each test case.
#. **SUT Info Verification** - Validate the system under test (SUT) hardware and software endpoint info is present.
+Test Case Pass Percentage
+=========================
-1. Mandatory Test Area Results
-==============================
+All mandatory test cases have to run successfully. The below figure of the **Test Run Results**
+is one method and shows that 96.71% of the mandatory test cases have passed.
+This value must not be lower than 100%.
+
+.. image:: images/ovp_pass_percentage.png
+ :align: center
+ :width: 350 px
-Test results can be displayed by clicking on the hyperlink under the 'Test ID' column.
-User should validate that results for all mandatory test areas are included in the overall test suite. The required
-mandatory test cases are:
-- functest.vping.userdata
-- functest.vping.ssh
+Mandatory Test Case Results
+===========================
+
+Test results can be displayed by clicking on the hyperlink under the **Test ID** column.
+Reviewers should validate that results for all mandatory test cases are included in the overall
+test suite. The required mandatory test cases are:
+
- bottlenecks.stress.ping
-- functest.tempest.osinterop
+- functest.security.patrole
- functest.tempest.compute
- functest.tempest.identity_v3
- functest.tempest.image
+- functest.tempest.ipv6_api
- functest.tempest.network_api
-- functest.tempest.volume
- functest.tempest.neutron_trunk_ports
-- functest.tempest.ipv6_api
-- functest.security.patrole
-- yardstick.ha.nova_api
-- yardstick.ha.neutron_server
-- yardstick.ha.keystone
-- yardstick.ha.glance_api
+- functest.tempest.osinterop
+- functest.tempest.volume
+- functest.vping.ssh
+- functest.vping.userdata
- yardstick.ha.cinder_api
- yardstick.ha.cpu_load
+- yardstick.ha.database
- yardstick.ha.disk_load
+- yardstick.ha.glance_api
- yardstick.ha.haproxy
+- yardstick.ha.keystone
+- yardstick.ha.neutron_server
+- yardstick.ha.nova_api
- yardstick.ha.rabbitmq
-- yardstick.ha.database
*Note, that the 'Test ID' column in this view condenses the UUID used for 'Test ID' to
eight characters even though the 'Test ID' is a longer UUID in the back-end.*
-.. image:: images/ovp_result_overview.png
- :align: center
- :scale: 100%
-
-Figure 3
-
-2. Test-Case Pass Percentage
-============================
+Failed test cases can be easy identified by the color of pass/total number:
-All mandatory test-cases have to run successfully. The below diagram of the 'Test Run Results' is one method and
-shows that 98.15% of the mandatory test-cases have passed.
-This value must not be lower than 100%.
-
-.. image:: images/ovp_pass_percentage.png
- :align: center
- :width: 350 px
-
-Figure 4
-
-Failed test cases can also be easy identified by the color of pass/total number. :
-
-- Green when all test-cases pass
-- Orange when at least one fails
-- Red when all test-cases fail
+- **Green** when all test cases pass
+- **Orange** when at least one fails/skips
+- **Red** when all test cases fail/skip
.. image:: images/ovp_pass_fraction.png
:align: center
:width: 350 px
-Figure 5
-3. Log File Verification
-========================
+Log File Verification
+=====================
Each log file of the mandatory test cases have to be verified for content.
Log files can be displayed by clicking on the setup icon to the right of the results,
-as shown in figure below.
+as shown in the figure below.
.. image:: images/ovp_log_setup.png
:align: center
:scale: 100%
-Figure 6
*Note, all log files can be found at results/ directory as shown at the following table.*
@@ -148,37 +138,46 @@ Figure 6
+------------------------+--------------------------+
-The bottlenecks log must contain the 'SUCCESS' result as shown in following example:
+Bottlenecks Logs
+----------------
- 2018-08-22 14:11:21,815 [INFO] yardstick.benchmark.core.task task.py:127 Testcase: "ping_bottlenecks" **SUCCESS**!!!
+It must contain the 'SUCCESS' result at the end of Bottlenecks log as shown in following example:
-Functest logs opens an html page that lists all test cases as shown in figure 7. All test cases must have run
-successfuly.
+ 2019-12-03 07:35:14,630 [INFO] yardstick.benchmark.core.task task.py:129 Testcase: "ping_bottlenecks" SUCCESS!!!
-.. image:: images/ovp_log_files_functest_image.png
- :align: center
- :scale: 100%
-Figure 7
+Functest Logs
+-------------
-For the vping test area log file (functest.log). The two entries displayed in the tables below must be present in
-this log file.
+There are 2 different types of Functest logs, one is plain text for **vping** test cases and the other
+is html file for **tempest** and **security** test cases.
-**functest.vping_userdata**
+For **vping** test cases, two entries displayed in the tables below must be present in log files.
+
+**functest.vping.ssh**
.. image:: images/ovp_vping_ssh.png
:align: center
:scale: 100%
-Figure 8
-**functest.vping_ssh**
+**functest.vping.userdata**
.. image:: images/ovp_vping_user.png
:align: center
:scale: 100%
-Figure 9
+
+For **tempest** and **security** test cases, it opens an html page that lists all test cases as shown
+below. All test cases must have run successfully.
+
+.. image:: images/ovp_log_files_functest_image.png
+ :align: center
+ :scale: 100%
+
+
+Yardstick Logs
+--------------
The yardstick log must contain the 'SUCCESS' result for each of the test-cases within this
test area. This can be verified by searching the log for the keyword 'SUCCESS'.
@@ -190,29 +189,39 @@ An example of a FAILED and a SUCCESS test case are listed below:
2018-08-28 10:23:41,907 [INFO] yardstick.benchmark.core.task task.py:127 Testcase: "opnfv_yardstick_tc052" **SUCCESS**!!!
-4. SUT Info Verification
-========================
+SUT Info Verification
+=====================
SUT information must be present in the results to validate that all required endpoint services
and at least two controllers were present during test execution. For the results shown below,
-click the '**info**' hyperlink in the **SUT** column to navigate to the SUT information page.
+click the **info** hyperlink in the **SUT** column to navigate to the SUT information page.
.. image:: images/sut_info.png
:align: center
:scale: 100%
-Figure 10
-In the '**Endpoints**' listing shown below for the SUT VIM component, ensure that services are
+In the **Endpoints** listing shown below for the SUT VIM component, ensure that services are
present for identify, compute, image, volume and network at a minimum by inspecting the
-'**Service Type**' column.
+**Service Type** column.
.. image:: images/sut_endpoints.png
:align: center
:scale: 100%
-Figure 11
-Inspect the '**Hosts**' listing found below the Endpoints secion of the SUT info page and ensure
+Inspect the **Hosts** listing found below the Endpoints secion of the SUT info page and ensure
at least two hosts are present, as two controllers are required the for the mandatory HA
-test-cases.
+test cases.
+
+
+Approve or Not Approve Results
+==============================
+
+When you decide to approve or not approve this test, you can click the **Operation** and choose
+**approve** or **not approve**. Once you have approved or not approved the test, you can click
+**View Reviews** to find the review status as shown below.
+
+.. image:: images/review_status.png
+ :align: center
+ :scale: 100%
diff --git a/docs/testing/user/systempreparation/index.rst b/docs/testing/user/systempreparation/index.rst
index 5bc150a3..71916736 100644
--- a/docs/testing/user/systempreparation/index.rst
+++ b/docs/testing/user/systempreparation/index.rst
@@ -5,9 +5,9 @@
.. _dovetail-system_preparation_guide:
-============================
-OVP System Preparation Guide
-============================
+=================================
+OVP NFVI System Preparation Guide
+=================================
This document provides a general guide to hardware system prerequisites
and expectations for running OPNFV OVP testing. For detailed guide of
diff --git a/docs/testing/user/testspecification/highavailability/index.rst b/docs/testing/user/testspecification/highavailability/index.rst
index dd98ba94..e489894f 100644
--- a/docs/testing/user/testspecification/highavailability/index.rst
+++ b/docs/testing/user/testspecification/highavailability/index.rst
@@ -31,7 +31,7 @@ This test area references the following specifications:
- ETSI GS NFV-REL 001
- - http://www.etsi.org/deliver/etsi_gs/NFV-REL/001_099/001/01.01.01_60/gs_nfv-rel001v010101p.pdf
+ - https://www.etsi.org/deliver/etsi_gs/NFV-REL/001_099/001/01.01.01_60/gs_nfv-rel001v010101p.pdf
- OpenStack High Availability Guide
diff --git a/docs/testing/user/testspecification/index.rst b/docs/testing/user/testspecification/index.rst
index 9fff4689..69df2a01 100644
--- a/docs/testing/user/testspecification/index.rst
+++ b/docs/testing/user/testspecification/index.rst
@@ -29,7 +29,7 @@ specification documents.
highavailability/index
security_patrole/index
- snaps_smoke/index
+ security_patrole_vxlan_dependent/index
stress/index
tempest_compute/index
tempest_identity_v3/index
diff --git a/docs/testing/user/testspecification/security_patrole/index.rst b/docs/testing/user/testspecification/security_patrole/index.rst
index 189c248d..250d8bbd 100644
--- a/docs/testing/user/testspecification/security_patrole/index.rst
+++ b/docs/testing/user/testspecification/security_patrole/index.rst
@@ -17,13 +17,12 @@ The test area specifically validates services image and networking.
References
================
-- `OpenStack image service API reference <https://developer.openstack.org/api-ref/image/v2/index.html>`_
-- `OpenStack metadata definitions service API reference <https://developer.openstack.org/api-ref/image/v2/metadefs-index.html>`_
-- `OpenStack layer 2 networking service API reference <https://developer.openstack.org/api-ref/network/v2/index.html#layer-2-networking>`_
-- `OpenStack layer 3 networking service API reference <https://developer.openstack.org/api-ref/network/v2/index.html#layer-3-networking>`_
-- `OpenStack network security API reference <https://developer.openstack.org/api-ref/network/v2/index.html#security>`_
-- `OpenStack resource management API reference <https://developer.openstack.org/api-ref/network/v2/index.html#resource-management>`_
-- `OpenStack networking agents API reference <https://developer.openstack.org/api-ref/network/v2/index.html#networking-agents>`_
+- `OpenStack image service API reference <https://docs.openstack.org/api-ref/image/v2/index.html>`_
+- `OpenStack metadata definitions service API reference <https://docs.openstack.org/api-ref/image/v2/metadefs-index.html>`_
+- `OpenStack layer 2 networking service API reference <https://docs.openstack.org/api-ref/network/v2/index.html#layer-2-networking>`_
+- `OpenStack layer 3 networking service API reference <https://docs.openstack.org/api-ref/network/v2/index.html#layer-3-networking>`_
+- `OpenStack network security API reference <https://docs.openstack.org/api-ref/network/v2/index.html#security>`_
+- `OpenStack resource management API reference <https://docs.openstack.org/api-ref/network/v2/index.html#resource-management>`_
System Under Test (SUT)
@@ -46,7 +45,7 @@ by the tests, review the Python source code accessible via the following links.
These tests cover the RBAC tests of image basic operations.
Implementation:
-`BasicOperationsImagesRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_images_rbac.py>`_
+`BasicOperationsImagesRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_images_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_images_rbac.BasicOperationsImagesRbacTest.test_create_image
- patrole_tempest_plugin.tests.api.image.test_images_rbac.BasicOperationsImagesRbacTest.test_create_image_tag
@@ -67,10 +66,9 @@ Implementation:
These tests cover the RBAC tests of image namespaces.
Implementation:
-`ImageNamespacesRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_rbac.py>`_
+`ImageNamespacesRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_create_metadef_namespace
-- patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_list_metadef_namespaces
- patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_modify_metadef_namespace
@@ -79,7 +77,7 @@ Implementation:
These tests cover the RBAC tests of image namespaces objects.
Implementation:
-`ImageNamespacesObjectsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_objects_rbac.py>`_
+`ImageNamespacesObjectsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_objects_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_namespace_objects_rbac.ImageNamespacesObjectsRbacTest.test_create_metadef_object_in_namespace
- patrole_tempest_plugin.tests.api.image.test_image_namespace_objects_rbac.ImageNamespacesObjectsRbacTest.test_list_metadef_objects_in_namespace
@@ -92,7 +90,7 @@ Implementation:
These tests cover the RBAC tests of image namespaces property.
Implementation:
-`NamespacesPropertyRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_property_rbac.py>`_
+`NamespacesPropertyRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_property_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_namespace_property_rbac.NamespacesPropertyRbacTest.test_add_md_properties
- patrole_tempest_plugin.tests.api.image.test_image_namespace_property_rbac.NamespacesPropertyRbacTest.test_get_md_properties
@@ -105,7 +103,7 @@ Implementation:
These tests cover the RBAC tests of image namespaces tags.
Implementation:
-`NamespaceTagsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_tags_rbac.py>`_
+`NamespaceTagsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_namespace_tags_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_namespace_tags_rbac.NamespaceTagsRbacTest.test_create_namespace_tag
- patrole_tempest_plugin.tests.api.image.test_image_namespace_tags_rbac.NamespaceTagsRbacTest.test_create_namespace_tags
@@ -119,7 +117,7 @@ Implementation:
These tests cover the RBAC tests of image resource types.
Implementation:
-`ImageResourceTypesRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_image_resource_types_rbac.py>`_
+`ImageResourceTypesRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_image_resource_types_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_image_resource_types_rbac.ImageResourceTypesRbacTest.test_add_metadef_resource_type
- patrole_tempest_plugin.tests.api.image.test_image_resource_types_rbac.ImageResourceTypesRbacTest.test_get_metadef_resource_type
@@ -131,7 +129,7 @@ Implementation:
These tests cover the RBAC tests of image member.
Implementation:
-`ImagesMemberRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/image/test_images_member_rbac.py>`_
+`ImagesMemberRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/image/test_images_member_rbac.py>`_
- patrole_tempest_plugin.tests.api.image.test_images_member_rbac.ImagesMemberRbacTest.test_add_image_member
- patrole_tempest_plugin.tests.api.image.test_images_member_rbac.ImagesMemberRbacTest.test_delete_image_member
@@ -144,8 +142,8 @@ Implementation:
These tests cover the RBAC tests of network agents.
Implementation:
-`AgentsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_agents_rbac.py#L24>`_ and
-`DHCPAgentSchedulersRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_agents_rbac.py#L147>`_.
+`AgentsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_agents_rbac.py#L24>`_ and
+`DHCPAgentSchedulersRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_agents_rbac.py#L147>`_.
- patrole_tempest_plugin.tests.api.network.test_agents_rbac.AgentsRbacTest.test_show_agent
- patrole_tempest_plugin.tests.api.network.test_agents_rbac.AgentsRbacTest.test_update_agent
@@ -159,7 +157,7 @@ Implementation:
These tests cover the RBAC tests of network floating ips.
Implementation:
-`FloatingIpsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_floating_ips_rbac.py>`_
+`FloatingIpsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_floating_ips_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_create_floating_ip
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_create_floating_ip_floatingip_address
@@ -173,27 +171,24 @@ Implementation:
These tests cover the RBAC tests of network basic operations.
Implementation:
-`NetworksRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_networks_rbac.py>`_
+`NetworksRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_networks_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_network_type
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_segmentation_id
+- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_is_default
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_router_external
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_shared
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_delete_network
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_delete_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_list_dhcp_agents_on_hosting_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_network_type
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_physical_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_segmentation_id
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_router_external
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network
+- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_provider_physical_network
+- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_provider_segmentation_id
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_router_external
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_shared
-- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_subnet
**Network ports RBAC test:**
@@ -201,14 +196,14 @@ Implementation:
These tests cover the RBAC tests of network ports.
Implementation:
-`PortsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_ports_rbac.py>`_
+`PortsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_ports_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_allowed_address_pairs
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_binding_host_id
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_binding_profile
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_device_owner
-- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_fixed_ips
+- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_fixed_ips_ip_address
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_mac_address
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_security_enabled
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_delete_port
@@ -222,7 +217,7 @@ Implementation:
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_binding_host_id
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_binding_profile
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_device_owner
-- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_fixed_ips
+- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_fixed_ips_ip_address
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_mac_address
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_update_port_security_enabled
@@ -232,7 +227,7 @@ Implementation:
These tests cover the RBAC tests of network routers.
Implementation:
-`RouterRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_routers_rbac.py>`_
+`RouterRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_routers_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_add_router_interface
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_create_router
@@ -240,6 +235,7 @@ Implementation:
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_create_router_external_fixed_ips
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_delete_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_remove_router_interface
+- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_show_high_availability_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_show_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_update_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_update_router_enable_snat
@@ -253,7 +249,7 @@ Implementation:
These tests cover the RBAC tests of network security groups.
Implementation:
-`SecGroupRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_security_groups_rbac.py>`_
+`SecGroupRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_security_groups_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_create_security_group
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_create_security_group_rule
@@ -262,7 +258,7 @@ Implementation:
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_list_security_group_rules
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_list_security_groups
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_group_rule
-- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_groups
+- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_group
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_update_security_group
@@ -271,7 +267,7 @@ Implementation:
These tests cover the RBAC tests of network service providers.
Implementation:
-`ServiceProvidersRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_service_providers_rbac.py>`_
+`ServiceProvidersRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_service_providers_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_service_providers_rbac.ServiceProvidersRbacTest.test_list_service_providers
@@ -281,7 +277,7 @@ Implementation:
These tests cover the RBAC tests of network subnetpools.
Implementation:
-`SubnetPoolsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_subnetpools_rbac.py>`_
+`SubnetPoolsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_subnetpools_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_subnetpools_rbac.SubnetPoolsRbacTest.test_create_subnetpool
- patrole_tempest_plugin.tests.api.network.test_subnetpools_rbac.SubnetPoolsRbacTest.test_create_subnetpool_shared
@@ -296,10 +292,36 @@ Implementation:
These tests cover the RBAC tests of network subnets.
Implementation:
-`SubnetsRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_subnets_rbac.py>`_
+`SubnetsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_subnets_rbac.py>`_
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_create_subnet
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_delete_subnet
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_list_subnets
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_show_subnet
- patrole_tempest_plugin.tests.api.network.test_subnets_rbac.SubnetsRbacTest.test_update_subnet
+
+
+**Network flavors RBAC test:**
+
+These tests cover the RBAC tests of network flavors.
+
+Implementation:
+`FlavorsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_flavors_rbac.py>`_
+
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_create_flavor
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_delete_flavor
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_list_flavors
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_show_flavor
+- patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_update_flavor
+
+
+**Network segments RBAC test:**
+
+These tests cover the RBAC tests of network segments.
+
+Implementation:
+`SegmentsRbacTest <https://github.com/openstack/patrole/blob/0.4.0/patrole_tempest_plugin/tests/api/network/test_network_segments_rbac.py>`_
+
+- patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_create_network_segments
+- patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_show_network_segments
+- patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_update_network_segments
diff --git a/docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst b/docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst
new file mode 100644
index 00000000..646cb8b6
--- /dev/null
+++ b/docs/testing/user/testspecification/security_patrole_vxlan_dependent/index.rst
@@ -0,0 +1,51 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) OPNFV
+
+=====================================
+Patrole Tempest Tests Depend on Vxlan
+=====================================
+
+Scope
+=====
+
+This test area includes some tempest role-based access control (RBAC) tests
+which depend on vxlan physical networks.
+
+
+References
+================
+
+- `OpenStack image service API reference <https://docs.openstack.org/api-ref/image/v2/index.html>`_
+- `OpenStack metadata definitions service API reference <https://docs.openstack.org/api-ref/image/v2/metadefs-index.html>`_
+- `OpenStack layer 2 networking service API reference <https://docs.openstack.org/api-ref/network/v2/index.html#layer-2-networking>`_
+- `OpenStack layer 3 networking service API reference <https://docs.openstack.org/api-ref/network/v2/index.html#layer-3-networking>`_
+- `OpenStack network security API reference <https://docs.openstack.org/api-ref/network/v2/index.html#security>`_
+- `OpenStack resource management API reference <https://docs.openstack.org/api-ref/network/v2/index.html#resource-management>`_
+
+
+System Under Test (SUT)
+=======================
+
+The system under test is assumed to be the NFVI and VIM deployed on a Pharos
+compliant infrastructure.
+
+
+Test Area Structure
+====================
+
+The test area is structured in individual tests as listed below. Each test case
+is able to run independently, i.e. irrelevant of the state created by a previous
+test. For detailed information on the individual steps and assertions performed
+by the tests, review the Python source code accessible via the following links.
+
+
+**Network basic RBAC test:**
+
+These tests cover the RBAC tests of network basic operations by creating a vxlan provider network.
+
+Implementation:
+`NetworksRbacTest <https://github.com/openstack/patrole/blob/0.2.0/patrole_tempest_plugin/tests/api/network/test_networks_rbac.py>`_
+
+- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_network_type
+- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_segmentation_id
diff --git a/docs/testing/user/testspecification/snaps_smoke/index.rst b/docs/testing/user/testspecification/snaps_smoke/index.rst
deleted file mode 100644
index dff2b2b5..00000000
--- a/docs/testing/user/testspecification/snaps_smoke/index.rst
+++ /dev/null
@@ -1,232 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
-.. (c) Ericsson AB
-
-==============================
-SNAPS smoke test specification
-==============================
-
-.. toctree::
- :maxdepth: 2
-
-Scope
-=====
-
-The SNAPS smoke test case contains tests that setup and destroy environments
-with VMs with and without Floating IPs with a newly created user and project.
-
-References
-==========
-
-This smoke test executes the Python Tests included with the SNAPS libraries
-that exercise many of the OpenStack APIs within Keystone, Glance, Neutron,
-and Nova.
-
-- https://wiki.opnfv.org/display/PROJ/SNAPS-OO
-
-System Under Test (SUT)
-=======================
-
-The SUT is assumed to be the NFVi and VIM in operation on a Pharos compliant infrastructure.
-
-
-Test Area Structure
-===================
-
-The test area is structured in individual tests as listed below.
-For detailed information on the individual steps and assertions performed
-by the tests, review the Python source code accessible via the following links:
-
-**Dynamic creation of User/Project objects to be leveraged for the integration tests:**
-
-- `Create Image Success tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_image_tests.py#L254>`_
- - snaps.openstack.tests.create_image_tests.CreateImageSuccessTests.test_create_delete_image
- - snaps.openstack.tests.create_image_tests.CreateImageSuccessTests.test_create_image_clean_file
- - snaps.openstack.tests.create_image_tests.CreateImageSuccessTests.test_create_image_clean_url
- - snaps.openstack.tests.create_image_tests.CreateImageSuccessTests.test_create_image_clean_url_properties
- - snaps.openstack.tests.create_image_tests.CreateImageSuccessTests.test_create_same_image
- - snaps.openstack.tests.create_image_tests.CreateImageSuccessTests.test_create_same_image_new_settings
-
-- `Create Image Negative tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_image_tests.py#L463>`_
- - snaps.openstack.tests.create_image_tests.CreateImageNegativeTests.test_bad_image_file
- - snaps.openstack.tests.create_image_tests.CreateImageNegativeTests.test_bad_image_image_type
- - snaps.openstack.tests.create_image_tests.CreateImageNegativeTests.test_bad_image_name
- - snaps.openstack.tests.create_image_tests.CreateImageNegativeTests.test_bad_image_url
-
-- `Create Image Multi Part tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_image_tests.py#L551>`_
- - snaps.openstack.tests.create_image_tests.CreateMultiPartImageTests.test_create_three_part_image_from_file_3_creators
- - snaps.openstack.tests.create_image_tests.CreateMultiPartImageTests.test_create_three_part_image_from_url
- - snaps.openstack.tests.create_image_tests.CreateMultiPartImageTests.test_create_three_part_image_from_url_3_creators
-
-- `Create Keypairs tests <https://github.com/opnfv/snaps/blob/stable%2Ffraser/snaps/openstack/tests/create_keypairs_tests.py#L192>`_
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsTests.test_create_delete_keypair
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsTests.test_create_keypair_from_file
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsTests.test_create_keypair_large_key
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsTests.test_create_keypair_only
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsTests.test_create_keypair_save_both
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsTests.test_create_keypair_save_pub_only
-
-- `Create Keypairs Cleanup tests <https://github.com/opnfv/snaps/blob/stable%2Ffraser/snaps/openstack/tests/create_keypairs_tests.py#L361>`_
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsCleanupTests.test_create_keypair_exist_files_delete
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsCleanupTests.test_create_keypair_exist_files_keep
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsCleanupTests.test_create_keypair_gen_files_delete_1
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsCleanupTests.test_create_keypair_gen_files_delete_2
- - snaps.openstack.tests.create_keypairs_tests.CreateKeypairsCleanupTests.test_create_keypair_gen_files_keep
-
-- `Create Network Success tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_network_tests.py#L355>`_
- - snaps.openstack.tests.create_network_tests.CreateNetworkSuccessTests.test_create_delete_network
- - snaps.openstack.tests.create_network_tests.CreateNetworkSuccessTests.test_create_network_router_admin_user_to_new_project
- - snaps.openstack.tests.create_network_tests.CreateNetworkSuccessTests.test_create_network_router_new_user_to_admin_project
- - snaps.openstack.tests.create_network_tests.CreateNetworkSuccessTests.test_create_network_with_router
- - snaps.openstack.tests.create_network_tests.CreateNetworkSuccessTests.test_create_network_without_router
- - snaps.openstack.tests.create_network_tests.CreateNetworkSuccessTests.test_create_networks_same_name
-
-- `Create Router Success tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_router_tests.py#L118>`_
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_delete_router
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_router_admin_state_True
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_router_admin_state_false
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_router_admin_user_to_new_project
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_router_external_network
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_router_new_user_as_admin_project
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_router_private_network
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_router_vanilla
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_router_with_ext_port
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_with_internal_sub
- - snaps.openstack.tests.create_router_tests.CreateRouterSuccessTests.test_create_with_invalid_internal_sub
-
-- `Create Router Negative tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_router_tests.py#L514>`_
- - snaps.openstack.tests.create_router_tests.CreateRouterNegativeTests.test_create_router_admin_ports
- - snaps.openstack.tests.create_router_tests.CreateRouterNegativeTests.test_create_router_invalid_gateway_name
- - snaps.openstack.tests.create_router_tests.CreateRouterNegativeTests.test_create_router_noname
-
-
-- `Create QoS tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_qos_tests.py#L112>`_
- - snaps.openstack.tests.create_qos_tests.CreateQoSTests.test_create_delete_qos
- - snaps.openstack.tests.create_qos_tests.CreateQoSTests.test_create_qos
- - snaps.openstack.tests.create_qos_tests.CreateQoSTests.test_create_same_qos
-
-- `Create Simple Volume Success tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_volume_tests.py#L116>`_
- - snaps.openstack.tests.create_volume_tests.CreateSimpleVolumeSuccessTests.test_create_delete_volume
- - snaps.openstack.tests.create_volume_tests.CreateSimpleVolumeSuccessTests.test_create_same_volume
- - snaps.openstack.tests.create_volume_tests.CreateSimpleVolumeSuccessTests.test_create_volume_simple
-
-- `Create Simple Volume Failure tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_volume_tests.py#L116>`_
- - snaps.openstack.tests.create_volume_tests.CreateSimpleVolumeFailureTests.test_create_volume_bad_image
- - snaps.openstack.tests.create_volume_tests.CreateSimpleVolumeFailureTests.test_create_volume_bad_size
- - snaps.openstack.tests.create_volume_tests.CreateSimpleVolumeFailureTests.test_create_volume_bad_type
-
-- `Create Volume With Type tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_volume_tests.py#L286>`_
- - snaps.openstack.tests.create_volume_tests.CreateVolumeWithTypeTests.test_bad_volume_type
- - snaps.openstack.tests.create_volume_tests.CreateVolumeWithTypeTests.test_valid_volume_type
-
-- `Create Volume With Image tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_volume_tests.py#L336>`_
- - snaps.openstack.tests.create_volume_tests.CreateVolumeWithImageTests.test_bad_image_name
- - snaps.openstack.tests.create_volume_tests.CreateVolumeWithImageTests.test_valid_volume_image
-
-- `Create Simple Volume Type Success tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_volume_type_tests.py#L113>`_
- - snaps.openstack.tests.create_volume_type_tests.CreateSimpleVolumeTypeSuccessTests.test_create_delete_volume_type
- - snaps.openstack.tests.create_volume_type_tests.CreateSimpleVolumeTypeSuccessTests.test_create_same_volume_type
- - snaps.openstack.tests.create_volume_type_tests.CreateSimpleVolumeTypeSuccessTests.test_create_volume_type
-
-- `Create Volume Type Complex tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_volume_type_tests.py#L206>`_
- - snaps.openstack.tests.create_volume_type_tests.CreateVolumeTypeComplexTests.test_volume_type_with_encryption
- - snaps.openstack.tests.create_volume_type_tests.CreateVolumeTypeComplexTests.test_volume_type_with_qos
- - snaps.openstack.tests.create_volume_type_tests.CreateVolumeTypeComplexTests.test_volume_type_with_qos_and_encryption
-
-- `Simple Health Check <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L283>`_
- - snaps.openstack.tests.create_instance_tests.SimpleHealthCheck.test_check_vm_ip_dhcp
-
-- `Create Instance Two Net tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L2835>`_
- - snaps.openstack.tests.create_instance_tests.CreateInstanceTwoNetTests.test_ping_via_router
-
-- `Create Instance Simple tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L408>`_
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSimpleTests.test_create_admin_instance
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSimpleTests.test_create_delete_instance
-
-- `Create Instance Port Manipulation tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L1343>`_
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_allowed_address_pairs
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_allowed_address_pairs_bad_ip
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_allowed_address_pairs_bad_mac
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_custom_invalid_ip_one_subnet
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_custom_invalid_mac
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_custom_mac_and_ip
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_custom_valid_ip_one_subnet
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_custom_valid_mac
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_one_port_two_ip_one_subnet
- - snaps.openstack.tests.create_instance_tests.CreateInstancePortManipulationTests.test_set_one_port_two_ip_two_subnets
-
-- `Instance Security Group tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L283>`_
- - snaps.openstack.tests.create_instance_tests.InstanceSecurityGroupTests.test_add_invalid_security_group
- - snaps.openstack.tests.create_instance_tests.InstanceSecurityGroupTests.test_add_same_security_group
- - snaps.openstack.tests.create_instance_tests.InstanceSecurityGroupTests.test_add_security_group
- - snaps.openstack.tests.create_instance_tests.InstanceSecurityGroupTests.test_remove_security_group
- - snaps.openstack.tests.create_instance_tests.InstanceSecurityGroupTests.test_remove_security_group_never_added
-
-- `Create Instance On Compute Host <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L1738>`_
- - snaps.openstack.tests.create_instance_tests.CreateInstanceOnComputeHost.test_deploy_vm_to_each_compute_node
-
-- `Create Instance From Three Part Image <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L2205>`_
- - snaps.openstack.tests.create_instance_tests.CreateInstanceFromThreePartImage.test_create_instance_from_three_part_image
-
-- `Create Instance Volume tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L3062>`_
- - snaps.openstack.tests.create_instance_tests.CreateInstanceVolumeTests.test_create_instance_with_one_volume
- - snaps.openstack.tests.create_instance_tests.CreateInstanceVolumeTests.test_create_instance_with_two_volumes
-
-- `Create Instance Single Network tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_instance_tests.py#L687>`_
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSingleNetworkTests.test_single_port_static
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSingleNetworkTests.test_ssh_client_fip_after_active
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSingleNetworkTests.test_ssh_client_fip_after_init
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSingleNetworkTests.test_ssh_client_fip_after_reboot
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSingleNetworkTests.test_ssh_client_fip_before_active
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSingleNetworkTests.test_ssh_client_fip_reverse_engineer
- - snaps.openstack.tests.create_instance_tests.CreateInstanceSingleNetworkTests.test_ssh_client_fip_second_creator
-
-
-- `Create Stack Success tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_stack_tests.py#L131>`_
- - snaps.openstack.tests.create_stack_tests.CreateStackSuccessTests.test_create_delete_stack
- - snaps.openstack.tests.create_stack_tests.CreateStackSuccessTests.test_create_same_stack
- - snaps.openstack.tests.create_stack_tests.CreateStackSuccessTests.test_create_stack_short_timeout
- - snaps.openstack.tests.create_stack_tests.CreateStackSuccessTests.test_create_stack_template_dict
- - snaps.openstack.tests.create_stack_tests.CreateStackSuccessTests.test_create_stack_template_file
- - snaps.openstack.tests.create_stack_tests.CreateStackSuccessTests.test_retrieve_network_creators
- - snaps.openstack.tests.create_stack_tests.CreateStackSuccessTests.test_retrieve_vm_inst_creators
-
-- `Create Stack Volume tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_stack_tests.py#L735>`_
- - snaps.openstack.tests.create_stack_tests.CreateStackVolumeTests.test_retrieve_volume_creator
- - snaps.openstack.tests.create_stack_tests.CreateStackVolumeTests.test_retrieve_volume_type_creator
-
-- `Create Stack Flavor tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_stack_tests.py#L829>`_
- - snaps.openstack.tests.create_stack_tests.CreateStackFlavorTests.test_retrieve_flavor_creator
-
-- `Create Stack Keypair tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_stack_tests.py#L888>`_
- - snaps.openstack.tests.create_stack_tests.CreateStackKeypairTests.test_retrieve_keypair_creator
-
-- `Create Stack Security Group tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_stack_tests.py#L969>`_
- - snaps.openstack.tests.create_stack_tests.CreateStackSecurityGroupTests.test_retrieve_security_group_creatorl
-
-- `Create Stack Negative tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_stack_tests.py#L1062>`_
- - snaps.openstack.tests.create_stack_tests.CreateStackNegativeTests.test_bad_stack_file
- - snaps.openstack.tests.create_stack_tests.CreateStackNegativeTest.test_missing_dependencies
-
-- `Create Security Group tests <https://github.com/opnfv/snaps/blob/stable%2Ffraser/snaps/openstack/tests/create_security_group_tests.py#L199>`_
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_add_rule
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_create_delete_group
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_create_group_admin_user_to_new_project
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_create_group_new_user_to_admin_project
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_create_group_with_one_complex_rule
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_create_group_with_one_simple_rule
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_create_group_with_several_rules
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_create_group_without_rules
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_remove_rule_by_id
- - snaps.openstack.tests.create_security_group_tests.CreateSecurityGroupTests.test_remove_rule_by_setting
-
-**Floating IP and Ansible provisioning:**
-
-- `Create Stack Floating tests <https://github.com/opnfv/snaps/blob/stable/fraser/snaps/openstack/tests/create_stack_tests.py#L414>`_
- - snaps.openstack.tests.create_stack_tests.CreateStackFloatingIpTests.test_connect_via_ssh_heat_vm
- - snaps.openstack.tests.create_stack_tests.CreateStackFloatingIpTests.test_connect_via_ssh_heat_vm_derived
-
-
-- `Ansible Provisioning tests <https://github.com/opnfv/snaps/blob/stable%2Ffraser/snaps/provisioning/tests/ansible_utils_tests.py#L48>`_
- - snaps.provisioning.tests.ansible_utils_tests.AnsibleProvisioningTests.test_apply_simple_playbook
- - snaps.provisioning.tests.ansible_utils_tests.AnsibleProvisioningTests.test_apply_template_playbook
diff --git a/docs/testing/user/testspecification/stress/index.rst b/docs/testing/user/testspecification/stress/index.rst
index 74961fd1..5483fc93 100644
--- a/docs/testing/user/testspecification/stress/index.rst
+++ b/docs/testing/user/testspecification/stress/index.rst
@@ -71,7 +71,7 @@ Test Case 1 - Concurrent capacity based on life-cycle ping test
Short name
----------
-dovetail.stress.ping
+bottlenecks.stress.ping
Use case specification
----------------------
@@ -133,7 +133,7 @@ Test execution
* Test action 8: Go to *Test action 3* and do the test again to create *N2* VM pairs with PASS VM pairs counted as *S2*
* Test action 9: If *S2<N3*, the SUT is marked with FAIL. Otherwise marked with PASS.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
Typical setting of *(N1, N2, N3, P1)* is *(5, 5, 5, 10)*.
diff --git a/docs/testing/user/testspecification/tempest_compute/index.rst b/docs/testing/user/testspecification/tempest_compute/index.rst
index 18a0019a..ee86ae77 100644
--- a/docs/testing/user/testspecification/tempest_compute/index.rst
+++ b/docs/testing/user/testspecification/tempest_compute/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+==================================
Tempest Compute test specification
-===========================================
+==================================
Scope
@@ -29,30 +29,30 @@ These runtime operations includes:
References
==========
-`Security Groups: <https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups>`_
+`Security Groups: <https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups>`_
- create security group
- delete security group
-`Networks: <https://developer.openstack.org/api-ref/networking/v2/index.html#networks>`_
+`Networks: <https://docs.openstack.org/api-ref/network/v2/index.html#networks>`_
- create network
- delete network
-`Routers and interface: <https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers>`_
+`Routers and interface: <https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers>`_
- create router
- update router
- delete router
- add interface to router
-`Subnets: <https://developer.openstack.org/api-ref/networking/v2/index.html#subnets>`_
+`Subnets: <https://docs.openstack.org/api-ref/network/v2/index.html#subnets>`_
- create subnet
- update subnet
- delete subnet
-`Servers: <https://developer.openstack.org/api-ref/compute/>`_
+`Servers: <https://docs.openstack.org/api-ref/compute/>`_
- create keypair
- create server
@@ -60,13 +60,13 @@ References
- add/assign floating IP
- disassociate floating IP
-`Ports: <https://developer.openstack.org/api-ref/networking/v2/index.html#ports>`_
+`Ports: <https://docs.openstack.org/api-ref/network/v2/index.html#ports>`_
- create port
- update port
- delete port
-`Floating IPs: <https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips>`_
+`Floating IPs: <https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips>`_
- create floating IP
- delete floating IP
@@ -86,7 +86,7 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.compute of
+All these test cases are included in the test case functest.tempest.compute of
OVP test suite.
@@ -110,8 +110,8 @@ by the tests, review the Python source code accessible via the following links:
- `Security Groups test <https://github.com/openstack/tempest/blob/master/tempest/api/compute/security_groups/test_security_groups.py#L23>`_
- tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_groups_create_list_delete
-- `Attach Interfaces test <https://github.com/openstack/tempest/blob/master/tempest/api/compute/servers/test_attach_interfaces.py#L32>`_
- - tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesTestJSON.test_add_remove_fixed_ip
+- `Attach Interfaces test <https://github.com/openstack/tempest/blob/master/tempest/api/compute/servers/test_attach_interfaces.py#L347>`_
+ - tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesUnderV243Test.test_add_remove_fixed_ip
- `Server Addresses test <https://github.com/openstack/tempest/blob/master/tempest/api/compute/servers/test_server_addresses.py#L21>`_
diff --git a/docs/testing/user/testspecification/tempest_identity_v3/index.rst b/docs/testing/user/testspecification/tempest_identity_v3/index.rst
index bb60b204..599427a3 100644
--- a/docs/testing/user/testspecification/tempest_identity_v3/index.rst
+++ b/docs/testing/user/testspecification/tempest_identity_v3/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+======================================
Tempest Identity v3 test specification
-===========================================
+======================================
Scope
@@ -31,7 +31,7 @@ These runtime operations may include that create, list, verify and delete:
References
==========
-`Identity API v3.0 <https://developer.openstack.org/api-ref/identity/v3/index.html>`_
+`Identity API v3.0 <https://docs.openstack.org/api-ref/identity/v3/index.html>`_
System Under Test (SUT)
=======================
@@ -46,14 +46,14 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.identity_v3 of
+All these test cases are included in the test case functest.tempest.identity_v3 of
OVP test suite.
- `Create, Get, Update and Delete Credentials <https://github.com/openstack/tempest/blob/12.2.0/tempest/api/identity/admin/v3/test_credentials.py#L21>`_
- tempest.api.identity.admin.v3.test_credentials.CredentialsTestJSON.test_credentials_create_get_update_delete
- `Create and Verify Domain <https://github.com/openstack/tempest/blob/12.2.0/tempest/api/identity/admin/v3/test_domains.py#L159>`_
- - tempest.api.identity.admin.v3.test_domains.DefaultDomainTestJSON.test_default_domain_exists
+ - tempest.api.identity.v3.test_domains.DefaultDomainTestJSON.test_default_domain_exists
- `Create, Update and Delete Domain <https://github.com/openstack/tempest/blob/12.2.0/tempest/api/identity/admin/v3/test_domains.py>`_
- tempest.api.identity.admin.v3.test_domains.DomainsTestJSON.test_create_update_delete_domain
diff --git a/docs/testing/user/testspecification/tempest_image/index.rst b/docs/testing/user/testspecification/tempest_image/index.rst
index da438e52..121b38e6 100644
--- a/docs/testing/user/testspecification/tempest_image/index.rst
+++ b/docs/testing/user/testspecification/tempest_image/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+================================
Tempest Image test specification
-===========================================
+================================
Scope
@@ -17,7 +17,7 @@ network runtime operations functionality.
References
==========
-`Image Service API v2 <https://developer.openstack.org/api-ref/image/v2/index.html#images>`_
+`Image Service API v2 <https://docs.openstack.org/api-ref/image/v2/index.html>`_
System Under Test (SUT)
=======================
@@ -32,11 +32,11 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.image of
+All these test cases are included in the test case functest.tempest.image of
OVP test suite.
- `Register, Upload, Get Image and Get Image File API's <https://github.com/openstack/tempest/blob/18.0.0/tempest/api/image/v2/test_images.py#L32>`_
- tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file
- `List Versions <https://github.com/openstack/tempest/blob/18.0.0/tempest/api/image/v2/test_versions.py>`_
- - tempest.api.image.v2.test_versions.VersionsTest.test_list_versions \ No newline at end of file
+ - tempest.api.image.v2.test_versions.VersionsTest.test_list_versions
diff --git a/docs/testing/user/testspecification/tempest_ipv6/index.rst b/docs/testing/user/testspecification/tempest_ipv6/index.rst
index d78370c8..3da23c99 100644
--- a/docs/testing/user/testspecification/tempest_ipv6/index.rst
+++ b/docs/testing/user/testspecification/tempest_ipv6/index.rst
@@ -23,7 +23,7 @@ References
- upstream openstack API reference
- - http://developer.openstack.org/api-ref
+ - https://docs.openstack.org/api-ref/
- upstream openstack IPv6 reference
@@ -68,7 +68,7 @@ Test Descriptions
API Used and Reference
----------------------
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- show network details
- update network
@@ -77,7 +77,7 @@ Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#netwo
- create netowrk
- bulk create networks
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- list subnets
- create subnet
@@ -86,7 +86,7 @@ Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnet
- update subnet
- delete subnet
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- list routers
- create router
@@ -96,7 +96,7 @@ Routers and interface: https://developer.openstack.org/api-ref/networking/v2/ind
- add interface to router
- remove interface from router
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- show port details
- update port
@@ -105,7 +105,7 @@ Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
- create port
- bulk create ports
-Security groups: https://developer.openstack.org/api-ref/networking/v2/index.html#security-groups-security-groups
+Security groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- list security groups
- create security groups
@@ -113,14 +113,14 @@ Security groups: https://developer.openstack.org/api-ref/networking/v2/index.htm
- update security group
- delete security group
-Security groups rules: https://developer.openstack.org/api-ref/networking/v2/index.html#security-group-rules-security-group-rules
+Security groups rules: https://docs.openstack.org/api-ref/network/v2/index.html#security-group-rules-security-group-rules
- list security group rules
- create security group rule
- show security group rule
- delete security group rule
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- list servers
- create server
diff --git a/docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst b/docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst
index 60a5633e..b3d2c9dc 100644
--- a/docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst
+++ b/docs/testing/user/testspecification/tempest_ipv6/ipv6_api.rst
@@ -9,7 +9,7 @@ Test Case 1 - Create and Delete Bulk Network, IPv6 Subnet and Port
Short name
----------
-dovetail.tempest.ipv6_api.bulk_network_subnet_port_create_delete
+functest.tempest.ipv6_api.bulk_network_subnet_port_create_delete
Use case specification
----------------------
@@ -61,7 +61,7 @@ Test execution
* Test action 18: List all networks, verifying the network ids are no longer present
* **Test assertion 8:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use bulk create commands to create networks, IPv6 subnets and ports on
@@ -84,7 +84,7 @@ Test Case 2 - Create, Update and Delete an IPv6 Network and Subnet
Short name
-----------
-dovetail.tempest.ipv6_api.network_subnet_create_update_delete
+functest.tempest.ipv6_api.network_subnet_create_update_delete
Use case specification
----------------------
@@ -125,7 +125,7 @@ Test execution
* **Test assertion 5:** The network "id" is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to create, update, delete network, IPv6 subnet on the
@@ -148,7 +148,7 @@ Test Case 3 - Check External Network Visibility
Short name
-----------
-dovetail.tempest.ipv6_api.external_network_visibility
+functest.tempest.ipv6_api.external_network_visibility
Use case specification
----------------------
@@ -189,7 +189,7 @@ Test execution
* **Test assertion 4:** There is no subnet of the external network with the configured
public network id
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use list commands to list external networks, pre-configured
@@ -211,7 +211,7 @@ Test Case 4 - List IPv6 Networks and Subnets
Short name
-----------
-dovetail.tempest.ipv6_api.network_subnet_list
+functest.tempest.ipv6_api.network_subnet_list
Use case specification
----------------------
@@ -248,7 +248,7 @@ Test execution
* Test action 8: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The network "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to use create commands to create network, IPv6 subnet, list
@@ -270,7 +270,7 @@ Test Case 5 - Show Details of an IPv6 Network and Subnet
Short name
----------
-dovetail.tempest.ipv6_api.network_subnet_show
+functest.tempest.ipv6_api.network_subnet_show
Use case specification
----------------------
@@ -308,7 +308,7 @@ Test execution
* Test action 8: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use create commands to create network, IPv6 subnet and show
@@ -330,7 +330,7 @@ Test Case 6 - Create an IPv6 Port in Allowed Allocation Pools
Short name
----------
-dovetail.tempest.ipv6_api.port_create_in_allocation_pool
+functest.tempest.ipv6_api.port_create_in_allocation_pool
Use case specification
----------------------
@@ -373,7 +373,7 @@ Test execution
* Test action 12: List all networks, verifying the network id is no longer present
* **Test assertion 5:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use create commands to create an IPv6 subnet within allowed
@@ -395,7 +395,7 @@ Test Case 7 - Create an IPv6 Port with Empty Security Groups
Short name
-----------
-dovetail.tempest.ipv6_api.port_create_empty_security_group
+functest.tempest.ipv6_api.port_create_empty_security_group
Use case specification
----------------------
@@ -431,7 +431,7 @@ Test execution
* Test action 10: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use create commands to create port with
@@ -452,7 +452,7 @@ Test Case 8 - Create, Update and Delete an IPv6 Port
Short name
----------
-dovetail.tempest.ipv6_api.port_create_update_delete
+functest.tempest.ipv6_api.port_create_update_delete
Use case specification
----------------------
@@ -489,7 +489,7 @@ Test execution
* Test action 9: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to use create/update/delete commands to create/update/delete port
@@ -511,7 +511,7 @@ Test Case 9 - List IPv6 Ports
Short name
----------
-dovetail.tempest.ipv6_api.port_list
+functest.tempest.ipv6_api.port_list
Use case specification
----------------------
@@ -543,7 +543,7 @@ Test execution
* Test action 7: List all networks, verifying the network id is no longer present
* **Test assertion 3:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use list commands to list the networks and ports on
@@ -564,7 +564,7 @@ Test Case 10 - Show Key/Valus Details of an IPv6 Port
Short name
----------
-dovetail.tempest.ipv6_api.port_show_details
+functest.tempest.ipv6_api.port_show_details
Use case specification
----------------------
@@ -602,7 +602,7 @@ Test execution
* Test action 8: List all networks, verifying the network id is no longer present
* **Test assertion 4:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use show commands to show port details on the SUT API.
@@ -625,7 +625,7 @@ Test Case 11 - Add Multiple Interfaces for an IPv6 Router
Short name
-----------
-dovetail.tempest.ipv6_api.router_add_multiple_interface
+functest.tempest.ipv6_api.router_add_multiple_interface
Use case specification
----------------------
@@ -667,7 +667,7 @@ Test execution
* **Test assertion 3:** The interfaces, router, IPv6 subnets and networks ids are not present in the lists
after deleting
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use bulk create commands to create networks, IPv6 subnets and ports on
@@ -690,7 +690,7 @@ Test Case 12 - Add and Remove an IPv6 Router Interface with port_id
Short name
----------
-dovetail.tempest.ipv6_api.router_interface_add_remove_with_port
+functest.tempest.ipv6_api.router_interface_add_remove_with_port
Use case specification
----------------------
@@ -728,7 +728,7 @@ Test execution
ones are not found in the list.
* **Test assertion 3:** interfaces, ports, routers, subnets and networks are not found in the lists after deleting
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to use add/remove commands to add/remove router interface to the port,
@@ -750,7 +750,7 @@ Test Case 13 - Add and Remove an IPv6 Router Interface with subnet_id
Short name
----------
-dovetail.tempest.ipv6_api.router_interface_add_remove
+functest.tempest.ipv6_api.router_interface_add_remove
Use case specification
----------------------
@@ -794,7 +794,7 @@ Test execution
* Test action 13: List all networks, verifying the network id is no longer present
* **Test assertion 7:** The network "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to add and remove router interface with the subnet id on the
@@ -818,7 +818,7 @@ Test Case 14 - Create, Show, List, Update and Delete an IPv6 router
Short name
----------
-dovetail.tempest.ipv6_api.router_create_show_list_update_delete
+functest.tempest.ipv6_api.router_create_show_list_update_delete
Use case specification
----------------------
@@ -856,7 +856,7 @@ Test execution
* Test action 7: List all routers, verifying the router id is no longer present
* **Test assertion 8:** The "id" parameter is not present in the router list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to create, show, list, update and delete router on
@@ -882,7 +882,7 @@ Test Case 15 - Create, List, Update, Show and Delete an IPv6 security group
Short name
----------
-dovetail.tempest.ipv6_api.security_group_create_list_update_show_delete
+functest.tempest.ipv6_api.security_group_create_list_update_show_delete
Use case specification
----------------------
@@ -917,7 +917,7 @@ Test execution
* Test action 7: List all security groups, verifying the security group's id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the security group list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to create list, update, show and delete security group on
@@ -940,7 +940,7 @@ Test Case 16 - Create, Show and Delete IPv6 security group rule
Short name
----------
-dovetail.tempest.ipv6_api.security_group_rule_create_show_delete
+functest.tempest.ipv6_api.security_group_rule_create_show_delete
Use case specification
----------------------
@@ -977,7 +977,7 @@ Test execution
* Test action 8: List all security groups, verifying the security group's id is no longer present
* **Test assertion 4:** The security group "id" parameter is not present in the list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to create, show, list and delete security group rules on
@@ -998,7 +998,7 @@ Test Case 17 - List IPv6 Security Groups
Short name
----------
-dovetail.tempest.ipv6_api.security_group_list
+functest.tempest.ipv6_api.security_group_list
Use case specification
----------------------
@@ -1023,7 +1023,7 @@ Test execution
if the default security group exists
* **Test assertion 1:** The default security group is in the list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to list security groups on the SUT API.
diff --git a/docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst b/docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst
index f3a279f0..5871321d 100644
--- a/docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst
+++ b/docs/testing/user/testspecification/tempest_ipv6/ipv6_scenario.rst
@@ -9,7 +9,7 @@ Test Case 1 - IPv6 Address Assignment - Dual Stack, SLAAC, DHCPv6 Stateless
Short name
----------
-dovetail.tempest.ipv6_scenario.dhcpv6_stateless
+functest.tempest.ipv6_scenario.dhcpv6_stateless
Use case specification
----------------------
@@ -60,7 +60,7 @@ Test execution
* Test action 14: List all networks, verifying the id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode
@@ -84,7 +84,7 @@ Test Case 2 - IPv6 Address Assignment - Dual Net, Dual Stack, SLAAC, DHCPv6 Stat
Short name
----------
-dovetail.tempest.ipv6_scenario.dualnet_dhcpv6_stateless
+functest.tempest.ipv6_scenario.dualnet_dhcpv6_stateless
Use case specification
----------------------
@@ -138,7 +138,7 @@ Test execution
* Test action 16: List all networks, verifying the ids are no longer present
* **Test assertion 6:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'dhcpv6_stateless'
@@ -163,7 +163,7 @@ Test Case 3 - IPv6 Address Assignment - Multiple Prefixes, Dual Stack, SLAAC, DH
Short name
----------
-dovetail.tempest.ipv6_scenario.multiple_prefixes_dhcpv6_stateless
+functest.tempest.ipv6_scenario.multiple_prefixes_dhcpv6_stateless
Use case specification
----------------------
@@ -216,7 +216,7 @@ Test execution
* Test action 14: List all networks, verifying the id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'dhcpv6_stateless'
@@ -241,7 +241,7 @@ Test Case 4 - IPv6 Address Assignment - Dual Net, Multiple Prefixes, Dual Stack,
Short name
----------
-dovetail.tempest.ipv6_scenario.dualnet_multiple_prefixes_dhcpv6_stateless
+functest.tempest.ipv6_scenario.dualnet_multiple_prefixes_dhcpv6_stateless
Use case specification
----------------------
@@ -296,7 +296,7 @@ Test execution
* Test action 16: List all networks, verifying the ids are no longer present
* **Test assertion 6:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'dhcpv6_stateless'
@@ -321,7 +321,7 @@ Test Case 5 - IPv6 Address Assignment - Dual Stack, SLAAC
Short name
----------
-dovetail.tempest.ipv6_scenario.slaac
+functest.tempest.ipv6_scenario.slaac
Use case specification
----------------------
@@ -371,7 +371,7 @@ Test execution
* Test action 14: List all networks, verifying the id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'slaac'
@@ -395,7 +395,7 @@ Test Case 6 - IPv6 Address Assignment - Dual Net, Dual Stack, SLAAC
Short name
----------
-dovetail.tempest.ipv6_scenario.dualnet_slaac
+functest.tempest.ipv6_scenario.dualnet_slaac
Use case specification
----------------------
@@ -448,7 +448,7 @@ Test execution
* Test action 16: List all networks, verifying the ids are no longer present
* **Test assertion 6:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'slaac'
@@ -473,7 +473,7 @@ Test Case 7 - IPv6 Address Assignment - Multiple Prefixes, Dual Stack, SLAAC
Short name
----------
-dovetail.tempest.ipv6_scenario.multiple_prefixes_slaac
+functest.tempest.ipv6_scenario.multiple_prefixes_slaac
Use case specification
----------------------
@@ -524,7 +524,7 @@ Test execution
* Test action 14: List all networks, verifying the id is no longer present
* **Test assertion 6:** The "id" parameter is not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'slaac'
@@ -549,7 +549,7 @@ Test Case 8 - IPv6 Address Assignment - Dual Net, Dual Stack, Multiple Prefixes,
Short name
----------
-dovetail.tempest.ipv6_scenario.dualnet_multiple_prefixes_slaac
+functest.tempest.ipv6_scenario.dualnet_multiple_prefixes_slaac
Use case specification
----------------------
@@ -602,7 +602,7 @@ Test execution
* Test action 16: List all networks, verifying the ids are no longer present
* **Test assertion 6:** The two "id" parameters are not present in the network list
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test evaluates the ability to assign IPv6 addresses in ipv6_ra_mode 'slaac'
diff --git a/docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst b/docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst
index 92c7e856..f414de61 100644
--- a/docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst
+++ b/docs/testing/user/testspecification/tempest_multi_node_scheduling/index.rst
@@ -53,7 +53,7 @@ on multiple nodes. Each test case is able to run independently, i.e. irrelevant
the state created by a previous test. Specifically, every test performs clean-up
operations which return the system to the same state as before the test.
-All these test cases are included in the test case dovetail.tempest.multi_node_scheduling of
+All these test cases are included in the test case functest.tempest.multi_node_scheduling of
OVP test suite.
Test Descriptions
@@ -63,28 +63,28 @@ Test Descriptions
API Used and Reference
----------------------
-Security Groups: https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
+Security Groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- create security group
- delete security group
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- create network
- delete network
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- create router
- delete router
- add interface to router
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- create subnet
- delete subnet
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create keypair
- create server
@@ -96,17 +96,17 @@ Servers: https://developer.openstack.org/api-ref/compute/
- list server groups
- show server group details
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- create port
- delete port
-Floating IPs: https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips
+Floating IPs: https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips
- create floating IP
- delete floating IP
-Availability zone: https://developer.openstack.org/api-ref/compute/
+Availability zone: https://docs.openstack.org/api-ref/compute/
- get availability zone
@@ -145,7 +145,7 @@ Test execution
that every server ended up on a different host
* Test action 6: Delete the created servers
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of VM resource scheduling.
@@ -190,7 +190,7 @@ Test execution
* Test action 5: List all server groups
* **Test assertion 4:** SERG1 and SERG2 are not in the list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of creating and deleting server groups with the same name and policy.
@@ -233,7 +233,7 @@ Test execution
* Test action 3: Delete SERG1 and list all server groups
* **Test assertion 3:** SERG1 is not in the list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of creating and deleting server group with affinity policy.
@@ -275,7 +275,7 @@ Test execution
* Test action 3: Delete SERG1 and list all server groups
* **Test assertion 3:** SERG1 is not in the list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of creating and deleting server group with anti-affinity policy.
@@ -316,7 +316,7 @@ Test execution
* **Test assertion 1:** SERG1 is in the list
* Test action 4: Delete SERG1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of listing server groups.
@@ -358,7 +358,7 @@ Test execution
* **Test assertion 1:** All values in D1 are the same as the values in D2
* Test action 4: Delete SERG1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of showing server group details.
diff --git a/docs/testing/user/testspecification/tempest_network_api/index.rst b/docs/testing/user/testspecification/tempest_network_api/index.rst
index ccbe8e0a..4fc47208 100644
--- a/docs/testing/user/testspecification/tempest_network_api/index.rst
+++ b/docs/testing/user/testspecification/tempest_network_api/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+======================================
Tempest Network API test specification
-===========================================
+======================================
Scope
@@ -29,42 +29,42 @@ These runtime operations may include that create, list, verify or delete:
References
==========
-`Networks: <https://developer.openstack.org/api-ref/network/v2/#networks>`_
+`Networks: <https://docs.openstack.org/api-ref/network/v2/#networks>`_
- create network
- delete network
-`Routers and interface: <https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers>`_
+`Routers and interface: <https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers>`_
- create router
- update router
- delete router
- add interface to router
-`Subnets: <https://developer.openstack.org/api-ref/networking/v2/index.html#subnets>`_
+`Subnets: <https://docs.openstack.org/api-ref/network/v2/index.html#subnets>`_
- create subnet
- update subnet
- delete subnet
-`Subnetpools: <https://developer.openstack.org/api-ref/network/v2/#subnet-pools-extension-subnetpools>`_
+`Subnetpools: <https://docs.openstack.org/api-ref/network/v2/#subnet-pools-extension-subnetpools>`_
- create subnetpool
- update subnetpool
- delete subnetpool
-`Ports: <https://developer.openstack.org/api-ref/networking/v2/index.html#ports>`_
+`Ports: <https://docs.openstack.org/api-ref/network/v2/index.html#ports>`_
- create port
- update port
- delete port
-`Floating IPs: <https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips>`_
+`Floating IPs: <https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips>`_
- create floating IP
- delete floating IP
-`Api Versions <https://developer.openstack.org/api-ref/network/v2/#api-versions>`_
+`Api Versions <https://docs.openstack.org/api-ref/network/v2/#api-versions>`_
- list version
- show version
@@ -82,7 +82,7 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.network of
+All these test cases are included in the test case functest.tempest.network of
OVP test suite.
@@ -127,4 +127,4 @@ OVP test suite.
- tempest.api.network.test_subnetpools_extensions.SubnetPoolsTestJSON.test_create_list_show_update_delete_subnetpools
`API Version Resources test <https://github.com/openstack/tempest/blob/master/tempest/api/network/test_versions.py>`_
- - tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources \ No newline at end of file
+ - tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources
diff --git a/docs/testing/user/testspecification/tempest_network_scenario/index.rst b/docs/testing/user/testspecification/tempest_network_scenario/index.rst
index 6c172474..feee105d 100644
--- a/docs/testing/user/testspecification/tempest_network_scenario/index.rst
+++ b/docs/testing/user/testspecification/tempest_network_scenario/index.rst
@@ -58,7 +58,7 @@ test case is able to run independently, i.e. irrelevant of the state created by
a previous test. Specifically, every test performs clean-up operations which
return the system to the same state as before the test.
-All these test cases are included in the test case dovetail.tempest.network_scenario of
+All these test cases are included in the test case functest.tempest.network_scenario of
OVP test suite.
Test Descriptions
@@ -68,30 +68,30 @@ Test Descriptions
API Used and Reference
----------------------
-Security Groups: https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
+Security Groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- create security group
- delete security group
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- create network
- delete network
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- create router
- update router
- delete router
- add interface to router
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- create subnet
- update subnet
- delete subnet
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create keypair
- create server
@@ -99,13 +99,13 @@ Servers: https://developer.openstack.org/api-ref/compute/
- add/assign floating IP
- disassociate floating IP
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- create port
- update port
- delete port
-Floating IPs: https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips
+Floating IPs: https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips
- create floating IP
- delete floating IP
@@ -149,7 +149,7 @@ Test execution
* **Test assertion 5:** Ping FIP1 and SSH to VM2 via FIP1 successfully
* Test action 8: Delete SG1, NET1, SUBNET1, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of basic network operations.
@@ -220,7 +220,7 @@ Test execution
* **Test assertion 2:** Ping NET2's internal gateway successfully
* Test action 8: Delete SG1, NET1, NET2, SUBNET1, SUBNET2, R1, NIC2, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of adding network to an active VM.
@@ -278,7 +278,7 @@ Test execution
retrieve the VM1's configured dns and verify it has been successfully updated
* Test action 7: Delete SG1, NET1, SUBNET1, R1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the functionality of updating subnet's configurations.
@@ -336,7 +336,7 @@ Test execution
* **Test assertion 6:** Ping FIP1 via SSHCLNT1 successfully
* Test action 10: Delete SG1, NET1, SUBNET1, R1, SSHCLNT1, VM1, VM2 and FIP1, FIP2
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the VM public and project connectivity status by changing VM port
@@ -395,7 +395,7 @@ Test execution
* **Test assertion 3:** Ping FIP1 and SSH to VM1 with FIP1 successfully
* Test action 8: Delete SG1, NET1, SUBNET1, R1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the router public connectivity status by changing
diff --git a/docs/testing/user/testspecification/tempest_network_security/index.rst b/docs/testing/user/testspecification/tempest_network_security/index.rst
index 2a785289..6934be1f 100644
--- a/docs/testing/user/testspecification/tempest_network_security/index.rst
+++ b/docs/testing/user/testspecification/tempest_network_security/index.rst
@@ -53,7 +53,7 @@ port security. Each test case is able to run independently, i.e. irrelevant of
the state created by a previous test. Specifically, every test performs clean-up
operations which return the system to the same state as before the test.
-All these test cases are included in the test case dovetail.tempest.network_security of
+All these test cases are included in the test case functest.tempest.network_security of
OVP test suite.
Test Descriptions
@@ -63,12 +63,12 @@ Test Descriptions
API Used and Reference
----------------------
-Security Groups: https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
+Security Groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- create security group
- delete security group
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- create network
- delete network
@@ -76,27 +76,27 @@ Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#netwo
- create floating ip
- delete floating ip
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- create router
- delete router
- list routers
- add interface to router
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- create subnet
- list subnets
- delete subnet
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create keypair
- create server
- delete server
- add/assign floating ip
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- update port
- list ports
@@ -145,7 +145,7 @@ Test execution
* **Test assertion 2:** The ping operation is successful
* Test action 16: Delete SG1, NET1, NET2, SUBNET1, SUBNET2, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to prevent MAC spoofing by using port security.
@@ -206,7 +206,7 @@ Test execution
* Test action 14: Delete SG1, SG2, SG3, SG4, NET1, NET2, SUBNET1, SUBNET2, R1, R2,
VM1, VM2, FIP1 and FIP2
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability of the security group to filter packets cross tenant.
@@ -263,7 +263,7 @@ Test execution
* **Test assertion 2:** The ping operation is successful
* Test action 11: Delete SG1, SG2, NET1, SUBNET1, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability of the security group to filter packets in one tenant.
@@ -319,7 +319,7 @@ Test execution
* **Test assertion 3:** Can SSH to VM1 successfully
* Test action 11: Delete SG1, SG2, NET1, SUBNET1, R1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability of multiple security groups to filter packets.
@@ -379,7 +379,7 @@ Test execution
* **Test assertion 2:** The ping operation is successful
* Test action 12: Delete SG1, SG2, NET1, SUBNET1, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability of port security to disable security group.
@@ -436,7 +436,7 @@ Test execution
* **Test assertion 2:** The ping operation is successful
* Test action 13: Delete SG1, SG2, SG3, NET1, SUBNET1, R1, VM1, VM2 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to update port with a new security group.
diff --git a/docs/testing/user/testspecification/tempest_osinterop/index.rst b/docs/testing/user/testspecification/tempest_osinterop/index.rst
index 6773275e..d2a54e86 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/index.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/index.rst
@@ -3,11 +3,11 @@
.. (c) Huawei Technologies Co.,Ltd and others
=============================================
-OpenStack Interoperability test specification
+OpenStack Interoperability Test Specification
=============================================
The test cases documented here are the API test cases in the OpenStack
-Interop guideline 2017.09 as implemented by the RefStack client.
+Interop guideline 2018.11 as implemented by the RefStack client.
References
================
@@ -16,9 +16,9 @@ References
- https://wiki.openstack.org/wiki/Governance/InteropWG
-- OpenStack Interoperability guidelines (version 2017.09)
+- OpenStack Interoperability guidelines (version 2018.11)
- - https://github.com/openstack/interop/blob/master/2017.09.json
+ - https://github.com/openstack/interop/blob/master/2018.11.json
- Refstack client
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst
index 601d1054..3e663d98 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_compute.rst
@@ -11,7 +11,7 @@ Scope
The VIM compute operations test area evaluates the ability of the system under
test to support VIM compute operations. The test cases documented here are the
-compute API test cases in the OpenStack Interop guideline 2017.09 as implemented
+compute API test cases in the OpenStack Interop guideline 2018.11 as implemented
by the RefStack client. These test cases will evaluate basic OpenStack (as a VIM)
compute operations, including:
@@ -22,7 +22,7 @@ compute operations, including:
- Basic server operations
- Volume management operations
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -30,14 +30,15 @@ The following terms and abbreviations are used in conjunction with this test are
- API - Application Programming Interface
- NFVi - Network Functions Virtualization infrastructure
- SUT - System Under Test
-- UUID - Universally Unique Identifier
+- UUID - Universally Unique IDentifier
- VIM - Virtual Infrastructure Manager
- VM - Virtual Machine
System Under Test (SUT)
=======================
-The system under test is assumed to be the NFVi and VIM deployed with a Pharos compliant infrastructure.
+The system under test is assumed to be the NFVi and VIM deployed with a Pharos
+compliant infrastructure.
Test Area Structure
====================
@@ -50,7 +51,7 @@ the same state as before the test.
For brevity, the test cases in this test area are summarized together based on
the operations they are testing.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Test Descriptions
@@ -60,7 +61,7 @@ Test Descriptions
API Used and Reference
----------------------
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create server
- delete server
@@ -79,7 +80,7 @@ Servers: https://developer.openstack.org/api-ref/compute/
- create keypair
- delete keypair
-Block storage: https://developer.openstack.org/api-ref/block-storage
+Block storage: https://docs.openstack.org/api-ref/block-storage/
- create volume
- delete volume
@@ -759,3 +760,48 @@ Post conditions
---------------
N/A
+
+--------------------------------------------------------
+Test Case 10 - Keypair operations within the Compute API
+--------------------------------------------------------
+
+Test case specification
+-----------------------
+
+This test case evaluates the Compute API ability of creating keypair with type,
+the reference is,
+
+tempest.api.compute.keypairs.test_keypairs_v22.KeyPairsV22TestJSON.test_keypairsv22_create_list_show_with_type
+
+Test preconditions
+------------------
+
+* Compute server extension API
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+''''''''''''''
+
+* Test action 1: Create a keypair with type 'x509' and a random name
+* **Test assertion 1:** The keypair type received in the response body is equal to 'x509'
+* Test action 2: Show the details of this created keypair
+* **Test assertion 2:** The keypair type received in the response body is equal to 'x509'
+* Test action 3: List all keypairs and find the one with the same name as given in test action 1
+* **Test assertion 3:** The keypair type of this keypair is equal to 'x509'
+
+Pass / fail criteria
+''''''''''''''''''''
+
+This test evaluates the functionality of keypair operations within the Compute API.
+Specifically, the test verifies that:
+
+* Can create keypair by specifying keypair type.
+
+In order to pass this test, all test assertions listed in the test execution above need to pass.
+
+Post conditions
+---------------
+
+N/A
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst
index 6c0d23b7..34a71168 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_identity.rst
@@ -14,7 +14,7 @@ support VIM identity operations. The tests in this area will evaluate
API discovery operations within the Identity v3 API, auth operations within
the Identity API.
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -34,7 +34,7 @@ Test Area Structure
The test area is structured based on VIM identity operations. Each test case
is able to run independently, i.e. irrelevant of the state created by a previous test.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Dependency Description
@@ -42,15 +42,15 @@ Dependency Description
The VIM identity operations test cases are a part of the OpenStack
interoperability tempest test cases. For Fraser based dovetail release, the
-OpenStack interoperability guidelines (version 2017.09) is adopted, which is
+OpenStack interoperability guidelines (version 2018.11) is adopted, which is
valid for Mitaka, Newton, Ocata and Pike releases of Openstack.
Test Descriptions
=================
-----------------------------------------------------
-API discovery operations within the Identity v3 API
-----------------------------------------------------
+-----------------------------------------------------------------
+Test Case 1 - API discovery operations within the Identity v3 API
+-----------------------------------------------------------------
Use case specification
-----------------------
@@ -79,7 +79,7 @@ Test execution
'stable', 'experimental', 'supported', 'deprecated' are all of the identity api 'status'
values.
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test case passes if all test action steps execute successfully and all assertions
@@ -91,14 +91,15 @@ Post conditions
None
-------------------------------------------
-Auth operations within the Identity API
-------------------------------------------
+-----------------------------------------------------
+Test Case 2 - Auth operations within the Identity API
+-----------------------------------------------------
Use case specification
-----------------------
tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token
+tempest.api.identity.v3.test_tokens.TokensV3Test.test_validate_token
Test preconditions
-------------------
@@ -119,8 +120,16 @@ Test execution
passes if it is equal to the user_name which is used to get token.
* Test action 4: Get the method in getting token response message, the test
passes if it is equal to the password which is used to get token.
+* Test action 5: Get the token by system credentials and show the token,
+ the test passes if the response bodies of the get and show operations are the same.
+* Test action 6: Get the user_id in showing token response message, the test
+ passes if it is equal to the user_id which is used to get token.
+* Test action 7: Get the username in showing token response message, the test
+ passes if it is equal to the username which is used to get token.
+* Test action 8: Delete this token by non-admin compute client, the test passes
+ if it raises a NotFound exception.
-Pass / fail criteria
+Pass / Fail criteria
'''''''''''''''''''''
This test case passes if all test action steps execute successfully and all assertions
@@ -132,3 +141,39 @@ Post conditions
None
+--------------------------------------------------------
+Test Case 3 - Catalog operations within the Identity API
+--------------------------------------------------------
+
+Use case specification
+-----------------------
+
+tempest.api.identity.v3.test_catalog.IdentityCatalogTest.test_catalog_standardization
+
+Test preconditions
+-------------------
+
+None
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+'''''''''''''''
+
+* Test action 1: Show all catalogs by non-admin catalog client, the test passes
+ if the catalog types getting in the show response message equal to the
+ standard service values. Standard catalog types of 'keystone', 'nova', 'glance' and
+ 'swift' should be 'identity', 'compute', 'image' and 'object-store' respectively.
+
+Pass / Fail criteria
+'''''''''''''''''''''
+
+This test case passes if all test action steps execute successfully and all assertions
+are affirmed. If any test steps fails to execute successfully or any of the assertions
+is not met, the test case fails.
+
+Post conditions
+---------------
+
+None
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst
index 96a98631..b6cdb77f 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_image.rst
@@ -11,11 +11,11 @@ Scope
The VIM image test area evaluates the ability of the system under test to support
VIM image operations. The test cases documented here are the Image API test cases
-in the Openstack Interop guideline 2017.09 as implemented by the Refstack client.
+in the Openstack Interop guideline 2018.11 as implemented by the Refstack client.
These test cases will evaluate basic Openstack (as a VIM) image operations including
image creation, image list, image update and image deletion capabilities using Glance v2 API.
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -40,7 +40,7 @@ to run independently, i.e. irrelevant of the state created by a previous test.
For brevity, the test cases in this test area are summarized together based on
the operations they are testing.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Test Descriptions
@@ -50,7 +50,7 @@ Test Descriptions
API Used and Reference
----------------------
-Images: https://developer.openstack.org/api-ref/image/v2/
+Images: https://docs.openstack.org/api-ref/image/v2/
- create image
- delete image
@@ -62,9 +62,9 @@ Images: https://developer.openstack.org/api-ref/image/v2/
- add image tag
- delete image tag
----------------------------------------
-Image get tests using the Glance v2 API
----------------------------------------
+-----------------------------------------------------
+Test Case 1 - Image get tests using the Glance v2 API
+-----------------------------------------------------
Test case specification
-----------------------
@@ -104,7 +104,7 @@ Test execution
whether the 6 images' ids are not in the show list.
* **Test assertion 6:** The 6 images' ids are not found in the show list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
The first two test cases evaluate the ability to use Glance v2 API to show image
@@ -124,9 +124,9 @@ Post conditions
None
---------------------------------------
-CRUD image operations in Images API v2
---------------------------------------
+----------------------------------------------------
+Test Case 2 - CRUD image operations in Images API v2
+----------------------------------------------------
Test case specification
-----------------------
@@ -148,7 +148,7 @@ Test execution
* Test action 2: List all images and check whether the ids listed are in the created images list.
* **Test assertion 1:** The ids get from the list images API are in the created images list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the ability to use Glance v2 API to list images.
@@ -163,9 +163,9 @@ Post conditions
None
-----------------------------------------
-Image list tests using the Glance v2 API
-----------------------------------------
+------------------------------------------------------
+Test Case 3 - Image list tests using the Glance v2 API
+------------------------------------------------------
Test case specification
-----------------------
@@ -224,7 +224,7 @@ Test execution
the 6 ids are not in the show list.
* **Test assertion 8:** The stored 6 ids are not found in the show list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the ability to use Glance v2 API to list images with
@@ -245,9 +245,9 @@ Post conditions
None
-------------------------------------------
-Image update tests using the Glance v2 API
-------------------------------------------
+--------------------------------------------------------
+Test Case 4 - Image update tests using the Glance v2 API
+--------------------------------------------------------
Test case specification
-----------------------
@@ -291,7 +291,7 @@ Test execution
and check whether the ids are not in the show list.
* **Test assertion 6:** The two ids are not found in the show list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the ability to use Glance v2 API to update images with
@@ -308,9 +308,9 @@ Post conditions
None
---------------------------------------------
-Image deletion tests using the Glance v2 API
---------------------------------------------
+----------------------------------------------------------
+Test Case 5 - Image deletion tests using the Glance v2 API
+----------------------------------------------------------
Test case specification
-----------------------
@@ -349,7 +349,7 @@ Test execution
and check whether the ids are in the list.
* **Test assertion 5:** The two ids are not found in the list.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
The first three test cases evaluate the ability to use Glance v2 API to delete images
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst
index a21b303c..d8e7413b 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_network.rst
@@ -11,11 +11,11 @@ Scope
The VIM network test area evaluates the ability of the system under test to support
VIM network operations. The test cases documented here are the network API test cases
-in the Openstack Interop guideline 2017.09 as implemented by the Refstack client.
+in the Openstack Interop guideline 2018.11 as implemented by the Refstack client.
These test cases will evaluate basic Openstack (as a VIM) network operations including
basic CRUD operations on L2 networks, L2 network ports and security groups.
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -42,7 +42,7 @@ the same state as before the test.
For brevity, the test cases in this test area are summarized together based on
the operations they are testing.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Test Descriptions
@@ -52,7 +52,7 @@ Test Descriptions
API Used and Reference
----------------------
-Network: http://developer.openstack.org/api-ref/networking/v2/index.html
+Network: https://docs.openstack.org/api-ref/network/v2/index.html
- create network
- update network
@@ -84,9 +84,9 @@ Network: http://developer.openstack.org/api-ref/networking/v2/index.html
- show security group rule
- delete security group rule
----------------------------------------------------------
-Basic CRUD operations on L2 networks and L2 network ports
----------------------------------------------------------
+-----------------------------------------------------------------------
+Test Case 1 - Basic CRUD operations on L2 networks and L2 network ports
+-----------------------------------------------------------------------
Test case specification
-----------------------
@@ -100,6 +100,7 @@ tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_without
tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_all_attributes
tempest.api.network.test_networks.NetworksTest.test_create_update_delete_network_subnet
tempest.api.network.test_networks.NetworksTest.test_delete_network_with_subnet
+tempest.api.network.test_networks.NetworksTest.test_external_network_visibility
tempest.api.network.test_networks.NetworksTest.test_list_networks
tempest.api.network.test_networks.NetworksTest.test_list_networks_fields
tempest.api.network.test_networks.NetworksTest.test_list_subnets
@@ -114,6 +115,7 @@ tempest.api.network.test_ports.PortsTestJSON.test_create_port_in_allowed_allocat
tempest.api.network.test_ports.PortsTestJSON.test_create_update_delete_port
tempest.api.network.test_ports.PortsTestJSON.test_list_ports
tempest.api.network.test_ports.PortsTestJSON.test_list_ports_fields
+tempest.api.network.test_ports.PortsTestJSON.test_port_list_filter_by_router_id
tempest.api.network.test_ports.PortsTestJSON.test_show_port
tempest.api.network.test_ports.PortsTestJSON.test_show_port_fields
@@ -163,55 +165,64 @@ Test execution
found after deletion
* Test action 9: Create a network and create a subnet of this network, then delete this network
* **Test assertion 9:** The subnet has also been deleted after deleting the network
-* Test action 10: Create a network and list all networks
-* **Test assertion 10:** The network created is found in the list
-* Test action 11: Create a network and list networks with the id and name of the created network
-* **Test assertion 11:** The id and name of the list network equal to the created network's id and name
-* Test action 12: Create a network and create a subnet of this network, then list all subnets
-* **Test assertion 12:** The subnet created is found in the list
-* Test action 13: Create a network and create a subnet of this network, then list subnets with
+* Test action 10: List all external networks, find the one with the same public_network_id
+ as defined in tempest.conf and list its subnets
+* **Test assertion 10:** The external network can be found, no internal network got by the
+ list operation, if this external network is shared, the subnets list is not empty,
+ otherwise, it should be empty
+* Test action 11: Create a network and list all networks
+* **Test assertion 11:** The network created is found in the list
+* Test action 12: Create a network and list networks with the id and name of the created network
+* **Test assertion 12:** The id and name of the list network equal to the created network's id and name
+* Test action 13: Create a network and create a subnet of this network, then list all subnets
+* **Test assertion 13:** The subnet created is found in the list
+* Test action 14: Create a network and create a subnet of this network, then list subnets with
the id and network_id of the created subnet
-* **Test assertion 13:** The id and network_id of the list subnet equal to the created subnet
-* Test action 14: Create a network and show network's details with the id of the created network
-* **Test assertion 14:** The id and name returned in the response equal to the created network's id and name
-* Test action 15: Create a network and just show network's id and name info with the id of the created network
-* **Test assertion 15:** The keys returned in the response are only id and name, and the values
+* **Test assertion 14:** The id and network_id of the list subnet equal to the created subnet
+* Test action 15: Create a network and show network's details with the id of the created network
+* **Test assertion 15:** The id and name returned in the response equal to the created network's id and name
+* Test action 16: Create a network and just show network's id and name info with the id of the created network
+* **Test assertion 16:** The keys returned in the response are only id and name, and the values
of all the keys equal to network's id and name
-* Test action 16: Create a network and create a subnet of this network, then show subnet's details
+* Test action 17: Create a network and create a subnet of this network, then show subnet's details
with the id of the created subnet
-* **Test assertion 16:** The id and cidr info returned in the response equal to the created
+* **Test assertion 17:** The id and cidr info returned in the response equal to the created
subnet's id and cidr
-* Test action 17: Create a network and create a subnet of this network, then show subnet's id and
+* Test action 18: Create a network and create a subnet of this network, then show subnet's id and
network_id info with the id of the created subnet
-* **Test assertion 17:** The keys returned in the response are just id and network_id, and the values
+* **Test assertion 18:** The keys returned in the response are just id and network_id, and the values
of all the keys equal to subnet's id and network_id
-* Test action 18: Create a network and create a subnet of this network, then update subnet's
+* Test action 19: Create a network and create a subnet of this network, then update subnet's
name, host_routes, dns_nameservers and gateway_ip
-* **Test assertion 18:** The name, host_routes, dns_nameservers and gateway_ip returned in the
+* **Test assertion 19:** The name, host_routes, dns_nameservers and gateway_ip returned in the
response equal to the values used to update the subnet
-* Test action 19: Create 2 networks and bulk create 2 ports with the ids of the created networks
-* **Test assertion 19:** The network_id of each port equals to the one used to create the port and
+* Test action 20: Create 2 networks and bulk create 2 ports with the ids of the created networks
+* **Test assertion 20:** The network_id of each port equals to the one used to create the port and
the admin_state_up of each port is True
-* Test action 20: Create a network and create a subnet of this network by setting allocation_pools,
+* Test action 21: Create a network and create a subnet of this network by setting allocation_pools,
then create a port with the created network's id
-* **Test assertion 20:** The ip_address of the created port is in the range of the allocation_pools
-* Test action 21: Create a network and create a port with its id, then update the port's name and
+* **Test assertion 21:** The ip_address of the created port is in the range of the allocation_pools
+* Test action 22: Create a network and create a port with its id, then update the port's name and
set its admin_state_up to be False
-* **Test assertion 21:** The name returned in the response equals to the name used to update
+* **Test assertion 22:** The name returned in the response equals to the name used to update
the port and the port's admin_state_up is False
-* Test action 22: Create a network and create a port with its id, then list all ports
-* **Test assertion 22:** The created port is found in the list
-* Test action 23: Create a network and create a port with its id, then list ports with the id
- and mac_address of the created port
+* Test action 23: Create a network and create a port with its id, then list all ports
* **Test assertion 23:** The created port is found in the list
-* Test action 24: Create a network and create a port with its id, then show the port's details
-* **Test assertion 24:** The key 'id' is in the details
-* Test action 25: Create a network and create a port with its id, then show the port's id
+* Test action 24: Create a network and create a port with its id, then list ports with the id
+ and mac_address of the created port
+* **Test assertion 24:** The created port is found in the list
+* Test action 25: Create a network and create a subnet, port with its id, create a router
+ and add this port as this router's interface, then list ports with this router id
+* **Test assertion 25:** The number of the ports list is 1, the port id and device_id
+ getting with the list operation are the same as the ones got when creating them
+* Test action 26: Create a network and create a port with its id, then show the port's details
+* **Test assertion 26:** The key 'id' is in the details
+* Test action 27: Create a network and create a port with its id, then show the port's id
and mac_address info with the port's id
-* **Test assertion 25:** The keys returned in the response are just id and mac_address,
+* **Test assertion 27:** The keys returned in the response are just id and mac_address,
and the values of all the keys equal to port's id and mac_address
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
These test cases evaluate the ability of basic CRUD operations on L2 networks and L2 network ports.
@@ -232,9 +243,9 @@ Post conditions
N/A
-----------------------------------------
-Basic CRUD operations on security groups
-----------------------------------------
+------------------------------------------------------
+Test Case 2 - Basic CRUD operations on security groups
+------------------------------------------------------
Test case specification
-----------------------
@@ -321,7 +332,7 @@ Test execution
* Test action 19: Generate a random uuid and use this id to show security group rule
* **Test assertion 19:** Failed to show security group rule because of nonexistent id of security group rule
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
These test cases evaluate the ability of Basic CRUD operations on security groups and security group rules.
@@ -342,9 +353,9 @@ Post conditions
N/A
--------------------------------
-CRUD operations on subnet pools
--------------------------------
+---------------------------------------------
+Test Case 3 - CRUD operations on subnet pools
+---------------------------------------------
Test case specification
-----------------------
@@ -371,7 +382,7 @@ Test execution
* Test action 4: Delete SNP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
These test cases evaluate the ability of Basic CRUD operations on subnetpools.
@@ -385,3 +396,131 @@ Post conditions
---------------
N/A
+
+----------------------------------------
+Test Case 4 - CRUD operations on routers
+----------------------------------------
+
+Test case specification
+-----------------------
+
+tempest.api.network.test_routers.RoutersTest.test_add_multiple_router_interfaces
+tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_port_id
+tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_subnet_id
+tempest.api.network.test_routers.RoutersTest.test_create_show_list_update_delete_router
+tempest.api.network.test_routers.RoutersTest.test_update_delete_extra_route
+tempest.api.network.test_routers.RoutersTest.test_update_router_admin_state
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_add_router_interfaces_on_overlapping_subnets_returns_400
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_delete_non_existent_router_returns_404
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_add_gateway_invalid_network_returns_404
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_add_gateway_net_not_external_returns_400
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_remove_interface_in_use_returns_409
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_show_non_existent_router_returns_404
+tempest.api.network.test_routers_negative.RoutersNegativeTest.test_update_non_existent_router_returns_404
+
+Test preconditions
+------------------
+
+Neutron is available.
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+''''''''''''''
+
+* Test action 1: Create 2 networks NET1 and NET2, create SubNet1 of NET1 and SubNet2 with cidr of NET2,
+ create a router R1, add SubNet1 and SubNet2 to be R1's interfaces and get port details
+* **Test assertion 1:** The device_id and subnet_id of the port equals to the router id
+ and subnet id getting from the creating response body
+* Test action 2: Create a network NET1, create SubNet1 of NET1, create P1 of NET1, create a router R1,
+ add P1 to be its interface, store the response body B1, show P1 and store the response body B2 and
+ remove the interface with port id
+* **Test assertion 2:** B1 includes keys 'subnet_id' and 'port_id', 'device_id' of B2 equals to router id
+* Test action 3: Create a network NET1, create SubNet1 of NET1, create a router R1, add SubNet1 to be its
+ interface, store the response body B1, show port details with 'port_id' in B1
+* **Test assertion 3:** B1 includes keys 'subnet_id' and 'port_id', 'device_id' equals to router id
+* Test action 4: Create a router R1 with name, admin_state_up False and external_network_id Ext-Net,
+ store the request body B1, show R1 with 'id' in B1, list all routers, update R1's name
+* **Test assertion 4:** 'name', 'admin_state_up' and 'network_id' in B1 equal to the name, False and Ext-Net,
+ 'name' in show details equals to 'name' in B1, 'network_id' in show details equals to Ext-Net,
+ R1 is in the router list, the 'name' has been updated
+* Test action 5: Create a router R1 with admin_state_up=True, create 4 networks and 4 subnets with different
+ cidr, add these 4 subnets to R1 as its interfaces, update R1 by given routes and show R1, delete extra
+ route of R1 and then show R1
+* **Test assertion 5:** The number of routes queals to 4 and routes is empty after deletion
+* Test action 6: Create a router R1, update 'admin_state_up' of R1 to be True
+* **Test assertion 6:** 'admin_state_up' of R1 is False, 'admin_state_up' of R1 is True after updating
+* Test action 7: Create 2 networks NET1 and NET2, create SubNet1 of NET1 and SubNet2 of NET2,
+ create a router R1, add SubNet1 to be R1's interface and then trying to add SubNet2 to be R1's interface
+* **Test assertion 7:** It raises a BadRequest exception when trying to add SubNet2
+* Test action 8: Try to delete router with a random name
+* **Test assertion 8:** It raises a NotFound exception when trying delete operation
+* Test action 9: Create a router R1 and try to update it with wrong external_gateway_info
+* **Test assertion 9:** It raises a NotFound exception when trying to update R1
+* Test action 10: Create an internal network NET1, create SubNet1 with cidr of NET1,
+ create a router R1 and try to update it with NET1 as external_gateway_info
+* **Test assertion 10:** It raises a BadRequest exception when trying to update R1
+* Test action 11: Create a network NET1, create SubNet1 of NET1, create a router R1, add SubNet1 to
+ the interface of R1 and try to delete R1
+* **Test assertion 11:** It raises a Conflict exception when trying to delete R1
+* Test action 12: Try to show router with a random name
+* **Test assertion 12:** It raises a NotFound exception when trying to show router
+* Test action 13: Try to update router with a random name
+* **Test assertion 13:** It raises a NotFound exception when trying to update router
+
+
+Pass / Fail criteria
+''''''''''''''''''''
+
+These test cases evaluate the ability of Basic CRUD operations on routers.
+Specifically it verifies that:
+
+* Routers can be created, updated, shown and deleted.
+* Can not show, update and delete non existent router.
+* Can not remove interface in use.
+
+In order to pass this test, all test assertions listed in the test execution above need to pass.
+
+Post conditions
+---------------
+
+N/A
+
+----------------------------------------------
+Test Case 5 - List versions within Network API
+----------------------------------------------
+
+Test case specification
+-----------------------
+
+tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources
+
+Test preconditions
+------------------
+
+Neutron is available.
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+''''''''''''''
+
+* Test action 1: List network API versions
+* **Test assertion 1:** The network API version is 'v2.0'
+
+Pass / Fail criteria
+''''''''''''''''''''
+
+This test case evaluates the ability of listing network API versions.
+Specifically it verifies that:
+
+* The network API version is 'v2.0'.
+
+In order to pass this test, all test assertions listed in the test execution above need to pass.
+
+Post conditions
+---------------
+
+N/A
diff --git a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst
index 097123aa..263e87cc 100644
--- a/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst
+++ b/docs/testing/user/testspecification/tempest_osinterop/tempest_osinterop_volume.rst
@@ -11,7 +11,7 @@ Scope
The VIM volume operations test area evaluates the ability of the system under
test to support VIM volume operations. The test cases documented here are the
-volume API test cases in the OpenStack Interop guideline 2017.09 as implemented
+volume API test cases in the OpenStack Interop guideline 2018.11 as implemented
by the RefStack client. These test cases will evaluate basic OpenStack (as a VIM)
volume operations, including:
@@ -24,7 +24,7 @@ volume operations, including:
- Volume metadata operations
- Volume snapshot operations
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test area
@@ -51,7 +51,7 @@ the same state as before the test.
For brevity, the test cases in this test area are summarized together based on
the operations they are testing.
-All these test cases are included in the test case dovetail.tempest.osinterop of
+All these test cases are included in the test case functest.tempest.osinterop of
OVP test suite.
Test Descriptions
@@ -61,7 +61,7 @@ Test Descriptions
API Used and Reference
----------------------
-Block storage: https://developer.openstack.org/api-ref/block-storage
+Block storage: https://docs.openstack.org/api-ref/block-storage/
- create volume
- delete volume
@@ -77,49 +77,8 @@ Block storage: https://developer.openstack.org/api-ref/block-storage
- update snapshot
- delete snapshot
------------------------------------------------------
-Test Case 1 - Upload volumes with Cinder v2 or v3 API
------------------------------------------------------
-
-Test case specification
------------------------
-
-tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_volume_upload
-
-Test preconditions
-------------------
-
-* Volume extension API
-
-Basic test flow execution description and pass/fail criteria
-------------------------------------------------------------
-
-Test execution
-''''''''''''''
-* Test action 1: Create a volume VOL1
-* Test action 2: Convert VOL1 and upload image IMG1 to the Glance
-* Test action 3: Wait until the status of IMG1 is 'ACTIVE' and VOL1 is 'available'
-* Test action 4: Show the details of IMG1
-* **Test assertion 1:** The name of IMG1 shown is the same as the name used to upload it
-* **Test assertion 2:** The disk_format of IMG1 is the same as the disk_format of VOL1
-
-Pass / fail criteria
-''''''''''''''''''''
-
-This test case evaluates the volume API ability of uploading images.
-Specifically, the test verifies that:
-
-* The Volume can convert volumes and upload images.
-
-In order to pass this test, all test assertions listed in the test execution above need to pass.
-
-Post conditions
----------------
-
-N/A
-
--------------------------------------------------------------------------------------
-Test Case 2 - Volume service availability zone operations with the Cinder v2 or v3 API
+Test Case 1 - Volume service availability zone operations with the Cinder v2 or v3 API
--------------------------------------------------------------------------------------
Test case specification
@@ -140,7 +99,7 @@ Test execution
* Test action 1: List all existent availability zones
* **Test assertion 1:** Verify the availability zone list length is greater than 0
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of listing availability zones.
@@ -156,7 +115,7 @@ Post conditions
N/A
--------------------------------------------------------------------
-Test Case 3 - Volume cloning operations with the Cinder v2 or v3 API
+Test Case 2 - Volume cloning operations with the Cinder v2 or v3 API
--------------------------------------------------------------------
Test case specification
@@ -190,7 +149,7 @@ Test execution
* Test action 7: Update the name of VOL3 and description with the original value
* **Test assertion 6:** Verify VOL3's bootable flag is 'False'
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of creating a cloned volume from a source volume,
@@ -210,7 +169,7 @@ Post conditions
N/A
--------------------------------------------------------------------------
-Test Case 4 - Image copy-to-volume operations with the Cinder v2 or v3 API
+Test Case 3 - Image copy-to-volume operations with the Cinder v2 or v3 API
--------------------------------------------------------------------------
Test case specification
@@ -249,7 +208,7 @@ Test execution
* Test action 11: Update the name of VOL3 and description with the original value
* **Test assertion 8:** Verify VOL3's bootable flag is 'True'
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of updating volume's bootable flag and creating
@@ -270,7 +229,7 @@ Post conditions
N/A
----------------------------------------------------------------------------------
-Test Case 5 - Volume creation and deletion operations with the Cinder v2 or v3 API
+Test Case 4 - Volume creation and deletion operations with the Cinder v2 or v3 API
----------------------------------------------------------------------------------
Test case specification
@@ -320,7 +279,7 @@ Test execution
* Test action 13: Create a volume with volume size '0'
* **Test assertion 12:** Verify create volume failed, a bad request error is returned in the response
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of creating a volume, getting volume
@@ -343,7 +302,7 @@ Post conditions
N/A
--------------------------------------------------------------------------------------
-Test Case 6 - Volume service extension listing operations with the Cinder v2 or v3 API
+Test Case 5 - Volume service extension listing operations with the Cinder v2 or v3 API
--------------------------------------------------------------------------------------
Test case specification
@@ -366,7 +325,7 @@ Test execution
* Test action 1: List all cinder service extensions
* **Test assertion 1:** Verify all extensions are list in the extension list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of listing all existent volume service extensions.
@@ -381,7 +340,7 @@ Post conditions
N/A
----------------------------------------------------------------
-Test Case 7 - Volume GET operations with the Cinder v2 or v3 API
+Test Case 6 - Volume GET operations with the Cinder v2 or v3 API
----------------------------------------------------------------
Test case specification
@@ -409,7 +368,7 @@ Test execution
* Test action 3: Retrieve a volume with a nonexistent volume ID
* **Test assertion 3:** Verify retrieve volume failed, a 'Not Found' error is returned in the response
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of getting volumes.
@@ -425,7 +384,7 @@ Post conditions
N/A
--------------------------------------------------------------------
-Test Case 8 - Volume listing operations with the Cinder v2 or v3 API
+Test Case 7 - Volume listing operations with the Cinder v2 or v3 API
--------------------------------------------------------------------
Test case specification
@@ -510,7 +469,7 @@ Test execution
* Test action 22: List all existent volumes and paginate the volume list by desired volume IDs
* **Test assertion 24:** Verify only the desired volumes are listed in the filtered volume list
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of getting a list of volumes and filtering the volume list.
@@ -529,7 +488,7 @@ Post conditions
N/A
---------------------------------------------------------------------
-Test Case 9 - Volume metadata operations with the Cinder v2 or v3 API
+Test Case 8 - Volume metadata operations with the Cinder v2 or v3 API
---------------------------------------------------------------------
Test case specification
@@ -561,7 +520,7 @@ Test execution
* Test action 6: Update one metadata item 'key3' of VOL2
* **Test assertion 5:** Verify the metadata of VOL2 is updated
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of creating metadata for a volume, getting the
@@ -580,9 +539,9 @@ Post conditions
N/A
----------------------------------------------------------------------------------------
-Test Case 10 - Verification of read-only status on volumes with the Cinder v2 or v3 API
----------------------------------------------------------------------------------------
+--------------------------------------------------------------------------------------
+Test Case 9 - Verification of read-only status on volumes with the Cinder v2 or v3 API
+--------------------------------------------------------------------------------------
Test case specification
-----------------------
@@ -605,7 +564,7 @@ Test execution
* Test action 2: Update a provided volume VOL1's read-only access mode to 'False'
* **Test assertion 2:** Verify VOL1 is not in read-only access mode
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of setting and updating volume read-only access mode.
@@ -620,61 +579,8 @@ Post conditions
N/A
--------------------------------------------------------------------------
-Test Case 11 - Volume reservation operations with the Cinder v2 or v3 API
--------------------------------------------------------------------------
-
-Test case specification
------------------------
-
-tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_reserve_unreserve_volume
-tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_reserve_volume_with_negative_volume_status
-tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_reserve_volume_with_nonexistent_volume_id
-tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_unreserve_volume_with_nonexistent_volume_id
-
-Test preconditions
-------------------
-
-* Volume extension API
-
-Basic test flow execution description and pass/fail criteria
-------------------------------------------------------------
-
-Test execution
-''''''''''''''
-
-* Test action 1: Update a provided volume VOL1 as reserved
-* **Test assertion 1:** Verify VOL1 is in 'attaching' status
-* Test action 2: Update VOL1 as un-reserved
-* **Test assertion 2:** Verify VOL1 is in 'available' status
-* Test action 3: Update a provided volume VOL2 as reserved
-* Test action 4: Update VOL2 as reserved again
-* **Test assertion 3:** Verify update VOL2 status failed, a bad request error is returned in the response
-* Test action 5: Update VOL2 as un-reserved
-* Test action 6: Update a non-existent volume as reserved by using an invalid volume ID
-* **Test assertion 4:** Verify update non-existent volume as reserved failed, a 'Not Found' error is returned in the response
-* Test action 7: Update a non-existent volume as un-reserved by using an invalid volume ID
-* **Test assertion 5:** Verify update non-existent volume as un-reserved failed, a 'Not Found' error is returned in the response
-
-Pass / fail criteria
-''''''''''''''''''''
-
-This test case evaluates the volume API ability of reserving and un-reserving volumes.
-Specifically, the test verifies that:
-
-* Volume can be reserved and un-reserved.
-* Update a non-existent volume as reserved is not allowed.
-* Update a non-existent volume as un-reserved is not allowed.
-
-In order to pass this test, all test assertions listed in the test execution above need to pass.
-
-Post conditions
----------------
-
-N/A
-
----------------------------------------------------------------------------------------
-Test Case 12 - Volume snapshot creation/deletion operations with the Cinder v2 or v3 API
+Test Case 10 - Volume snapshot creation/deletion operations with the Cinder v2 or v3 API
----------------------------------------------------------------------------------------
Test case specification
@@ -753,7 +659,7 @@ Test execution
* Test action 27: Create a snapshot from a volume by using an empty volume ID
* **Test assertion 21:** Verify create snapshot failed, a 'Not Found' error is returned in the response
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of managing snapshot and snapshot metadata.
@@ -784,7 +690,7 @@ Post conditions
N/A
--------------------------------------------------------------------
-Test Case 13 - Volume update operations with the Cinder v2 or v3 API
+Test Case 11 - Volume update operations with the Cinder v2 or v3 API
--------------------------------------------------------------------
Test case specification
@@ -812,7 +718,7 @@ Test execution
* Test action 3: Update a non-existent volume by using a random generated volume ID
* **Test assertion 3:** Verify update volume failed, a 'Not Found' error is returned in the response
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test case evaluates the volume API ability of updating volume attributes.
@@ -828,3 +734,41 @@ Post conditions
---------------
N/A
+
+-------------------------------------------------------------------
+Test Case 12 - Volume list version operation with the Cinder v3 API
+-------------------------------------------------------------------
+
+Test case specification
+-----------------------
+
+tempest.api.volume.test_versions.VersionsTest.test_list_versions
+
+Test preconditions
+------------------
+
+* Volume API
+
+Basic test flow execution description and pass/fail criteria
+------------------------------------------------------------
+
+Test execution
+''''''''''''''
+
+* Test action 1: List volume API versions
+* **Test assertion 1:** The list versions operation is successful executed
+
+Pass / Fail criteria
+''''''''''''''''''''
+
+This test case evaluates the volume API ability of listing volume API versions.
+Specifically, the test verifies that:
+
+* Successfully listing volume API versions.
+
+In order to pass this test, all test assertions listed in the test execution above need to pass.
+
+Post conditions
+---------------
+
+N/A
diff --git a/docs/testing/user/testspecification/tempest_trunk_ports/index.rst b/docs/testing/user/testspecification/tempest_trunk_ports/index.rst
index fd60a32e..f82fc468 100644
--- a/docs/testing/user/testspecification/tempest_trunk_ports/index.rst
+++ b/docs/testing/user/testspecification/tempest_trunk_ports/index.rst
@@ -17,7 +17,7 @@ operations, by means of both positive and negative tests.
References
================
-- `OpenStack API reference <https://developer.openstack.org/api-ref/network/v2/#trunk-networking>`_
+- `OpenStack API reference <https://docs.openstack.org/api-ref/network/v2/#trunk-networking>`_
System Under Test (SUT)
@@ -35,9 +35,9 @@ is able to run independently, i.e. irrelevant of the state created by a previous
test. For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-- `Neutron Trunk API tests <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py>`_
-- `Neutron Trunk API trunk details <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk_details.py>`_
-- `Neutron Trunk API negative tests <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk_negative.py>`_
+- `Neutron Trunk API tests <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk.py>`_
+- `Neutron Trunk API trunk details <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk_details.py>`_
+- `Neutron Trunk API negative tests <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk_negative.py>`_
**Trunk port and sub-port CRUD operations:**
@@ -46,38 +46,17 @@ These tests cover the CRUD (Create, Read, Update, Delete) life-cycle operations
of trunk ports and subports.
Implementation:
-`TrunkTestInheritJSONBase <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py#L228>`_
-and
-`TrunkTestJSON <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py#L83>`_.
+`TrunkTestJSON <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk.py#L71>`_
-- neutron.tests.tempest.api.test_trunk.TrunkTestInheritJSONBase.test_add_subport
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_add_subport
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_show_delete_trunk
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_trunk_empty_subports_list
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_trunk_subports_not_specified
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_update_trunk
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_create_update_trunk_with_description
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_delete_trunk_with_subport_is_allowed
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_get_subports
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_list_trunks
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_remove_subport
-- neutron.tests.tempest.api.test_trunk.TrunkTestJSON.test_show_trunk_has_project_id
-
-
-**MTU-related operations:**
-
-These tests validate that trunk ports and subports can be created and added
-when specifying valid MTU sizes. These tests do not include negative tests
-covering invalid MTU sizes.
-
-Implementation:
-`TrunkTestMtusJSON <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py#L302>`_
-
-- neutron.tests.tempest.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_equal_to_trunk
-- neutron.tests.tempest.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_smaller_than_trunk
-- neutron.tests.tempest.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_equal_to_subport
-- neutron.tests.tempest.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_greater_than_subport
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_add_subports
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_show_delete_trunk
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_empty_subports_list
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_subports_not_specified
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_delete_trunk_with_subport_is_allowed
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_get_subports
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_list_trunks
+- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_remove_subport
**API for listing query results:**
@@ -86,17 +65,15 @@ These tests verify that listing operations of trunk port objects work. This
functionality is required for CLI and UI operations.
Implementation:
-`TrunksSearchCriteriaTest <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk.py#L346>`_
+`TrunksSearchCriteriaTest <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk.py#L306>`_
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_no_pagination_limit_0
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_asc
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_desc
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_href_links
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_marker
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_asc
-- neutron.tests.tempest.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_desc
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_no_pagination_limit_0
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_asc
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_desc
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_marker
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_asc
+- neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_desc
**Query trunk port details:**
@@ -104,11 +81,11 @@ Implementation:
These tests validate that all attributes of trunk port objects can be queried.
Implementation:
-`TestTrunkDetailsJSON <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk_details.py#L20>`_
+`TestTrunkDetailsJSON <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk_details.py#L20>`_
-- neutron.tests.tempest.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_empty_trunk_details
-- neutron.tests.tempest.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_no_subports
-- neutron.tests.tempest.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_with_subport
+- neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_empty_trunk_details
+- neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_no_subports
+- neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_with_subport
**Negative tests:**
@@ -117,30 +94,27 @@ These group of tests comprise negative tests which verify that invalid operation
are handled correctly by the system under test.
Implementation:
-`TrunkTestNegative <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/api/test_trunk_negative.py#L24>`_
-
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_add_subport_duplicate_segmentation_details
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_add_subport_passing_dict
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_disabled_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_trunk_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_subport_invalid_inherit_network_segmentation_type
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_subport_missing_segmentation_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_duplicate_subport_segmentation_ids
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_subport_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_port_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_id
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_type
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_subport
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_delete_trunk_disabled_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_not_found
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_passing_dict
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_port_id_disabled_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestMtusJSON.test_add_subport_with_mtu_greater_than_trunk
-- neutron.tests.tempest.api.test_trunk_negative.TrunkTestMtusJSON.test_create_trunk_with_mtu_smaller_than_subport
+`TrunkTestNegative <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/api/test_trunk_negative.py#L27>`_
+
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_duplicate_segmentation_details
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_passing_dict
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_disabled_trunk
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_parent_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_missing_segmentation_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_trunk
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_duplicate_subport_segmentation_ids
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_subport_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_port_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_id
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_type
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_subport
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_trunk
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_trunk_disabled_trunk
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_not_found
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_passing_dict
+- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_port_id_disabled_trunk
**Scenario tests (tests covering more than one functionality):**
@@ -151,7 +125,6 @@ functions as intended. To this end, they boot up two VMs with trunk ports and
sub ports and verify connectivity between those VMs.
Implementation:
-`TrunkTest <https://github.com/openstack/neutron/blob/stable/pike/neutron/tests/tempest/scenario/test_trunk.py#L45>`_
+`TrunkTest <https://github.com/openstack/neutron-tempest-plugin/blob/0.3.0/neutron_tempest_plugin/scenario/test_trunk.py#L41>`_
-- neutron.tests.tempest.scenario.test_trunk.TrunkTest.test_subport_connectivity
-- neutron.tests.tempest.scenario.test_trunk.TrunkTest.test_trunk_subport_lifecycle
+- neutron_tempest_plugin.scenario.test_trunk.TrunkTest.test_trunk_subport_lifecycle
diff --git a/docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst b/docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst
index 7091929a..1621d8f5 100644
--- a/docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst
+++ b/docs/testing/user/testspecification/tempest_vm_lifecycle/index.rst
@@ -59,7 +59,7 @@ Each test case is able to run independently, i.e. irrelevant of the state
created by a previous test. Specifically, every test performs clean-up
operations which return the system to the same state as before the test.
-All these test cases are included in the test case dovetail.tempest.vm_lifecycle of
+All these test cases are included in the test case functest.tempest.vm_lifecycle of
OVP test suite.
Test Descriptions
@@ -69,35 +69,35 @@ Test Descriptions
API Used and Reference
----------------------
-Block storage: https://developer.openstack.org/api-ref/block-storage
+Block storage: https://docs.openstack.org/api-ref/block-storage/
- create volume
- delete volume
- attach volume to server
- detach volume from server
-Security Groups: https://developer.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
+Security Groups: https://docs.openstack.org/api-ref/network/v2/index.html#security-groups-security-groups
- create security group
- delete security group
-Networks: https://developer.openstack.org/api-ref/networking/v2/index.html#networks
+Networks: https://docs.openstack.org/api-ref/network/v2/index.html#networks
- create network
- delete network
-Routers and interface: https://developer.openstack.org/api-ref/networking/v2/index.html#routers-routers
+Routers and interface: https://docs.openstack.org/api-ref/network/v2/index.html#routers-routers
- create router
- delete router
- add interface to router
-Subnets: https://developer.openstack.org/api-ref/networking/v2/index.html#subnets
+Subnets: https://docs.openstack.org/api-ref/network/v2/index.html#subnets
- create subnet
- delete subnet
-Servers: https://developer.openstack.org/api-ref/compute/
+Servers: https://docs.openstack.org/api-ref/compute/
- create keypair
- create server
@@ -120,17 +120,17 @@ Servers: https://developer.openstack.org/api-ref/compute/
- migrate server
- live-migrate server
-Ports: https://developer.openstack.org/api-ref/networking/v2/index.html#ports
+Ports: https://docs.openstack.org/api-ref/network/v2/index.html#ports
- create port
- delete port
-Floating IPs: https://developer.openstack.org/api-ref/networking/v2/index.html#floating-ips-floatingips
+Floating IPs: https://docs.openstack.org/api-ref/network/v2/index.html#floating-ips-floatingips
- create floating IP
- delete floating IP
-Availability zone: https://developer.openstack.org/api-ref/compute/
+Availability zone: https://docs.openstack.org/api-ref/compute/
- get availability zone
@@ -174,7 +174,7 @@ Test execution
* **Test assertion 8:** Verify VM1's addresses have been refreshed after disassociating FIP1
* Test action 10: Delete SG1, IMG1, KEYP1, VOL1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates a minimum basic scenario. Specifically, the test verifies that:
@@ -227,7 +227,7 @@ Test execution
* **Test assertion 3:** Verify SRC_HOST does not equal to DST_HOST
* Test action 11: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to cold migrate VMs. Specifically, the test verifies that:
@@ -274,7 +274,7 @@ Test execution
* **Test assertion 3:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 8: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to pause and unpause VMs. Specifically, the test verifies that:
@@ -319,7 +319,7 @@ Test execution
* **Test assertion 1:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 6: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to reboot servers. Specifically, the test verifies that:
@@ -362,7 +362,7 @@ Test execution
* **Test assertion 1:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 6: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to rebuild servers. Specifically, the test verifies that:
@@ -407,7 +407,7 @@ Test execution
* **Test assertion 1:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 8: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to resize servers. Specifically, the test verifies that:
@@ -453,7 +453,7 @@ Test execution
* **Test assertion 2:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 8: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to stop and start servers. Specifically, the test verifies that:
@@ -501,7 +501,7 @@ Test execution
* **Test assertion 2:** Verify can ping FIP1 successfully and can SSH to VM1 via FIP1
* Test action 8: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to suspend and resume servers. Specifically, the test verifies that:
@@ -553,7 +553,7 @@ Test execution
* **Test assertion 4:** Verify VM1 status is 'ACTIVE'
* Test action 10: Delete KEYP1, VM1 and FIP1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to suspend and resume servers in sequence.
@@ -575,7 +575,7 @@ Test Case 10 - Resize volume backed server
Test case specification
-----------------------
-tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_resize_volume_backed_server_confirm
+tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_volume_backed_server_confirm
Test preconditions
------------------
@@ -597,7 +597,7 @@ Test execution
* **Test assertion 1:** VM1's status is 'ACTIVE'
* Test action 6: Delete VM1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to resize volume backed servers.
@@ -646,7 +646,7 @@ Test execution
* **Test assertion 1:** Verify the values written and read are the same
* Test action 9: Delete SG1, KEYP1 and VM1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to shelve and unshelve servers.
@@ -694,7 +694,7 @@ Test execution
* **Test assertion 1:** Verify T_STAMP1 equals to T_STAMP2
* Test action 9: Delete SG1, KEYP1 and VM1
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates the ability to shelve and unshelve volume backed servers.
diff --git a/docs/testing/user/testspecification/tempest_volume/index.rst b/docs/testing/user/testspecification/tempest_volume/index.rst
index 7672e4de..64888c99 100644
--- a/docs/testing/user/testspecification/tempest_volume/index.rst
+++ b/docs/testing/user/testspecification/tempest_volume/index.rst
@@ -2,9 +2,9 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) Ericsson AB
-===========================================
+=================================
Tempest Volume test specification
-===========================================
+=================================
Scope
@@ -35,7 +35,7 @@ The test area is structured in individual tests as listed below.
For detailed information on the individual steps and assertions performed
by the tests, review the Python source code accessible via the following links:
-All these test cases are included in the test case dovetail.tempest.volume of
+All these test cases are included in the test case functest.tempest.volume of
OVP test suite.
diff --git a/docs/testing/user/testspecification/vnf/index.rst b/docs/testing/user/testspecification/vnf/index.rst
index 4b43e7d3..d13634a1 100644
--- a/docs/testing/user/testspecification/vnf/index.rst
+++ b/docs/testing/user/testspecification/vnf/index.rst
@@ -33,16 +33,12 @@ This test area references the following specifications and guides:
- 3GPP LTE
- - http://www.3gpp.org/technologies/keywords-acronyms/98-lte
+ - https://www.3gpp.org/technologies/keywords-acronyms/98-lte
- ETSI - TS 24.301
- https://www.etsi.org/deliver/etsi_ts/124300_124399/124301/10.03.00_60/ts_124301v100300p.pdf
-- ABoT : Test Orchestration Solution
-
- - https://www.rebaca.com/abot-test-orchestration-tool/
-
- Cloudify clearwater: opnfv-cloudify-clearwater [1]
- https://github.com/Orange-OpenSource/opnfv-cloudify-clearwater
@@ -86,14 +82,14 @@ return the system to the same state as before the test.
Test Descriptions
=================
-----------------------------------------------------------------
+------------------
Test Case 1 - vEPC
-----------------------------------------------------------------
+------------------
Short name
----------
-dovetail.vnf.vepc
+functest.vnf.vepc
Use case specification
@@ -140,7 +136,7 @@ Test execution
* Test action 5: The deployed VMs are deleted.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
The VNF Manager (juju) should be deployed successfully
@@ -157,14 +153,14 @@ Post conditions
The clean-up operations are run.
-----------------------------------------------------------------
+------------------
Test Case 2 - vIMS
-----------------------------------------------------------------
+------------------
Short name
----------
-dovetail.vnf.vims
+functest.vnf.vims
Use case specification
----------------------
@@ -206,7 +202,7 @@ Test execution
* Test action 5: The deployed VMs are deleted.
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
The VNF orchestrator (Cloudify) should be deployed successfully.
diff --git a/docs/testing/user/testspecification/vping/index.rst b/docs/testing/user/testspecification/vping/index.rst
index 666b732f..355018c6 100644
--- a/docs/testing/user/testspecification/vping/index.rst
+++ b/docs/testing/user/testspecification/vping/index.rst
@@ -3,7 +3,7 @@
.. (c) Ericsson AB
========================
-Vping test specification
+Vping Test Specification
========================
.. toctree::
@@ -31,10 +31,10 @@ References
- SCPClient
- - https://pypi.python.org/pypi/scp
+ - https://pypi.org/project/scp/
-Definitions and abbreviations
+Definitions and Abbreviations
=============================
The following terms and abbreviations are used in conjunction with this test
@@ -42,10 +42,10 @@ area
- ICMP - Internet Control Message Protocol
- L3 - Layer 3
-- NFVi - Network functions virtualization infrastructure
-- SCP - Secure Copy
-- SSH - Secure Shell
-- VM - Virtual machine
+- NFVi - Network Functions Virtualization infrastructure
+- SCP - Secure CoPy
+- SSH - Secure SHell
+- VM - Virtual Machine
System Under Test (SUT)
@@ -73,7 +73,7 @@ Test Case 1 - vPing using userdata provided by nova metadata service
Short name
----------
-dovetail.vping.userdata
+functest.vping.userdata
Use case specification
@@ -106,39 +106,46 @@ checking the console output of the source VMs.
Test execution
''''''''''''''
-* Test action 1:
+* **Test action 1:**
* Create a private tenant network by using neutron client
* Create one subnet and one router in the network by neutron client
* Add one interface between the subnet and router
* Add one gateway route to the router by neutron client
* Store the network id in the response
* **Test assertion 1:** The network id, subnet id and router id can be found in the response
-* Test action 2:
+* **Test action 2:**
* Create an security group by using neutron client
* Store the security group id parameter in the response
* **Test assertion 2:** The security group id can be found in the response
-* Test action 3: boot VM1 by using nova client with configured name, image, flavor, private tenant
- network created in test action 1, security group created in test action 2
+* **Test action 3:**
+ * Boot VM1 by using nova client with configured name, image, flavor, private tenant
+ network created in test action 1 and security group created in test action 2
* **Test assertion 3:** The VM1 object can be found in the response
-* Test action 4: Generate ping script with the IP of VM1 to be passed as userdata provided by
- the **nova metadata service**.
-* Test action 5: Boot VM2 by using nova client with configured name, image, flavor, private tenant
- network created in test action 1, security group created in test action 2, userdata created
- in test action 4
+* **Test action 4:**
+ * Generate ping script with the IP of VM1 to be passed as userdata provided by
+ the **nova metadata service**
+* **Test action 5:**
+ * Boot VM2 by using nova client with configured name, image, flavor, private tenant
+ network created in test action 1, security group created in test action 2, and
+ userdata created in test action 4
* **Test assertion 4:** The VM2 object can be found in the response
-* Test action 6: Inside VM2, the ping script is executed automatically when booted and it contains a
- loop doing the ping until the return code is 0 or timeout reached. For each ping, when the return
- code is 0, "vPing OK" is printed in the VM2 console-log, otherwise, "vPing KO" is printed.
- Monitoring the console-log of VM2 to see the response generated by the script.
+* **Test action 6:**
+ * Inside VM2, the ping script is executed automatically when booted and it contains a
+ loop doing the ping until the return code is 0 or timeout reached
+ * For each ping, when the return code is 0, "vPing OK" is printed in the VM2 console-log,
+ otherwise, "vPing KO" is printed
+ * Monitoring the console-log of VM2 to see the response generated by the script
* **Test assertion 5:** "vPing OK" is detected, when monitoring the console-log in VM2
-* Test action 7: delete VM1, VM2
+* **Test action 7:**
+ * Delete VM1, VM2
* **Test assertion 6:** VM1 and VM2 are not present in the VM list
-* Test action 8: delete security group, gateway, interface, router, subnet and network
+* **Test action 8:**
+ * Delete security group, gateway, interface, router, subnet and network
* **Test assertion 7:** The security group, gateway, interface, router, subnet and network are
no longer present in the lists after deleting
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates basic NFVi capabilities of the system under test.
@@ -173,7 +180,7 @@ Test Case 2 - vPing using SSH to a floating IP
Short name
----------
-dovetail.vping.ssh
+functest.vping.ssh
Use case specification
@@ -207,45 +214,55 @@ Test execution
''''''''''''''
-* Test action 1:
+* **Test action 1:**
* Create a private tenant network by neutron client
* Create one subnet and one router are created in the network by using neutron client
* Create one interface between the subnet and router
* Add one gateway route to the router by neutron client
* Store the network id in the response
* **Test assertion 1:** The network id, subnet id and router id can be found in the response
-* Test action 2:
+* **Test action 2:**
* Create an security group by using neutron client
* Store the security group id parameter in the response
* **Test assertion 2:** The security group id can be found in the response
-* Test action 3: Boot VM1 by using nova client with configured name, image, flavor, private tenant
- network created in test action 1, security group created in test action 2
+* **Test action 3:**
+ * Boot VM1 by using nova client with configured name, image, flavor, private tenant
+ network created in test action 1 and security group created in test action 2
* **Test assertion 3:** The VM1 object can be found in the response
-* Test action 4: Boot VM2 by using nova client with configured name, image, flavor, private tenant
- network created in test action 1, security group created in test action 2
+* **Test action 4:**
+ * Boot VM2 by using nova client with configured name, image, flavor, private tenant
+ network created in test action 1 and security group created in test action 2
* **Test assertion 4:** The VM2 object can be found in the response
-* Test action 5: create one floating IP by using neutron client, storing the floating IP address
- returned in the response
+* **Test action 5:**
+ * Create one floating IP by using neutron client, storing the floating IP address
+ returned in the response
* **Test assertion 5:** Floating IP address can be found in the response
-* Test action 6: Assign the floating IP address created in test action 5 to VM2 by using nova client
+* **Test action 6:**
+ * Assign the floating IP address created in test action 5 to VM2 by using nova client
* **Test assertion 6:** The assigned floating IP can be found in the VM2 console log file
-* Test action 7: Establish SSH connection between the test host and VM2 through the floating IP
+* **Test action 7:**
+ * Establish SSH connection between the test host and VM2 through the floating IP
* **Test assertion 7:** SSH connection between the test host and VM2 is established within
300 seconds
-* Test action 8: Copy the Ping script from the test host to VM2 by using SCPClient
+* **Test action 8:**
+ * Copy the Ping script from the test host to VM2 by using SCPClient
* **Test assertion 8:** The Ping script can be found inside VM2
-* Test action 9: Inside VM2, to execute the Ping script to ping VM1, the Ping script contains a
- loop doing the ping until the return code is 0 or timeout reached, for each ping, when the return
- code is 0, "vPing OK" is printed in the VM2 console-log, otherwise, "vPing KO" is printed.
- Monitoring the console-log of VM2 to see the response generated by the script.
+* **Test action 9:**
+ * Inside VM2, to execute the Ping script to ping VM1, the Ping script contains a
+ loop doing the ping until the return code is 0 or timeout reached
+ * For each ping, when the return code is 0, "vPing OK" is printed in the VM2 console-log,
+ otherwise, "vPing KO" is printed
+ * Monitoring the console-log of VM2 to see the response generated by the script
* **Test assertion 9:** "vPing OK" is detected, when monitoring the console-log in VM2
-* Test action 10: delete VM1, VM2
+* **Test action 10:**
+ * Delete VM1, VM2
* **Test assertion 10:** VM1 and VM2 are not present in the VM list
-* Test action 11: delete floating IP, security group, gateway, interface, router, subnet and network
+* **Test action 11:**
+ * Delete floating IP, security group, gateway, interface, router, subnet and network
* **Test assertion 11:** The security group, gateway, interface, router, subnet and network are
no longer present in the lists after deleting
-Pass / fail criteria
+Pass / Fail criteria
''''''''''''''''''''
This test evaluates basic NFVi capabilities of the system under test.
diff --git a/docs/testing/user/testspecification/vpn/index.rst b/docs/testing/user/testspecification/vpn/index.rst
index f6778435..9d0a4ad3 100644
--- a/docs/testing/user/testspecification/vpn/index.rst
+++ b/docs/testing/user/testspecification/vpn/index.rst
@@ -13,9 +13,8 @@ Scope
=====
The VPN test area evaluates the ability of the system under test to support VPN
-networking for virtual workloads. The tests in this test area will evaluate
-establishing VPN networks, publishing and communication between endpoints using
-BGP and tear down of the networks.
+networking for virtual workloads. This test area tests CRUD (Create, Read, Update,
+Delete) operations of BGPVPN API.
References
==========
@@ -44,14 +43,9 @@ The following terms and abbreviations are used in conjunction with this test
area
- BGP - Border gateway protocol
-- eRT - Export route target
-- IETF - Internet Engineering Task Force
-- iRT - Import route target
- NFVi - Network functions virtualization infrastructure
-- Tenant - An isolated set of virtualized infrastructures
- VM - Virtual machine
- VPN - Virtual private network
-- VLAN - Virtual local area network
System Under Test (SUT)
@@ -64,483 +58,23 @@ Pharos compliant infrastructure.
Test Area Structure
===================
-The test area is structured in four separate tests which are executed
+The test area is structured in several tempest tests which are executed
sequentially. The order of the tests is arbitrary as there are no dependencies
across the tests. Specifially, every test performs clean-up operations which
return the system to the same state as before the test.
-The test area evaluates the ability of the SUT to establish connectivity
-between Virtual Machines using an appropriate route target configuration,
-reconfigure the route targets to remove connectivity between the VMs, then
-reestablish connectivity by re-association.
-
Test Descriptions
=================
-----------------------------------------------------------------
-Test Case 1 - VPN provides connectivity between Neutron subnets
-----------------------------------------------------------------
-
-Short name
-----------
-
-dovetail.sdnvpn.subnet_connectivity
-
-
-Use case specification
-----------------------
-
-This test evaluates the use case where an NFVi tenant uses a BGPVPN to provide
-connectivity between VMs on different Neutron networks and subnets that reside
-on different hosts.
-
-
-Test preconditions
-------------------
-
-2 compute nodes are available, denoted Node1 and Node2 in the following.
-
-
-Basic test flow execution description and pass/fail criteria
-------------------------------------------------------------
-
-Methodology for verifying connectivity
-''''''''''''''''''''''''''''''''''''''
-
-Connectivity between VMs is tested by sending ICMP ping packets between
-selected VMs. The target IPs are passed to the VMs sending pings by means of a
-custom user data script. Whether or not a ping was successful is determined by
-checking the console output of the source VMs.
-
-
-Test execution
-''''''''''''''
-
-* Create Neutron network N1 and subnet SN1 with IP range 10.10.10.0/24
-* Create Neutron network N2 and subnet SN2 with IP range 10.10.11.0/24
-
-* Create VM1 on Node1 with a port in network N1
-* Create VM2 on Node1 with a port in network N1
-* Create VM3 on Node2 with a port in network N1
-* Create VM4 on Node1 with a port in network N2
-* Create VM5 on Node2 with a port in network N2
-
-* Create VPN1 with eRT<>iRT
-* Create network association between network N1 and VPN1
-
-* VM1 sends ICMP packets to VM2 using ``ping``
-
-* **Test assertion 1:** Ping from VM1 to VM2 succeeds: ``ping`` exits with return code 0
-
-* VM1 sends ICMP packets to VM3 using ``ping``
-
-* **Test assertion 2:** Ping from VM1 to VM3 succeeds: ``ping`` exits with return code 0
-
-* VM1 sends ICMP packets to VM4 using ``ping``
-
-* **Test assertion 3:** Ping from VM1 to VM4 fails: ``ping`` exits with a non-zero return code
-
-* Create network association between network N2 and VPN1
-
-* VM4 sends ICMP packets to VM5 using ``ping``
-
-* **Test assertion 4:** Ping from VM4 to VM5 succeeds: ``ping`` exits with return code 0
-
-* Configure iRT=eRT in VPN1
-
-* VM1 sends ICMP packets to VM4 using ``ping``
-
-* **Test assertion 5:** Ping from VM1 to VM4 succeeds: ``ping`` exits with return code 0
-
-* VM1 sends ICMP packets to VM5 using ``ping``
-
-* **Test assertion 6:** Ping from VM1 to VM5 succeeds: ``ping`` exits with return code 0
-
-* Delete all instances: VM1, VM2, VM3, VM4 and VM5
-
-* Delete all networks and subnets: networks N1 and N2 including subnets SN1 and SN2
-
-* Delete all network associations and VPN1
-
-
-Pass / fail criteria
-''''''''''''''''''''
-
-This test evaluates the capability of the NFVi and VIM to provide routed IP
-connectivity between VMs by means of BGP/MPLS VPNs. Specifically, the test
-verifies that:
-
-* VMs in the same Neutron subnet have IP connectivity regardless of BGP/MPLS
- VPNs (test assertion 1, 2, 4)
-
-* VMs in different Neutron subnets do not have IP connectivity by default - in
- this case without associating VPNs with the same import and export route
- targets to the Neutron networks (test assertion 3)
-
-* VMs in different Neutron subnets have routed IP connectivity after
- associating both networks with BGP/MPLS VPNs which have been configured with
- the same import and export route targets (test assertion 5, 6). Hence,
- adjusting the ingress and egress route targets enables as well as prohibits
- routing.
-
-In order to pass this test, all test assertions listed in the test execution
-above need to pass.
-
-
-Post conditions
----------------
-
-N/A
-
-------------------------------------------------------------
-Test Case 2 - VPNs ensure traffic separation between tenants
-------------------------------------------------------------
-
-Short Name
-----------
-
-dovetail.sdnvpn.tenant_separation
-
-
-Use case specification
-----------------------
-
-This test evaluates if VPNs provide separation of traffic such that overlapping
-IP ranges can be used.
-
-
-Test preconditions
-------------------
-
-2 compute nodes are available, denoted Node1 and Node2 in the following.
-
-
-Basic test flow execution description and pass/fail criteria
-------------------------------------------------------------
-
-Methodology for verifying connectivity
-''''''''''''''''''''''''''''''''''''''
-
-Connectivity between VMs is tested by establishing an SSH connection. Moreover,
-the command "hostname" is executed at the remote VM in order to retrieve the
-hostname of the remote VM. The retrieved hostname is furthermore compared
-against an expected value. This is used to verify tenant traffic separation,
-i.e., despite overlapping IPs, a connection is made to the correct VM as
-determined by means of the hostname of the target VM.
-
-
-
-Test execution
-''''''''''''''
-
-* Create Neutron network N1
-* Create subnet SN1a of network N1 with IP range 10.10.10.0/24
-* Create subnet SN1b of network N1 with IP range 10.10.11.0/24
-
-* Create Neutron network N2
-* Create subnet SN2a of network N2 with IP range 10.10.10.0/24
-* Create subnet SN2b of network N2 with IP range 10.10.11.0/24
-
-* Create VM1 on Node1 with a port in network N1 and IP 10.10.10.11.
-* Create VM2 on Node1 with a port in network N1 and IP 10.10.10.12.
-* Create VM3 on Node2 with a port in network N1 and IP 10.10.11.13.
-* Create VM4 on Node1 with a port in network N2 and IP 10.10.10.12.
-* Create VM5 on Node2 with a port in network N2 and IP 10.10.11.13.
-
-* Create VPN1 with iRT=eRT=RT1
-* Create network association between network N1 and VPN1
-
-* VM1 attempts to execute the command ``hostname`` on the VM with IP 10.10.10.12 via SSH.
-
-* **Test assertion 1:** VM1 can successfully connect to the VM with IP
- 10.10.10.12. via SSH and execute the remote command ``hostname``. The
- retrieved hostname equals the hostname of VM2.
-
-* VM1 attempts to execute the command ``hostname`` on the VM with IP 10.10.11.13 via SSH.
-
-* **Test assertion 2:** VM1 can successfully connect to the VM with IP
- 10.10.11.13 via SSH and execute the remote command ``hostname``. The
- retrieved hostname equals the hostname of VM3.
-
-* Create VPN2 with iRT=eRT=RT2
-* Create network association between network N2 and VPN2
-
-* VM4 attempts to execute the command ``hostname`` on the VM with IP 10.10.11.13 via SSH.
-
-* **Test assertion 3:** VM4 can successfully connect to the VM with IP
- 10.10.11.13 via SSH and execute the remote command ``hostname``. The
- retrieved hostname equals the hostname of VM5.
-
-* VM4 attempts to execute the command ``hostname`` on the VM with IP 10.10.11.11 via SSH.
-
-* **Test assertion 4:** VM4 cannot connect to the VM with IP 10.10.11.11 via SSH.
-
-* Delete all instances: VM1, VM2, VM3, VM4 and VM5
-
-* Delete all networks and subnets: networks N1 and N2 including subnets SN1a, SN1b, SN2a and SN2b
-
-* Delete all network associations, VPN1 and VPN2
-
-
-Pass / fail criteria
-''''''''''''''''''''
-
-This test evaluates the capability of the NFVi and VIM to provide routed IP
-connectivity between VMs by means of BGP/MPLS VPNs. Specifically, the test
-verifies that:
-
-* VMs in the same Neutron subnet (still) have IP connectivity between each
- other when a BGP/MPLS VPN is associated with the network (test assertion 1).
-
-* VMs in different Neutron subnets have routed IP connectivity between each
- other when BGP/MPLS VPNs with the same import and expert route targets are
- associated with both networks (assertion 2).
-
-* VMs in different Neutron networks and BGP/MPLS VPNs with different import and
- export route targets can have overlapping IP ranges. The BGP/MPLS VPNs
- provide traffic separation (assertion 3 and 4).
-
-In order to pass this test, all test assertions listed in the test execution
-above need to pass.
-
-
-Post conditions
----------------
-
-N/A
-
---------------------------------------------------------------------------------
-Test Case 3 - VPN provides connectivity between subnets using router association
---------------------------------------------------------------------------------
-
-Short Name
-----------
-
-dovetail.sdnvpn.router_association
-
-
-Use case specification
-----------------------
-
-This test evaluates if a VPN provides connectivity between two subnets by
-utilizing two different VPN association mechanisms: a router association and a
-network association.
-
-Specifically, the test network topology comprises two networks N1 and N2 with
-corresponding subnets. Additionally, network N1 is connected to a router R1.
-This test verifies that a VPN V1 provides connectivity between both networks
-when applying a router association to router R1 and a network association to
-network N2.
-
-
-Test preconditions
-------------------
-
-2 compute nodes are available, denoted Node1 and Node2 in the following.
-
-Basic test flow execution description and pass/fail criteria
-------------------------------------------------------------
-
-Methodology for verifying connectivity
-''''''''''''''''''''''''''''''''''''''
-
-Connectivity between VMs is tested by sending ICMP ping packets between
-selected VMs. The target IPs are passed to the VMs sending pings by means of a
-custom user data script. Whether or not a ping was successful is determined by
-checking the console output of the source VMs.
-
-
-Test execution
-''''''''''''''
-
-* Create a network N1, a subnet SN1 with IP range 10.10.10.0/24 and a connected router R1
-* Create a network N2, a subnet SN2 with IP range 10.10.11.0/24
-
-* Create VM1 on Node1 with a port in network N1
-* Create VM2 on Node1 with a port in network N1
-* Create VM3 on Node2 with a port in network N1
-* Create VM4 on Node1 with a port in network N2
-* Create VM5 on Node2 with a port in network N2
-
-* Create VPN1 with eRT<>iRT so that connected subnets should not reach each other
-
-* Create route association between router R1 and VPN1
-
-* VM1 sends ICMP packets to VM2 using ``ping``
-
-* **Test assertion 1:** Ping from VM1 to VM2 succeeds: ``ping`` exits with return code 0
-
-* VM1 sends ICMP packets to VM3 using ``ping``
-
-* **Test assertion 2:** Ping from VM1 to VM3 succeeds: ``ping`` exits with return code 0
-
-* VM1 sends ICMP packets to VM4 using ``ping``
-
-* **Test assertion 3:** Ping from VM1 to VM4 fails: ``ping`` exits with a non-zero return code
-
-* Create network association between network N2 and VPN1
-
-* VM4 sends ICMP packets to VM5 using ``ping``
-
-* **Test assertion 4:** Ping from VM4 to VM5 succeeds: ``ping`` exits with return code 0
-
-* Change VPN1 so that iRT=eRT
-
-* VM1 sends ICMP packets to VM4 using ``ping``
-
-* **Test assertion 5:** Ping from VM1 to VM4 succeeds: ``ping`` exits with return code 0
-
-* VM1 sends ICMP packets to VM5 using ``ping``
-
-* **Test assertion 6:** Ping from VM1 to VM5 succeeds: ``ping`` exits with return code 0
-
-* Delete all instances: VM1, VM2, VM3, VM4 and VM5
-
-* Delete all networks, subnets and routers: networks N1 and N2 including subnets SN1 and SN2, router R1
-
-* Delete all network and router associations and VPN1
-
-
-Pass / fail criteria
-''''''''''''''''''''
-
-This test evaluates the capability of the NFVi and VIM to provide routed IP
-connectivity between VMs by means of BGP/MPLS VPNs. Specifically, the test
-verifies that:
-
-* VMs in the same Neutron subnet have IP connectivity regardless of the import
- and export route target configuration of BGP/MPLS VPNs (test assertion 1, 2, 4)
-
-* VMs in different Neutron subnets do not have IP connectivity by default - in
- this case without associating VPNs with the same import and export route
- targets to the Neutron networks or connected Neutron routers (test assertion 3).
-
-* VMs in two different Neutron subnets have routed IP connectivity after
- associating the first network and a router connected to the second network
- with BGP/MPLS VPNs which have been configured with the same import and export
- route targets (test assertion 5, 6). Hence, adjusting the ingress and egress
- route targets enables as well as prohibits routing.
-
-* Network and router associations are equivalent methods for binding Neutron networks
- to VPN.
-
-In order to pass this test, all test assertions listed in the test execution
-above need to pass.
-
-
-Post conditions
----------------
-
-N/A
-
----------------------------------------------------------------------------------------------------
-Test Case 4 - Verify interworking of router and network associations with floating IP functionality
----------------------------------------------------------------------------------------------------
-
-Short Name
-----------
-
-dovetail.sdnvpn.router_association_floating_ip
-
-
-Use case specification
-----------------------
-
-This test evaluates if both the router association and network association
-mechanisms interwork with floating IP functionality.
-
-Specifically, the test network topology comprises two networks N1 and N2 with
-corresponding subnets. Additionally, network N1 is connected to a router R1.
-This test verifies that i) a VPN V1 provides connectivity between both networks
-when applying a router association to router R1 and a network association to
-network N2 and ii) a VM in network N1 is reachable externally by means of a
-floating IP.
-
-
-Test preconditions
-------------------
-
-At least one compute node is available.
-
-Basic test flow execution description and pass/fail criteria
-------------------------------------------------------------
-
-Methodology for verifying connectivity
-''''''''''''''''''''''''''''''''''''''
-
-Connectivity between VMs is tested by sending ICMP ping packets between
-selected VMs. The target IPs are passed to the VMs sending pings by means of a
-custom user data script. Whether or not a ping was successful is determined by
-checking the console output of the source VMs.
-
-
-Test execution
-''''''''''''''
-
-* Create a network N1, a subnet SN1 with IP range 10.10.10.0/24 and a connected router R1
-* Create a network N2 with IP range 10.10.20.0/24
-
-* Create VM1 with a port in network N1
-* Create VM2 with a port in network N2
-
-* Create VPN1
-* Create a router association between router R1 and VPN1
-* Create a network association between network N2 and VPN1
-
-
-* VM1 sends ICMP packets to VM2 using ``ping``
-
-* **Test assertion 1:** Ping from VM1 to VM2 succeeds: ``ping`` exits with return code 0
-
-* Assign a floating IP to VM1
-
-* The host running the test framework sends ICMP packets to VM1 using ``ping``
-
-* **Test assertion 2:** Ping from the host running the test framework to the
- floating IP of VM1 succeeds: ``ping`` exits with return code 0
-
-* Delete floating IP assigned to VM1
-
-* Delete all instances: VM1, VM2
-
-* Delete all networks, subnets and routers: networks N1 and N2 including subnets SN1 and SN2, router R1
-
-* Delete all network and router associations as well as VPN1
-
-
-Pass / fail criteria
-''''''''''''''''''''
-
-This test evaluates the capability of the NFVi and VIM to provide routed IP
-connectivity between VMs by means of BGP/MPLS VPNs. Specifically, the test
-verifies that:
-
-* VMs in the same Neutron subnet have IP connectivity regardless of the import
- and export route target configuration of BGP/MPLS VPNs (test assertion 1)
-
-* VMs connected to a network which has been associated with a BGP/MPLS VPN are
- reachable through floating IPs.
-
-In order to pass this test, all test assertions listed in the test execution
-above need to pass.
-
-
-Post conditions
----------------
-
-N/A
-
-
-
------------------------------------
-Test Case 5 - Tempest API CRUD Tests
+Test Case 1 - Tempest API CRUD Tests
------------------------------------
Short Name
----------
-dovetail.tempest.bgpvpn
+functest.tempest.bgpvpn
Use case specification
diff --git a/docs/testing/user/userguide/api_testing_guide.rst b/docs/testing/user/userguide/api_testing_guide.rst
new file mode 100644
index 00000000..119beff7
--- /dev/null
+++ b/docs/testing/user/userguide/api_testing_guide.rst
@@ -0,0 +1,373 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) OPNFV, Huawei Technologies Co.,Ltd and others.
+
+===============================
+Running Dovetail by RESTful API
+===============================
+
+Overview
+--------
+
+Dovetail framework provides RESTful APIs for end users to run all OVP test cases.
+Also it provides a Swagger UI for users to find out all APIs and try them out.
+
+
+Definitions and abbreviations
+-----------------------------
+
+- REST - Representational State Transfer
+- API - Application Programming Interface
+- OVP - OPNFV Verification Program
+- UI - User Interface
+
+
+Environment Preparation
+-----------------------
+
+
+Install Docker
+^^^^^^^^^^^^^^
+
+The main prerequisite software for Dovetail is Docker. Please refer to official
+Docker installation guide that is relevant to your Test Host's operating system.
+
+
+Configuring the Test Host Environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+For convenience and as a convention, we will create a home directory for storing
+all Dovetail related config items and results files:
+
+.. code-block:: bash
+
+ $ mkdir -p ${HOME}/dovetail
+ $ export DOVETAIL_HOME=${HOME}/dovetail
+
+
+Installing Dovetail API
+-----------------------
+
+The Dovetail project maintains a Docker image that has both Dovetail API and
+Dovetail CLI preinstalled. This Docker image is tagged with versions.
+Before pulling the Dovetail image, check the OPNFV's OVP web page first to
+determine the right tag for OVP testing.
+
+
+Downloading Dovetail Docker Image
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The first version of Dovetail API is ovp-3.0.0.
+
+.. code-block:: bash
+
+ $ sudo docker pull opnfv/dovetail:ovp-3.0.0
+ ovp-3.0.0: Pulling from opnfv/dovetail
+ 6abc03819f3e: Pull complete
+ 05731e63f211: Pull complete
+ 0bd67c50d6be: Pull complete
+ 3f737f5d00b2: Pull complete
+ c93fd0792ebd: Pull complete
+ 77d9a9603ec6: Pull complete
+ 9463cdd9c628: Pull complete
+ Digest: sha256:45e2ffdbe217a4e6723536afb5b6a3785d318deff535da275f34cf8393af458d
+ Status: Downloaded newer image for opnfv/dovetail:ovp-3.0.0
+
+
+Deploying Dovetail API
+^^^^^^^^^^^^^^^^^^^^^^
+
+The Dovetail API can be deployed by running a Dovetail container with the Docker
+image downloaded before.
+
+.. code-block:: bash
+
+ $ docker run -itd -p <swagger_port>:80 -p <api_port>:5000 --privileged=true \
+ -e SWAGGER_HOST=<host_ip>:<api_port> -e DOVETAIL_HOME=/home/ovp \
+ -v /home/ovp:/home/ovp -v /var/run/docker.sock:/var/run/docker.sock \
+ opnfv/dovetail:<version>
+
+
+In the container, it uses 2 ports for Swagger UI (port 80) and API (port 5000)
+respectively. So in order to access to these 2 services outside the container,
+it needs to map them to the host ports. It can be any available ports in the host.
+
+The env SWAGGER_HOST is optional. If you will access the Swagger UI webpage with
+the same host deploying this container, there is no need to set SWAGGER_HOST.
+Otherwise, if you will access the Swagger UI webpage from other machines, then
+it needs to set SWAGGER_HOST.
+
+
+Using Dovetail API
+------------------
+
+Here give the guide of where to find out all APIs and how to use them.
+
+
+Swagger UI Webpage
+^^^^^^^^^^^^^^^^^^
+
+After deploying Dovetail container, the Swagger UI webpage can be accessed with
+any browser. The url is ``http://localhost:<swagger_port>/dovetail-api/index.html``
+if accessing from the same host as deploying this container. Otherwise, the url
+is ``http://<host_ip>:<swagger_port>/dovetail-api/index.html``.
+
+
+Calling APIs
+^^^^^^^^^^^^
+
+There are totally 5 APIs provided by Dovetail.
+
+ * Get all test suites
+
+ * Get all test cases
+
+ * Run test cases
+
+ * Run test cases with execution ID
+
+ * Get status of test cases
+
+Here give some easy guide of how to call these APIs. For more detailed infomation,
+please refer to the Swagger UI page.
+
+
+Getting All Test Suites
+=======================
+
+ * This is a **GET** function with no parameter to get all test suites defined
+ in Dovetail container.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/testsuites``.
+
+ * The response body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "testsuites": {
+ "debug": {
+ "name": "debug",
+ "testcases_list": {
+ "optional": [
+ "functest.vping.userdata"
+ ]
+ }
+ },
+ "healthcheck": {
+ "name": "healthcheck",
+ "testcases_list": {
+ "optional": [
+ "functest.healthcheck.connection_check"
+ ]
+ }
+ }
+ }
+ }
+
+
+Getting All Test Cases
+======================
+
+ * This is a **GET** function without no parameter to get all test cases integrated
+ in Dovetail container.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/testcases``.
+
+ * The response body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "testcases": [
+ {
+ "description": "This test case will verify the high availability of the user service provided by OpenStack (keystone) on control node.",
+ "scenario": "nfvi",
+ "subTestCase": null,
+ "testCaseName": "yardstick.ha.keystone"
+ },
+ {
+ "description": "testing for vping using userdata",
+ "scenario": "nfvi",
+ "subTestCase": null,
+ "testCaseName": "functest.vping.userdata"
+ },
+ {
+ "description": "tempest smoke test cases about volume",
+ "scenario": "nfvi",
+ "subTestCase": [
+ "tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_attach_detach_volume_to_instance[compute,id-fff42874-7db5-4487-a8e1-ddda5fb5288d,smoke]",
+ "tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern[compute,id-557cd2c2-4eb8-4dce-98be-f86765ff311b,image,slow,volume]"
+ ],
+ "testCaseName": "functest.tempest.volume"
+ }
+ ]
+ }
+
+
+Running Test Cases
+==================
+
+ * This is a **POST** function with some parameters to run a subset of the whole test cases.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/execution``.
+
+ * The request body is structured as following. The ``conf`` section is used to
+ give all configuration items those are required to run test cases. They are
+ the same as all configuration files provided under ``$DOVETAIL_HOME/pre_config/``.
+ If you already have these files under this directory, the whole ``conf`` section
+ can be ignored. If you provide these configuration items with the request body,
+ then the corresponding files under ``$DOVETAIL_HOME/pre_config/`` will be ignored
+ by Dovetail. The ``testcase``, ``testsuite``, ``testarea`` and ``deploy_scenario``
+ correspond to ``--testcase``, ``--testsuite``, ``--testarea`` and ``--deploy-scenario``
+ defined with Dovetail CLI. The ``options`` section support to set all options
+ which have already been implemented by Dovetail CLI including ``--optional``,
+ ``--mandatory``, ``--no-clean``, ``--no-api-validation``, ``--offline``,
+ ``--report``, ``--stop`` and ``--debug``. For options list in ``options`` section,
+ they are set to be ``True``, otherwise, they are set to be ``False``.
+
+ .. code-block:: bash
+
+ {
+ "conf": {
+ "vm_images": "/home/ovp/images",
+ "pods": {
+ "nodes": [
+ {
+ "name": "node1",
+ "role": "Controller",
+ "ip": "192.168.117.222",
+ "user": "root",
+ "password": "root",
+ }
+ ],
+ "process_info": [
+ {
+ "testcase_name": "yardstick.ha.rabbitmq",
+ "attack_host": "node1",
+ "attack_process": "rabbitmq"
+ }
+ ]
+ },
+ "tempest_conf": {
+ "compute": {
+ "min_compute_nodes": "2",
+ "volume_device_name": "vdb",
+ "max_microversion": "2.65"
+ }
+ },
+ "hosts": {
+ "192.168.141.101": [
+ "volume.os.com",
+ "compute.os.com"
+ ]
+ },
+ "envs": {
+ "OS_USERNAME": "admin",
+ "OS_PASSWORD": "admin",
+ "OS_AUTH_URL": "https://192.168.117.222:5000/v3",
+ "EXTERNAL_NETWORK": "ext-net"
+ }
+ },
+ "testcase": [
+ "functest.vping.ssh",
+ "yardstick.ha.rabbitmq"
+ ],
+ "testsuite": "ovp.2019.12",
+ "testarea": [
+ "vping",
+ "ha"
+ ],
+ "deploy_scenario": "os-nosdn-ovs-ha",
+ "options": [
+ "debug",
+ "report"
+ ]
+ }
+
+
+ * The response body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "result": [
+ {
+ "endTime": null,
+ "executionId": "a65e24c0-1803-11ea-84f4-0242ac110004",
+ "results": null,
+ "scenario": "nfvi",
+ "status": "IN_PROGRESS",
+ "testCaseName": "functest.vping.ssh",
+ "testSuiteName": "ovp.2019.12",
+ "timestart": null
+ }
+ ]
+ }
+
+
+Running Test Cases with Execution ID
+====================================
+
+ * This is a **POST** function with some parameters to run a subset of
+ whole test cases and set the execution ID instead of using the random one.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/execution/{exec_id}``.
+
+ * It's almost the same as the above running test cases API except the execution ID.
+
+
+Getting Status of Test Cases
+============================
+
+ * This is a **POST** function to get the status of some test cases by using
+ the execution ID received in the response body of `Running Test Cases`_ or
+ `Running Test Cases with Execution ID`_ APIs.
+
+ * The request URL is ``http://<host_ip>:<api_port>/api/v1/scenario/nfvi/execution/status/{exec_id}``.
+
+ * The request body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "testcase": [
+ "functest.vping.ssh"
+ ]
+ }
+
+ * The response body is structured as:
+
+ .. code-block:: bash
+
+ {
+ "result": [
+ {
+ "endTime": "2019-12-06 08:39:23",
+ "executionId": "a65e24c0-1803-11ea-84f4-0242ac110004",
+ "results": {
+ "criteria": "PASS",
+ "sub_testcase": [],
+ "timestart": "2019-12-06 08:38:40",
+ "timestop":"2019-12-06 08:39:23"
+ },
+ "scenario": "nfvi",
+ "status": "COMPLETED",
+ "testCaseName": "functest.vping.ssh",
+ "testSuiteName": "ovp.2019.12",
+ "timestart":"2019-12-06 08:38:40"
+ }
+ ]
+ }
+
+
+
+
+Getting Test Results
+^^^^^^^^^^^^^^^^^^^^
+
+Each time you call the running test case API, Dovetail creates a directory with the
+execution ID as the name under ``$DOVETAIL_HOME`` to store results on the host.
+You can find all result files under ``$DOVETAIL_HOME/<executionId>/results``.
+If you run test cases with ``report`` option, then there will be a tarball file
+under ``$DOVETAIL_HOME/<executionId>`` which can be upload to OVP portal.
diff --git a/docs/testing/user/userguide/cli_reference.rst b/docs/testing/user/userguide/cli_reference.rst
index 97eccffc..7dd5c8e4 100644
--- a/docs/testing/user/userguide/cli_reference.rst
+++ b/docs/testing/user/userguide/cli_reference.rst
@@ -135,7 +135,7 @@ Dovetail List Commands
.. code-block:: bash
- root@1f230e719e44:~/dovetail/dovetail# dovetail list ovp.2018.09
+ root@1f230e719e44:~/dovetail/dovetail# dovetail list ovp.2019.12
- mandatory
functest.vping.userdata
functest.vping.ssh
@@ -166,15 +166,11 @@ Dovetail List Commands
functest.tempest.vm_lifecycle
functest.tempest.network_scenario
functest.tempest.bgpvpn
- functest.bgpvpn.subnet_connectivity
- functest.bgpvpn.tenant_separation
- functest.bgpvpn.router_association
- functest.bgpvpn.router_association_floating_ip
+ functest.security.patrole_vxlan_dependent
yardstick.ha.neutron_l3_agent
yardstick.ha.controller_restart
functest.vnf.vims
functest.vnf.vepc
- functest.snaps.smoke
Dovetail Show Commands
----------------------
@@ -199,12 +195,15 @@ Dovetail Show Commands
validate:
type: functest
testcase: vping_ssh
+ image_name: opnfv/functest-healthcheck
report:
source_archive_files:
- functest.log
dest_archive_files:
- vping_logs/functest.vping.ssh.log
- check_results_file: 'functest_results.txt'
+ check_results_file:
+ - 'functest_results.txt'
+ portal_key_file: vping_logs/functest.vping.ssh.log
sub_testcase_list:
.. code-block:: bash
@@ -219,20 +218,20 @@ Dovetail Show Commands
testcase: tempest_custom
pre_condition:
- 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.image.functest.log
- tempest_logs/functest.tempest.image.log
- tempest_logs/functest.tempest.image.html
- check_results_file: 'functest_results.txt'
+ check_results_file:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.image.html
sub_testcase_list:
- tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file[id-139b765e-7f3d-4b3d-8b37-3ca3876ee318,smoke]
- tempest.api.image.v2.test_versions.VersionsTest.test_list_versions[id-659ea30a-a17c-4317-832c-0f68ed23c31d,smoke]
@@ -248,41 +247,43 @@ Dovetail Run Commands
Dovetail compliance test entry!
Options:
- --deploy-scenario TEXT Specify the DEPLOY_SCENARIO which will be used as input by each testcase respectively
+ --opnfv-ci Only enabled when running with OPNFV CI jobs and pushing results to TestAPI DB
--optional Run all optional test cases.
+ --mandatory Run all mandatory test cases.
+ --deploy-scenario TEXT Specify the DEPLOY_SCENARIO which will be used as input by each testcase respectively
+ -n, --no-clean Keep all Containers created for debuging.
+ --no-api-validation disable strict API response validation
--offline run in offline method, which means not to update the docker upstream images, functest, yardstick, etc.
-r, --report Create a tarball file to upload to OVP web portal
+ -s, --stop Flag for stopping on test case failure.
-d, --debug Flag for showing debug log on screen.
--testcase TEXT Compliance testcase. Specify option multiple times to include multiple test cases.
--testarea TEXT Compliance testarea within testsuite. Specify option multiple times to include multiple test areas.
- -s, --stop Flag for stopping on test case failure.
- -n, --no-clean Keep all Containers created for debuging.
- --no-api-validation disable strict API response validation
- --mandatory Run all mandatory test cases.
--testsuite TEXT compliance testsuite.
-h, --help Show this message and exit.
.. code-block:: bash
root@1f230e719e44:~/dovetail/dovetail# dovetail run --testcase functest.vping.ssh --offline -r --deploy-scenario os-nosdn-ovs-ha
- 2017-10-12 14:57:51,278 - run - INFO - ================================================
- 2017-10-12 14:57:51,278 - run - INFO - Dovetail compliance: ovp.2018.09!
- 2017-10-12 14:57:51,278 - run - INFO - ================================================
- 2017-10-12 14:57:51,278 - run - INFO - Build tag: daily-master-b80bca76-af5d-11e7-879a-0242ac110002
- 2017-10-12 14:57:51,278 - run - INFO - DEPLOY_SCENARIO : os-nosdn-ovs-ha
- 2017-10-12 14:57:51,336 - run - WARNING - There is no hosts file /home/dovetail/pre_config/hosts.yaml, may be some issues with domain name resolution.
- 2017-10-12 14:57:51,336 - run - INFO - Get hardware info of all nodes list in file /home/cvp/pre_config/pod.yaml ...
- 2017-10-12 14:57:51,336 - run - INFO - Hardware info of all nodes are stored in file /home/cvp/results/all_hosts_info.json.
- 2017-10-12 14:57:51,517 - run - INFO - >>[testcase]: functest.vping.ssh
- 2017-10-12 14:58:21,325 - report.Report - INFO - Results have been stored with file /home/cvp/results/functest_results.txt.
- 2017-10-12 14:58:21,325 - report.Report - INFO -
+ 2019-12-06 02:51:52,634 - run - INFO - ================================================
+ 2019-12-06 02:51:52,634 - run - INFO - Dovetail compliance: ovp.2019.12!
+ 2019-12-06 02:51:52,634 - run - INFO - ================================================
+ 2019-12-06 02:51:52,634 - run - INFO - Build tag: daily-master-5b58584a-17d3-11ea-878a-0242ac110002
+ 2019-12-06 02:51:52,634 - run - INFO - DEPLOY_SCENARIO : os-nosdn-ovs-ha
+ 2019-12-06 02:51:53,077 - run - INFO - >>[testcase]: functest.vping.ssh
+ 2019-12-06 02:51:53,078 - dovetail.test_runner.DockerRunner - WARNING - There is no hosts file /home/ovp/pre_config/hosts.yaml. This may cause some issues with domain name resolution.
+ 2019-12-06 02:51:54,048 - dovetail.test_runner.DockerRunner - INFO - Get hardware info of all nodes list in file /home/ovp/pre_config/pod.yaml ...
+ 2019-12-06 02:51:54,049 - dovetail.test_runner.DockerRunner - INFO - Hardware info of all nodes are stored in file /home/dovetail/results/all_hosts_info.json.
+ 2019-12-06 02:51:54,073 - dovetail.container.Container - WARNING - There is no hosts file /home/ovp/pre_config/hosts.yaml. This may cause some issues with domain name resolution.
+ 2019-12-06 02:52:57,982 - dovetail.report.Report - INFO - Results have been stored with files: ['/home/ovp/results/functest_results.txt'].
+ 2019-12-06 02:52:57,986 - dovetail.report.Report - INFO -
Dovetail Report
- Version: 2018.09
- Build Tag: daily-master-b80bca76-af5d-11e7-879a-0242ac110002
- Test Date: 2018-08-13 03:23:56 UTC
- Duration: 291.92 s
+ Version: 2019.12
+ Build Tag: daily-master-5b58584a-17d3-11ea-878a-0242ac110002
+ Test Date: 2019-12-06 02:52:57 UTC
+ Duration: 64.91 s
- Pass Rate: 0.00% (1/1)
- vping: pass rate 100%
+ Pass Rate: 100.00% (1/1)
+ vping: pass rate 100.00%
-functest.vping.ssh PASS
diff --git a/docs/testing/user/userguide/images/tocsa_vnf_test_environment.png b/docs/testing/user/userguide/images/tocsa_vnf_test_environment.png
new file mode 100644
index 00000000..78b3f74a
--- /dev/null
+++ b/docs/testing/user/userguide/images/tocsa_vnf_test_environment.png
Binary files differ
diff --git a/docs/testing/user/userguide/images/tosca_vnf_test_flow.png b/docs/testing/user/userguide/images/tosca_vnf_test_flow.png
new file mode 100644
index 00000000..87dc8ec4
--- /dev/null
+++ b/docs/testing/user/userguide/images/tosca_vnf_test_flow.png
Binary files differ
diff --git a/docs/testing/user/userguide/index.rst b/docs/testing/user/userguide/index.rst
index 355817df..98ca56e0 100644
--- a/docs/testing/user/userguide/index.rst
+++ b/docs/testing/user/userguide/index.rst
@@ -13,3 +13,5 @@ OVP Testing User Guide
testing_guide.rst
cli_reference.rst
+ api_testing_guide.rst
+ vnf_test_guide.rst
diff --git a/docs/testing/user/userguide/testing_guide.rst b/docs/testing/user/userguide/testing_guide.rst
index 2a6a46a6..d1c31683 100644
--- a/docs/testing/user/userguide/testing_guide.rst
+++ b/docs/testing/user/userguide/testing_guide.rst
@@ -2,12 +2,15 @@
.. http://creativecommons.org/licenses/by/4.0
.. (c) OPNFV, Huawei Technologies Co.,Ltd and others.
-==========================================
-Conducting OVP Testing with Dovetail
-==========================================
+=========================================
+Conducting OVP NFVI Testing with Dovetail
+=========================================
Overview
-------------------------------
+--------
+
+This guide provides the instructions for the OVP Infrastructure testing. For the
+OVP VNF testing, please refer to the next section.
The Dovetail testing framework for OVP consists of two major parts: the testing client which
executes all test cases in a lab (vendor self-testing or a third party lab),
@@ -160,8 +163,8 @@ results files:
$ mkdir -p ${HOME}/dovetail
$ export DOVETAIL_HOME=${HOME}/dovetail
-For example, Here we set dovetail home directory to be ``${HOME}/dovetail``.
-Then create two directories named ``pre_config`` and ``images`` inside this directory
+For example, here we set dovetail home directory to be ``${HOME}/dovetail``.
+Then create two directories named ``pre_config`` and ``images`` under this directory
to store all Dovetail related config files and all test images respectively:
.. code-block:: bash
@@ -231,6 +234,13 @@ this file should contain.
# Otherwise, it will create a role 'Member' to do that.
export NEW_USER_ROLE=xxx
+ # For XCI installer the following environment parameters should be added in
+ # this file. Otherwise, those parameters could be ignored.
+ export INSTALLER_TYPE=osa
+ export DEPLOY_SCENARIO=os-nosdn-nofeature
+ export XCI_FLAVOR=noha
+
+
The OS_AUTH_URL variable is key to configure correctly, as the other admin services
are collected from the identity service. HTTPS should be configured in the SUT so
@@ -268,7 +278,12 @@ Here is an example of what this file should contain.
# Expected device name when a volume is attached to an instance.
volume_device_name: vdb
-Use the listing above as a minimum to execute the mandatory test areas.
+ # One sub test case of functest.tempest.osinterop will be skipped if not provide this version.
+ # The default range of microversion for tempest is [None - None].
+ # Test case functest.tempest.osinterop required the range to be [2.2 - latest].
+ max_microversion: 2.65
+
+Use the listing above as a minimum to execute the mandatory test cases.
If the optional BGPVPN Tempest API tests shall be run, Tempest needs to be told
that the BGPVPN service is available. To do that, add the following to the
@@ -302,10 +317,12 @@ Below is a sample with the required syntax when password is employed by the cont
nodes:
-
- # This can not be changed and must be node0.
+ # This info of node0 is used only for one optional test case 'yardstick.ha.controller_restart'.
+ # If you don't plan to test it, this Jumpserver node can be ignored.
+ # This can not be changed and **must** be node0.
name: node0
- # This must be Jumpserver.
+ # This **must** be Jumpserver.
role: Jumpserver
# This is the instance IP of a node which has ipmitool installed.
@@ -318,10 +335,13 @@ Below is a sample with the required syntax when password is employed by the cont
password: root
-
- # This can not be changed and must be node1.
+ # Almost all HA test cases are trying to login to a controller node named 'node1'
+ # and then kill some processes running on it.
+ # If you don't want to reset the attack node name for each test case, this
+ # name can not be changed and **must** be node1.
name: node1
- # This must be controller.
+ # This **must** be controller.
role: Controller
# This is the instance IP of a controller node, which is the haproxy primary node
@@ -335,6 +355,13 @@ Below is a sample with the required syntax when password is employed by the cont
process_info:
-
+ # For all HA test cases, there are 2 parameters, 'attack_process' and 'attack_host',
+ # which support to be set by users instead of using the default values.
+ # The 'attack_process' is the process name of one HA test case which it try to kill.
+ # The 'attack_host' is the host name which the test case try to login and then kill
+ # the process running on it.
+ # Fllowing is 2 samples.
+
# The default attack process of yardstick.ha.rabbitmq is 'rabbitmq-server'.
# Here can be reset to 'rabbitmq'.
testcase_name: yardstick.ha.rabbitmq
@@ -360,10 +387,9 @@ A sample is provided below to show the required syntax when using a key file.
user: root
# Private ssh key for accessing the controller nodes. If a keyfile is
- # being used, the path specified **must** be as shown below as this
- # is the location of the user-provided private ssh key inside the
- # Yardstick container.
- key_filename: /home/opnfv/userconfig/pre_config/id_rsa
+ # being used instead of password, it **must** be put under
+ # $DOVETAIL_HOME/pre_config/ and named 'id_rsa'.
+ key_filename: /home/dovetail/pre_config/id_rsa
Under nodes, repeat entries for name, role, ip, user and password or key file for each of the
controller/compute nodes that comprise the SUT. Use a '-' to separate each of the entries.
@@ -432,20 +458,26 @@ OPNFV's OVP web page first to determine the right tag for OVP testing.
Online Test Host
""""""""""""""""
-If the Test Host is online, you can directly pull Dovetail Docker image and download Ubuntu
-and Cirros images. All other dependent docker images will automatically be downloaded. The
-Ubuntu and Cirros images are used by Dovetail for image creation and VM instantiation within
-the SUT.
+If the Test Host is online, you can directly pull Dovetail Docker image, then all
+other dependent docker images will automatically be downloaded. Also you can download
+other related VM images such as Ubuntu and Cirros images which are used by Dovetail
+for image creation and VM instantiation within the SUT.
+
+Following given the download url for each VM images. Cirros-0.4.0 and Ubuntu-16.04
+are used by mandatory test cases, so they are the only 2 images **must** be downloaded
+before doing the test. There are also 2 other optional VM images, Ubuntu-14.04 and
+Cloudify-manager, which are used by optional test cases functest.vnf.vepc and functest.vnf.vims.
+If you don't plan to test these 2 test cases, you can skip downloading these 2 images.
.. code-block:: bash
$ wget -nc http://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img -P ${DOVETAIL_HOME}/images
- $ wget -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_HOME}/images
$ wget -nc https://cloud-images.ubuntu.com/releases/16.04/release/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_HOME}/images
- $ wget -nc http://repository.cloudifysource.org/cloudify/4.0.1/sp-release/cloudify-manager-premium-4.0.1.qcow2 -P ${DOVETAIL_HOME}/images
+ $ wget -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_HOME}/images
+ $ wget -nc http://repository.cloudifysource.org/cloudify/19.01.24/community-release/cloudify-docker-manager-community-19.01.24.tar -P ${DOVETAIL_HOME}/images
- $ sudo docker pull opnfv/dovetail:ovp-2.0.0
- ovp-2.0.0: Pulling from opnfv/dovetail
+ $ sudo docker pull opnfv/dovetail:ovp-3.0.0
+ ovp-3.0.0: Pulling from opnfv/dovetail
324d088ce065: Pull complete
2ab951b6c615: Pull complete
9b01635313e2: Pull complete
@@ -457,7 +489,7 @@ the SUT.
0ad9f4168266: Pull complete
d949894f87f6: Pull complete
Digest: sha256:7449601108ebc5c40f76a5cd9065ca5e18053be643a0eeac778f537719336c29
- Status: Downloaded newer image for opnfv/dovetail:ovp-2.0.0
+ Status: Downloaded newer image for opnfv/dovetail:ovp-3.0.0
Offline Test Host
"""""""""""""""""
@@ -468,26 +500,25 @@ to pull all dependent images is because Dovetail normally does dependency checki
and automatically pulls images as needed, if the Test Host is online. If the Test Host is
offline, then all these dependencies will need to be manually copied.
-The Docker images and Cirros image below are necessary for all mandatory test cases.
+The Docker images, Ubuntu and Cirros image below are necessary for all mandatory test cases.
.. code-block:: bash
- $ sudo docker pull opnfv/dovetail:ovp-2.0.0
- $ sudo docker pull opnfv/functest-smoke:opnfv-6.3.0
- $ sudo docker pull opnfv/yardstick:ovp-2.0.0
- $ sudo docker pull opnfv/bottlenecks:ovp-2.0.0
+ $ sudo docker pull opnfv/dovetail:ovp-3.0.0
+ $ sudo docker pull opnfv/functest-smoke:hunter
+ $ sudo docker pull opnfv/functest-healthcheck:hunter
+ $ sudo docker pull opnfv/yardstick:opnfv-8.0.0
+ $ sudo docker pull opnfv/bottlenecks:8.0.1-latest
$ wget -nc http://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img -P {ANY_DIR}
+ $ wget -nc https://cloud-images.ubuntu.com/releases/16.04/release/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_HOME}/images
The other Docker images and test images below are only used by optional test cases.
.. code-block:: bash
- $ sudo docker pull opnfv/functest-healthcheck:opnfv-6.3.0
- $ sudo docker pull opnfv/functest-features:opnfv-6.3.0
- $ sudo docker pull opnfv/functest-vnf:opnfv-6.3.0
+ $ sudo docker pull opnfv/functest-vnf:hunter
$ wget -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P {ANY_DIR}
- $ wget -nc https://cloud-images.ubuntu.com/releases/16.04/release/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P {ANY_DIR}
- $ wget -nc http://repository.cloudifysource.org/cloudify/4.0.1/sp-release/cloudify-manager-premium-4.0.1.qcow2 -P {ANY_DIR}
+ $ wget -nc http://repository.cloudifysource.org/cloudify/19.01.24/community-release/cloudify-docker-manager-community-19.01.24.tar -P ${DOVETAIL_HOME}/images
Once all these images are pulled, save the images, copy them to the Test Host, and then load
the Dovetail image and all dependent images at the Test Host.
@@ -496,10 +527,10 @@ At the online host, save the images with the command below.
.. code-block:: bash
- $ sudo docker save -o dovetail.tar opnfv/dovetail:ovp-2.0.0 \
- opnfv/functest-smoke:opnfv-6.3.0 opnfv/functest-healthcheck:opnfv-6.3.0 \
- opnfv/functest-features:opnfv-6.3.0 opnfv/functest-vnf:opnfv-6.3.0 \
- opnfv/yardstick:ovp-2.0.0 opnfv/bottlenecks:ovp-2.0.0
+ $ sudo docker save -o dovetail.tar opnfv/dovetail:ovp-3.0.0 \
+ opnfv/functest-smoke:hunter opnfv/functest-healthcheck:hunter \
+ opnfv/functest-vnf:hunter \
+ opnfv/yardstick:opnfv-8.0.0 opnfv/bottlenecks:8.0.1-latest
The command above creates a dovetail.tar file with all the images, which can then be copied
to the Test Host. To load the Dovetail images on the Test Host execute the command below.
@@ -513,14 +544,13 @@ Now check to see that all Docker images have been pulled or loaded properly.
.. code-block:: bash
$ sudo docker images
- REPOSITORY TAG IMAGE ID CREATED SIZE
- opnfv/dovetail ovp-2.0.0 ac3b2d12b1b0 24 hours ago 784 MB
- opnfv/functest-smoke opnfv-6.3.0 010aacb7c1ee 17 hours ago 594.2 MB
- opnfv/functest-healthcheck opnfv-6.3.0 2cfd4523f797 17 hours ago 234 MB
- opnfv/functest-features opnfv-6.3.0 b61d4abd56fd 17 hours ago 530.5 MB
- opnfv/functest-vnf opnfv-6.3.0 929e847a22c3 17 hours ago 1.87 GB
- opnfv/yardstick ovp-2.0.0 84b4edebfc44 17 hours ago 2.052 GB
- opnfv/bottlenecks ovp-2.0.0 3d4ed98a6c9a 21 hours ago 638 MB
+ REPOSITORY TAG IMAGE ID CREATED SIZE
+ opnfv/dovetail ovp-3.0.0 4b68659da24d 22 hours ago 825MB
+ opnfv/functest-smoke hunter c0253f6de153 3 weeks ago 556MB
+ opnfv/functest-healthcheck hunter fb6d766e38e0 3 weeks ago 379MB
+ opnfv/functest-vnf hunter 31466d52d155 21 hours ago 1.1GB
+ opnfv/yardstick opnfv-8.0.0 189d7d9fbcb2 7 months ago 2.54GB
+ opnfv/bottlenecks 8.0.1-latest 44c1b9fb25aa 5 hours ago 837MB
After copying and loading the Dovetail images at the Test Host, also copy the test images
(Ubuntu, Cirros and cloudify-manager) to the Test Host.
@@ -528,7 +558,7 @@ After copying and loading the Dovetail images at the Test Host, also copy the te
- Copy image ``cirros-0.4.0-x86_64-disk.img`` to ``${DOVETAIL_HOME}/images/``.
- Copy image ``ubuntu-14.04-server-cloudimg-amd64-disk1.img`` to ``${DOVETAIL_HOME}/images/``.
- Copy image ``ubuntu-16.04-server-cloudimg-amd64-disk1.img`` to ``${DOVETAIL_HOME}/images/``.
-- Copy image ``cloudify-manager-premium-4.0.1.qcow2`` to ``${DOVETAIL_HOME}/images/``.
+- Copy image ``cloudify-docker-manager-community-19.01.24.tar`` to ``${DOVETAIL_HOME}/images/``.
Starting Dovetail Docker
------------------------
@@ -565,10 +595,10 @@ for the details of the CLI.
$ dovetail run --testsuite <test-suite-name>
-The '--testsuite' option is used to control the set of tests intended for execution
+The ``--testsuite`` option is used to control the set of tests intended for execution
at a high level. For the purposes of running the OVP test suite, the test suite name follows
-the following format, ``ovp.<major>.<minor>.<patch>``. The latest and default test suite is
-ovp.2018.09.
+the following format, ``ovp.<release-version>``. The latest and default test suite is
+ovp.2019.12.
.. code-block:: bash
@@ -578,18 +608,18 @@ This command is equal to
.. code-block:: bash
- $ dovetail run --testsuite ovp.2018.09
+ $ dovetail run --testsuite ovp.2019.12
Without any additional options, the above command will attempt to execute all mandatory and
-optional test cases with test suite ovp.2018.09.
+optional test cases with test suite ovp.2019.12.
To restrict the breadth of the test scope, it can also be specified using options
-'--mandatory' or '--optional'.
+``--mandatory`` or ``--optional``.
.. code-block:: bash
$ dovetail run --mandatory
-Also there is a '--testcase' option provided to run a specified test case.
+Also there is a ``--testcase`` option provided to run a specified test case.
.. code-block:: bash
@@ -622,7 +652,7 @@ the DEPLOY_SCENARIO environment parameter having as substring the feature name "
(e.g. os-nosdn-ovs-ha).
Note for the users:
- - if their system uses DPDK, they should run with --deploy-scenario <xx-yy-ovs-zz>
+ - if their system uses DPDK, they should run with ``--deploy-scenario <xx-yy-ovs-zz>``
(e.g. os-nosdn-ovs-ha)
- this is an experimental feature
@@ -633,14 +663,14 @@ Note for the users:
By default, results are stored in local files on the Test Host at ``$DOVETAIL_HOME/results``.
Each time the 'dovetail run' command is executed, the results in the aforementioned directory
are overwritten. To create a singular compressed result file for upload to the OVP portal or
-for archival purposes, the tool provides an option '--report'.
+for archival purposes, the tool provides an option ``--report``.
.. code-block:: bash
$ dovetail run --report
If the Test Host is offline, ``--offline`` should be added to support running with
-local resources.
+local resources. Otherwise, it will try to download resources online during the run time.
.. code-block:: bash
@@ -652,22 +682,23 @@ result file on the Test Host.
.. code-block:: bash
$ dovetail run --offline --testcase functest.vping.userdata --report
- 2018-05-22 08:16:16,353 - run - INFO - ================================================
- 2018-05-22 08:16:16,353 - run - INFO - Dovetail compliance: ovp.2018.09!
- 2018-05-22 08:16:16,353 - run - INFO - ================================================
- 2018-05-22 08:16:16,353 - run - INFO - Build tag: daily-master-660de986-5d98-11e8-b635-0242ac110001
- 2018-05-22 08:19:31,595 - run - WARNING - There is no hosts file /home/dovetail/pre_config/hosts.yaml, may be some issues with domain name resolution.
- 2018-05-22 08:19:31,595 - run - INFO - Get hardware info of all nodes list in file /home/dovetail/pre_config/pod.yaml ...
- 2018-05-22 08:19:39,778 - run - INFO - Hardware info of all nodes are stored in file /home/dovetail/results/all_hosts_info.json.
- 2018-05-22 08:19:39,961 - run - INFO - >>[testcase]: functest.vping.userdata
- 2018-05-22 08:31:17,961 - run - INFO - Results have been stored with file /home/dovetail/results/functest_results.txt.
- 2018-05-22 08:31:17,969 - report.Report - INFO -
+ 2019-12-04 07:31:13,156 - run - INFO - ================================================
+ 2019-12-04 07:31:13,157 - run - INFO - Dovetail compliance: ovp.2019.12!
+ 2019-12-04 07:31:13,157 - run - INFO - ================================================
+ 2019-12-04 07:31:13,157 - run - INFO - Build tag: daily-master-0c9184e6-1668-11ea-b1cd-0242ac110002
+ 2019-12-04 07:31:13,610 - run - INFO - >>[testcase]: functest.vping.userdata
+ 2019-12-04 07:31:13,612 - dovetail.test_runner.DockerRunner - WARNING - There is no hosts file /home/ovp/pre_config/hosts.yaml. This may cause some issues with domain name resolution.
+ 2019-12-04 07:31:14,587 - dovetail.test_runner.DockerRunner - INFO - Get hardware info of all nodes list in file /home/ovp/pre_config/pod.yaml ...
+ 2019-12-04 07:31:14,587 - dovetail.test_runner.DockerRunner - INFO - Hardware info of all nodes are stored in file /home/dovetail/results/all_hosts_info.json.
+ 2019-12-04 07:31:14,612 - dovetail.container.Container - WARNING - There is no hosts file /home/ovp/pre_config/hosts.yaml. This may cause some issues with domain name resolution.
+ 2019-12-04 07:32:13,804 - dovetail.report.Report - INFO - Results have been stored with files: ['/home/ovp/results/functest_results.txt'].
+ 2019-12-04 07:32:13,808 - dovetail.report.Report - INFO -
Dovetail Report
- Version: 1.0.0
- Build Tag: daily-master-660de986-5d98-11e8-b635-0242ac110001
- Upload Date: 2018-05-22 08:31:17 UTC
- Duration: 698.01 s
+ Version: 2019.12
+ Build Tag: daily-master-0c9184e6-1668-11ea-b1cd-0242ac110002
+ Test Date: 2019-12-04 07:32:13 UTC
+ Duration: 60.20 s
Pass Rate: 100.00% (1/1)
vping: pass rate 100.00%
@@ -676,28 +707,27 @@ result file on the Test Host.
When test execution is complete, a tar file with all result and log files is written in
``$DOVETAIL_HOME`` on the Test Host. An example filename is
-``${DOVETAIL_HOME}/logs_20180105_0858.tar.gz``. The file is named using a
-timestamp that follows the convention 'YearMonthDay-HourMinute'. In this case, it was generated
-at 08:58 on January 5th, 2018. This tar file is used for uploading the logs to the OVP portal.
+``${DOVETAIL_HOME}/logs_20191204_0732.tar.gz``. The file is named using a timestamp
+that follows the convention 'YearMonthDay_HourMinute'. In this case, it was generated
+at 07:32 on December 4th, 2019. This tar file is used for uploading the logs and
+results to the OVP portal.
Making Sense of OVP Test Results
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When a tester is performing trial runs, Dovetail stores results in local files on the Test
-Host by default within the directory specified below.
+Host by default within directory ``$DOVETAIL_HOME/results``.
+ * Log file: dovetail.log
-.. code-block:: bash
-
- cd $DOVETAIL_HOME/results
+ * Review the dovetail.log to see if all important information has been captured
-#. Local file
+ * In default mode without DEBUG.
- * Log file: dovetail.log
+ * Adding option ``-d/--debug`` to change the mode to be DEBUG.
- * Review the dovetail.log to see if all important information has been captured
- - in default mode without DEBUG.
+ * Result file: results.json
* Review the results.json to see all results data including criteria for PASS or FAIL.
@@ -707,11 +737,11 @@ Host by default within the directory specified below.
``security_logs/functest.security.XXX.html`` respectively,
which has the passed, skipped and failed test cases results.
- * This kind of files need to be opened with a web browser.
+ * This kind of files need to be opened with a web browser.
- * The skipped test cases are accompanied with the reason tag for the users to see why these test cases skipped.
+ * The skipped test cases are accompanied with the reason tag for the users to see why these test cases skipped.
- * The failed test cases have rich debug information for the users to see why these test cases failed.
+ * The failed test cases have rich debug information for the users to see why these test cases failed.
* Vping test cases
@@ -725,39 +755,31 @@ Host by default within the directory specified below.
* Its log is stored in ``stress_logs/bottlenecks.stress.XXX.log``.
- * Snaps test cases
-
- * Its log is stored in ``snaps_logs/functest.snaps.smoke.log``.
-
* VNF test cases
* Its log is stored in ``vnf_logs/functest.vnf.XXX.log``.
- * Bgpvpn test cases
-
- * Can see the log details in ``bgpvpn_logs/functest.bgpvpn.XXX.log``.
-
OVP Portal Web Interface
------------------------
The OVP portal is a public web interface for the community to collaborate on results
and to submit results for official OPNFV compliance verification. The portal can be used as a
-resource by users and testers to navigate and inspect results more easily than by manually
+resource by users to navigate and inspect results more easily than by manually
inspecting the log files. The portal also allows users to share results in a private manner
until they are ready to submit results for peer community review.
* Web Site URL
- * https://verified.opnfv.org
+ * https://nfvi-verified.lfnetworking.org
* Sign In / Sign Up Links
- * Accounts are exposed through Linux Foundation or OpenStack account credentials.
+ * Accounts are exposed through Linux Foundation.
* If you already have a Linux Foundation ID, you can sign in directly with your ID.
- * If you do not have a Linux Foundation ID, you can sign up for a new one using 'Sign Up'
+ * If you do not have a Linux Foundation ID, you can sign up for a new one using 'Sign Up'.
* My Results Tab
@@ -765,20 +787,25 @@ until they are ready to submit results for peer community review.
* This page lists all results uploaded by you after signing in.
- * Following the two steps below, the results are status uploaded and in status 'private'.
+ * Following the two steps below, the results are uploaded and in status 'private'.
- * Obtain results tar file located at ``${DOVETAIL_HOME}/``, e.g. ``logs_20180105_0858.tar.gz``
+ * Obtain results tar file located at ``${DOVETAIL_HOME}/``, e.g. ``logs_20180105_0858.tar.gz``.
- * Use the *Choose File* button where a file selection dialog allows you to choose your result
- file from the hard-disk. Then click the *Upload* button and see a results ID once your
- upload succeeds.
+ * Use the *Choose File* button where a file selection dialog allows you to choose your result file from the hard-disk. Then click the *Upload result* button and see a results ID once your upload succeeds.
* Results are remaining in status 'private' until they are submitted for review.
- * Use the *Operation* column drop-down option 'submit to review', to expose results to
- OPNFV community peer reviewers. Use the 'withdraw submit' option to reverse this action.
+ * Use the *Operation* column drop-down option *submit to review*, to expose results to
+ OPNFV community peer reviewers. Use the *withdraw submit* option to reverse this action.
+
+ * Results status are changed to be 'review' after submit to review.
+
+ * Use the *View Reviews* to find the review status including reviewers' names and the outcome.
- * Use the *Operation* column drop-down option 'share with' to share results with other
+ * The administrator will approve the results which have got 2 positive outcome from 2 reviewers.
+ Then the status will be changed to be 'verified'.
+
+ * Use the *Operation* column drop-down option *share with* to share results with other
users by supplying either the login user ID or the email address associated with
the share target account. The result is exposed to the share target but remains private
otherwise.
@@ -787,6 +814,8 @@ until they are ready to submit results for peer community review.
* This page shows your account info after you sign in.
+ * There are 3 different roles: administrator, user and reviewer.
+
Updating Dovetail or a Test Suite
---------------------------------
@@ -798,5 +827,6 @@ Follow the instructions in section `Installing Dovetail on the Test Host`_ and
sudo docker pull opnfv/dovetail:<dovetail_new_tag>
sudo docker pull opnfv/functest:<functest_new_tag>
sudo docker pull opnfv/yardstick:<yardstick_new_tag>
+ sudo docker pull opnfv/bottlenecks:<bottlenecks_new_tag>
This step is necessary if dovetail software or the OVP test suite have updates.
diff --git a/docs/testing/user/userguide/vnf_test_guide.rst b/docs/testing/user/userguide/vnf_test_guide.rst
new file mode 100644
index 00000000..00c4e4ef
--- /dev/null
+++ b/docs/testing/user/userguide/vnf_test_guide.rst
@@ -0,0 +1,714 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) OPNFV, ONAP, and others.
+
+.. _dovetail-vnf_testers_guide:
+
+===================================
+Conducting ONAP VNF Testing for OVP
+===================================
+
+Overview
+--------
+
+As the LFN verification framework, the Dovetail team has worked with the ONAP VVP, and VTP
+projects to enable VNF testing, results submission, and results review to be completed
+throught the same web portal and processes used for the NFVI testing.
+For more details about VNF SDK and VVP, please refer to `ONAP VNF SDK Compliance Verification Program
+<https://docs.onap.org/en/elalto/submodules/vnfsdk/model.git/docs/files/VNFSDK-LFN-CVC.html>`_
+and `ONAP VVP <https://docs.onap.org/en/elalto/submodules/vvp/documentation.git/docs/index.html>`_.
+
+Testing is available for both HEAT and TOSCA defined VNFs, but the process is different depending
+on the template language. This userguide covers the testing process for both VNF types in the
+two sections below.
+
+
+Definitions and abbreviations
+-----------------------------
+
+- LFN - Linux Foundation Networking
+- ONAP - Open Network Automation Platform
+- OVP - OPNFV Verification Program
+- VNF - Virtual Network Function
+- VNF SDK - VNF Software Development Kit
+- VTP - VNF Test Platform
+- VVP - VNF Validation Program
+
+Testing of HEAT based VNFs
+--------------------------
+
+Environment Preparation
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Prerequisites
+"""""""""""""
+
+- `ONAP ElAlto Release deployed via OOM <https://onap.readthedocs.io/en/latest/submodules/oom.git/docs/oom_quickstart_guide.html>`_
+- An OpenStack deployment is available and privisioned as ONAP's Cloud Site
+- kubectl is installed on the system used to start the testing
+- bash
+- VNF Heat Templates
+- Preload json files
+
+After deploying ONAP, you need to configure ONAP with:
+
+- A cloud owner
+- A cloud region
+- A subscriber
+- A service type
+- A project name
+- An owning entity
+- A platform
+- A line of business
+- A cloud site
+
+If you're not familiar with how to configure ONAP, there are guides that use
+`robot <https://onap.readthedocs.io/en/elalto/submodules/integration.git/docs/docs_robot.html>`_
+or `direct api <https://wiki.onap.org/pages/viewpage.action?pageId=25431491>`_ requests available
+to help, as well as a guide for adding a new OpenStack site to ONAP.
+
+VVP Test Tool Setup
+"""""""""""""""""""
+
+On your local machine, or the system from which you will run the tests, you will need to clone the
+ONAP OOM project repo:
+
+.. code-block:: bash
+
+ git clone --branch 5.0.1-ONAP ssh://<username>@gerrit.onap.org:29418/oom --recurse-submodules
+
+VNF Preparation
+^^^^^^^^^^^^^^^
+
+The vnf lifecycle validation testsuite requires the VNF to be packaged into a specific directory
+hierarchy, shown below.
+
+.. code-block::
+
+ vnf_folder
+ ├── /templates
+ | └── base.yaml
+ | └── base.env
+ | └── incremental_0.yaml
+ | └── incremental_0.env
+ | └── ...
+ ├── /preloads
+ | └── base_preload.json
+ | └── incremental_0_preload.json
+ | └── ...
+ └── vnf-details.json
+
+- The name for vnf_folder is free-form, and can be located anywhere on your computer. The path to this folder will be passed to the testsuite as an argument.
+- /templates should contain your VVP-compliant VNF heat templates.
+- /preloads should contain a preload file for each VNF module (TODO: add link to preload documentation).
+ - For a VNF-API preload: vnf-name, vnf-type, generic-vnf-type, and generic-vnf-name should be empty strings.
+ - For a GR-API preload: vnf-name, vnf-type, vf-module-type, and vf-module-name should be empty strings.
+ - This information will be populated at runtime by the testsuite.
+- vnf-details should be a json file with the information that will be used by ONAP to instantiate the VNF. The structure of vnf-details is shown below.
+- VNF disk image must be uploaded and available in the OpenStack project being managed by ONAP
+- Modules must contain an entry for each module of the VNF. Only one module can be a base module.
+- api_type should match the format of the preloads that are provided in the package.
+- The other information should match what was used to configure ONAP during the pre-requisite section of this guide.
+
+.. code-block:: json
+
+ {
+ "vnf_name": "The Vnf Name",
+ "description": "Description of the VNF",
+ "modules": [
+ {
+ "filename": "base.yaml",
+ "isBase": "true",
+ "preload": "base_preload.json"
+ },
+ {
+ "filename": "incremental_0.yaml",
+ "isBase": "false",
+ "preload": "incremental_0.json"
+ },
+ ...
+ ],
+ "api_type": "[gr_api] or [vnf_api]",
+ "subscriber": "<subscriber name>",
+ "service_type": "<service type>",
+ "tenant_name": "<name of tenant>",
+ "region_id": "<name of region>",
+ "cloud_owner": "<name of cloud owner>",
+ "project_name": "<name of project>",
+ "owning_entity": "<name of owning entity>",
+ "platform": "<name of platform>",
+ "line_of_business": "<name of line of business>",
+ "os_password": "<openstack password>"
+ }
+
+Runnign the HEAT VNF Test
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The ONAP OOM Robot framework will run the test, using kubectl to manage the execution. The framework
+will copy your VNF template files to the robot container required to execute the test.
+
+.. code-block:: bash
+
+ cd oom/kubernetes/robot
+ $ ./instantiate-k8s.sh --help
+ ./instantiate-k8s.sh [options]
+
+ required:
+ -n, --namespace <namespace> namespace that robot pod is running under.
+ -f, --folder <folder> path to folder containing heat templates, preloads, and vnf-details.json.
+
+ additional options:
+ -p, --poll some cloud environments (like azure) have a short time out value when executing
+ kubectl. If your shell exits before the testsuite finishes, using this option
+ will poll the testsuite logs every 30 seconds until the test finishes.
+ -t, --tag <tag> robot testcase tag to execute (default is instantiate_vnf).
+
+ This script executes the VNF instantiation robot testsuite.
+ - It copies the VNF folder to the robot container that is part of the ONAP deployment.
+ - It models, distributes, and instantiates a heat-based VNF.
+ - It copies the logs to an output directory, and creates a tarball for upload to the OVP portal.
+
+
+**Sample execution:**
+
+.. code-block:: bash
+
+ $ ./instantiate-k8s.sh --namespace onap --folder /tmp/vnf-instantiation/examples/VNF_API/pass/multi_module/ --poll
+ ...
+ ...
+ ...
+ ...
+ ------------------------------------------------------------------------------
+ Testsuites.Vnf Instantiation :: The main driver for instantiating ... | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Testsuites | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Output: /share/logs/0003_ete_instantiate_vnf/output.xml
+ + set +x
+ testsuite has finished
+ Copying Results from pod...
+ /tmp/vnf-instantiation /tmp/vnf-instantiation
+ a log.html
+ a results.json
+ a stack_report.json
+ a validation-scripts.json
+ /tmp/vnf-instantiation
+ VNF test results: /tmp/vnfdata.46749/vnf_heat_results.tar.gz
+
+The testsuite takes about 10-15 minutes for a simple VNF, and will take longer for a more complicated VNF.
+
+Reporting Results
+"""""""""""""""""
+Once the testsuite is finished, it will create a directory and tarball in /tmp (the name of the directory
+and file is shown at the end of the stdout of the script). There will be a results.json in that directory
+that has the ultimate outcome of the test, in the structure shown below.
+
+**Log Files**
+
+The output tar file will have 4 log files in it.
+
+- results.json: This is high-level results file of all of the test steps, and is consumed by the OVP portal.
+- report.json: This is the output of the vvp validation scripts.
+- stack_report.json: This is the output from querying openstack to validate the heat modules.
+- log.html: This is the robot log, and contains each execution step of the testcase.
+
+If the result is "PASS", that means the testsuite was successful and the tarball is ready for submission
+to the OVP portal.
+
+**results.json**
+
+.. code-block:: json
+
+ {
+ "vnf_checksum": "afc57604a3b3b7401d5b8648328807b594d7711355a2315095ac57db4c334a50",
+ "build_tag": "vnf-validation-53270",
+ "version": "2019.09",
+ "test_date": "2019-09-04 17:50:10.575",
+ "duration": 437.002,
+ "vnf_type": "heat",
+ "testcases_list": [
+ {
+ "mandatory": "true",
+ "name": "onap-vvp.validate.heat",
+ "result": "PASS",
+ "objective": "onap heat template validation",
+ "sub_testcase": [],
+ "portal_key_file": "report.json"
+ },
+ {
+ "mandatory": "true",
+ "name": "onap-vvp.lifecycle_validate.heat",
+ "result": "PASS",
+ "objective": "onap vnf lifecycle validation",
+ "sub_testcase": [
+ {
+ "name": "model-and-distribute",
+ "result": "PASS"
+ },
+ {
+ "name": "instantiation",
+ "result": "PASS"
+ }
+ ],
+ "portal_key_file": "log.html"
+ },
+ {
+ "mandatory": "true",
+ "name": "stack_validation",
+ "result": "PASS",
+ "objective": "onap vnf openstack validation",
+ "sub_testcase": [],
+ "portal_key_file": "stack_report.json"
+ }
+ ]
+ }
+
+
+Additional Resources
+^^^^^^^^^^^^^^^^^^^^
+
+- `ONAP VVP Project <https://wiki.onap.org/display/DW/VNF+Validation+Program+Project>`_
+- `VVP Wiki Users Guide (this will track current ONAP master) <https://wiki.onap.org/pages/viewpage.action?pageId=68546123>`_
+
+Sample VNF templates are available on the VVP Wiki Users Guide page.
+
+Testing of TOSCA based VNFs
+---------------------------
+
+VNF Test Platform (VTP) provides an platform to on-board different test cases required for
+OVP for various VNF testing provided by VNFSDK (for TOSCA) and VVP(for HEAT) projects in
+ONAP. And it generates the test case outputs which would be uploaded into OVP portal for
+VNF badging.
+
+TOSCA VNF Test Environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As pre-requestiests steps, Its assumed that, successful ONAP, Vendor VNFM and OpenStack
+cloud are already available. Below installation steps help to setup VTP components and CLI.
+
+.. image:: images/tocsa_vnf_test_environment.png
+ :align: center
+ :scale: 100%
+
+Installation
+^^^^^^^^^^^^
+
+Clone the VNFSDK repo.
+
+.. code-block:: bash
+
+ git clone --branch elalto https://git.onap.org/vnfsdk/refrepo
+
+Install the VTP by using script *refrepo/vnfmarket-be/deployment/install/vtp_install.sh*
+
+Follow the steps as below (in sequence):
+
+- vtp_install.sh --download : It will download all required artifacts into /opt/vtp_stage
+- vtp_install.sh --install : It will install VTP (/opt/controller) and CLI (/opt/oclip)
+- vtp_install.sh --start : It will start VTP controller as tomcat service and CLI as oclip service
+- vtp_install.sh --verify : It will verify the setup is done properly by running some test cases.
+
+Last step (verify) would check the health of VTP components and TOSCA VNF compliance and validation test cases.
+
+Check Available Test Cases
+""""""""""""""""""""""""""
+
+VTP supports to check the compliance of VNF and PNF based on ONAP VNFREQS.
+
+To check:
+
+- Go to command console
+- Run command oclip
+- Now it will provide a command prompt:
+
+*oclip:open-cli>*
+
+Now run command as below and check the supported compliance testcases for VNFREQS.
+
+- csar-validate - Helps to validate given VNF CSAR for all configured VNFREQS.
+- csar-validate-rxxx - Helps to validate given VNF CSAR for a given VNFREQS xxx.
+
+.. code-block:: bash
+
+ oclip:open-cli>schema-list --product onap-dublin --service vnf-compliance
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |product |service |command |ocs-version |enabled |rpc |
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |onap-dublin |vnf-compliance |csar-validate-r10087 |1.0 |true | |
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |onap-dublin |vnf-compliance |csar-validate |1.0 |true | |
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |onap-dublin |vnf-compliance |csar-validate-r26885 |1.0 |true | |
+ +--------------+----------------+------------------------+--------------+----------+------+
+ |onap-dublin |vnf-compliance |csar-validate-r54356 |1.0 |true | |
+ ...
+
+To know the details of each VNFREQS, run as below.
+
+.. code-block:: bash
+
+ oclip:open-cli>use onap-dublin
+ oclip:onap-dublin>csar-validate-r54356 --help
+ usage: oclip csar-validate-r54356
+
+ Data types used by NFV node and is based on TOSCA/YAML constructs specified in draft GS NFV-SOL 001.
+ The node data definitions/attributes used in VNFD MUST comply.
+
+Now run command as below and check the supported validation testcases
+
+.. code-block:: bash
+
+ oclip:onap-dublin>use open-cli
+ oclip:open-cli>schema-list --product onap-dublin --service vnf-validation
+ +--------------+----------------+----------------------+--------------+----------+------+
+ |product |service |command |ocs-version |enabled |rpc |
+ +--------------+----------------+----------------------+--------------+----------+------+
+ |onap-dublin |vnf-validation |vnf-tosca-provision |1.0 |true | |
+ +--------------+----------------+----------------------+--------------+----------+------+
+
+Configure ONAP with required VNFM and cloud details
+"""""""""""""""""""""""""""""""""""""""""""""""""""
+
+**1. Setup the OCOMP profile onap-dublin**
+
+Run following command to configure the ONAP service URL and creadentials as given below, which will be
+used by VTP while executing the test cases
+
+.. code-block:: bash
+
+ oclip:open-cli>use onap-dublin
+ oclip:onap-dublin>profile onap-dublin
+ oclip:onap-dublin>set sdc.onboarding:host-url=http://159.138.8.8:30280
+ oclip:onap-dublin>set sdc.onboarding:host-username=cs0008
+ oclip:onap-dublin>set sdc.onboarding:host-password=demo123456!
+ oclip:onap-dublin>set sdc.catalog:host-url=http://159.138.8.8:30205
+ oclip:onap-dublin>set sdc.catalog:host-password=demo123456\!
+ oclip:onap-dublin>set sdc.catalog:host-username=cs0008
+ oclip:onap-dublin>set sdc.catalog:service-model-approve:host-username=gv0001
+ oclip:onap-dublin>set sdc.catalog:service-model-distribute:host-username=op0001
+ oclip:onap-dublin>set sdc.catalog:service-model-test-start:host-username=jm0007
+ oclip:onap-dublin>set sdc.catalog:service-model-test-accept:host-username=jm0007
+ oclip:onap-dublin>set sdc.catalog:service-model-add-artifact:host-username=ocomp
+ oclip:onap-dublin>set sdc.catalog:vf-model-add-artifact:host-username=ocomp
+ oclip:onap-dublin>set aai:host-url=https://159.138.8.8:30233
+ oclip:onap-dublin>set aai:host-username=AAI
+ oclip:onap-dublin>set aai:host-password=AAI
+ oclip:onap-dublin>set vfc:host-url=http://159.138.8.8:30280
+ oclip:onap-dublin>set multicloud:host-url=http://159.138.8.8:30280
+
+NOTE: Mostly all above entries value would be same execept the IP address used in the
+URL, which would be ONAP k8s cluser IP.
+
+By default, SDC onboarding service does not provide node port, which is available to
+access from external ONAP network. so to enable for external access, register the SDC
+onboarding service into MSB and use MSB url for sdc.onboarding:host-url.
+
+.. code-block:: bash
+
+ oclip:onap-dublin> microservice-create --service-name sdcob --service-version v1.0 --service-url /onboarding-api/v1.0 --path /onboarding-api/v1.0 --node-ip 172.16.1.0 --node-port 8081
+
+NOTE: To find the node-ip and node-port, use the following steps.
+
+Find out SDC onboarding service IP and port details as given here:
+
+.. code-block:: bash
+
+ [root@onap-dublin-vfw-93996-50c1z ~]# kubectl get pods -n onap -o wide | grep sdc-onboarding-be
+ dev-sdc-sdc-onboarding-be-5564b877c8-vpwr5 2/2 Running 0 29d 172.16.1.0 192.168.2.163 <none> <none>
+ dev-sdc-sdc-onboarding-be-cassandra-init-mtvz6 0/1 Completed 0 29d 172.16.0.220 192.168.2.163 <none> <none>
+ [root@onap-dublin-vfw-93996-50c1z ~]#
+
+Note down the IP address for sdc-onboarding-be 172.16.1.0
+
+.. code-block:: bash
+
+ [root@onap-dublin-vfw-93996-50c1z ~]# kubectl get services -n onap -o wide | grep sdc-onboarding-be
+ sdc-onboarding-be ClusterIP 10.247.198.92 <none> 8445/TCP,8081/TCP 29d app=sdc-onboarding-be,release=dev-sdc
+ [root@onap-dublin-vfw-93996-50c1z ~]#
+
+Note down the port for sdc-onboarding-be 8445 8081
+
+Similarly, other service IP and Port could be discovered like above, in case not know earlier :)
+
+Verify these details once by typing 'set'
+
+.. code-block:: bash
+
+ oclip:onap-dublin> set
+
+This profile would be used by user while running the test cases with ONAP setup configured in it, as below
+oclip --profile onap-dublin vnf-tosca-provision ....
+
+**2. Setup SDC consumer**
+
+SDC uses consumer concept to configure required VN model and service model artifacts. So
+following commands required to run, which will create consumer named ocomp, which is
+already configured in onap-dublin profile created in above steps.
+
+.. code-block:: bash
+
+ oclip --product onap-dublin --profile onap-dublin sdc-consumer-create --consumer-name ocomp
+
+NOTE: command oclip could be used in scripting mode as above or in interactive mode as used
+in earlier steps
+
+**3. Update the cloud and vnfm driver details**
+
+In the configuration file /opt/oclip/conf/vnf-tosca-provision.json, update the cloud
+and VNFM details.
+
+.. code-block:: json
+
+ "cloud": {
+ "identity-url": "http://10.12.11.1:5000/v3",
+ "username": "admin",
+ "password": "password",
+ "region": "RegionOVP",
+ "version": "ocata",
+ "tenant": "ocomp"
+ },
+ "vnfm":{
+ "hwvnfmdriver":{
+ "version": "v1.0",
+ "url": "http://159.138.8.8:38088",
+ "username": "admin",
+ "password": "xxxx"
+ },
+ "gvnfmdriver":{
+ "version": "v1.0",
+ "url": "http://159.138.8.8:30280"
+ }
+ }
+
+**4.Configure the decided VNFRES (optional)**
+VTP allows to configure the set of VNFREQS to be considered while running the VNF
+compliance test cases in the configuration file /opt/oclip/conf/vnfreqs.properties.
+
+If not available, please create this file with following entries:
+
+.. code-block:: bash
+
+ vnfreqs.enabled=r02454,r04298,r07879,r09467,r13390,r23823,r26881,r27310,r35851,r40293,r43958,r66070,r77707,r77786,r87234,r10087,r21322,r26885,r40820,r35854,r65486,r17852,r46527,r15837,r54356,r67895,r95321,r32155,r01123,r51347,r787965,r130206
+ pnfreqs.enabled=r10087,r87234,r35854,r15837,r17852,r293901,r146092,r57019,r787965,r130206
+ # ignored all chef and ansible related tests
+ vnferrors.ignored=
+ pnferrors.ignored=
+
+Runnign the TOSCA VNF Test
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Every test provided in VTP is given with guidelines on how to use it. On every execution of test cases, use the following additional arguments based on requirements
+
+- --product onap-dublin - It helps VTP choose the test cases written for onap-dublin version
+- --profile onap-dublin - It helps VTP to use the profile settings provided by admin (optional)
+- --request-id - It helps VTP to track the progress of the test cases execution and user could use this id for same. (optional)
+
+So, final test case execution would be as below. To find the test case arguments details, run second command below.
+
+.. code-block:: bash
+
+ oclip --product onap-dublin --profile onap-dublin --request-id req-1 <test case name> <test case arguments>
+ oclip --product onap-dublin <test case name> --help
+
+Running TOSCA VNF Compliance Testing
+""""""""""""""""""""""""""""""""""""
+
+To run compliance test as below with given CSAR file
+
+.. clode-block:: bash
+
+ oclip --product onap-dublin csar-validate --csar <csar file complete path>
+
+It will produce the result format as below:
+
+.. code-block:: json
+
+ {
+ "date": "Fri Sep 20 17:34:24 CST 2019",
+ "criteria": "PASS",
+ "contact": "ONAP VTP Team onap-discuss@lists.onap.org",
+ "results": [
+ {
+ "description": "V2.4.1 (2018-02)",
+ "passed": true,
+ "vnfreqName": "SOL004",
+ "errors": []
+ },
+ {
+ "description": "If the VNF or PNF CSAR Package utilizes Option 2 for package security, then the complete CSAR file MUST be digitally signed with the VNF or PNF provider private key. The VNF or PNF provider delivers one zip file consisting of the CSAR file, a signature file and a certificate file that includes the VNF or PNF provider public key. The certificate may also be included in the signature container, if the signature format allows that. The VNF or PNF provider creates a zip file consisting of the CSAR file with .csar extension, signature and certificate files. The signature and certificate files must be siblings of the CSAR file with extensions .cms and .cert respectively.\n",
+ "passed": true,
+ "vnfreqName": "r787965",
+ "errors": []
+ }
+ ],
+ "platform": "VNFSDK - VNF Test Platform (VTP) 1.0",
+ "vnf": {
+ "mode": "WITH_TOSCA_META_DIR",
+ "vendor": "ONAP",
+ "name": null,
+ "type": "TOSCA",
+ "version": null
+ }
+ }
+
+In case of errors, the errors section will have list of details as below. Each error block, will be
+given with error code and error details. Error code would be very useful to provide the troubleshooting
+guide in future. Note, to generate the test result in OVP archieve format, its recommended to run this compliance
+test with request-id similar to running validation test as below.
+
+.. code-block:: bash
+
+ [
+ {
+ "vnfreqNo": "R66070",
+ "code": "0x1000",
+ "message": "MissinEntry-Definitions file",
+ "lineNumber": -1
+ }
+ ]
+
+Running TOSCA VNF Validation Testing
+""""""""""""""""""""""""""""""""""""
+VTP provides validation test case with following modes:
+
+.. image:: images/tosca_vnf_test_flow.png
+ :align: left
+ :scale: 100%
+
+
+setup: Create requires Vendor, Service Subscription and VNF cloud in ONAP
+standup: From the given VSP csar, VNF csar and NS csar, it creates VF Model, NS Model and NS service
+cleanup: Remove those entries created during provision
+provision: Runs setup -> standup
+validate: Runs setup -> standup -> cleanup
+checkup: mode helps to verify automation is deployed properly.
+
+For OVP badging, validate mode would be used as below:
+
+.. code-block:: bash
+
+ oclip --request-id WkVVu9fD--product onap-dublin --profile onap-dublin vnf-tosca-provision --vsp <vsp csar> --vnf-csar <v
+
+Validation testing would take for a while to complete the test execution, so user could use the above
+given request-id, to tracking the progress as below:
+
+.. code-block:: bash
+
+ oclip execution-list --request-id WkVVu9fD
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |request-id |execution-id |product |service |command |profile |status |start-time |end-time |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731678753 |onap-dublin |vnf-validation |vnf-tosca-provision | |in-progress |2019-09-17T14:47:58.000 | |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731876397 |onap-dublin |sdc.catalog |service-model-test-request |onap-dublin |in-progress |2019-09-17T14:51:16.000 | |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731966966 |onap-dublin |sdc.onboarding |vsp-archive |onap-dublin |completed |2019-09-17T14:52:46.000 |2019-09-17T14:52:47.000 |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731976982 |onap-dublin |aai |subscription-delete |onap-dublin |completed |2019-09-17T14:52:56.000 |2019-09-17T14:52:57.000 |
+ +------------+------------------------+--------------+------------------+------------------------------+--------------+------------+--------------------------+--------------------------+
+ |WkVVu9fD |WkVVu9fD-1568731785780 |onap-dublin |aai |vnfm-create |onap-dublin |completed |2019-09-17T14:49:45.000 |2019-09-17T14:49:46.000 |
+ ......
+
+While executing the test cases, VTP provides unique execution-id (2nd column) for each step. As you note
+in the example above, some steps are in-progress, while others are completed already. If there is error
+then status will be set to failed.
+
+To find out the foot-print of each step, following commands are available
+
+.. code-block:: bash
+
+ oclip execution-show-out --execution-id WkVVu9fD-1568731785780 - Reports the standard output logs
+ oclip execution-show-err --execution-id WkVVu9fD-1568731785780 - Reports the standard error logs
+ oclip execution-show-debug --execution-id WkVVu9fD-1568731785780 - Reports the debug details like HTTP request and responseoclip execution-show --execution-id WkVVu9fD-1568731785780 - Reports the complete foot-print of inputs, outputs of steps
+
+Track the progress of the vnf-tosca-provision test cases until its completed. Then the out of the validation
+test cases could be retrieved as below:
+
+.. code-block:: bash
+
+ oclip execution-show --execution-id WkVVu9fD-1568731678753 - use vnf tosca test case execution id here
+
+It will provides the output format as below:
+
+.. code-block:: json
+
+ {
+ "output": {
+ "ns-id": null,
+ "vnf-id": "",
+ "vnfm-driver": "hwvnfmdriver",
+ "vnf-vendor-name": "huawei",
+ "onap-objects": {
+ "ns_instance_id": null,
+ "tenant_version": null,
+ "service_type_id": null,
+ "tenant_id": null,
+ "subscription_version": null,
+ "esr_vnfm_id": null,
+ "location_id": null,
+ "ns_version": null,
+ "vnf_status": "active",
+ "entitlement_id": null,
+ "ns_id": null,
+ "cloud_version": null,
+ "cloud_id": null,
+ "vlm_version": null,
+ "esr_vnfm_version": null,
+ "vlm_id": null,
+ "vsp_id": null,
+ "vf_id": null,
+ "ns_instance_status": "active",
+ "service_type_version": null,
+ "ns_uuid": null,
+ "location_version": null,
+ "feature_group_id": null,
+ "vf_version": null,
+ "vsp_version": null,
+ "agreement_id": null,
+ "vf_uuid": null,
+ "ns_vf_resource_id": null,
+ "vsp_version_id": null,
+ "customer_version": null,
+ "vf_inputs": null,
+ "customer_id": null,
+ "key_group_id": null,
+ },
+ "vnf-status": "active",
+ "vnf-name": "vgw",
+ "ns-status": "active"
+ },
+ "input": {
+ "mode": "validate",
+ "vsp": "/tmp/data/vtp-tmp-files/1568731645518.csar",
+ "vnfm-driver": "hwvnfmdriver",
+ "config-json": "/opt/oclip/conf/vnf-tosca-provision.json",
+ "vnf-vendor-name": "huawei",
+ "ns-csar": "/tmp/data/vtp-tmp-files/1568731660745.csar",
+ "onap-objects": "{}",
+ "timeout": "600000",
+ "vnf-name": "vgw",
+ "vnf-csar": "/tmp/data/vtp-tmp-files/1568731655310.csar"
+ },
+ "product": "onap-dublin",
+ "start-time": "2019-09-17T14:47:58.000",
+ "service": "vnf-validation",
+ "end-time": "2019-09-17T14:53:46.000",
+ "request-id": "WkVVu9fD-1568731678753",
+ "command": "vnf-tosca-provision",
+ "status": "completed"
+ }
+
+Reporting Results
+"""""""""""""""""
+
+VTP provides translation tool to migrate the VTP result into OVP portal format and generates the tar file
+for the given test case execution. Please refer `<https://github.com/onap/vnfsdk-refrepo/tree/master/vnfmarket-be/deployment/vtp2ovp>`_ for more details.
+
+Once tar is generated, it can be used to submit into OVP portal `<https://vnf-verified.lfnetworking.org/>`_
+
+.. References
+.. _`OVP VNF portal`: https://vnf-verified.lfnetworking.org
diff --git a/dovetail/api/app/__init__.py b/dovetail/api/app/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/dovetail/api/app/__init__.py
diff --git a/dovetail/api/app/constants.py b/dovetail/api/app/constants.py
new file mode 100644
index 00000000..f6ffd1ba
--- /dev/null
+++ b/dovetail/api/app/constants.py
@@ -0,0 +1,15 @@
+NFVI_PROJECT = ['bottlenecks', 'functest', 'yardstick']
+VNF_PROJECT = ['onap-vtp', 'onap-vvp']
+RUN_TEST_ITEMS = {
+ 'arguments': {
+ 'no_multiple': ['testsuite', 'deploy_scenario'],
+ 'multiple': ['testarea', 'testcase']
+ },
+ 'options': ['mandatory', 'no_api_validation', 'no_clean', 'stop', 'debug',
+ 'opnfv_ci', 'report', 'offline', 'optional']
+}
+CONFIG_YAML_FILES = {
+ 'hosts': 'hosts.yaml',
+ 'pods': 'pod.yaml',
+ 'tempest_conf': 'tempest_conf.yaml'
+}
diff --git a/dovetail/api/app/routes.py b/dovetail/api/app/routes.py
new file mode 100644
index 00000000..352d69f3
--- /dev/null
+++ b/dovetail/api/app/routes.py
@@ -0,0 +1,102 @@
+#!flask/bin/python
+
+import json
+import os
+import subprocess
+import time
+import uuid
+
+from flask import Flask, jsonify, request
+from flask_cors import CORS
+
+from app.server import Server
+
+app = Flask(__name__)
+CORS(app)
+
+
+@app.route('/api/v1/scenario/nfvi/testsuites', methods=['GET'])
+def get_all_testsuites():
+ testsuites = Server.list_testsuites()
+ return jsonify({'testsuites': testsuites}), 200
+
+
+@app.route('/api/v1/scenario/nfvi/testcases', methods=['GET'])
+def get_testcases():
+ testcases = Server.list_testcases()
+ return jsonify({'testcases': testcases}), 200
+
+
+@app.route('/api/v1/scenario/nfvi/execution', methods=['POST'])
+def run_testcases():
+ requestId = request.args.get('requestId')
+ if not requestId:
+ requestId = uuid.uuid1()
+ if os.getenv('DOVETAIL_HOME'):
+ dovetail_home = os.getenv('DOVETAIL_HOME')
+ else:
+ return 'No DOVETAIL_HOME found in env.\n', 500
+
+ server = Server(dovetail_home, requestId, request.json)
+
+ msg, ret = server.set_conf_files()
+ if not ret:
+ return msg, 500
+
+ msg, ret = server.set_vm_images()
+ if not ret:
+ return msg, 500
+
+ input_str = server.parse_request()
+
+ repo_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir))
+ run_script = os.path.join(repo_dir, 'run.py')
+
+ cmd = 'python3 {} {}'.format(run_script, input_str)
+ api_home = os.path.join(dovetail_home, str(requestId))
+ subprocess.Popen(cmd, shell=True, env={'DOVETAIL_HOME': api_home,
+ 'LC_ALL': 'C.UTF-8', 'LANG': 'C.UTF-8'})
+
+ testcases_file = os.path.join(dovetail_home, str(requestId),
+ 'results', 'testcases.json')
+ for loop in range(60):
+ if not os.path.isfile(testcases_file):
+ time.sleep(1)
+ else:
+ break
+ else:
+ return 'Can not get file testcases.json.\n', 500
+
+ with open(testcases_file, "r") as f:
+ for jsonfile in f:
+ data = json.loads(jsonfile)
+ testcases = data['testcases']
+ testsuite = data['testsuite']
+
+ result = server.get_execution_status(testsuite, testcases, testcases)
+
+ return jsonify({'result': result}), 200
+
+
+@app.route('/api/v1/scenario/nfvi/execution/status/<exec_id>',
+ methods=['POST'])
+def get_testcases_status(exec_id):
+ if 'testcase' not in request.json:
+ return 'Need testcases list as input.\n', 400
+
+ testcases = request.json['testcase']
+ dovetail_home = os.getenv('DOVETAIL_HOME')
+
+ server = Server(dovetail_home, exec_id, request.json)
+ testcases_file = os.path.join(dovetail_home, str(exec_id),
+ 'results', 'testcases.json')
+ with open(testcases_file, "r") as f:
+ for jsonfile in f:
+ data = json.loads(jsonfile)
+ testsuite = data['testsuite']
+
+ result = server.get_execution_status(testsuite, testcases,
+ data['testcases'])
+
+ return jsonify({'result': result}), 200
diff --git a/dovetail/api/app/server.py b/dovetail/api/app/server.py
new file mode 100644
index 00000000..d44e2ee5
--- /dev/null
+++ b/dovetail/api/app/server.py
@@ -0,0 +1,297 @@
+import json
+import os
+import shutil
+
+import app.constants as constants
+from app.utils import Utils
+
+from dovetail.testcase import Testsuite, Testcase
+
+
+class Server(object):
+
+ def __init__(self, dovetail_home=None, requestId=None, requestData=None):
+ self.dovetail_home = dovetail_home
+ self.requestId = requestId
+ self.requestData = requestData
+
+ @staticmethod
+ def list_testsuites():
+ return Testsuite.load()
+
+ @staticmethod
+ def list_testcases():
+ testcases = Testcase.load()
+ testcase_list = []
+ for key, value in testcases.items():
+ testcase = {'testCaseName': key,
+ 'description': value.objective(),
+ 'subTestCase': value.sub_testcase()}
+ if value.validate_type() in constants.NFVI_PROJECT:
+ testcase['scenario'] = 'nfvi'
+ elif value.validate_type() in constants.VNF_PROJECT:
+ testcase['scenario'] = 'vnf'
+ else:
+ testcase['scenario'] = 'unknown'
+ testcase_list.append(testcase)
+ return testcase_list
+
+ def set_vm_images(self):
+ image_path = os.path.join(self.dovetail_home, str(self.requestId),
+ 'images')
+ try:
+ origin_image_path = self.requestData['conf']['vm_images']
+ except KeyError:
+ origin_image_path = os.path.join(self.dovetail_home, 'images')
+ if os.path.exists(origin_image_path):
+ try:
+ shutil.copytree(origin_image_path, image_path)
+ except Exception as e:
+ return str(e), False
+ return "Success to set vm images.\n", True
+ else:
+ return "Could not find vm images.\n", False
+
+ def set_conf_files(self):
+ config_path = os.path.join(self.dovetail_home, str(self.requestId),
+ 'pre_config')
+ origin_config_path = os.path.join(self.dovetail_home, 'pre_config')
+ if os.path.exists(origin_config_path):
+ try:
+ shutil.copytree(origin_config_path, config_path)
+ except Exception as e:
+ return str(e), False
+
+ # check and prepare mandatory env_config.sh file
+ # if there are envs in request body, use it
+ # otherwise, use the file in pre_config
+ # if don't have this file, return False with error message
+ env_file = os.path.join(config_path, 'env_config.sh')
+ try:
+ Utils.write_env_file(self.requestData['conf']['envs'], env_file)
+ except KeyError:
+ if not os.path.isfile(env_file):
+ return "No 'envs' found in the request body.\n", False
+ else:
+ pass
+ except Exception as e:
+ return str(e), False
+
+ # check and prepare other optional yaml files
+ for key, value in constants.CONFIG_YAML_FILES.items():
+ config_file = os.path.join(config_path, value)
+ try:
+ Utils.write_yaml_file(self.requestData['conf'][key],
+ config_file)
+ except KeyError:
+ pass
+ except Exception as e:
+ return str(e), False
+
+ return 'Success to prepare all config files.\n', True
+
+ def parse_request(self):
+ output = ''
+ default_args = constants.RUN_TEST_ITEMS['arguments']
+ default_options = constants.RUN_TEST_ITEMS['options']
+
+ for arg in default_args['no_multiple']:
+ if arg in self.requestData.keys():
+ output = output + ' --{} {}'.format(arg, self.requestData[arg])
+ for arg in default_args['multiple']:
+ if arg in self.requestData.keys() and self.requestData[arg]:
+ for item in self.requestData[arg]:
+ output = output + ' --{} {}'.format(arg, item)
+
+ if 'options' not in self.requestData.keys():
+ return output
+
+ for option in default_options:
+ if option in self.requestData['options']:
+ output = output + ' --{}'.format(option)
+
+ return output
+
+ def get_execution_status(self, testsuite, request_testcases,
+ exec_testcases):
+ results_dir = os.path.join(self.dovetail_home, str(self.requestId),
+ 'results')
+ results = []
+ for tc in request_testcases:
+ if tc not in exec_testcases:
+ res = {'testCaseName': tc, 'status': 'NOT_EXECUTED'}
+ results.append(res)
+ continue
+
+ tc_type = tc.split('.')[0]
+ checker = CheckerFactory.create(tc_type)
+ status, result = checker.get_status(results_dir, tc)
+
+ res = {'testCaseName': tc, 'testSuiteName': testsuite,
+ 'scenario': 'nfvi', 'executionId': self.requestId,
+ 'results': result, 'status': status, 'timestart': None,
+ 'endTime': None}
+ try:
+ res['timestart'] = result['timestart']
+ res['endTime'] = result['timestop']
+ except Exception:
+ pass
+
+ results.append(res)
+
+ return results
+
+
+class Checker(object):
+
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def get_status_from_total_file(total_file, testcase):
+ with open(total_file, 'r') as f:
+ for jsonfile in f:
+ try:
+ data = json.loads(jsonfile)
+ for item in data['testcases_list']:
+ if item['name'] == testcase:
+ return item['result'], item['sub_testcase']
+ except KeyError as e:
+ return 'FAILED', None
+ except ValueError:
+ continue
+ return 'FAILED', None
+
+
+class FunctestChecker(Checker):
+
+ def get_status(self, results_dir, testcase):
+ functest_file = os.path.join(results_dir, 'functest_results.txt')
+ total_file = os.path.join(results_dir, 'results.json')
+ if not os.path.isfile(functest_file):
+ if not os.path.isfile(total_file):
+ return 'IN_PROGRESS', None
+ return 'FAILED', None
+ criteria = None
+ sub_testcase = []
+ timestart = None
+ timestop = None
+
+ # get criteria and sub_testcase when all tests completed
+ if os.path.isfile(total_file):
+ criteria, sub_testcase = self.get_status_from_total_file(
+ total_file, testcase)
+ if criteria == 'FAILED':
+ return 'FAILED', None
+
+ # get detailed results from functest_results.txt
+ with open(functest_file, 'r') as f:
+ for jsonfile in f:
+ try:
+ data = json.loads(jsonfile)
+ if data['build_tag'].endswith(testcase):
+ criteria = data['criteria'] if not criteria \
+ else criteria
+ timestart = data['start_date']
+ timestop = data['stop_date']
+ break
+ except KeyError:
+ return 'FAILED', None
+ except ValueError:
+ continue
+ else:
+ if not criteria:
+ return 'IN_PROGRESS', None
+
+ status = 'COMPLETED' if criteria == 'PASS' else 'FAILED'
+ results = {'criteria': criteria, 'sub_testcase': sub_testcase,
+ 'timestart': timestart, 'timestop': timestop}
+ return status, results
+
+
+class YardstickChecker(Checker):
+
+ def get_status(self, results_dir, testcase):
+ yardstick_file = os.path.join(results_dir, 'ha_logs',
+ '{}.out'.format(testcase))
+ total_file = os.path.join(results_dir, 'results.json')
+ if not os.path.isfile(yardstick_file):
+ if not os.path.isfile(total_file):
+ return 'IN_PROGRESS', None
+ return 'FAILED', None
+
+ criteria = None
+
+ # get criteria and sub_testcase when all tests completed
+ if os.path.isfile(total_file):
+ criteria, _ = self.get_status_from_total_file(total_file, testcase)
+ if criteria == 'FAILED':
+ return 'FAILED', None
+
+ with open(yardstick_file, 'r') as f:
+ for jsonfile in f:
+ data = json.loads(jsonfile)
+ try:
+ if not criteria:
+ criteria = data['result']['criteria']
+ if criteria == 'PASS':
+ details = data['result']['testcases']
+ for key, value in details.items():
+ sla_pass = value['tc_data'][0]['data']['sla_pass']
+ if not 1 == sla_pass:
+ criteria = 'FAIL'
+ except KeyError:
+ return 'FAILED', None
+
+ status = 'COMPLETED' if criteria == 'PASS' else 'FAILED'
+ results = {'criteria': criteria, 'timestart': None, 'timestop': None}
+ return status, results
+
+
+class BottlenecksChecker(Checker):
+
+ def get_status(self, results_dir, testcase):
+ bottlenecks_file = os.path.join(results_dir, 'stress_logs',
+ '{}.out'.format(testcase))
+ total_file = os.path.join(results_dir, 'results.json')
+ if not os.path.isfile(bottlenecks_file):
+ if not os.path.isfile(total_file):
+ return 'IN_PROGRESS', None
+ return 'FAILED', None
+
+ criteria = None
+
+ # get criteria and sub_testcase when all tests completed
+ if os.path.isfile(total_file):
+ criteria, _ = self.get_status_from_total_file(total_file, testcase)
+ if criteria == 'FAILED':
+ return 'FAILED', None
+
+ with open(bottlenecks_file, 'r') as f:
+ for jsonfile in f:
+ data = json.loads(jsonfile)
+ try:
+ if not criteria:
+ criteria = data['data_body']['result']
+ except KeyError:
+ return 'FAILED', None
+
+ status = 'COMPLETED' if criteria == 'PASS' else 'FAILED'
+ results = {'criteria': criteria, 'timestart': None, 'timestop': None}
+ return status, results
+
+
+class CheckerFactory(object):
+
+ CHECKER_MAP = {
+ 'functest': FunctestChecker,
+ 'yardstick': YardstickChecker,
+ 'bottlenecks': BottlenecksChecker
+ }
+
+ @classmethod
+ def create(cls, tc_type):
+ try:
+ return cls.CHECKER_MAP[tc_type]()
+ except KeyError:
+ return None
diff --git a/dovetail/api/app/utils.py b/dovetail/api/app/utils.py
new file mode 100644
index 00000000..9f35ee03
--- /dev/null
+++ b/dovetail/api/app/utils.py
@@ -0,0 +1,24 @@
+import json
+import os
+
+
+class Utils(object):
+
+ @staticmethod
+ def write_env_file(envs, file_path):
+ file_dir = os.path.dirname(file_path)
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir)
+ with open(file_path, "w") as f:
+ for key, value in envs.items():
+ f.write("export {}={}\n".format(key, value))
+ return True
+
+ @staticmethod
+ def write_yaml_file(data, file_path):
+ file_dir = os.path.dirname(file_path)
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir)
+ with open(file_path, "w") as f:
+ f.write(json.dumps(data) + '\n')
+ return True
diff --git a/dovetail/api/boot.sh b/dovetail/api/boot.sh
new file mode 100755
index 00000000..9fbb5484
--- /dev/null
+++ b/dovetail/api/boot.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+mkdir -p /var/www/html/dovetail-api
+cp -r /home/opnfv/swagger-ui/dist/* /var/www/html/dovetail-api
+cp /home/opnfv/dovetail/dovetail/api/swagger.yaml /var/www/html/dovetail-api
+sed -i 's#url: "https://petstore.swagger.io/v2/swagger.json"#url: "swagger.yaml"#g' /var/www/html/dovetail-api/index.html
+sed -i '/deepLinking: true,/a\ validatorUrl: null,' /var/www/html/dovetail-api/index.html
+
+if [[ -n ${SWAGGER_HOST} ]]; then
+ sed -i "s/host: localhost:8888/host: ${SWAGGER_HOST}/g" /var/www/html/dovetail-api/swagger.yaml
+fi
+
+/etc/init.d/apache2 start
+
+cd $(dirname $(readlink -f $0))
+exec gunicorn -b :5000 --access-logfile - --error-logfile - app.routes:app
diff --git a/dovetail/api/swagger.yaml b/dovetail/api/swagger.yaml
new file mode 100644
index 00000000..54695d7e
--- /dev/null
+++ b/dovetail/api/swagger.yaml
@@ -0,0 +1,346 @@
+swagger: "2.0"
+info:
+ description: "This is the dovetail API."
+ version: "1.0.0"
+ title: "Dovetail API"
+ contact:
+ email: "xudan16@huawei.com"
+ license:
+ name: "Apache 2.0"
+ url: "http://www.apache.org/licenses/LICENSE-2.0.html"
+host: localhost:8888
+basePath: "/api/v1/scenario/nfvi"
+tags:
+- name: "testsuites"
+ description: "Operations about testsuites"
+- name: "testcases"
+ description: "Operations about test cases"
+- name: "execution"
+ description: "Operations about running test cases"
+schemes:
+- "http"
+paths:
+ /testsuites:
+ get:
+ tags:
+ - "testsuites"
+ summary: "Get all testsuites"
+ description: ""
+ operationId: "getTestsuites"
+ consumes:
+ - "application/json"
+ produces:
+ - "application/json"
+ parameters: []
+ responses:
+ 200:
+ description: "successful operation"
+ default:
+ description: Unexpected error
+ /testcases:
+ get:
+ tags:
+ - "testcases"
+ summary: "Get all test cases"
+ description: ""
+ operationId: "getTestcases"
+ consumes:
+ - "application/json"
+ produces:
+ - "application/json"
+ parameters: []
+ responses:
+ 200:
+ description: "successful operation"
+ default:
+ description: Unexpected error
+ /execution:
+ post:
+ tags:
+ - "execution"
+ summary: "Run test cases"
+ description: ""
+ operationId: "runTestCases"
+ consumes:
+ - "application/json"
+ produces:
+ - "application/json"
+ parameters:
+ - name: "body"
+ in: "body"
+ description: "All info used to run tests"
+ required: false
+ schema:
+ $ref: "#/definitions/RunInfo"
+ responses:
+ 200:
+ description: "successful operation"
+ schema:
+ $ref: "#/definitions/StatusResponse"
+ 500:
+ description: "internal error"
+ default:
+ description: Unexpected error
+ /execution/{exec_id}:
+ post:
+ tags:
+ - "execution"
+ summary: "Run test cases with exec_id"
+ description: ""
+ operationId: "runTestCasesWithID"
+ consumes:
+ - "application/json"
+ produces:
+ - "application/json"
+ parameters:
+ - name: "exec_id"
+ in: "path"
+ description: "ID of this run, will generate randomly if not given"
+ required: true
+ schema:
+ type: "integer"
+ format: "uuid"
+ - name: "body"
+ in: "body"
+ description: "All info used to run tests"
+ required: false
+ schema:
+ $ref: "#/definitions/RunInfo"
+ responses:
+ 200:
+ description: "successful operation"
+ schema:
+ $ref: "#/definitions/StatusResponse"
+ 500:
+ description: "internal error"
+ default:
+ description: Unexpected error
+ /execution/status/{exec_id}:
+ post:
+ tags:
+ - "execution/status"
+ summary: "Get status of running test cases"
+ description: ""
+ operationId: "getTestCasesStatus"
+ consumes:
+ - "application/json"
+ produces:
+ - "application/json"
+ parameters:
+ - name: "exec_id"
+ in: "path"
+ description: "exec_id used to get the status of test cases"
+ required: true
+ schema:
+ type: "integer"
+ format: "uuid"
+ - name: "body"
+ in: "body"
+ description: "Test case list used to get status"
+ required: true
+ schema:
+ $ref: "#/definitions/TestCaseList"
+ responses:
+ 200:
+ description: "successful operation"
+ schema:
+ $ref: "#/definitions/StatusResponse"
+ 500:
+ description: "internal error"
+ default:
+ description: Unexpected error
+definitions:
+ TestCaseList:
+ type: "object"
+ properties:
+ testcase:
+ type: "array"
+ items:
+ type: "string"
+ example:
+ - "functest.vping.ssh"
+ - "yardstick.ha.rabbitmq"
+ Node:
+ type: "object"
+ required:
+ - "name"
+ - "role"
+ - "ip"
+ - "user"
+ properties:
+ name:
+ type: "string"
+ example: "node1"
+ role:
+ type: "string"
+ enum:
+ - "Controller"
+ - "Compute"
+ - "Jumpserver"
+ ip:
+ type: "string"
+ example: "192.168.117.222"
+ user:
+ type: "string"
+ example: "root"
+ password:
+ type: "string"
+ example: "root"
+ key_filename:
+ type: "string"
+ example: "/home/ovp/pre_config/id_rsa"
+ ProcessInfo:
+ type: "object"
+ required:
+ - "testcase_name"
+ properties:
+ testcase_name:
+ type: "string"
+ example: "yardstick.ha.rabbitmq"
+ attack_host:
+ type: "string"
+ example: "node1"
+ attack_process:
+ type: "string"
+ example: "rabbitmq"
+ Pods:
+ type: "object"
+ properties:
+ nodes:
+ type: "array"
+ items:
+ $ref: '#/definitions/Node'
+ process_info:
+ type: "array"
+ items:
+ $ref: "#/definitions/ProcessInfo"
+ tempestconf:
+ type: "object"
+ additionalProperties:
+ type: string
+ TempestConf:
+ type: "object"
+ additionalProperties:
+ $ref: "#/definitions/tempestconf"
+ Hosts:
+ type: "object"
+ additionalProperties:
+ type: "array"
+ items:
+ type: "string"
+ Envs:
+ type: "object"
+ additionalProperties:
+ type: string
+ example:
+ OS_USERNAME: "admin"
+ OS_PASSWORD: "admin"
+ OS_AUTH_URL: "https://192.168.117.222:5000/v3"
+ EXTERNAL_NETWORK: "ext-net"
+ Conf:
+ type: "object"
+ properties:
+ vm_images:
+ type: "string"
+ example: "/home/ovp/images"
+ pods:
+ $ref: "#/definitions/Pods"
+ tempest_conf:
+ $ref: "#/definitions/TempestConf"
+ hosts:
+ $ref: "#/definitions/Hosts"
+ envs:
+ $ref: "#/definitions/Envs"
+ RunInfo:
+ type: "object"
+ properties:
+ conf:
+ $ref: "#/definitions/Conf"
+ testcase:
+ type: "array"
+ items:
+ type: "string"
+ example:
+ - "functest.vping.ssh"
+ - "yardstick.ha.rabbitmq"
+ testsuite:
+ type: "string"
+ example: "ovp.2019.12"
+ testarea:
+ type: "array"
+ items:
+ type: "string"
+ example:
+ - "vping"
+ - "ha"
+ deploy_scenario:
+ type: "string"
+ example: "os-nosdn-ovs-ha"
+ options:
+ type: "array"
+ items:
+ type: "string"
+ enum:
+ - "opnfv-ci"
+ - "optional"
+ - "offline"
+ - "report"
+ - "debug"
+ - "stop"
+ - "no-clean"
+ - "no-api-validation"
+ - "mandatory"
+ example:
+ - "debug"
+ - "report"
+ Results:
+ type: "object"
+ properties:
+ criteria:
+ type: "string"
+ enum:
+ - "PASS"
+ - "FAIL"
+ timestart:
+ type: "string"
+ format: "date-time"
+ timestop:
+ type: "string"
+ format: "date-time"
+ TestCaseStatus:
+ type: "object"
+ properties:
+ endTime:
+ type: "string"
+ format: "date-time"
+ executionId:
+ type: "string"
+ format: "uuid"
+ results:
+ $ref: "#/definitions/Results"
+ scenario:
+ type: "string"
+ example: "nfvi"
+ status:
+ type: "string"
+ enum:
+ - "IN_PROGRESS"
+ - "COMPLETED"
+ - "FAILED"
+ - "NOT_EXECUTED"
+ testCaseName:
+ type: "string"
+ example: "functest.vping.ssh"
+ testSuiteName:
+ type: "string"
+ example: "ovp.2019.12"
+ timestart:
+ type: "string"
+ format: "date-time"
+ StatusResponse:
+ type: "object"
+ properties:
+ result:
+ type: "array"
+ items:
+ $ref: "#/definitions/TestCaseStatus"
diff --git a/dovetail/cli/commands/cli_testcase.py b/dovetail/cli/commands/cli_testcase.py
index e91d88eb..6711381c 100644
--- a/dovetail/cli/commands/cli_testcase.py
+++ b/dovetail/cli/commands/cli_testcase.py
@@ -19,12 +19,13 @@ import dovetail.utils.dovetail_utils as dt_utils
class CliTestcase(object):
- @classmethod
- def testsuite_load(cls):
+ @staticmethod
+ def testsuite_load():
dt_cfg.load_config_files(constants.CONF_PATH)
Testsuite.load()
- def list_one_testsuite(self, testsuite):
+ @staticmethod
+ def list_one_testsuite(testsuite):
testsuite_stream = Testsuite.get(testsuite)
if testsuite_stream:
mandatory = dt_utils.get_value_from_dict(
@@ -59,7 +60,8 @@ class CliTestcase(object):
else:
click.echo("No testsuite defined yet in dovetail!!!")
- def show_testcase(self, name):
+ @staticmethod
+ def show_testcase(name):
tc_path = os.path.join(constants.TESTCASE_PATH, "{}.yml".format(name))
if os.path.isfile(tc_path):
with open(tc_path, 'r') as stream:
@@ -70,7 +72,8 @@ class CliTestcase(object):
else:
click.echo("testcase %s not exist or not supported" % name)
- def run(self, args_str):
+ @staticmethod
+ def run(args_str):
options = ''
if args_str:
options = options + args_str
@@ -79,7 +82,7 @@ class CliTestcase(object):
os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir))
- cmd = ("python %s/run.py"
+ cmd = ("python3 %s/run.py"
" %s" % (repo_dir, options))
dt_utils.exec_cmd(cmd, exit_on_error=True,
exec_msg_on=False, info=True)
diff --git a/dovetail/container.py b/dovetail/container.py
index ed006dc9..b2a9428f 100644
--- a/dovetail/container.py
+++ b/dovetail/container.py
@@ -9,11 +9,12 @@
# http://www.apache.org/licenses/LICENSE-2.0
#
-import os
+import docker
+import sys
-import utils.dovetail_logger as dt_logger
-import utils.dovetail_utils as dt_utils
-from utils.dovetail_config import DovetailConfig as dt_cfg
+import dovetail.utils.dovetail_logger as dt_logger
+import dovetail.utils.dovetail_utils as dt_utils
+from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
class Container(object):
@@ -21,9 +22,10 @@ class Container(object):
logger = None
def __init__(self, testcase):
- self.container_id = None
+ self.container = None
self.testcase = testcase
self.valid_type = self.testcase.validate_type()
+ self.client = docker.from_env(timeout=None)
def __str__(self):
pass
@@ -47,96 +49,76 @@ class Container(object):
name = self._get_config('image_name', project_cfg, testcase_cfg)
tag = self._get_config('docker_tag', project_cfg, testcase_cfg)
- return "{}:{}".format(name, tag) if name and tag else None
-
- def set_vnftest_config(self):
- dovetail_config = dt_cfg.dovetail_config
-
- log_vol = '-v %s:%s ' % (dovetail_config['result_dir'],
- dovetail_config['vnftest']['result']['log'])
-
- key_file = os.path.join(dovetail_config['config_dir'],
- dovetail_config['pri_key'])
- key_container_path = dovetail_config['vnftest']['result']['key_path']
- if not os.path.isfile(key_file):
- self.logger.debug("Key file {} is not found".format(key_file))
- key_vol = ''
- else:
- key_vol = '-v %s:%s ' % (key_file, key_container_path)
- return "%s %s" % (log_vol, key_vol)
+ return '{}:{}'.format(name, tag) if name and tag else None
def create(self, docker_image):
dovetail_config = dt_cfg.dovetail_config
project_cfg = dovetail_config[self.valid_type]
- opts = dt_utils.get_value_from_dict('opts', project_cfg)
- envs = dt_utils.get_value_from_dict('envs', project_cfg)
- volumes_list = dt_utils.get_value_from_dict('volumes', project_cfg)
- opts = ' ' if not opts else opts
- envs = ' ' if not envs else envs
- volumes = ' '.join(volume for volume in volumes_list if volume) \
- if volumes_list else ' '
-
- hosts_config = dt_utils.get_hosts_info(self.logger)
-
- # This part will be totally removed after remove the 4 functions
- # set_functest_config has been removed
- # set_yardstick_config has been removed
- # set_bottlenecks_config has been removed
- # set_vnftest_config
- config = " "
- if self.valid_type.lower() == "vnftest":
- config = self.set_vnftest_config()
- if not config:
- return None
+ kwargs = dt_utils.get_value_from_dict('opts', project_cfg)
+ shell = dt_utils.get_value_from_dict('shell', project_cfg)
+ if not shell:
+ return None, "Lacking of key word 'shell' in config file."
+ env_list = dt_utils.get_value_from_dict('envs', project_cfg)
+ if env_list:
+ kwargs['environment'] = \
+ [env for env in env_list if env is not None]
+ volume_list = dt_utils.get_value_from_dict('volumes', project_cfg)
+ kwargs['volumes'] = [vol for vol in volume_list if vol is not None]
- cmd = 'sudo docker run {opts} {envs} {volumes} {config} ' \
- '{hosts_config} {docker_image} /bin/bash'.format(**locals())
- ret, container_id = dt_utils.exec_cmd(cmd, self.logger)
- if ret != 0:
- return None
- if self.valid_type.lower() == 'vnftest':
- self.set_vnftest_conf_file(container_id)
+ kwargs['mounts'], msg = dt_utils.get_mount_list(project_cfg)
+ if not kwargs['mounts']:
+ return None, msg
+
+ kwargs['extra_hosts'] = dt_utils.get_hosts_info(self.logger)
+
+ try:
+ self.container = self.client.containers.run(
+ docker_image, shell, **kwargs)
+ except (docker.errors.ContainerError, docker.errors.ImageNotFound,
+ docker.errors.APIError) as e:
+ return None, e
- self.container_id = container_id
- return container_id
+ return self.container.id, 'Successfully to create container.'
def get_image_id(self, image_name):
- cmd = 'sudo docker images -q %s' % (image_name)
- ret, image_id = dt_utils.exec_cmd(cmd, self.logger)
- if ret == 0:
- return image_id
- else:
- return None
+ try:
+ image_id = self.client.images.get(image_name).id
+ except (docker.errors.ImageNotFound, docker.errors.APIError):
+ image_id = None
+ return image_id
# remove the image according to the image_id
# if there exists containers using this image, then skip
def remove_image(self, image_id):
- cmd = "sudo docker ps -aq -f 'ancestor=%s'" % (image_id)
- ret, msg = dt_utils.exec_cmd(cmd, self.logger)
- if msg and ret == 0:
+ try:
+ containers = self.client.containers.list(
+ filters={'ancestor': image_id})
+ except docker.errors.APIError:
+ containers = []
+ if containers:
self.logger.debug('Image {} has containers, skip.'
.format(image_id))
return True
- cmd = 'sudo docker rmi %s' % (image_id)
self.logger.debug('Remove image {}.'.format(image_id))
- ret, msg = dt_utils.exec_cmd(cmd, self.logger)
- if ret == 0:
+ try:
+ self.client.images.remove(image_id)
self.logger.debug('Remove image {} successfully.'.format(image_id))
return True
- self.logger.error('Failed to remove image {}.'.format(image_id))
- return False
+ except (docker.errors.ImageNotFound, docker.errors.APIError):
+ self.logger.error('Failed to remove image {}.'.format(image_id))
+ return False
def pull_image_only(self, image_name):
- cmd = 'sudo docker pull %s' % (image_name)
- ret, _ = dt_utils.exec_cmd(cmd, self.logger)
- if ret != 0:
+ try:
+ self.client.images.pull(image_name)
+ self.logger.debug(
+ 'Success to pull docker image {}!'.format(image_name))
+ return True
+ except docker.errors.APIError:
self.logger.error(
'Failed to pull docker image {}!'.format(image_name))
return False
- self.logger.debug('Success to pull docker image {}!'
- .format(image_name))
- return True
def pull_image(self, docker_image):
if not docker_image:
@@ -147,7 +129,7 @@ class Container(object):
new_image_id = self.get_image_id(docker_image)
if not new_image_id:
self.logger.error(
- "Failed to get the id of image {}.".format(docker_image))
+ 'Failed to get the id of image {}.'.format(docker_image))
return None
if not old_image_id:
return docker_image
@@ -158,30 +140,50 @@ class Container(object):
self.remove_image(old_image_id)
return docker_image
- def check_container_exist(self, container_name):
- cmd = ('sudo docker ps -aq -f name={}'.format(container_name))
- ret, msg = dt_utils.exec_cmd(cmd, self.logger)
- if ret == 0 and msg:
- return True
- return False
+ def get_container(self, container_name):
+ try:
+ container = self.client.containers.get(container_name)
+ except (docker.errors.NotFound, docker.errors.APIError):
+ container = None
+ return container
def clean(self):
- cmd = ('sudo docker rm -f {}'.format(self.container_id))
- dt_utils.exec_cmd(cmd, self.logger)
- if self.valid_type.lower() == 'bottlenecks':
- containers = dt_utils.get_value_from_dict(
- 'extra_container', dt_cfg.dovetail_config[self.valid_type])
- for container in containers:
- if self.check_container_exist(container):
- cmd = ('sudo docker rm -f {}'.format(container))
- dt_utils.exec_cmd(cmd, self.logger)
+ try:
+ self.container.remove(force=True)
+ self.logger.debug(
+ 'container: {} was removed'.format(self.container.name))
+ except docker.errors.APIError as e:
+ self.logger.error(e)
+ extra_containers = dt_utils.get_value_from_dict(
+ 'extra_container', dt_cfg.dovetail_config[self.valid_type])
+ if extra_containers:
+ for container_name in extra_containers:
+ container = self.get_container(container_name)
+ if container:
+ try:
+ container.remove(force=True)
+ self.logger.debug(
+ 'container: {} was removed'.format(container_name))
+ except docker.errors.APIError as e:
+ self.logger.error(e)
def exec_cmd(self, sub_cmd, exit_on_error=False):
- if sub_cmd == "":
+ if not sub_cmd:
return (1, 'sub_cmd is empty')
- cmd = 'sudo docker exec {} /bin/bash -c "{}"'.format(self.container_id,
- sub_cmd)
- return dt_utils.exec_cmd(cmd, self.logger, exit_on_error)
+ shell = dt_utils.get_value_from_dict(
+ 'shell', dt_cfg.dovetail_config[self.valid_type])
+ if not shell:
+ return (1, 'shell is empty')
+ cmd = '{} -c "{}"'.format(shell, sub_cmd)
+ try:
+ result = self.container.exec_run(cmd)
+ except docker.errors.APIError as e:
+ result = (e.response.status_code, str(e))
+ self.logger.error(e)
+ if exit_on_error:
+ sys.exit(1)
+
+ return result
def copy_file(self, src_path, dest_path, exit_on_error=False):
if not src_path or not dest_path:
@@ -189,14 +191,6 @@ class Container(object):
cmd = 'cp %s %s' % (src_path, dest_path)
return self.exec_cmd(cmd, exit_on_error)
- def docker_copy(self, src_path, dest_path):
- if not src_path or not dest_path:
- return (1, 'src_path or dest_path is empty')
- cmd = 'docker cp {} {}:{}'.format(src_path,
- self.container_id,
- dest_path)
- return dt_utils.exec_cmd(cmd, self.logger)
-
def copy_files_in_container(self):
project_config = dt_cfg.dovetail_config[self.valid_type]
if 'copy_file_in_container' not in project_config.keys():
@@ -205,10 +199,3 @@ class Container(object):
return
for item in project_config['copy_file_in_container']:
self.copy_file(item['src_file'], item['dest_file'])
-
- def set_vnftest_conf_file(self):
- valid_type = 'vnftest'
- for conf_file in dt_cfg.dovetail_config[valid_type]['vnftest_conf']:
- src = conf_file['src_file']
- dest = conf_file['dest_file']
- self.docker_copy(src, dest)
diff --git a/dovetail/parser.py b/dovetail/parser.py
index 55ba6b80..e0935ad3 100644
--- a/dovetail/parser.py
+++ b/dovetail/parser.py
@@ -11,9 +11,9 @@
import jinja2
-import utils.dovetail_logger as dt_logger
-import utils.dovetail_utils as dt_utils
-from utils.dovetail_config import DovetailConfig as dt_cfg
+import dovetail.utils.dovetail_logger as dt_logger
+import dovetail.utils.dovetail_utils as dt_utils
+from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
class Parser(object):
diff --git a/dovetail/report.py b/dovetail/report.py
index 26cd6c52..ed3f942b 100644
--- a/dovetail/report.py
+++ b/dovetail/report.py
@@ -12,6 +12,7 @@
from __future__ import division
import collections
+import hashlib
import json
import re
import os
@@ -19,17 +20,18 @@ import datetime
import tarfile
import time
-import utils.dovetail_logger as dt_logger
+import dovetail.utils.dovetail_logger as dt_logger
-from utils.dovetail_config import DovetailConfig as dt_cfg
-import utils.dovetail_utils as dt_utils
-from testcase import Testcase
+from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
+import dovetail.utils.dovetail_utils as dt_utils
+from dovetail.testcase import Testcase
class Report(object):
results = {'functest': {}, 'yardstick': {}, 'functest-k8s': {},
- 'bottlenecks': {}, 'shell': {}, 'vnftest': {}}
+ 'bottlenecks': {}, 'shell': {}, 'onap-vtp': {},
+ 'onap-vvp': {}}
logger = None
@@ -39,25 +41,29 @@ class Report(object):
def check_tc_result(self, testcase):
result_path = dt_cfg.dovetail_config['result_dir']
- check_results_file = dt_utils.get_value_from_dict(
- 'report.check_results_file', testcase.testcase)
- if not check_results_file:
- self.logger.error("Failed to get 'check_results_file' from config "
- "file of test case {}".format(testcase.name()))
- self.check_result(testcase)
- return None
- result_file = os.path.join(result_path, check_results_file)
- if os.path.isfile(result_file):
- self.logger.info(
- 'Results have been stored with file {}.'.format(result_file))
- result = self.get_result(testcase, result_file)
- self.check_result(testcase, result)
- return result
- else:
- self.logger.error(
- 'Failed to store results with file {}.'.format(result_file))
+ check_results_files = dt_utils.get_value_from_dict(
+ 'report.check_results_files', testcase.testcase)
+ if not check_results_files:
+ self.logger.error("Failed to get 'check_results_files' from config"
+ " file of test case {}".format(testcase.name()))
self.check_result(testcase)
return None
+ result_files = []
+ for check_results_file in check_results_files:
+ result_file = os.path.join(result_path, check_results_file)
+ if not os.path.isfile(result_file):
+ self.logger.error(
+ 'Failed to store results with file {}.'.
+ format(result_file))
+ self.check_result(testcase)
+ return None
+ else:
+ result_files.append(result_file)
+ self.logger.info(
+ 'Results have been stored with files: {}.'.format(result_files))
+ result = self.get_result(testcase, result_files)
+ self.check_result(testcase, result)
+ return result
@staticmethod
def check_result(testcase, db_result=None):
@@ -65,16 +71,41 @@ class Report(object):
if checker is not None:
checker.check(testcase, db_result)
+ @staticmethod
+ def get_checksum(vnf_type):
+ if vnf_type == 'tosca':
+ path = os.path.join(dt_cfg.dovetail_config['config_dir'],
+ os.getenv('CSAR_FILE'))
+ elif vnf_type == 'heat':
+ path = os.path.join(
+ dt_cfg.dovetail_config['config_dir'],
+ '{}.zip'.format(os.getenv('VNF_ARCHIVE_NAME')))
+
+ checksum = hashlib.sha256()
+
+ if os.path.isfile(path):
+ with open(path, 'rb') as f:
+ for chunk in iter(lambda: f.read(4096), b''):
+ checksum.update(chunk)
+
+ return checksum.hexdigest()
+
def generate_json(self, testcase_list, duration):
report_obj = {}
# egeokun: using a hardcoded string instead of pbr version for
# versioning the result file. The version of the results.json is
# logically independent of the release of Dovetail.
- report_obj['version'] = '2018.09'
+ report_obj['version'] = dt_cfg.dovetail_config.get('version')
report_obj['build_tag'] = dt_cfg.dovetail_config['build_tag']
report_obj['test_date'] =\
datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC')
report_obj['duration'] = duration
+ vnf_type = dt_cfg.dovetail_config.get('vnf_type')
+ if vnf_type:
+ report_obj['vnf_type'] = vnf_type
+ report_obj['vnf_checksum'] = self.get_checksum(vnf_type)
+ else:
+ report_obj['validation'] = os.getenv('validation')
report_obj['testcases_list'] = []
if not testcase_list:
@@ -88,12 +119,21 @@ class Report(object):
testcase_inreport['objective'] = ''
testcase_inreport['sub_testcase'] = []
testcase_inreport['mandatory'] = False
+ testcase_inreport['portal_key_file'] = ''
report_obj['testcases_list'].append(testcase_inreport)
continue
testcase_inreport['result'] = testcase.passed()
testcase_inreport['objective'] = testcase.objective()
+ try:
+ vnf_type = testcase.vnf_type()
+ except Exception:
+ vnf_type = None
+ if vnf_type:
+ report_obj['vnf_type'] = vnf_type
+ report_obj['vnf_checksum'] = self.get_checksum(vnf_type)
testcase_inreport['mandatory'] = testcase.is_mandatory
+ testcase_inreport['portal_key_file'] = testcase.portal_key_file()
testcase_inreport['sub_testcase'] = []
if testcase.sub_testcase() is not None:
for sub_test in testcase.sub_testcase():
@@ -102,7 +142,6 @@ class Report(object):
'result': testcase.sub_testcase_passed(sub_test)
})
report_obj['testcases_list'].append(testcase_inreport)
- self.logger.debug(json.dumps(report_obj))
return report_obj
def generate(self, testcase_list, duration):
@@ -121,22 +160,15 @@ class Report(object):
sub_report = collections.OrderedDict()
testcase_num = {}
testcase_passnum = {}
- for area in dt_cfg.dovetail_config['testarea_supported']:
- sub_report[area] = ''
- testcase_num[area] = 0
- testcase_passnum[area] = 0
testarea_scope = []
for testcase in report_data['testcases_list']:
- supported_areas = dt_cfg.dovetail_config['testarea_supported']
- pattern = re.compile('|'.join(supported_areas))
- area = pattern.findall(testcase['name'])
- if not supported_areas or not area:
- self.logger.error('Test case {} not in supported testarea.'
- .format(testcase['name']))
- return None
- area = area[0]
- testarea_scope.append(area)
+ area = testcase['name'].split('.')[1]
+ if area not in testarea_scope:
+ testarea_scope.append(area)
+ sub_report[area] = ''
+ testcase_num[area] = 0
+ testcase_passnum[area] = 0
sub_report[area] += '-%-25s %s\n' %\
(testcase['name'], testcase['result'])
if 'sub_testcase' in testcase:
@@ -200,7 +232,7 @@ class Report(object):
f_out.add(os.path.join('results', f))
os.chdir(cwd)
- def get_result(self, testcase, check_results_file):
+ def get_result(self, testcase, check_results_files):
validate_testcase = testcase.validate_testcase()
type = testcase.validate_type()
crawler = CrawlerFactory.create(type)
@@ -208,7 +240,7 @@ class Report(object):
self.logger.error('Crawler is None: {}'.format(testcase.name()))
return None
- result = crawler.crawl(testcase, check_results_file)
+ result = crawler.crawl(testcase, check_results_files)
if result is not None:
self.results[type][validate_testcase] = result
@@ -238,8 +270,8 @@ class FunctestCrawler(Crawler):
cls.logger = \
dt_logger.Logger(__name__ + '.FunctestCrawler').getLogger()
- def crawl(self, testcase, file_path):
- return self.crawl_from_file(testcase, file_path)
+ def crawl(self, testcase, file_paths):
+ return self.crawl_from_file(testcase, file_paths[0])
def crawl_from_file(self, testcase, file_path):
dovetail_config = dt_cfg.dovetail_config
@@ -269,16 +301,10 @@ class FunctestCrawler(Crawler):
duration = dt_utils.get_duration(timestart, timestop,
self.logger)
if complex_testcase:
- tests = data['details']['tests_number']
- failed_num = data['details']['failures_number']
- success_case = data['details']['success']
- error_case = data['details']['failures']
- skipped_case = data['details']['skipped']
- details = {'tests': tests,
- 'failures': failed_num,
- 'success': success_case,
- 'errors': error_case,
- 'skipped': skipped_case}
+ if testcase_name == 'rally_full':
+ details = self.get_rally_details(data)
+ else:
+ details = self.get_details(data)
except KeyError as e:
self.logger.exception(
"Result data don't have key {}.".format(e))
@@ -293,6 +319,37 @@ class FunctestCrawler(Crawler):
testcase.set_results(json_results)
return json_results
+ def get_details(self, data):
+ try:
+ t_details = data['details']
+ details = {
+ 'tests': t_details['tests_number'],
+ 'failures': t_details['failures_number'],
+ 'success': t_details['success'],
+ 'errors': t_details['failures'],
+ 'skipped': t_details['skipped']
+ }
+ return details
+ except Exception as e:
+ self.logger.exception("Failed to get details, {}.".format(e))
+ return None
+
+ def get_rally_details(self, data):
+ try:
+ t_details = data['details']['modules'][0]['details']
+ tests = len(t_details['success']) + len(t_details['failures'])
+ details = {
+ 'tests': tests,
+ 'failures': len(t_details['failures']),
+ 'success': t_details['success'],
+ 'errors': t_details['failures'],
+ 'skipped': []
+ }
+ return details
+ except Exception as e:
+ self.logger.exception("Failed to get details, {}.".format(e))
+ return None
+
class FunctestK8sCrawler(FunctestCrawler):
@@ -321,8 +378,8 @@ class YardstickCrawler(Crawler):
cls.logger = \
dt_logger.Logger(__name__ + '.YardstickCrawler').getLogger()
- def crawl(self, testcase, file_path):
- return self.crawl_from_file(testcase, file_path)
+ def crawl(self, testcase, file_paths):
+ return self.crawl_from_file(testcase, file_paths[0])
def crawl_from_file(self, testcase, file_path):
if not os.path.exists(file_path):
@@ -362,8 +419,8 @@ class BottlenecksCrawler(Crawler):
cls.logger = \
dt_logger.Logger(__name__ + '.BottlenecksCrawler').getLogger()
- def crawl(self, testcase, file_path):
- return self.crawl_from_file(testcase, file_path)
+ def crawl(self, testcase, file_paths):
+ return self.crawl_from_file(testcase, file_paths[0])
def crawl_from_file(self, testcase, file_path):
if not os.path.exists(file_path):
@@ -392,8 +449,8 @@ class ShellCrawler(Crawler):
def __init__(self):
self.type = 'shell'
- def crawl(self, testcase, file_path):
- return self.crawl_from_file(testcase, file_path)
+ def crawl(self, testcase, file_paths):
+ return self.crawl_from_file(testcase, file_paths[0])
def crawl_from_file(self, testcase, file_path):
if not os.path.exists(file_path):
@@ -406,22 +463,37 @@ class ShellCrawler(Crawler):
return None
-class VnftestCrawler(Crawler):
+class OnapVtpCrawler(Crawler):
logger = None
def __init__(self):
- self.type = 'vnftest'
+ self.type = 'onap-vtp'
self.logger.debug('Create crawler: {}'.format(self.type))
@classmethod
def create_log(cls):
- cls.logger = \
- dt_logger.Logger(__name__ + '.VnftestCrawler').getLogger()
-
- def crawl(self, testcase, file_path):
- return self.crawl_from_file(testcase, file_path)
-
+ cls.logger = dt_logger.Logger(__name__ + '.OnapVtpCrawler').getLogger()
+
+ def crawl(self, testcase, file_paths):
+ return self.crawl_from_file(testcase, file_paths[0])
+
+ # The pass result looks like
+ # {
+ # "results": [
+ # {"property": "results", "value": "{value=SUCCESS}"},
+ # {"property": "build_tag", "value": "test"},
+ # {"property": "criteria", "value": "PASS"}
+ # ]
+ # }
+ # The fail result looks like
+ # {
+ # "results": [
+ # {"property": "results", "value": "{value=file doesn't exists}"},
+ # {"property": "build_tag", "value": "test"},
+ # {"property": "criteria", "value": "FAILED"}
+ # ]
+ # }
def crawl_from_file(self, testcase, file_path):
if not os.path.exists(file_path):
self.logger.error('Result file not found: {}'.format(file_path))
@@ -429,23 +501,70 @@ class VnftestCrawler(Crawler):
criteria = 'FAIL'
with open(file_path, 'r') as f:
for jsonfile in f:
- data = json.loads(jsonfile)
try:
- criteria = data['result']['criteria']
+ data = json.loads(jsonfile)
+ for item in data['results']:
+ if 'criteria' == item['property']:
+ if 'PASS' == item['value']:
+ criteria = 'PASS'
+ break
+ else:
+ self.logger.error('There is no property criteria.')
except KeyError as e:
self.logger.exception('Pass flag not found {}'.format(e))
+ except ValueError:
+ continue
+ json_results = {'criteria': criteria}
+
+ testcase.set_results(json_results)
+ return json_results
+
+
+class OnapVvpCrawler(Crawler):
+
+ logger = None
+
+ def __init__(self):
+ self.type = 'onap-vvp'
+ self.logger.debug('Create crawler: {}'.format(self.type))
+
+ @classmethod
+ def create_log(cls):
+ cls.logger = dt_logger.Logger(__name__ + '.OnapVvpCrawler').getLogger()
+
+ def crawl(self, testcase, file_paths):
+ return self.crawl_from_file(testcase, file_paths[0])
+
+ def crawl_from_file(self, testcase, file_path):
+ if not os.path.exists(file_path):
+ self.logger.error('Result file not found: {}'.format(file_path))
+ return None
+ criteria = 'FAIL'
+ with open(file_path, 'r') as f:
+ try:
+ data = json.load(f)
+ criteria = data['outcome']
+ except KeyError as e:
+ self.logger.exception('Outcome field not found {}'.format(e))
+ except ValueError:
+ self.logger.exception('Result file has invalid format')
json_results = {'criteria': criteria}
+
+ testcase.set_results(json_results)
return json_results
class CrawlerFactory(object):
- CRAWLER_MAP = {'functest': FunctestCrawler,
- 'yardstick': YardstickCrawler,
- 'bottlenecks': BottlenecksCrawler,
- 'vnftest': VnftestCrawler,
- 'shell': ShellCrawler,
- 'functest-k8s': FunctestK8sCrawler}
+ CRAWLER_MAP = {
+ 'functest': FunctestCrawler,
+ 'yardstick': YardstickCrawler,
+ 'bottlenecks': BottlenecksCrawler,
+ 'shell': ShellCrawler,
+ 'functest-k8s': FunctestK8sCrawler,
+ 'onap-vtp': OnapVtpCrawler,
+ 'onap-vvp': OnapVvpCrawler
+ }
@classmethod
def create(cls, type):
@@ -488,6 +607,12 @@ class FunctestChecker(object):
match = find_reg.findall(tc)
if match:
return True
+ reg = sub_testcase.rsplit('.', 1)[0] + '$'
+ find_reg = re.compile(reg)
+ for tc in result:
+ match = find_reg.findall(tc)
+ if match:
+ return True
return False
def check(self, testcase, db_result):
@@ -506,7 +631,6 @@ class FunctestChecker(object):
testcase_passed = 'PASS'
for sub_testcase in sub_testcase_list:
- self.logger.debug('Check sub_testcase: {}'.format(sub_testcase))
try:
if self.get_sub_testcase(sub_testcase,
db_result['details']['success']):
@@ -581,14 +705,30 @@ class ShellChecker(object):
testcase.passed(False)
-class VnftestChecker(object):
+class OnapVtpChecker(object):
logger = None
@classmethod
def create_log(cls):
- cls.logger = \
- dt_logger.Logger(__name__ + '.VnftestCheckers').getLogger()
+ cls.logger = dt_logger.Logger(__name__ + '.OnapVtpChecker').getLogger()
+
+ @staticmethod
+ def check(testcase, result):
+ if not result:
+ testcase.passed('FAIL')
+ else:
+ testcase.passed(result['criteria'])
+ return
+
+
+class OnapVvpChecker(object):
+
+ logger = None
+
+ @classmethod
+ def create_log(cls):
+ cls.logger = dt_logger.Logger(__name__ + '.OnapVvpChecker').getLogger()
@staticmethod
def check(testcase, result):
@@ -601,12 +741,15 @@ class VnftestChecker(object):
class CheckerFactory(object):
- CHECKER_MAP = {'functest': FunctestChecker,
- 'yardstick': YardstickChecker,
- 'bottlenecks': BottlenecksChecker,
- 'shell': ShellChecker,
- 'vnftest': VnftestChecker,
- 'functest-k8s': FunctestK8sChecker}
+ CHECKER_MAP = {
+ 'functest': FunctestChecker,
+ 'yardstick': YardstickChecker,
+ 'bottlenecks': BottlenecksChecker,
+ 'shell': ShellChecker,
+ 'functest-k8s': FunctestK8sChecker,
+ 'onap-vtp': OnapVtpChecker,
+ 'onap-vvp': OnapVvpChecker
+ }
@classmethod
def create(cls, type):
diff --git a/dovetail/run.py b/dovetail/run.py
index 6d2bcf66..c5281918 100755
--- a/dovetail/run.py
+++ b/dovetail/run.py
@@ -11,21 +11,23 @@
import copy
+from datetime import datetime
+import json
import os
import time
import uuid
import click
-from container import Container
+from dovetail.container import Container
from dovetail import constants
-from parser import Parser
-import report as dt_report
-import test_runner as dt_test_runner
-import testcase as dt_testcase
-from utils.dovetail_config import DovetailConfig as dt_cfg
-import utils.dovetail_logger as dt_logger
-import utils.dovetail_utils as dt_utils
+from dovetail.parser import Parser
+import dovetail.report as dt_report
+import dovetail.test_runner as dt_test_runner
+import dovetail.testcase as dt_testcase
+from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
+import dovetail.utils.dovetail_logger as dt_logger
+import dovetail.utils.dovetail_utils as dt_utils
EXIT_RUN_FAILED = 2
@@ -48,10 +50,20 @@ def run_test(testcase_list, report_flag, logger):
testcase = dt_testcase.Testcase.get(testcase_name)
run_testcase = True
+ tc_start_time = datetime.fromtimestamp(
+ time.time()).strftime('%Y-%m-%d %H:%M:%S')
if run_testcase:
testcase.run()
+ tc_stop_time = datetime.fromtimestamp(
+ time.time()).strftime('%Y-%m-%d %H:%M:%S')
result = report.check_tc_result(testcase)
+ if os.getenv('OPNFV_CI') == 'true':
+ dt_utils.push_results_to_db(case_name=testcase_name,
+ start_date=tc_start_time,
+ stop_date=tc_stop_time,
+ details=result,
+ logger=logger)
if dt_cfg.dovetail_config['stop']:
try:
if (not result or result['criteria'] == 'FAIL'):
@@ -109,13 +121,15 @@ def create_logs():
dt_report.FunctestCrawler.create_log()
dt_report.FunctestK8sCrawler.create_log()
dt_report.YardstickCrawler.create_log()
- dt_report.VnftestCrawler.create_log()
dt_report.BottlenecksCrawler.create_log()
+ dt_report.OnapVtpCrawler.create_log()
+ dt_report.OnapVvpCrawler.create_log()
dt_report.FunctestChecker.create_log()
dt_report.FunctestK8sChecker.create_log()
dt_report.YardstickChecker.create_log()
- dt_report.VnftestChecker.create_log()
dt_report.BottlenecksChecker.create_log()
+ dt_report.OnapVtpChecker.create_log()
+ dt_report.OnapVvpChecker.create_log()
dt_testcase.Testcase.create_log()
dt_testcase.Testsuite.create_log()
dt_test_runner.DockerRunner.create_log()
@@ -126,7 +140,7 @@ def clean_results_dir():
result_path = dt_cfg.dovetail_config['result_dir']
if os.path.exists(result_path):
if os.path.isdir(result_path):
- cmd = 'sudo rm -rf %s/*' % (result_path)
+ cmd = 'rm -rf %s/*' % (result_path)
dt_utils.exec_cmd(cmd, exit_on_error=False, exec_msg_on=False)
else:
print('result_dir in dovetail_config.yml is not a directory.')
@@ -149,14 +163,16 @@ def get_result_path():
'pre_config')
dt_cfg.dovetail_config['patch_dir'] = os.path.join(dovetail_home,
'patches')
+ dt_cfg.dovetail_config['userconfig_dir'] = os.path.join(dovetail_home,
+ 'userconfig')
return dovetail_home
def copy_userconfig_files(logger):
- pre_config_path = dt_cfg.dovetail_config['config_dir']
- if not os.path.isdir(pre_config_path):
- os.makedirs(pre_config_path)
- cmd = 'sudo cp -r %s/* %s' % (constants.USERCONF_PATH, pre_config_path)
+ userconfig_path = dt_cfg.dovetail_config['userconfig_dir']
+ if not os.path.isdir(userconfig_path):
+ os.makedirs(userconfig_path)
+ cmd = 'cp -r %s/* %s' % (constants.USERCONF_PATH, userconfig_path)
dt_utils.exec_cmd(cmd, logger, exit_on_error=False)
@@ -164,7 +180,7 @@ def copy_patch_files(logger):
patch_set_path = dt_cfg.dovetail_config['patch_dir']
if not os.path.isdir(patch_set_path):
os.makedirs(patch_set_path)
- cmd = 'sudo cp -a -r %s/* %s' % (constants.PATCH_PATH, patch_set_path)
+ cmd = 'cp -a -r %s/* %s' % (constants.PATCH_PATH, patch_set_path)
dt_utils.exec_cmd(cmd, logger, exit_on_error=False)
@@ -223,6 +239,10 @@ def get_testcase_list(logger=None, **kwargs):
if testsuite_validation and testarea_validation:
testsuite_yaml = load_testsuite(testsuite)
+ dt_cfg.dovetail_config['version'] = dt_utils.get_value_from_dict(
+ 'version', testsuite_yaml)
+ dt_cfg.dovetail_config['vnf_type'] = dt_utils.get_value_from_dict(
+ 'vnf_type', testsuite_yaml)
testcase_list = dt_testcase.Testcase.get_testcases_for_testsuite(
testsuite_yaml, testarea)
return check_testcase_list(testcase_list, logger)
@@ -240,8 +260,10 @@ def main(*args, **kwargs):
if not get_result_path():
return
clean_results_dir()
- if kwargs['debug']:
- os.environ['DEBUG'] = 'true'
+ os.environ['DEBUG'] = 'true' if kwargs['debug'] else 'false'
+ os.environ['OPNFV_CI'] = 'true' if kwargs['opnfv_ci'] else 'false'
+ os.environ['validation'] = 'disabled' \
+ if kwargs['no_api_validation'] else 'enabled'
create_logs()
logger = dt_logger.Logger('run').getLogger()
@@ -256,6 +278,13 @@ def main(*args, **kwargs):
dt_utils.check_docker_version(logger)
testcase_list = get_testcase_list(logger, **kwargs)
+
+ dovetail_home = os.environ['DOVETAIL_HOME']
+ testcases_file = os.path.join(dovetail_home, 'results', 'testcases.json')
+ with open(testcases_file, "w") as f:
+ data = {'testsuite': kwargs['testsuite'], 'testcases': testcase_list}
+ f.write(json.dumps(data) + '\n')
+
if not testcase_list:
raise SystemExit(EXIT_RUN_FAILED)
diff --git a/dovetail/test_runner.py b/dovetail/test_runner.py
index d77c3c65..266bdc20 100644
--- a/dovetail/test_runner.py
+++ b/dovetail/test_runner.py
@@ -14,14 +14,14 @@ import jinja2
import jinja2.meta
import yaml
-from container import Container
+from dovetail.container import Container
from dovetail import constants
-from utils.dovetail_config import DovetailConfig as dt_cfg
-import utils.dovetail_utils as dt_utils
-import utils.dovetail_logger as dt_logger
+from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
+import dovetail.utils.dovetail_utils as dt_utils
+import dovetail.utils.dovetail_logger as dt_logger
-class DockerRunner(object):
+class Runner(object):
logger = None
@@ -29,30 +29,40 @@ class DockerRunner(object):
self.testcase = testcase
self.logger.debug('Create runner: {}'.format(self.type))
+ def archive_logs(self):
+ result_path = os.path.join(os.environ['DOVETAIL_HOME'], 'results')
+ src_files = dt_utils.get_value_from_dict(
+ 'report.source_archive_files', self.testcase.testcase)
+ dest_files = dt_utils.get_value_from_dict(
+ 'report.dest_archive_files', self.testcase.testcase)
+ if not src_files and not dest_files:
+ return True
+ if not (src_files and dest_files) or len(src_files) != len(dest_files):
+ self.logger.error("Can't find corresponding 'result_dest_files' "
+ "for 'result_source_files' with testcase {}"
+ .format(self.testcase.name()))
+ return False
+ res = True
+ for index in range(0, len(src_files)):
+ src_file_path = os.path.join(result_path, src_files[index])
+ dest_file_path = os.path.join(result_path, dest_files[index])
+ if os.path.isfile(src_file_path):
+ os.renames(src_file_path, dest_file_path)
+ else:
+ self.logger.error("Can't find file {}.".format(src_file_path))
+ res = False
+ return res
+
+
+class DockerRunner(Runner):
+
+ def __init__(self, testcase):
+ super(DockerRunner, self).__init__(testcase)
+
@classmethod
def create_log(cls):
cls.logger = dt_logger.Logger(__name__ + '.DockerRunner').getLogger()
- def pre_copy(self, container=None, dest_path=None,
- src_file=None, exist_file=None):
- if not container:
- self.logger.error("Container instance is None.")
- return None
- if not dest_path:
- self.logger.error("There has no dest_path in {} config file."
- .format(self.testcase.name()))
- return None
- if src_file:
- self.testcase.mk_src_file()
- file_path = dt_cfg.dovetail_config[self.type]['result']['dir']
- src_path = os.path.join(file_path, src_file)
- if exist_file:
- file_path = dt_cfg.dovetail_config[self.type]['config']['dir']
- src_path = os.path.join(file_path, 'pre_config', exist_file)
-
- container.copy_file(src_path, dest_path)
- return dest_path
-
def run(self):
container = Container(self.testcase)
docker_image = container.get_docker_image()
@@ -67,21 +77,15 @@ class DockerRunner(object):
self.logger.error("Failed to pull the image.")
return
- container_id = container.create(docker_image)
+ container_id, msg = container.create(docker_image)
if not container_id:
self.logger.error('Failed to create container.')
+ self.logger.error(msg)
return
self.logger.debug('container id: {}'.format(container_id))
- dest_path = self.testcase.pre_copy_path('dest_path')
- src_file_name = self.testcase.pre_copy_path('src_file')
- exist_file_name = self.testcase.pre_copy_path('exist_src_file')
-
- if src_file_name or exist_file_name:
- if not self.pre_copy(container, dest_path, src_file_name,
- exist_file_name):
- return
+ self.testcase.mk_src_file()
cmds = self.testcase.pre_condition()
if cmds:
@@ -110,30 +114,6 @@ class DockerRunner(object):
if not dt_cfg.dovetail_config['noclean']:
container.clean()
- def archive_logs(self):
- result_path = os.path.join(os.environ['DOVETAIL_HOME'], 'results')
- src_files = dt_utils.get_value_from_dict(
- 'report.source_archive_files', self.testcase.testcase)
- dest_files = dt_utils.get_value_from_dict(
- 'report.dest_archive_files', self.testcase.testcase)
- if not src_files and not dest_files:
- return True
- if not (src_files and dest_files) or len(src_files) != len(dest_files):
- self.logger.error("Can't find corresponding 'result_dest_files' "
- "for 'result_source_files' with testcase {}"
- .format(self.testcase.name()))
- return False
- res = True
- for index in range(0, len(src_files)):
- src_file_path = os.path.join(result_path, src_files[index])
- dest_file_path = os.path.join(result_path, dest_files[index])
- if os.path.isfile(src_file_path):
- os.renames(src_file_path, dest_file_path)
- else:
- self.logger.error("Can't find file {}.".format(src_file_path))
- res = False
- return res
-
@staticmethod
def _render(task_template, **kwargs):
return jinja2.Template(task_template).render(**kwargs)
@@ -151,6 +131,9 @@ class DockerRunner(object):
config_item['debug'] = os.getenv('DEBUG')
config_item['build_tag'] = dt_cfg.dovetail_config['build_tag']
config_item['cacert'] = os.getenv('OS_CACERT')
+ config_item['host_url'] = os.getenv('HOST_URL')
+ config_item['csar_file'] = os.getenv('CSAR_FILE')
+ config_item['heat_templates_archive'] = os.getenv('VNF_ARCHIVE_NAME')
return config_item
def _update_config(self, testcase, update_pod=True):
@@ -239,19 +222,15 @@ class BottlenecksRunner(DockerRunner):
self._update_config(testcase)
-class ShellRunner(object):
-
- logger = None
+class ShellRunner(Runner):
@classmethod
def create_log(cls):
cls.logger = dt_logger.Logger(__name__ + '.ShellRunner').getLogger()
def __init__(self, testcase):
- super(ShellRunner, self).__init__()
- self.testcase = testcase
self.type = 'shell'
- self.logger.debug('Create runner: {}'.format(self.type))
+ super(ShellRunner, self).__init__(testcase)
def run(self):
testcase_passed = 'PASS'
@@ -292,11 +271,36 @@ class ShellRunner(object):
'exception: {}'.format(result_filename, e))
-class VnftestRunner(DockerRunner):
+class OnapVtpRunner(DockerRunner):
+
+ config_file_name = 'onap-vtp_config.yml'
def __init__(self, testcase):
- self.type = 'vnftest'
- super(VnftestRunner, self).__init__(testcase)
+ self.type = 'onap-vtp'
+ super(OnapVtpRunner, self).__init__(testcase)
+ env_file = os.path.join(dt_cfg.dovetail_config['config_dir'],
+ dt_cfg.dovetail_config['env_file'])
+ if not os.path.isfile(env_file):
+ self.logger.error('File {} does not exist.'.format(env_file))
+ return
+ dt_utils.source_env(env_file)
+ self._update_config(testcase, update_pod=False)
+
+
+class OnapVvpRunner(DockerRunner):
+
+ config_file_name = 'onap-vvp_config.yml'
+
+ def __init__(self, testcase):
+ self.type = 'onap-vvp'
+ super(OnapVvpRunner, self).__init__(testcase)
+ env_file = os.path.join(dt_cfg.dovetail_config['config_dir'],
+ dt_cfg.dovetail_config['env_file'])
+ if not os.path.isfile(env_file):
+ self.logger.error('File {} does not exist.'.format(env_file))
+ return
+ dt_utils.source_env(env_file)
+ self._update_config(testcase, update_pod=False)
class TestRunnerFactory(object):
@@ -306,8 +310,9 @@ class TestRunnerFactory(object):
"yardstick": YardstickRunner,
"bottlenecks": BottlenecksRunner,
"shell": ShellRunner,
- "vnftest": VnftestRunner,
- "functest-k8s": FunctestK8sRunner
+ "functest-k8s": FunctestK8sRunner,
+ "onap-vtp": OnapVtpRunner,
+ "onap-vvp": OnapVvpRunner
}
@classmethod
diff --git a/dovetail/testcase.py b/dovetail/testcase.py
index 3be1cb02..b07a878b 100644
--- a/dovetail/testcase.py
+++ b/dovetail/testcase.py
@@ -13,11 +13,11 @@ import os
import yaml
from dovetail import constants
-from parser import Parser
-from test_runner import TestRunnerFactory
-from utils.dovetail_config import DovetailConfig as dt_cfg
-import utils.dovetail_logger as dt_logger
-import utils.dovetail_utils as dt_utils
+from dovetail.parser import Parser
+from dovetail.test_runner import TestRunnerFactory
+from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
+import dovetail.utils.dovetail_logger as dt_logger
+import dovetail.utils.dovetail_utils as dt_utils
class Testcase(object):
@@ -25,7 +25,7 @@ class Testcase(object):
logger = None
def __init__(self, testcase_yaml):
- self.testcase = testcase_yaml.values()[0]
+ self.testcase = list(testcase_yaml.values())[0]
self.testcase['passed'] = 'FAIL'
self.cmds = []
self.sub_testcase_status = {}
@@ -77,17 +77,21 @@ class Testcase(object):
def sub_testcase_passed(self, name, passed=None):
if passed is not None:
- self.logger.debug(
- 'sub_testcase_passed: {} {}'.format(name, passed))
self.sub_testcase_status[name] = passed
return self.sub_testcase_status[name]
def validate_type(self):
return self.testcase['validate']['type']
+ def vnf_type(self):
+ return self.testcase['vnf_type']
+
def validate_testcase(self):
return self.testcase['validate']['testcase']
+ def portal_key_file(self):
+ return self.testcase['report']['portal_key_file']
+
def increase_retry(self):
return self._increase_retry(self.validate_testcase())
@@ -136,21 +140,18 @@ class Testcase(object):
return post_condition
def mk_src_file(self):
- testcase_src_file = self.pre_copy_path('src_file')
- try:
- file_path = os.path.join(dt_cfg.dovetail_config['result_dir'],
- testcase_src_file)
- with open(file_path, 'w+') as src_file:
- if self.sub_testcase() is not None:
+ test_list = os.path.join(dt_cfg.dovetail_config['result_dir'],
+ 'tempest_custom.txt')
+ if self.sub_testcase() is not None:
+ try:
+ with open(test_list, 'w+') as src_file:
for sub_test in self.sub_testcase():
- self.logger.debug(
- 'Save test cases {}'.format(sub_test))
src_file.write(sub_test + '\n')
- self.logger.debug('Save test cases to {}'.format(file_path))
- return file_path
- except Exception:
- self.logger.exception('Failed to save: {}'.format(file_path))
- return None
+ self.logger.debug('Save test cases to {}'.format(test_list))
+ return test_list
+ except Exception:
+ self.logger.exception('Failed to save: {}'.format(test_list))
+ return None
def run(self):
runner = TestRunnerFactory.create(self)
@@ -196,14 +197,16 @@ class Testcase(object):
for testcase_file in files:
with open(os.path.join(root, testcase_file)) as f:
testcase_yaml = yaml.safe_load(f)
- case_type = testcase_yaml.values()[0]['validate']['type']
+ case_type = \
+ list(testcase_yaml.values())[0]['validate']['type']
testcase = TestcaseFactory.create(case_type, testcase_yaml)
if testcase is not None:
- cls.testcase_list[next(testcase_yaml.iterkeys())] = \
+ cls.testcase_list[next(iter(testcase_yaml.keys()))] = \
testcase
else:
cls.logger.error('Failed to create test case: {}'
.format(testcase_file))
+ return cls.testcase_list
@classmethod
def get(cls, testcase_name):
@@ -221,8 +224,6 @@ class Testcase(object):
return True, area_full
for area in testarea:
- if area not in dt_cfg.dovetail_config['testarea_supported']:
- return False, None
if area == 'full':
return True, area_full
area_no_duplicate.append(area)
@@ -296,8 +297,7 @@ class FunctestTestcase(Testcase):
# patch inside the functest container
if dt_cfg.dovetail_config['no_api_validation']:
patch_cmd = os.path.join(
- dt_cfg.dovetail_config['functest']['config']['dir'],
- 'patches',
+ dt_cfg.dovetail_config['functest']['patches_dir'],
'functest',
'disable-api-validation',
'apply.sh')
@@ -344,13 +344,22 @@ class ShellTestcase(Testcase):
self.type = 'shell'
-class VnftestTestcase(Testcase):
+class OnapVtpTestcase(Testcase):
+
+ validate_testcase_list = {}
+
+ def __init__(self, testcase_yaml):
+ super(OnapVtpTestcase, self).__init__(testcase_yaml)
+ self.type = 'onap-vtp'
+
+
+class OnapVvpTestcase(Testcase):
validate_testcase_list = {}
def __init__(self, testcase_yaml):
- super(VnftestTestcase, self).__init__(testcase_yaml)
- self.type = 'vnftest'
+ super(OnapVvpTestcase, self).__init__(testcase_yaml)
+ self.type = 'onap-vvp'
class TestcaseFactory(object):
@@ -359,8 +368,9 @@ class TestcaseFactory(object):
'yardstick': YardstickTestcase,
'bottlenecks': BottlenecksTestcase,
'shell': ShellTestcase,
- 'vnftest': VnftestTestcase,
- 'functest-k8s': FunctestK8sTestcase
+ 'functest-k8s': FunctestK8sTestcase,
+ 'onap-vtp': OnapVtpTestcase,
+ 'onap-vvp': OnapVvpTestcase
}
@classmethod
@@ -398,6 +408,7 @@ class Testsuite(object):
with open(os.path.join(root, testsuite_yaml)) as f:
testsuite_yaml = yaml.safe_load(f)
cls.testsuite_list.update(testsuite_yaml)
+ return cls.testsuite_list
@classmethod
def get(cls, testsuite_name):
diff --git a/dovetail/tests/unit/cli/commands/test_cli_testcase.py b/dovetail/tests/unit/cli/commands/test_cli_testcase.py
index 2a1feb64..324db640 100644
--- a/dovetail/tests/unit/cli/commands/test_cli_testcase.py
+++ b/dovetail/tests/unit/cli/commands/test_cli_testcase.py
@@ -34,7 +34,7 @@ class CliTestcaseTesting(unittest.TestCase):
testcase.run(options)
mock_path.dirname.assert_called_once()
- cmd = 'python %s/run.py %s' % (repo_dir, options)
+ cmd = 'python3 %s/run.py %s' % (repo_dir, options)
mock_utils.exec_cmd.assert_called_once_with(
cmd, exit_on_error=True, exec_msg_on=False, info=True)
@@ -60,7 +60,7 @@ class CliTestcaseTesting(unittest.TestCase):
mock_click.echo.assert_called_once_with(
'testcase %s not exist or not supported' % testcase_name)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.cli.commands.cli_testcase.constants')
@patch('os.path')
@patch('dovetail.cli.commands.cli_testcase.click')
@@ -85,7 +85,7 @@ class CliTestcaseTesting(unittest.TestCase):
mock_path.isfile.assert_called_once_with(testcase_whole_path)
mock_click.echo.assert_called_once_with(file_data)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.cli.commands.cli_testcase.constants')
@patch('os.path')
@patch('dovetail.cli.commands.cli_testcase.click')
diff --git a/dovetail/tests/unit/cli/test_cli_base.py b/dovetail/tests/unit/cli/test_cli_base.py
index 80b7e7b3..9b06426c 100644
--- a/dovetail/tests/unit/cli/test_cli_base.py
+++ b/dovetail/tests/unit/cli/test_cli_base.py
@@ -24,7 +24,7 @@ class CliBaseTesting(unittest.TestCase):
runner = CliRunner()
result = runner.invoke(cli_base.cli, [])
- self.assertEquals(result.exit_code, 0)
+ self.assertEqual(result.exit_code, 0)
def test_testcase_list(self, mock_testcase):
testsuite = 'suite'
@@ -33,7 +33,7 @@ class CliBaseTesting(unittest.TestCase):
result = runner.invoke(cli_base.testcase_list, [testsuite])
mock_testcase.list_testsuites.assert_called_once_with(testsuite)
- self.assertEquals(result.exit_code, 0)
+ self.assertEqual(result.exit_code, 0)
def test_testcase_show(self, mock_testcase):
testcase = 'case'
@@ -42,7 +42,7 @@ class CliBaseTesting(unittest.TestCase):
result = runner.invoke(cli_base.testcase_show, [testcase])
mock_testcase.show_testcase.assert_called_once_with(testcase)
- self.assertEquals(result.exit_code, 0)
+ self.assertEqual(result.exit_code, 0)
def test_testcase_run(self, mock_testcase):
run_args = ('arga', 'argb')
@@ -52,4 +52,4 @@ class CliBaseTesting(unittest.TestCase):
expected = ' '.join(run_args)
mock_testcase.run.assert_called_once_with(expected)
- self.assertEquals(result.exit_code, 0)
+ self.assertEqual(result.exit_code, 0)
diff --git a/dovetail/tests/unit/cmd_config.yml b/dovetail/tests/unit/cmd_config.yml
index 4a1439f6..405aabce 100644
--- a/dovetail/tests/unit/cmd_config.yml
+++ b/dovetail/tests/unit/cmd_config.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
cli:
arguments:
@@ -22,3 +31,11 @@ cli:
flags:
- '--report'
is_flag: 'True'
+ opnfvci:
+ flags:
+ - '--opnfv-ci'
+ is_flag: 'True'
+ noapivalidation:
+ flags:
+ - '--no-api-validation'
+ is_flag: 'True'
diff --git a/dovetail/tests/unit/test_container.py b/dovetail/tests/unit/test_container.py
index 70e01d8e..86da9d3c 100644
--- a/dovetail/tests/unit/test_container.py
+++ b/dovetail/tests/unit/test_container.py
@@ -10,6 +10,7 @@
import unittest
from mock import patch, call, Mock
+import docker
from dovetail.container import Container
@@ -19,6 +20,7 @@ __author__ = 'Stamatis Katsaounis <mokats@intracom-telecom.com>'
class ContainerTesting(unittest.TestCase):
def setUp(self):
+ self.patcher1 = patch.object(docker, 'from_env')
testcase = patch.object(Container, 'testcase')
testcase.testcase = {'validate': {
'type': 'bottlenecks'}}
@@ -28,28 +30,13 @@ class ContainerTesting(unittest.TestCase):
val_type_obj = Mock()
val_type_obj.return_value = 'bottlenecks'
testcase.validate_type = val_type_obj
+ self.client = self.patcher1.start().return_value
self.container = Container(testcase)
self.logger = Mock()
self.container.logger = self.logger
def tearDown(self):
- pass
-
- @patch('dovetail.container.dt_cfg')
- @patch.object(Container, 'docker_copy')
- def test_set_vnftest_conf_file(self, mock_copy, mock_config):
- source_file = 'source'
- destination_file = 'destination_file'
- mock_config.dovetail_config = {
- 'vnftest': {
- 'vnftest_conf': [{
- 'src_file': source_file,
- 'dest_file': destination_file}]}}
-
- self.container.set_vnftest_conf_file()
-
- mock_copy.assert_called_once_with(
- source_file, destination_file)
+ self.patcher1.stop()
@patch('dovetail.container.dt_cfg')
@patch.object(Container, 'copy_file')
@@ -88,22 +75,6 @@ class ContainerTesting(unittest.TestCase):
mock_copy.assert_not_called()
- def test_docker_copy_error(self):
- expected = (1, 'src_path or dest_path is empty')
- result = self.container.docker_copy(None, None)
-
- self.assertEqual(expected, result)
-
- @patch('dovetail.container.dt_utils')
- def test_docker_copy(self, mock_utils):
- expected = (0, 'success')
- mock_utils.exec_cmd.return_value = expected
- result = self.container.docker_copy('source', 'dest')
-
- mock_utils.exec_cmd.assert_called_once_with(
- 'docker cp source None:dest', self.logger)
- self.assertEqual(expected, result)
-
def test_copy_file_error(self):
expected = (1, 'src_path or dest_path is empty')
result = self.container.copy_file(None, None)
@@ -126,59 +97,115 @@ class ContainerTesting(unittest.TestCase):
self.assertEqual(expected, result)
+ @patch('dovetail.container.dt_cfg')
@patch('dovetail.container.dt_utils')
- def test_exec_cmd(self, mock_utils):
+ def test_exec_cmd(self, mock_utils, mock_config):
expected = (0, 'success')
+ mock_utils.get_value_from_dict.return_value = 'shell'
+ mock_config.dovetail_config = {'bottlenecks': 'value'}
+ container_obj = Mock()
+ container_obj.exec_run.return_value = expected
+ self.container.container = container_obj
+
+ result = self.container.exec_cmd('command')
+
+ self.assertEqual(expected, result)
+
+ @patch('dovetail.container.dt_cfg')
+ @patch('dovetail.container.dt_utils')
+ @patch('sys.exit')
+ def test_exec_cmd_exception(self, mock_exit, mock_utils, mock_config):
+ mock_utils.get_value_from_dict.return_value = 'shell'
+ mock_config.dovetail_config = {'bottlenecks': 'value'}
+ container_obj = Mock()
+ response_obj = Mock()
+ response_obj.status_code = 1
+ container_obj.exec_run.side_effect = \
+ docker.errors.APIError('error', response=response_obj)
+ self.container.container = container_obj
+
+ expected = (1, 'error')
+ result = self.container.exec_cmd('command', exit_on_error=True)
+
+ self.assertEqual(expected, result)
+ mock_exit.assert_called_once_with(1)
+
+ @patch('dovetail.container.dt_cfg')
+ @patch('dovetail.container.dt_utils')
+ def test_exec_cmd_no_shell(self, mock_utils, mock_config):
+ expected = (1, 'shell is empty')
mock_utils.exec_cmd.return_value = expected
+ mock_utils.get_value_from_dict.return_value = None
+ mock_config.dovetail_config = {'bottlenecks': 'value'}
result = self.container.exec_cmd('command')
- mock_utils.exec_cmd.assert_called_once_with(
- 'sudo docker exec None /bin/bash -c "command"', self.logger, False)
self.assertEqual(expected, result)
@patch('dovetail.container.dt_cfg')
@patch('dovetail.container.dt_utils')
- @patch.object(Container, 'check_container_exist')
+ @patch.object(Container, 'get_container')
def test_clean(self, mock_check, mock_utils, mock_config):
container_name = 'container'
mock_config.dovetail_config = {'bottlenecks': 'value'}
mock_utils.get_value_from_dict.return_value = [container_name]
- mock_check.return_value = True
+ self.container.container = Mock()
+ mock_check.return_value = Mock()
self.container.clean()
mock_utils.get_value_from_dict.assert_called_once_with(
'extra_container', 'value')
mock_check.assert_called_once_with(container_name)
- mock_utils.exec_cmd.assert_has_calls([
- call('sudo docker rm -f None', self.logger),
- call('sudo docker rm -f container', self.logger)])
+ @patch('dovetail.container.dt_cfg')
@patch('dovetail.container.dt_utils')
- def test_check_container_exist_true(self, mock_utils):
+ @patch.object(Container, 'get_container')
+ def test_clean_extra_error(self, mock_check, mock_utils, mock_config):
container_name = 'container'
- cmd = ('sudo docker ps -aq -f name={}'.format(container_name))
- mock_utils.exec_cmd.return_value = (0, 'msg')
+ mock_config.dovetail_config = {'bottlenecks': 'value'}
+ mock_utils.get_value_from_dict.return_value = [container_name]
+ container_obj = Mock()
+ container_obj.remove.side_effect = docker.errors.APIError('error')
+ self.container.container = Mock()
+ mock_check.return_value = container_obj
- result = self.container.check_container_exist(container_name)
+ self.container.clean()
- mock_utils.exec_cmd.assert_called_once_with(cmd, self.logger)
- self.assertEquals(True, result)
+ mock_utils.get_value_from_dict.assert_called_once_with(
+ 'extra_container', 'value')
+ mock_check.assert_called_once_with(container_name)
+ @patch('dovetail.container.dt_cfg')
@patch('dovetail.container.dt_utils')
- def test_check_container_exist_false(self, mock_utils):
+ def test_clean_no_extra_container(self, mock_utils, mock_config):
+ mock_utils.get_value_from_dict.return_value = None
+ container_obj = Mock()
+ container_obj.remove.side_effect = docker.errors.APIError('error')
+ self.container.container = container_obj
+ self.container.clean()
+ mock_utils.get_value_from_dict.assert_called_once()
+
+ def test_get_container_exist_true(self):
+ container_name = 'container'
+ expected = Mock()
+ self.client.containers.get.return_value = expected
+
+ result = self.container.get_container(container_name)
+
+ self.assertEqual(expected, result)
+
+ def test_get_container_none(self):
container_name = 'container'
- cmd = ('sudo docker ps -aq -f name={}'.format(container_name))
- mock_utils.exec_cmd.return_value = (1, 'msg')
+ self.client.containers.get.side_effect = \
+ docker.errors.APIError('error')
- result = self.container.check_container_exist(container_name)
+ result = self.container.get_container(container_name)
- mock_utils.exec_cmd.assert_called_once_with(cmd, self.logger)
- self.assertEquals(False, result)
+ self.assertEqual(None, result)
def test_pull_image_none(self):
result = self.container.pull_image(None)
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch.object(Container, 'pull_image_only')
@patch.object(Container, 'get_image_id')
@@ -190,7 +217,7 @@ class ContainerTesting(unittest.TestCase):
mock_get.assert_called_once_with(docker_image)
mock_pull.assert_called_once_with(docker_image)
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch.object(Container, 'pull_image_only')
@patch.object(Container, 'get_image_id')
@@ -206,7 +233,7 @@ class ContainerTesting(unittest.TestCase):
call(docker_image), call(docker_image)])
self.logger.error.assert_called_once_with(
'Failed to get the id of image {}.'.format(docker_image))
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch.object(Container, 'pull_image_only')
@patch.object(Container, 'get_image_id')
@@ -220,7 +247,7 @@ class ContainerTesting(unittest.TestCase):
mock_pull.assert_called_once_with(docker_image)
mock_get.assert_has_calls([
call(docker_image), call(docker_image)])
- self.assertEquals(docker_image, result)
+ self.assertEqual(docker_image, result)
@patch.object(Container, 'pull_image_only')
@patch.object(Container, 'get_image_id')
@@ -236,7 +263,7 @@ class ContainerTesting(unittest.TestCase):
call(docker_image), call(docker_image)])
self.logger.debug.assert_called_once_with(
'Image {} has no changes, no need to remove.'.format(docker_image))
- self.assertEquals(docker_image, result)
+ self.assertEqual(docker_image, result)
@patch.object(Container, 'remove_image')
@patch.object(Container, 'pull_image_only')
@@ -253,101 +280,81 @@ class ContainerTesting(unittest.TestCase):
mock_get.assert_has_calls([
call(docker_image), call(docker_image)])
mock_remove.assert_called_once_with(old_obj)
- self.assertEquals(docker_image, result)
+ self.assertEqual(docker_image, result)
- @patch('dovetail.container.dt_utils')
- def test_pull_image_only(self, mock_utils):
+ def test_pull_image_only(self):
docker_image = 'image'
- mock_utils.exec_cmd.return_value = (0, 'msg')
result = self.container.pull_image_only(docker_image)
- cmd = 'sudo docker pull %s' % (docker_image)
- mock_utils.exec_cmd.assert_called_once_with(cmd, self.logger)
self.logger.debug.assert_called_once_with(
'Success to pull docker image {}!'.format(docker_image))
- self.assertEquals(True, result)
+ self.assertEqual(True, result)
- @patch('dovetail.container.dt_utils')
- def test_pull_image_only_error(self, mock_utils):
+ def test_pull_image_only_error(self):
docker_image = 'image'
- mock_utils.exec_cmd.return_value = (1, 'error')
+ self.client.images.pull.side_effect = docker.errors.APIError('error')
result = self.container.pull_image_only(docker_image)
- cmd = 'sudo docker pull %s' % (docker_image)
- mock_utils.exec_cmd.assert_called_once_with(cmd, self.logger)
self.logger.error.assert_called_once_with(
'Failed to pull docker image {}!'.format(docker_image))
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
- @patch('dovetail.container.dt_utils')
- def test_remove_image(self, mock_utils):
+ def test_remove_image(self):
image_id = 'image_id'
- mock_utils.exec_cmd.side_effect = [(1, 'error'), (0, 'msg')]
+ self.client.containers.list.side_effect = \
+ docker.errors.APIError('error')
result = self.container.remove_image(image_id)
- mock_utils.exec_cmd.assert_has_calls([
- call("sudo docker ps -aq -f 'ancestor=%s'" % (image_id),
- self.logger),
- call('sudo docker rmi %s' % (image_id), self.logger)])
self.logger.debug.assert_has_calls([
call('Remove image {}.'.format(image_id)),
call('Remove image {} successfully.'.format(image_id))])
- self.assertEquals(True, result)
+ self.assertEqual(True, result)
- @patch('dovetail.container.dt_utils')
- def test_remove_image_ancestors(self, mock_utils):
+ def test_remove_image_ancestors(self):
image_id = 'image_id'
- mock_utils.exec_cmd.return_value = (0, 'msg')
+ self.client.containers.list.return_value = ['cont_a']
result = self.container.remove_image(image_id)
- cmd = "sudo docker ps -aq -f 'ancestor=%s'" % (image_id)
- mock_utils.exec_cmd.assert_called_once_with(cmd, self.logger)
self.logger.debug.assert_called_once_with(
'Image {} has containers, skip.'.format(image_id))
- self.assertEquals(True, result)
+ self.assertEqual(True, result)
- @patch('dovetail.container.dt_utils')
- def test_remove_image_error(self, mock_utils):
+ def test_remove_image_error(self):
image_id = 'image_id'
- mock_utils.exec_cmd.return_value = (1, 'error')
+ self.client.containers.list.return_value = []
+ self.client.images.remove.side_effect = \
+ docker.errors.ImageNotFound('error')
result = self.container.remove_image(image_id)
- mock_utils.exec_cmd.assert_has_calls([
- call("sudo docker ps -aq -f 'ancestor=%s'" % (image_id),
- self.logger),
- call('sudo docker rmi %s' % (image_id), self.logger)])
self.logger.debug.assert_called_once_with(
'Remove image {}.'.format(image_id))
self.logger.error.assert_called_once_with(
'Failed to remove image {}.'.format(image_id))
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
- @patch('dovetail.container.dt_utils')
- def test_get_image_id(self, mock_utils):
+ def test_get_image_id(self):
image_name = 'image_id'
- mock_utils.exec_cmd.return_value = (0, image_name)
+ mock_img = Mock()
+ mock_img.id = image_name
+ self.client.images.get.return_value = mock_img
result = self.container.get_image_id(image_name)
- cmd = 'sudo docker images -q %s' % (image_name)
- mock_utils.exec_cmd.assert_called_once_with(cmd, self.logger)
- self.assertEquals(image_name, result)
+ self.assertEqual(image_name, result)
- @patch('dovetail.container.dt_utils')
- def test_get_image_id_error(self, mock_utils):
+ def test_get_image_id_error(self):
image_name = 'image_id'
- mock_utils.exec_cmd.return_value = (1, 'error')
+ self.client.images.get.side_effect = \
+ docker.errors.ImageNotFound('error')
result = self.container.get_image_id(image_name)
- cmd = 'sudo docker images -q %s' % (image_name)
- mock_utils.exec_cmd.assert_called_once_with(cmd, self.logger)
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.container.dt_utils')
def test_get_config(self, mock_utils):
@@ -357,7 +364,7 @@ class ContainerTesting(unittest.TestCase):
result = self.container._get_config('a', 'b', 'c')
mock_utils.get_value_from_dict.assert_called_once_with('a', 'c')
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
@patch('dovetail.container.dt_utils')
def test_get_config_none(self, mock_utils):
@@ -369,7 +376,7 @@ class ContainerTesting(unittest.TestCase):
call('a', 'c'), call('a', 'b')])
self.logger.error.assert_called_once_with(
"Couldn't find key {}.".format('a'))
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.container.dt_cfg')
@patch.object(Container, '_get_config')
@@ -383,7 +390,7 @@ class ContainerTesting(unittest.TestCase):
mock_get_conf.assert_has_calls([
call('image_name', 'value', {'type': 'bottlenecks'}),
call('docker_tag', 'value', {'type': 'bottlenecks'})])
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
@patch('dovetail.container.dt_logger')
def test_create_log(self, mock_logger):
@@ -393,54 +400,7 @@ class ContainerTesting(unittest.TestCase):
self.container.create_log()
- self.assertEquals(self.logger, self.container.logger)
-
- @patch('dovetail.container.dt_cfg')
- @patch('dovetail.container.os.path')
- def test_set_vnftest_config_no_file(self, mock_path, mock_config):
- v_one = 'v_one'
- v_two = 'v_two'
- v_three = 'v_three'
- v_four = 'v_four'
- v_five = 'v_five'
- mock_path.join.return_value = '/'.join([v_two, v_three])
- mock_path.isfile.return_value = False
- mock_config.dovetail_config = {
- 'result_dir': v_one, 'config_dir': v_two,
- 'pri_key': v_three, 'vnftest': {
- 'result': {'log': v_four, 'key_path': v_five}}}
-
- expected = '-v {}:{} '.format(v_one, v_four)
- result = self.container.set_vnftest_config()
-
- mock_path.join.assert_called_once_with(v_two, v_three)
- mock_path.isfile.assert_called_once_with('/'.join([v_two, v_three]))
- self.logger.debug.assert_called_once_with(
- 'Key file {} is not found'.format('/'.join([v_two, v_three])))
- self.assertEquals(expected, result)
-
- @patch('dovetail.container.dt_cfg')
- @patch('dovetail.container.os.path')
- def test_set_vnftest_config(self, mock_path, mock_config):
- v_one = 'v_one'
- v_two = 'v_two'
- v_three = 'v_three'
- v_four = 'v_four'
- v_five = 'v_five'
- mock_path.join.return_value = '/'.join([v_two, v_three])
- mock_path.isfile.return_value = True
- mock_config.dovetail_config = {
- 'result_dir': v_one, 'config_dir': v_two,
- 'pri_key': v_three, 'vnftest': {
- 'result': {'log': v_four, 'key_path': v_five}}}
-
- expected = '-v {}:{} -v {}/{}:{} '.format(v_one, v_four, v_two,
- v_three, v_five)
- result = self.container.set_vnftest_config()
-
- mock_path.join.assert_called_once_with(v_two, v_three)
- mock_path.isfile.assert_called_once_with('/'.join([v_two, v_three]))
- self.assertEquals(expected, result)
+ self.assertEqual(self.logger, self.container.logger)
@patch('dovetail.container.dt_utils')
@patch('dovetail.container.dt_cfg')
@@ -448,109 +408,82 @@ class ContainerTesting(unittest.TestCase):
docker_image = 'docker_image'
container_id = 'container_id'
mock_utils.get_value_from_dict.side_effect = [
- 'opts', 'envs', ['volume_one', 'volume_two']]
+ {'key': 'value'}, 'shell', 'envs', ['volume_one', 'volume_two']]
+ mock_utils.get_mount_list.side_effect = [['mount', 'list'], 'success']
mock_utils.get_hosts_info.return_value = 'host_info'
- mock_utils.exec_cmd.return_value = (0, container_id)
+ container_obj = Mock()
+ container_obj.id = container_id
+ self.client.containers.run.return_value = container_obj
project_config = {}
mock_config.dovetail_config = {'bottlenecks': project_config}
expected = container_id
- result = self.container.create(docker_image)
+ result, msg = self.container.create(docker_image)
mock_utils.get_value_from_dict.assert_has_calls([
call('opts', project_config),
+ call('shell', project_config),
call('envs', project_config),
call('volumes', project_config)])
mock_utils.get_hosts_info.assert_called_once_with(self.logger)
- mock_utils.exec_cmd.assert_called_once_with(
- 'sudo docker run opts envs volume_one volume_two host_info '
- 'docker_image /bin/bash', self.logger)
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
+ self.assertEqual('Successfully to create container.', msg)
@patch('dovetail.container.dt_utils')
@patch('dovetail.container.dt_cfg')
- @patch('dovetail.container.os.getenv')
- def test_create_error(self, mock_getenv, mock_config, mock_utils):
+ def test_create_no_shell(self, mock_config, mock_utils):
docker_image = 'docker_image'
- mock_utils.get_value_from_dict.side_effect = [
- 'opts', 'envs', ['volume_one']]
- mock_getenv.side_effect = ['True', 'dovetail_home', None, 'True']
+ mock_config.dovetail_config = {'bottlenecks': 'value'}
+ mock_utils.get_value_from_dict.side_effect = ['opts', None]
mock_utils.get_hosts_info.return_value = 'host_info'
- mock_utils.check_https_enabled.return_value = True
- mock_utils.exec_cmd.return_value = (1, 'error')
- project_config = {}
- mock_config.dovetail_config = {'bottlenecks': project_config}
- result = self.container.create(docker_image)
+
+ result, msg = self.container.create(docker_image)
mock_utils.get_value_from_dict.assert_has_calls([
- call('opts', project_config),
- call('envs', project_config),
- call('volumes', project_config)])
- mock_utils.get_hosts_info.assert_called_once_with(self.logger)
- mock_utils.exec_cmd.assert_called_once_with(
- 'sudo docker run opts envs volume_one host_info '
- 'docker_image /bin/bash', self.logger)
- self.assertEquals(None, result)
+ call('opts', 'value'),
+ call('shell', 'value')])
+ self.assertEqual(None, result)
+ self.assertEqual("Lacking of key word 'shell' in config file.", msg)
@patch('dovetail.container.dt_utils')
@patch('dovetail.container.dt_cfg')
- @patch('dovetail.container.os.getenv')
- @patch.object(Container, 'set_vnftest_config')
- @patch.object(Container, 'set_vnftest_conf_file')
- def test_create_vnftest(self, mock_setvnffile, mock_setvnfconf,
- mock_getenv, mock_config, mock_utils):
+ def test_create_mounts_none(self, mock_config, mock_utils):
docker_image = 'docker_image'
- container_id = 'container_id'
+ project_config = {}
+ mock_config.dovetail_config = {'bottlenecks': project_config}
mock_utils.get_value_from_dict.side_effect = [
- 'opts', 'envs', ['volume_one']]
- mock_getenv.side_effect = ['False', 'dovetail_home', 'cacert', 'True']
- mock_setvnfconf.return_value = 'vnftest_config'
+ {'key': 'value'}, 'shell', ['envs'], ['volume_one']]
+ mock_utils.get_mount_list.side_effect = [[None, 'error']]
mock_utils.get_hosts_info.return_value = 'host_info'
- mock_utils.exec_cmd.return_value = (0, container_id)
- project_config = {}
- mock_config.dovetail_config = {'vnftest': project_config}
- expected = container_id
- self.container.valid_type = 'vnftest'
- result = self.container.create(docker_image)
- self.container.valid_type = 'bottlenecks'
+ result, msg = self.container.create(docker_image)
mock_utils.get_value_from_dict.assert_has_calls([
- call('opts', project_config),
- call('envs', project_config),
- call('volumes', project_config)])
- mock_utils.get_hosts_info.assert_called_once_with(self.logger)
- mock_setvnfconf.assert_called_once_with()
- mock_setvnffile.assert_called_once_with(container_id)
- mock_utils.exec_cmd.assert_called_once_with(
- 'sudo docker run opts envs volume_one vnftest_config host_info '
- 'docker_image /bin/bash',
- self.logger)
- self.assertEquals(expected, result)
+ call('opts', project_config), call('shell', project_config),
+ call('envs', project_config), call('volumes', project_config)])
+ self.assertEqual(None, result)
+ self.assertEqual('error', msg)
@patch('dovetail.container.dt_utils')
@patch('dovetail.container.dt_cfg')
- @patch('dovetail.container.os.getenv')
- @patch.object(Container, 'set_vnftest_config')
- def test_create_vnftest_error(self, mock_setvnfconf,
- mock_getenv, mock_config, mock_utils):
+ def test_create_error(self, mock_config, mock_utils):
docker_image = 'docker_image'
mock_utils.get_value_from_dict.side_effect = [
- 'opts', 'envs', ['volume_one']]
- mock_getenv.return_value = 'True'
- mock_setvnfconf.return_value = None
- mock_config.dovetail_config = {
- 'vnftest': 'value',
- 'build_tag': 'v_one'}
-
- self.container.valid_type = 'vnftest'
- result = self.container.create(docker_image)
- self.container.valid_type = 'bottlenecks'
+ {'key': 'value'}, 'shell', ['envs'], ['volume_one']]
+ mock_utils.get_mount_list.side_effect = [['mount', 'list'], 'success']
+ mock_utils.get_hosts_info.return_value = 'host_info'
+ mock_utils.check_https_enabled.return_value = True
+ self.client.containers.run.side_effect = \
+ docker.errors.ImageNotFound('error')
+ project_config = {}
+ mock_config.dovetail_config = {'bottlenecks': project_config}
+ result, msg = self.container.create(docker_image)
mock_utils.get_value_from_dict.assert_has_calls([
- call('opts', 'value'),
- call('envs', 'value'),
- call('volumes', 'value')])
+ call('opts', project_config),
+ call('shell', project_config),
+ call('envs', project_config),
+ call('volumes', project_config)])
mock_utils.get_hosts_info.assert_called_once_with(self.logger)
- mock_setvnfconf.assert_called_once_with()
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
+ self.assertEqual('error', str(docker.errors.ImageNotFound('error')))
diff --git a/dovetail/tests/unit/test_parser.py b/dovetail/tests/unit/test_parser.py
index acfd25cf..4f164772 100644
--- a/dovetail/tests/unit/test_parser.py
+++ b/dovetail/tests/unit/test_parser.py
@@ -39,28 +39,28 @@ class TestParser(unittest.TestCase):
def test_parser_cmd(self, mock_logger):
"""Test whether the command is correctly parsed."""
- mock_cmd = "python /functest/ci/run_tests.py "\
+ mock_cmd = "python3 /functest/ci/run_tests.py "\
"-t {{validate_testcase}} -r"
with open(os.path.join(self.test_path, 'test_testcase.yaml')) as f:
mock_testcase_yaml = yaml.safe_load(f)
MockTestcase = type('Testcase', (object,), {})
mock_testcase = MockTestcase()
- mock_testcase.testcase = mock_testcase_yaml.values()[0]
+ mock_testcase.testcase = list(mock_testcase_yaml.values())[0]
output = parser.Parser.parse_cmd(mock_cmd, mock_testcase)
- expected_output = ("python /functest/ci/run_tests.py -t "
+ expected_output = ("python3 /functest/ci/run_tests.py -t "
"tempest_smoke_serial -r")
self.assertEqual(expected_output, output)
def test_parser_cmd_fail(self, mock_logger):
"""Test whether the command is correctly parsed."""
- mock_cmd = "python /functest/ci/run_tests.py "\
+ mock_cmd = "python3 /functest/ci/run_tests.py "\
"-t {{validate_testcase}} -r"
mock_testcase_yaml = {}
MockTestcase = type('Testcase', (object,), {})
mock_testcase = MockTestcase()
mock_testcase.testcase = mock_testcase_yaml.values()
output = parser.Parser.parse_cmd(mock_cmd, mock_testcase)
- expected_output = ("python /functest/ci/run_tests.py -t "
+ expected_output = ("python3 /functest/ci/run_tests.py -t "
"None -r")
self.assertEqual(expected_output, output)
diff --git a/dovetail/tests/unit/test_report.py b/dovetail/tests/unit/test_report.py
index fa5a02e0..41d70d2f 100644
--- a/dovetail/tests/unit/test_report.py
+++ b/dovetail/tests/unit/test_report.py
@@ -8,6 +8,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
##
+import json
import os
import unittest
import yaml
@@ -33,16 +34,19 @@ class ReportTesting(unittest.TestCase):
dt_report.FunctestK8sCrawler.logger = None
dt_report.YardstickCrawler.logger = None
dt_report.BottlenecksCrawler.logger = None
- dt_report.VnftestCrawler.logger = None
+ dt_report.OnapVtpCrawler.logger = None
+ dt_report.OnapVvpCrawler.logger = None
dt_report.FunctestChecker.logger = None
dt_report.FunctestK8sChecker.logger = None
dt_report.YardstickChecker.logger = None
dt_report.BottlenecksChecker.logger = None
- dt_report.VnftestChecker.logger = None
+ dt_report.OnapVtpChecker.logger = None
+ dt_report.OnapVvpChecker.logger = None
dt_report.Report.logger = None
dt_report.Report.results = {
'functest': {}, 'yardstick': {}, 'functest-k8s': {},
- 'bottlenecks': {}, 'shell': {}, 'vnftest': {}}
+ 'bottlenecks': {}, 'shell': {}, 'onap-vtp': {},
+ 'onap-vvp': {}}
def _produce_report_initial_text(self, report_data):
report_txt = ''
@@ -62,7 +66,7 @@ class ReportTesting(unittest.TestCase):
dt_report.Report.create_log()
- self.assertEquals(getlogger_obj, dt_report.Report.logger)
+ self.assertEqual(getlogger_obj, dt_report.Report.logger)
@patch('dovetail.report.os.path')
@patch('dovetail.report.dt_cfg')
@@ -78,7 +82,7 @@ class ReportTesting(unittest.TestCase):
inner_testcase_obj = Mock()
testcase_obj.testcase = inner_testcase_obj
mock_config.dovetail_config = {'result_dir': 'result_dir'}
- mock_utils.get_value_from_dict.return_value = 'check_results_file'
+ mock_utils.get_value_from_dict.return_value = ['check_results_file']
mock_path.join.return_value = 'results_file'
mock_path.isfile.return_value = True
mock_get.return_value = 'result'
@@ -86,15 +90,15 @@ class ReportTesting(unittest.TestCase):
result = report.check_tc_result(testcase_obj)
mock_utils.get_value_from_dict.assert_called_once_with(
- 'report.check_results_file', inner_testcase_obj)
+ 'report.check_results_files', inner_testcase_obj)
mock_path.join.assert_called_once_with(
'result_dir', 'check_results_file')
mock_path.isfile.assert_called_once_with('results_file')
logger_obj.info.assert_called_once_with(
- 'Results have been stored with file results_file.')
- mock_get.assert_called_once_with(testcase_obj, 'results_file')
+ 'Results have been stored with files: [\'results_file\'].')
+ mock_get.assert_called_once_with(testcase_obj, ['results_file'])
mock_check.assert_called_once_with(testcase_obj, 'result')
- self.assertEquals('result', result)
+ self.assertEqual('result', result)
@patch('dovetail.report.os.path')
@patch('dovetail.report.dt_cfg')
@@ -110,21 +114,21 @@ class ReportTesting(unittest.TestCase):
inner_testcase_obj = Mock()
testcase_obj.testcase = inner_testcase_obj
mock_config.dovetail_config = {'result_dir': 'result_dir'}
- mock_utils.get_value_from_dict.return_value = 'check_results_file'
+ mock_utils.get_value_from_dict.return_value = ['check_results_file']
mock_path.join.return_value = 'results_file'
mock_path.isfile.return_value = False
result = report.check_tc_result(testcase_obj)
mock_utils.get_value_from_dict.assert_called_once_with(
- 'report.check_results_file', inner_testcase_obj)
+ 'report.check_results_files', inner_testcase_obj)
mock_path.join.assert_called_once_with(
'result_dir', 'check_results_file')
mock_path.isfile.assert_called_once_with('results_file')
logger_obj.error.assert_called_once_with(
'Failed to store results with file results_file.')
mock_check.assert_called_once_with(testcase_obj)
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.report.os.path')
@patch('dovetail.report.dt_cfg')
@@ -146,12 +150,12 @@ class ReportTesting(unittest.TestCase):
result = report.check_tc_result(testcase_obj)
mock_utils.get_value_from_dict.assert_called_once_with(
- 'report.check_results_file', inner_testcase_obj)
+ 'report.check_results_files', inner_testcase_obj)
logger_obj.error.assert_called_once_with(
- "Failed to get 'check_results_file' from config "
+ "Failed to get 'check_results_files' from config "
"file of test case name")
mock_check.assert_called_once_with(testcase_obj)
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.report.CheckerFactory')
def test_check_result(self, mock_factory):
@@ -166,25 +170,33 @@ class ReportTesting(unittest.TestCase):
mock_factory.create.assert_called_once_with('type')
checker_obj.check.assert_called_once_with(testcase_obj, None)
+ @patch('dovetail.report.os.getenv')
+ @patch.object(dt_report.Report, 'get_checksum')
@patch('dovetail.report.Testcase')
@patch('dovetail.report.datetime.datetime')
@patch('dovetail.report.dt_cfg')
- def test_generate_json(self, mock_config, mock_datetime, mock_testcase):
+ def test_generate_json(self, mock_config, mock_datetime, mock_testcase,
+ mock_checksum, mock_env):
logger_obj = Mock()
report = dt_report.Report()
report.logger = logger_obj
- testcase_list = ['t_a', 't_b']
+ testcase_list = ['ta.tb.tc', 'td.te.tf']
duration = 42
mock_config.dovetail_config = {
- 'build_tag': 'build_tag'
+ 'build_tag': 'build_tag',
+ 'version': '2018.09'
}
+ mock_env.return_value = 'enabled'
utc_obj = Mock()
utc_obj.strftime.return_value = '2018-01-13 13:13:13 UTC'
mock_datetime.utcnow.return_value = utc_obj
testcase_obj = Mock()
testcase_obj.passed.return_value = 'PASS'
testcase_obj.objective.return_value = 'objective'
+ mock_checksum.return_value = 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
testcase_obj.is_mandatory = True
+ testcase_obj.portal_key_file.return_value = 'a/b.log'
+ testcase_obj.vnf_type.return_value = 'tosca'
testcase_obj.sub_testcase.return_value = ['subt_a']
testcase_obj.sub_testcase_passed.return_value = 'PASS'
mock_testcase.get.side_effect = [testcase_obj, None]
@@ -193,137 +205,234 @@ class ReportTesting(unittest.TestCase):
expected = {
'version': '2018.09',
'build_tag': 'build_tag',
+ 'vnf_type': 'tosca',
+ 'vnf_checksum': 'da39a3ee5e6b4b0d3255bfef95601890afd80709',
'test_date': '2018-01-13 13:13:13 UTC',
'duration': duration,
+ 'validation': 'enabled',
'testcases_list': [
{
- 'name': 't_a',
+ 'name': 'ta.tb.tc',
'result': 'PASS',
'objective': 'objective',
'mandatory': True,
+ 'portal_key_file': 'a/b.log',
'sub_testcase': [{
'name': 'subt_a',
'result': 'PASS'
}]
},
{
- 'name': 't_b',
+ 'name': 'td.te.tf',
'result': 'Undefined',
'objective': '',
'mandatory': False,
+ 'portal_key_file': '',
'sub_testcase': []
}
]
}
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
+ @patch('dovetail.report.os.getenv')
+ @patch('dovetail.report.Testcase')
@patch('dovetail.report.datetime.datetime')
@patch('dovetail.report.dt_cfg')
- def test_generate_json_no_list(self, mock_config, mock_datetime):
+ def test_generate_json_noVNF(self, mock_config, mock_datetime,
+ mock_testcase, mock_env):
logger_obj = Mock()
report = dt_report.Report()
report.logger = logger_obj
+ testcase_list = ['ta.tb.tc', 'td.te.tf']
duration = 42
mock_config.dovetail_config = {
- 'build_tag': 'build_tag'
+ 'build_tag': 'build_tag',
+ 'version': '2018.09'
}
+ mock_env.return_value = 'disabled'
utc_obj = Mock()
utc_obj.strftime.return_value = '2018-01-13 13:13:13 UTC'
mock_datetime.utcnow.return_value = utc_obj
+ testcase_obj = Mock()
+ testcase_obj.passed.return_value = 'PASS'
+ testcase_obj.objective.return_value = 'objective'
+ testcase_obj.is_mandatory = True
+ testcase_obj.vnf_type.return_value = None
+ testcase_obj.portal_key_file.return_value = 'a/b.log'
+ testcase_obj.sub_testcase.return_value = ['subt_a']
+ testcase_obj.sub_testcase_passed.return_value = 'PASS'
+ mock_testcase.get.side_effect = [testcase_obj, None]
- result = report.generate_json([], duration)
+ result = report.generate_json(testcase_list, duration)
expected = {
'version': '2018.09',
'build_tag': 'build_tag',
'test_date': '2018-01-13 13:13:13 UTC',
'duration': duration,
- 'testcases_list': []
+ 'validation': 'disabled',
+ 'testcases_list': [
+ {
+ 'name': 'ta.tb.tc',
+ 'result': 'PASS',
+ 'objective': 'objective',
+ 'mandatory': True,
+ 'portal_key_file': 'a/b.log',
+ 'sub_testcase': [{
+ 'name': 'subt_a',
+ 'result': 'PASS'
+ }]
+ },
+ {
+ 'name': 'td.te.tf',
+ 'result': 'Undefined',
+ 'objective': '',
+ 'mandatory': False,
+ 'portal_key_file': '',
+ 'sub_testcase': []
+ }
+ ]
}
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
+ @patch('dovetail.report.os.getenv')
+ @patch('dovetail.report.Testcase')
+ @patch('dovetail.report.datetime.datetime')
@patch('dovetail.report.dt_cfg')
- @patch.object(dt_report.Report, 'generate_json')
- @patch.object(dt_report.Report, 'save_json_results')
- def test_generate(self, mock_save, mock_generate, mock_config):
+ def test_generate_json_noVNF_inTestCase(self, mock_config, mock_datetime,
+ mock_testcase, mock_env):
logger_obj = Mock()
report = dt_report.Report()
report.logger = logger_obj
- testcase_list = ['t_a', 't_b']
+ testcase_list = ['ta.tb.tc', 'td.te.tf']
+ duration = 42
mock_config.dovetail_config = {
- 'testarea_supported': testcase_list
+ 'build_tag': 'build_tag',
+ 'version': '2018.09'
}
- duration = 42
- report_data = {
- 'version': 'v2',
- 'build_tag': '2.0.0',
+ mock_env.return_value = 'enabled'
+ utc_obj = Mock()
+ utc_obj.strftime.return_value = '2018-01-13 13:13:13 UTC'
+ mock_datetime.utcnow.return_value = utc_obj
+ testcase_obj = Mock()
+ testcase_obj.passed.return_value = 'PASS'
+ testcase_obj.objective.return_value = 'objective'
+ testcase_obj.is_mandatory = True
+ testcase_obj.portal_key_file.return_value = 'a/b.log'
+ testcase_obj.vnf_type.side_effect = Exception()
+ testcase_obj.sub_testcase.return_value = ['subt_a']
+ testcase_obj.sub_testcase_passed.return_value = 'PASS'
+ mock_testcase.get.side_effect = [testcase_obj, None]
+
+ result = report.generate_json(testcase_list, duration)
+ expected = {
+ 'version': '2018.09',
+ 'build_tag': 'build_tag',
'test_date': '2018-01-13 13:13:13 UTC',
- 'duration': 42.42,
+ 'duration': duration,
+ 'validation': 'enabled',
'testcases_list': [
{
- 'name': 't_a',
+ 'name': 'ta.tb.tc',
'result': 'PASS',
+ 'objective': 'objective',
+ 'mandatory': True,
+ 'portal_key_file': 'a/b.log',
'sub_testcase': [{
'name': 'subt_a',
'result': 'PASS'
}]
},
{
- 'name': 't_b',
- 'result': 'SKIP'
+ 'name': 'td.te.tf',
+ 'result': 'Undefined',
+ 'objective': '',
+ 'mandatory': False,
+ 'portal_key_file': '',
+ 'sub_testcase': []
}
]
}
- mock_generate.return_value = report_data
- result = report.generate(testcase_list, duration)
- expected = self._produce_report_initial_text(report_data)
- expected += 'Pass Rate: 100.00% (1/1)\n'
- expected += '%-25s pass rate %.2f%%\n' % ('t_a:', 100)
- expected += '-%-25s %s\n' % ('t_a', 'PASS')
- expected += '\t%-110s %s\n' % ('subt_a', 'PASS')
- expected += '%-25s all skipped\n' % 't_b'
- expected += '-%-25s %s\n' % ('t_b', 'SKIP')
+ self.assertEqual(expected, result)
- mock_generate.assert_called_once_with(testcase_list, duration)
- mock_save.assert_called_once_with(report_data)
- report.logger.info.assert_called_once_with(expected)
- self.assertEquals(expected, result)
+ @patch('dovetail.report.datetime.datetime')
+ @patch('dovetail.report.dt_cfg')
+ @patch.object(dt_report.Report, 'get_checksum')
+ def test_generate_json_no_list(self, mock_checksum, mock_config,
+ mock_datetime):
+ logger_obj = Mock()
+ report = dt_report.Report()
+ report.logger = logger_obj
+ duration = 42
+ mock_config.dovetail_config = {
+ 'build_tag': 'build_tag',
+ 'version': '2018.09',
+ 'vnf_type': 'tosca'
+ }
+ utc_obj = Mock()
+ utc_obj.strftime.return_value = '2018-01-13 13:13:13 UTC'
+ mock_datetime.utcnow.return_value = utc_obj
+ mock_checksum.return_value = 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
+
+ result = report.generate_json([], duration)
+ expected = {
+ 'version': '2018.09',
+ 'build_tag': 'build_tag',
+ 'vnf_type': 'tosca',
+ 'vnf_checksum': 'da39a3ee5e6b4b0d3255bfef95601890afd80709',
+ 'test_date': '2018-01-13 13:13:13 UTC',
+ 'duration': duration,
+ 'testcases_list': []
+ }
+
+ self.assertEqual(expected, result)
@patch('dovetail.report.dt_cfg')
@patch.object(dt_report.Report, 'generate_json')
@patch.object(dt_report.Report, 'save_json_results')
- def test_generate_error(self, mock_save, mock_generate, mock_config):
+ def test_generate(self, mock_save, mock_generate, mock_config):
logger_obj = Mock()
report = dt_report.Report()
report.logger = logger_obj
- mock_config.dovetail_config = {
- 'testarea_supported': []
- }
- testcase_list = ['t_a']
+ testcase_list = ['ta.tb.tc', 'td.te.tf']
duration = 42
report_data = {
'version': 'v2',
'build_tag': '2.0.0',
'test_date': '2018-01-13 13:13:13 UTC',
'duration': 42.42,
- 'testcases_list': [{
- 'name': 't_a',
- 'result': 'PASS'
- }]
+ 'testcases_list': [
+ {
+ 'name': 'ta.tb.tc',
+ 'result': 'PASS',
+ 'sub_testcase': [{
+ 'name': 'subt_a',
+ 'result': 'PASS'
+ }]
+ },
+ {
+ 'name': 'td.te.tf',
+ 'result': 'SKIP'
+ }
+ ]
}
mock_generate.return_value = report_data
result = report.generate(testcase_list, duration)
- expected = None
+ expected = self._produce_report_initial_text(report_data)
+ expected += 'Pass Rate: 100.00% (1/1)\n'
+ expected += '%-25s pass rate %.2f%%\n' % ('tb:', 100)
+ expected += '-%-25s %s\n' % ('ta.tb.tc', 'PASS')
+ expected += '\t%-110s %s\n' % ('subt_a', 'PASS')
+ expected += '%-25s all skipped\n' % 'te'
+ expected += '-%-25s %s\n' % ('td.te.tf', 'SKIP')
mock_generate.assert_called_once_with(testcase_list, duration)
mock_save.assert_called_once_with(report_data)
- report.logger.error.assert_called_once_with(
- 'Test case {} not in supported testarea.'
- .format(report_data['testcases_list'][0]['name']))
- self.assertEquals(expected, result)
+ report.logger.info.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
@patch('dovetail.report.dt_cfg')
@patch.object(dt_report.Report, 'generate_json')
@@ -332,9 +441,6 @@ class ReportTesting(unittest.TestCase):
logger_obj = Mock()
report = dt_report.Report()
report.logger = logger_obj
- mock_config.dovetail_config = {
- 'testarea_supported': []
- }
duration = 42
report_data = {
'version': 'v2',
@@ -352,9 +458,9 @@ class ReportTesting(unittest.TestCase):
mock_generate.assert_called_once_with([], duration)
mock_save.assert_called_once_with(report_data)
report.logger.info.assert_called_once_with(expected)
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json')
@patch('dovetail.report.os.path')
@patch('dovetail.report.dt_cfg')
@@ -377,7 +483,7 @@ class ReportTesting(unittest.TestCase):
mock_json.dumps.assert_called_once_with('results')
file_obj.write.assert_called_once_with('results text\n')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json')
@patch('dovetail.report.os.path')
@patch('dovetail.report.dt_cfg')
@@ -443,18 +549,18 @@ class ReportTesting(unittest.TestCase):
mock_crawler.create.return_value = crawler_obj
crawler_obj.crawl.return_value = 'result'
- result = report.get_result(testcase_obj, 'check_results_file')
+ result = report.get_result(testcase_obj, 'check_results_files')
testcase_obj.validate_testcase.assert_called_once_with()
testcase_obj.validate_type.assert_called_once_with()
mock_crawler.create.assert_called_once_with('functest')
crawler_obj.crawl.assert_called_once_with(
- testcase_obj, 'check_results_file')
+ testcase_obj, 'check_results_files')
logger_obj.debug.assert_called_once_with(
'Test case: validate -> result acquired')
- self.assertEquals({'validate': 'result'},
- dt_report.Report.results['functest'])
- self.assertEquals('result', result)
+ self.assertEqual({'validate': 'result'},
+ dt_report.Report.results['functest'])
+ self.assertEqual('result', result)
@patch('dovetail.report.CrawlerFactory')
def test_get_result_no_result(self, mock_crawler):
@@ -469,17 +575,17 @@ class ReportTesting(unittest.TestCase):
mock_crawler.create.return_value = crawler_obj
crawler_obj.crawl.return_value = None
- result = report.get_result(testcase_obj, 'check_results_file')
+ result = report.get_result(testcase_obj, 'check_results_files')
testcase_obj.validate_testcase.assert_called_once_with()
testcase_obj.validate_type.assert_called_once_with()
mock_crawler.create.assert_called_once_with('functest')
crawler_obj.crawl.assert_called_once_with(
- testcase_obj, 'check_results_file')
+ testcase_obj, 'check_results_files')
testcase_obj.increase_retry.assert_called_once_with()
logger_obj.debug.assert_called_once_with(
'Test case: validate -> result acquired retry: retry')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.report.CrawlerFactory')
def test_get_result_no_crawler(self, mock_crawler):
@@ -492,14 +598,14 @@ class ReportTesting(unittest.TestCase):
testcase_obj.validate_type.return_value = 'functest'
mock_crawler.create.return_value = None
- result = report.get_result(testcase_obj, 'check_results_file')
+ result = report.get_result(testcase_obj, 'check_results_files')
testcase_obj.validate_testcase.assert_called_once_with()
testcase_obj.validate_type.assert_called_once_with()
mock_crawler.create.assert_called_once_with('functest')
logger_obj.error.assert_called_once_with(
'Crawler is None: name')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.report.dt_logger')
def test_functest_crawler_create_log(self, mock_logger):
@@ -510,7 +616,7 @@ class ReportTesting(unittest.TestCase):
dt_report.FunctestCrawler.create_log()
- self.assertEquals(getlogger_obj, dt_report.FunctestCrawler.logger)
+ self.assertEqual(getlogger_obj, dt_report.FunctestCrawler.logger)
@patch('dovetail.report.dt_cfg')
@patch('dovetail.report.os.path')
@@ -525,16 +631,38 @@ class ReportTesting(unittest.TestCase):
testcase_obj.name.return_value = 'name'
crawler = dt_report.FunctestCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ result = crawler.crawl(testcase_obj, [file_path])
mock_path.exists.assert_called_once_with(file_path)
testcase_obj.validate_testcase.assert_called_once_with()
testcase_obj.name.assert_called_once_with()
logger_obj.error.assert_called_once_with(
'Result file not found: {}'.format(file_path))
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
- @patch('__builtin__.open')
+ def test_functest_crawler_get_details_exception(self):
+ logger_obj = Mock()
+ dt_report.FunctestCrawler.logger = logger_obj
+ data = None
+ crawler = dt_report.FunctestCrawler()
+
+ excepted = None
+ result = crawler.get_details(data)
+ logger_obj.exception.assert_called_once()
+ self.assertEqual(excepted, result)
+
+ def test_functest_crawler_get_rally_details_exception(self):
+ logger_obj = Mock()
+ dt_report.FunctestCrawler.logger = logger_obj
+ data = None
+ crawler = dt_report.FunctestCrawler()
+
+ excepted = None
+ result = crawler.get_rally_details(data)
+ logger_obj.exception.assert_called_once()
+ self.assertEqual(excepted, result)
+
+ @patch('builtins.open')
@patch('dovetail.report.json')
@patch('dovetail.report.dt_cfg')
@patch('dovetail.report.dt_utils')
@@ -570,7 +698,7 @@ class ReportTesting(unittest.TestCase):
mock_utils.get_duration.return_value = 'duration'
crawler = dt_report.FunctestCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ result = crawler.crawl(testcase_obj, [file_path])
expected = {'criteria': 'criteria', 'timestart': 'start_date',
'timestop': 'stop_date', 'duration': 'duration',
'details': {
@@ -587,9 +715,69 @@ class ReportTesting(unittest.TestCase):
testcase_obj.validate_testcase.assert_called_once_with()
testcase_obj.sub_testcase.assert_called_once_with()
testcase_obj.name.assert_called_once_with()
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
+
+ @patch('builtins.open')
+ @patch('dovetail.report.json')
+ @patch('dovetail.report.dt_cfg')
+ @patch('dovetail.report.dt_utils')
+ @patch('dovetail.report.os.path')
+ def test_functest_rally_crawler_crawl(self, mock_path, mock_utils,
+ mock_config, mock_json, mock_open):
+ logger_obj = Mock()
+ mock_config.dovetail_config = {'build_tag': 'tag'}
+ dt_report.FunctestCrawler.logger = logger_obj
+ mock_path.exists.return_value = True
+ file_path = 'file_path'
+ testcase_obj = Mock()
+ testcase_obj.validate_testcase.return_value = 'rally_full'
+ testcase_obj.name.return_value = 'name'
+ testcase_obj.sub_testcase.return_value = ['subt_a', 'subt_b', 'subt_c']
+ file_obj = Mock()
+ mock_open.return_value.__enter__.side_effect = [[file_obj], file_obj]
+ data_dict = {
+ 'case_name': 'rally_full',
+ 'build_tag': 'tag-name',
+ 'criteria': 'criteria',
+ 'start_date': 'start_date',
+ 'stop_date': 'stop_date',
+ 'details': {
+ 'modules': [
+ {
+ 'details': {
+ 'success': ['subt_a'],
+ 'failures': ['subt_b', 'subt_c']
+ },
+ 'module': 'module'
+ }
+ ]
+ }
+ }
+
+ mock_json.loads.return_value = data_dict
+ mock_utils.get_duration.return_value = 'duration'
+
+ crawler = dt_report.FunctestCrawler()
+ result = crawler.crawl(testcase_obj, [file_path, file_path])
+ expected = {'criteria': 'criteria', 'timestart': 'start_date',
+ 'timestop': 'stop_date', 'duration': 'duration',
+ 'details': {
+ 'tests': 3, 'failures': 2,
+ 'success': ['subt_a'], 'errors': ['subt_b', 'subt_c'],
+ 'skipped': []}}
+
+ mock_path.exists.assert_called_once_with(file_path)
+ mock_open.assert_called_with(file_path, 'r')
+ mock_json.loads.assert_called_with(file_obj)
+ mock_utils.get_duration.assert_called_once_with(
+ 'start_date', 'stop_date', logger_obj)
+ testcase_obj.set_results.assert_called_with(expected)
+ testcase_obj.validate_testcase.assert_called_once_with()
+ testcase_obj.sub_testcase.assert_called_once_with()
+ testcase_obj.name.assert_called_once_with()
+ self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json.loads')
@patch('dovetail.report.dt_cfg')
@patch('dovetail.report.dt_utils')
@@ -612,7 +800,7 @@ class ReportTesting(unittest.TestCase):
mock_utils.get_duration.return_value = 'duration'
crawler = dt_report.FunctestCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ result = crawler.crawl(testcase_obj, [file_path])
mock_path.exists.assert_called_once_with(file_path)
mock_open.assert_called_once_with(file_path, 'r')
@@ -622,7 +810,7 @@ class ReportTesting(unittest.TestCase):
testcase_obj.name.assert_called_once_with()
logger_obj.exception.assert_called_once_with(
"Result data don't have key 'case_name'.")
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.report.dt_logger')
def test_functestk8s_crawler_create_log(self, mock_logger):
@@ -633,7 +821,7 @@ class ReportTesting(unittest.TestCase):
dt_report.FunctestK8sCrawler.create_log()
- self.assertEquals(getlogger_obj, dt_report.FunctestK8sCrawler.logger)
+ self.assertEqual(getlogger_obj, dt_report.FunctestK8sCrawler.logger)
@patch('dovetail.report.FunctestK8sCrawler.crawl_from_file')
@patch('dovetail.report.dt_cfg')
@@ -648,11 +836,11 @@ class ReportTesting(unittest.TestCase):
crawler = dt_report.FunctestK8sCrawler()
- result = crawler.crawl(testcase, file_path)
+ result = crawler.crawl(testcase, [file_path])
dt_report.FunctestK8sCrawler.crawl_from_file.assert_called_once_with(
'testcase', 'file_path')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.report.dt_logger')
def test_yardstick_crawler_create_log(self, mock_logger):
@@ -663,7 +851,7 @@ class ReportTesting(unittest.TestCase):
dt_report.YardstickCrawler.create_log()
- self.assertEquals(getlogger_obj, dt_report.YardstickCrawler.logger)
+ self.assertEqual(getlogger_obj, dt_report.YardstickCrawler.logger)
@patch('dovetail.report.os.path')
def test_yardstick_crawler_crawl_not_exists(self, mock_path):
@@ -673,14 +861,14 @@ class ReportTesting(unittest.TestCase):
file_path = 'file_path'
crawler = dt_report.YardstickCrawler()
- result = crawler.crawl(None, file_path)
+ result = crawler.crawl(None, [file_path])
mock_path.exists.assert_called_once_with(file_path)
logger_obj.error.assert_called_once_with(
'Result file not found: {}'.format(file_path))
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json.loads')
@patch('dovetail.report.dt_utils')
@patch('dovetail.report.os.path')
@@ -710,7 +898,7 @@ class ReportTesting(unittest.TestCase):
mock_utils.get_value_from_dict.return_value = 'PASS'
crawler = dt_report.YardstickCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ result = crawler.crawl(testcase_obj, [file_path])
expected = {'criteria': 'FAIL'}
mock_path.exists.assert_called_once_with(file_path)
@@ -720,9 +908,9 @@ class ReportTesting(unittest.TestCase):
'result.criteria', data_dict)
testcase_obj.validate_testcase.assert_called_once_with()
testcase_obj.set_results.assert_called_once_with(expected)
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json.loads')
@patch('dovetail.report.dt_utils')
@patch('dovetail.report.os.path')
@@ -741,7 +929,7 @@ class ReportTesting(unittest.TestCase):
mock_utils.get_value_from_dict.return_value = 'PASS'
crawler = dt_report.YardstickCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ result = crawler.crawl(testcase_obj, [file_path])
expected = {'criteria': 'PASS'}
mock_path.exists.assert_called_once_with(file_path)
@@ -753,7 +941,7 @@ class ReportTesting(unittest.TestCase):
testcase_obj.set_results.assert_called_once_with(expected)
logger_obj.exception.assert_called_once_with(
"Pass flag not found 'result'")
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
@patch('dovetail.report.dt_logger')
def test_bottlenecks_crawler_create_log(self, mock_logger):
@@ -764,7 +952,7 @@ class ReportTesting(unittest.TestCase):
dt_report.BottlenecksCrawler.create_log()
- self.assertEquals(getlogger_obj, dt_report.BottlenecksCrawler.logger)
+ self.assertEqual(getlogger_obj, dt_report.BottlenecksCrawler.logger)
@patch('dovetail.report.os.path')
def test_bottlenecks_crawler_crawl_not_exists(self, mock_path):
@@ -774,14 +962,14 @@ class ReportTesting(unittest.TestCase):
file_path = 'file_path'
crawler = dt_report.BottlenecksCrawler()
- result = crawler.crawl(None, file_path)
+ result = crawler.crawl(None, [file_path])
mock_path.exists.assert_called_once_with(file_path)
logger_obj.error.assert_called_once_with(
'Result file not found: {}'.format(file_path))
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json.loads')
@patch('dovetail.report.os.path')
def test_bottlenecks_crawler_crawl_pass(self, mock_path, mock_loads,
@@ -800,16 +988,16 @@ class ReportTesting(unittest.TestCase):
mock_loads.return_value = data_dict
crawler = dt_report.BottlenecksCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ result = crawler.crawl(testcase_obj, [file_path])
expected = {'criteria': 'PASS'}
mock_path.exists.assert_called_once_with(file_path)
mock_open.assert_called_once_with(file_path, 'r')
mock_loads.assert_called_once_with(file_obj)
testcase_obj.set_results.assert_called_once_with(expected)
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json.loads')
@patch('dovetail.report.os.path')
def test_bottlenecks_crawler_crawl_fail(self, mock_path, mock_loads,
@@ -828,16 +1016,16 @@ class ReportTesting(unittest.TestCase):
mock_loads.return_value = data_dict
crawler = dt_report.BottlenecksCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ result = crawler.crawl(testcase_obj, [file_path])
expected = {'criteria': 'FAIL'}
mock_path.exists.assert_called_once_with(file_path)
mock_open.assert_called_once_with(file_path, 'r')
mock_loads.assert_called_once_with(file_obj)
testcase_obj.set_results.assert_called_once_with(expected)
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json.loads')
@patch('dovetail.report.os.path')
def test_bottlenecks_crawler_crawl_key_error(self, mock_path, mock_loads,
@@ -853,7 +1041,7 @@ class ReportTesting(unittest.TestCase):
mock_loads.return_value = {}
crawler = dt_report.BottlenecksCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ result = crawler.crawl(testcase_obj, [file_path])
expected = {'criteria': 'FAIL'}
mock_path.exists.assert_called_once_with(file_path)
@@ -862,7 +1050,7 @@ class ReportTesting(unittest.TestCase):
testcase_obj.set_results.assert_called_once_with(expected)
logger_obj.exception.assert_called_once_with(
"Pass flag not found 'data_body'")
- self.assertEquals(expected, result)
+ self.assertEqual(expected, result)
@patch('dovetail.report.os.path')
def test_shell_crawler_crawl_not_exists(self, mock_path):
@@ -870,12 +1058,12 @@ class ReportTesting(unittest.TestCase):
file_path = 'file_path'
crawler = dt_report.ShellCrawler()
- result = crawler.crawl(None, file_path)
+ result = crawler.crawl(None, [file_path])
mock_path.exists.assert_called_once_with(file_path)
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.os.path')
def test_shell_crawler_crawl_exception(self, mock_path, mock_open):
mock_path.exists.return_value = True
@@ -883,13 +1071,13 @@ class ReportTesting(unittest.TestCase):
mock_open.return_value.__enter__.return_value = Exception()
crawler = dt_report.ShellCrawler()
- result = crawler.crawl(None, file_path)
+ result = crawler.crawl(None, [file_path])
mock_path.exists.assert_called_once_with(file_path)
mock_open.assert_called_once_with(file_path, 'r')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json.load')
@patch('dovetail.report.os.path')
def test_shell_crawler_crawl(self, mock_path, mock_load,
@@ -901,101 +1089,313 @@ class ReportTesting(unittest.TestCase):
mock_load.return_value = 'result'
crawler = dt_report.ShellCrawler()
- result = crawler.crawl(None, file_path)
+ result = crawler.crawl(None, [file_path])
mock_path.exists.assert_called_once_with(file_path)
mock_open.assert_called_once_with(file_path, 'r')
mock_load.assert_called_once_with(file_obj)
- self.assertEquals('result', result)
+ self.assertEqual('result', result)
+
+ @patch('dovetail.report.dt_logger')
+ def test_onapvtp_crawler_create_log(self, mock_logger):
+ getlogger_obj = Mock()
+ logger_obj = Mock()
+ logger_obj.getLogger.return_value = getlogger_obj
+ mock_logger.Logger.return_value = logger_obj
+
+ dt_report.OnapVtpCrawler.create_log()
+
+ self.assertEqual(getlogger_obj, dt_report.OnapVtpCrawler.logger)
@patch('dovetail.report.dt_logger')
- def test_vnftest_crawler_create_log(self, mock_logger):
+ def test_onapvvp_crawler_create_log(self, mock_logger):
getlogger_obj = Mock()
logger_obj = Mock()
logger_obj.getLogger.return_value = getlogger_obj
mock_logger.Logger.return_value = logger_obj
- dt_report.VnftestCrawler.create_log()
+ dt_report.OnapVvpCrawler.create_log()
- self.assertEquals(getlogger_obj, dt_report.VnftestCrawler.logger)
+ self.assertEqual(getlogger_obj, dt_report.OnapVvpCrawler.logger)
@patch('dovetail.report.os.path')
- def test_vnftest_crawler_crawl_not_exists(self, mock_path):
+ def test_onapvtp_crawler_crawl_not_exists(self, mock_path):
logger_obj = Mock()
- dt_report.VnftestCrawler.logger = logger_obj
+ dt_report.OnapVtpCrawler.logger = logger_obj
mock_path.exists.return_value = False
file_path = 'file_path'
- crawler = dt_report.VnftestCrawler()
- result = crawler.crawl(None, file_path)
+ crawler = dt_report.OnapVtpCrawler()
+ result = crawler.crawl(None, [file_path])
mock_path.exists.assert_called_once_with(file_path)
logger_obj.error.assert_called_once_with(
'Result file not found: {}'.format(file_path))
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
- @patch('__builtin__.open')
+ @patch('dovetail.report.os.path')
+ def test_onapvvp_crawler_crawl_not_exists(self, mock_path):
+ logger_obj = Mock()
+ dt_report.OnapVvpCrawler.logger = logger_obj
+ mock_path.exists.return_value = False
+ file_path = 'file_path'
+
+ crawler = dt_report.OnapVvpCrawler()
+ result = crawler.crawl(None, [file_path])
+
+ mock_path.exists.assert_called_once_with(file_path)
+ logger_obj.error.assert_called_once_with(
+ 'Result file not found: {}'.format(file_path))
+ self.assertEqual(None, result)
+
+ @patch('builtins.open')
+ @patch('dovetail.report.os.path')
+ def test_onapvvp_crawler_crawl_pass(self, mock_path,
+ mock_open):
+ dt_report.OnapVvpCrawler.logger = Mock()
+ mock_path.exists.return_value = True
+ file_path = 'file_path'
+ testcase_obj = Mock()
+ file_obj = Mock()
+ file_obj.read.return_value = json.dumps({'outcome': 'PASS'})
+ mock_open.return_value.__enter__.return_value = file_obj
+
+ crawler = dt_report.OnapVvpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
+ expected = {'criteria': 'PASS'}
+
+ mock_path.exists.assert_called_once_with(file_path)
+ mock_open.assert_called_once_with(file_path, 'r')
+ file_obj.read.assert_called_once_with()
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
+
+ @patch('builtins.open')
+ @patch('dovetail.report.os.path')
+ def test_onapvvp_crawler_crawl_fail(self, mock_path,
+ mock_open):
+ dt_report.OnapVvpCrawler.logger = Mock()
+ mock_path.exists.return_value = True
+ file_path = 'file_path'
+ testcase_obj = Mock()
+ file_obj = Mock()
+ file_obj.read.return_value = json.dumps({'outcome': 'FAIL'})
+ mock_open.return_value.__enter__.return_value = file_obj
+
+ crawler = dt_report.OnapVvpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
+ expected = {'criteria': 'FAIL'}
+
+ mock_path.exists.assert_called_once_with(file_path)
+ mock_open.assert_called_once_with(file_path, 'r')
+ file_obj.read.assert_called_once_with()
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
+
+ @patch('builtins.open')
+ @patch('dovetail.report.os.path')
+ def test_onapvvp_crawler_crawl_value_exception(self, mock_path,
+ mock_open):
+ dt_report.OnapVvpCrawler.logger = Mock()
+ mock_path.exists.return_value = True
+ file_path = 'file_path'
+ testcase_obj = Mock()
+ file_obj = Mock()
+ file_obj.read.return_value = 'error'
+ mock_open.return_value.__enter__.return_value = file_obj
+
+ crawler = dt_report.OnapVvpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
+ expected = {'criteria': 'FAIL'}
+
+ mock_path.exists.assert_called_once_with(file_path)
+ mock_open.assert_called_once_with(file_path, 'r')
+ file_obj.read.assert_called_once_with()
+ dt_report.OnapVvpCrawler.logger.exception.assert_called_once_with(
+ 'Result file has invalid format')
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
+
+ @patch('builtins.open')
+ @patch('dovetail.report.os.path')
+ def test_onapvvp_crawler_crawl_key_exception(self, mock_path,
+ mock_open):
+ dt_report.OnapVvpCrawler.logger = Mock()
+ mock_path.exists.return_value = True
+ file_path = 'file_path'
+ testcase_obj = Mock()
+ file_obj = Mock()
+ file_obj.read.return_value = json.dumps({'key': 'value'})
+ mock_open.return_value.__enter__.return_value = file_obj
+
+ crawler = dt_report.OnapVvpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
+ expected = {'criteria': 'FAIL'}
+
+ mock_path.exists.assert_called_once_with(file_path)
+ mock_open.assert_called_once_with(file_path, 'r')
+ file_obj.read.assert_called_once_with()
+ dt_report.OnapVvpCrawler.logger.exception.assert_called_once_with(
+ "Outcome field not found 'outcome'")
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
+
+ @patch('builtins.open')
@patch('dovetail.report.json.loads')
@patch('dovetail.report.os.path')
- def test_vnftest_crawler_crawl(self, mock_path, mock_loads,
- mock_open):
- dt_report.VnftestCrawler.logger = Mock()
+ def test_onapvtp_crawler_crawl_pass(self, mock_path, mock_loads,
+ mock_open):
+ dt_report.OnapVtpCrawler.logger = Mock()
mock_path.exists.return_value = True
file_path = 'file_path'
testcase_obj = Mock()
file_obj = Mock()
mock_open.return_value.__enter__.return_value = [file_obj]
data_dict = {
- 'result': {
- 'criteria': 'PASS'
- }
+ 'results': [
+ {"property": "results", "value": "{value=SUCCESS}"},
+ {"property": "build_tag", "value": "test-name"},
+ {"property": "criteria", "value": "PASS"}
+ ]
}
mock_loads.return_value = data_dict
- crawler = dt_report.VnftestCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ crawler = dt_report.OnapVtpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
expected = {'criteria': 'PASS'}
mock_path.exists.assert_called_once_with(file_path)
mock_open.assert_called_once_with(file_path, 'r')
mock_loads.assert_called_once_with(file_obj)
- self.assertEquals(expected, result)
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
+
+ @patch('builtins.open')
+ @patch('dovetail.report.json.loads')
+ @patch('dovetail.report.os.path')
+ def test_onapvtp_crawler_crawl_fail(self, mock_path, mock_loads,
+ mock_open):
+ dt_report.OnapVtpCrawler.logger = Mock()
+ mock_path.exists.return_value = True
+ file_path = 'file_path'
+ testcase_obj = Mock()
+ file_obj = Mock()
+ mock_open.return_value.__enter__.return_value = [file_obj]
+ data_dict = {
+ 'results': [
+ {"property": "results", "value": "{value=file doesn't exist}"},
+ {"property": "build_tag", "value": "test-name"},
+ {"property": "criteria", "value": "FAILED"}
+ ]
+ }
+ mock_loads.return_value = data_dict
+
+ crawler = dt_report.OnapVtpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
+ expected = {'criteria': 'FAIL'}
+
+ mock_path.exists.assert_called_once_with(file_path)
+ mock_open.assert_called_once_with(file_path, 'r')
+ mock_loads.assert_called_once_with(file_obj)
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
+
+ @patch('builtins.open')
+ @patch('dovetail.report.json.loads')
+ @patch('dovetail.report.os.path')
+ def test_onapvtp_crawler_crawl_no_criteria(self, mock_path, mock_loads,
+ mock_open):
+ dt_report.OnapVtpCrawler.logger = Mock()
+ mock_path.exists.return_value = True
+ file_path = 'file_path'
+ testcase_obj = Mock()
+ file_obj = Mock()
+ mock_open.return_value.__enter__.return_value = [file_obj]
+ data_dict = {
+ 'results': [
+ {"property": "results", "value": "{value=file doesn't exist}"},
+ {"property": "build_tag", "value": "test-name"},
+ {"property": "error_criteria", "value": "FAILED"}
+ ]
+ }
+ mock_loads.return_value = data_dict
+
+ crawler = dt_report.OnapVtpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
+ expected = {'criteria': 'FAIL'}
+
+ mock_path.exists.assert_called_once_with(file_path)
+ mock_open.assert_called_once_with(file_path, 'r')
+ mock_loads.assert_called_once_with(file_obj)
+ dt_report.OnapVtpCrawler.logger.error.assert_called_once_with(
+ 'There is no property criteria.')
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.report.json.loads')
@patch('dovetail.report.os.path')
- def test_vnftest_crawler_crawl_key_error(self, mock_path, mock_loads,
+ def test_onapvtp_crawler_crawl_exception(self, mock_path, mock_loads,
mock_open):
- logger_obj = Mock()
- dt_report.VnftestCrawler.logger = logger_obj
+ dt_report.OnapVtpCrawler.logger = Mock()
mock_path.exists.return_value = True
file_path = 'file_path'
testcase_obj = Mock()
file_obj = Mock()
mock_open.return_value.__enter__.return_value = [file_obj]
+ data_dict = {
+ 'error_results': [
+ {"property": "results", "value": "{value=file doesn't exist}"},
+ {"property": "build_tag", "value": "test-name"},
+ {"property": "error_criteria", "value": "FAILED"}
+ ]
+ }
+ mock_loads.return_value = data_dict
- mock_loads.return_value = {}
+ crawler = dt_report.OnapVtpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
+ expected = {'criteria': 'FAIL'}
- crawler = dt_report.VnftestCrawler()
- result = crawler.crawl(testcase_obj, file_path)
+ mock_path.exists.assert_called_once_with(file_path)
+ mock_open.assert_called_once_with(file_path, 'r')
+ mock_loads.assert_called_once_with(file_obj)
+ dt_report.OnapVtpCrawler.logger.exception.assert_called_once_with(
+ "Pass flag not found 'results'")
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
+
+ @patch('builtins.open')
+ @patch('dovetail.report.json.loads')
+ @patch('dovetail.report.os.path')
+ def test_onapvtp_crawler_crawl_value_error(self, mock_path, mock_loads,
+ mock_open):
+ dt_report.OnapVtpCrawler.logger = Mock()
+ mock_path.exists.return_value = True
+ file_path = 'file_path'
+ testcase_obj = Mock()
+ file_obj = Mock()
+ mock_open.return_value.__enter__.return_value = [file_obj]
+ mock_loads.side_effect = ValueError('No JSON object could be decoded')
+
+ crawler = dt_report.OnapVtpCrawler()
+ result = crawler.crawl(testcase_obj, [file_path])
expected = {'criteria': 'FAIL'}
mock_path.exists.assert_called_once_with(file_path)
mock_open.assert_called_once_with(file_path, 'r')
mock_loads.assert_called_once_with(file_obj)
- logger_obj.exception.assert_called_once_with(
- "Pass flag not found 'result'")
- self.assertEquals(expected, result)
+ testcase_obj.set_results.assert_called_once_with(expected)
+ self.assertEqual(expected, result)
def test_crawler_factory(self):
result = dt_report.CrawlerFactory.create('shell')
- self.assertEquals(dt_report.ShellCrawler, result.__class__)
+ self.assertEqual(dt_report.ShellCrawler, result.__class__)
def test_crawler_factory_none(self):
- self.assertEquals(None, dt_report.CrawlerFactory.create('other'))
+ self.assertEqual(None, dt_report.CrawlerFactory.create('other'))
def test_result_checker(self):
- self.assertEquals('PASS', dt_report.ResultChecker.check())
+ self.assertEqual('PASS', dt_report.ResultChecker.check())
@patch('dovetail.report.dt_logger')
def test_functest_checker_create_log(self, mock_logger):
@@ -1006,26 +1406,40 @@ class ReportTesting(unittest.TestCase):
dt_report.FunctestChecker.create_log()
- self.assertEquals(getlogger_obj, dt_report.FunctestChecker.logger)
+ self.assertEqual(getlogger_obj, dt_report.FunctestChecker.logger)
def test_functest_get_sub_testcase_no_result(self):
- self.assertEquals(
+ self.assertEqual(
False, dt_report.FunctestChecker.get_sub_testcase(None, None))
def test_functest_get_sub_testcase_simple_match(self):
- self.assertEquals(
+ self.assertEqual(
True,
dt_report.FunctestChecker.get_sub_testcase('subt_a',
['subt_b', 'subt_a']))
def test_functest_get_sub_testcase_extended_match(self):
- self.assertEquals(
+ self.assertEqual(
True,
dt_report.FunctestChecker.get_sub_testcase('subt_a',
['subt_b', 'subt_a+']))
+ def test_functest_get_sub_testcase_class_match(self):
+ self.assertEqual(
+ True,
+ dt_report.FunctestChecker.get_sub_testcase('subclass_a.subt_a',
+ ['subclass_a',
+ 'subclass_b.subt_b']))
+
+ def test_functest_get_sub_testcase_class_no_match(self):
+ self.assertEqual(
+ False,
+ dt_report.FunctestChecker.get_sub_testcase('subclass_a.subt_a',
+ ['subclass_a.subt_a_a',
+ 'subclass_b.subt_b']))
+
def test_functest_get_sub_no_match(self):
- self.assertEquals(
+ self.assertEqual(
False,
dt_report.FunctestChecker.get_sub_testcase('subt_a',
['subt_b']))
@@ -1071,16 +1485,6 @@ class ReportTesting(unittest.TestCase):
checker.check(testcase_obj, db_result)
testcase_obj.sub_testcase.assert_called_once_with()
- logger_obj.debug.assert_has_calls([
- call('Check sub_testcase: subt_a'),
- call('Check sub_testcase: subt_b'),
- call('Check sub_testcase: subt_c'),
- call('Check sub_testcase: subt_d')])
- testcase_obj.sub_testcase_passed.assert_has_calls([
- call('subt_a', 'PASS'),
- call('subt_b', 'SKIP'),
- call('subt_c', 'FAIL'),
- call('subt_d', 'FAIL')])
testcase_obj.passed.assert_has_calls([call('PASS'), call('FAIL')])
@patch('dovetail.report.dt_logger')
@@ -1092,7 +1496,7 @@ class ReportTesting(unittest.TestCase):
dt_report.FunctestK8sChecker.create_log()
- self.assertEquals(getlogger_obj, dt_report.FunctestK8sChecker.logger)
+ self.assertEqual(getlogger_obj, dt_report.FunctestK8sChecker.logger)
@patch('dovetail.report.dt_logger')
def test_yardstick_checker_create_log(self, mock_logger):
@@ -1103,7 +1507,7 @@ class ReportTesting(unittest.TestCase):
dt_report.YardstickChecker.create_log()
- self.assertEquals(getlogger_obj, dt_report.YardstickChecker.logger)
+ self.assertEqual(getlogger_obj, dt_report.YardstickChecker.logger)
def test_yardstick_check_result(self):
testcase_obj = Mock()
@@ -1130,7 +1534,7 @@ class ReportTesting(unittest.TestCase):
dt_report.BottlenecksChecker.create_log()
- self.assertEquals(getlogger_obj, dt_report.BottlenecksChecker.logger)
+ self.assertEqual(getlogger_obj, dt_report.BottlenecksChecker.logger)
def test_bottlenecks_check_result(self):
testcase_obj = Mock()
@@ -1164,36 +1568,104 @@ class ReportTesting(unittest.TestCase):
testcase_obj.passed.assert_called_once_with(False)
+ def test_checker_factory(self):
+ result = dt_report.CheckerFactory.create('shell')
+ self.assertEqual(dt_report.ShellChecker, result.__class__)
+
+ def test_checker_factory_none(self):
+ self.assertEqual(None, dt_report.CheckerFactory.create('other'))
+
@patch('dovetail.report.dt_logger')
- def test_vnftest_checker_create_log(self, mock_logger):
+ def test_onapvtp_checker_create_log(self, mock_logger):
getlogger_obj = Mock()
logger_obj = Mock()
logger_obj.getLogger.return_value = getlogger_obj
mock_logger.Logger.return_value = logger_obj
- dt_report.VnftestChecker.create_log()
+ dt_report.OnapVtpChecker.create_log()
+
+ self.assertEqual(getlogger_obj, dt_report.OnapVtpChecker.logger)
+
+ def test_onapvtp_check_result_none(self):
+ testcase_obj = Mock()
+ result = {}
+
+ dt_report.OnapVtpChecker.check(testcase_obj, result)
- self.assertEquals(getlogger_obj, dt_report.VnftestChecker.logger)
+ testcase_obj.passed.assert_called_once_with('FAIL')
- def test_vnftest_check_result(self):
+ def test_onapvtp_check_result(self):
testcase_obj = Mock()
result = {'criteria': 'PASS'}
- dt_report.VnftestChecker.check(testcase_obj, result)
+ dt_report.OnapVtpChecker.check(testcase_obj, result)
testcase_obj.passed.assert_called_once_with('PASS')
- def test_vnftest_check_result_none(self):
+ @patch('dovetail.report.dt_logger')
+ def test_onapvvp_checker_create_log(self, mock_logger):
+ getlogger_obj = Mock()
+ logger_obj = Mock()
+ logger_obj.getLogger.return_value = getlogger_obj
+ mock_logger.Logger.return_value = logger_obj
+
+ dt_report.OnapVvpChecker.create_log()
+
+ self.assertEqual(getlogger_obj, dt_report.OnapVvpChecker.logger)
+
+ def test_onapvvp_check_result_none(self):
testcase_obj = Mock()
result = {}
- dt_report.VnftestChecker.check(testcase_obj, result)
+ dt_report.OnapVvpChecker.check(testcase_obj, result)
testcase_obj.passed.assert_called_once_with('FAIL')
- def test_checker_factory(self):
- result = dt_report.CheckerFactory.create('shell')
- self.assertEquals(dt_report.ShellChecker, result.__class__)
+ def test_onapvvp_check_result(self):
+ testcase_obj = Mock()
+ result = {'criteria': 'PASS'}
- def test_checker_factory_none(self):
- self.assertEquals(None, dt_report.CheckerFactory.create('other'))
+ dt_report.OnapVvpChecker.check(testcase_obj, result)
+
+ testcase_obj.passed.assert_called_once_with('PASS')
+
+ @patch('dovetail.report.dt_cfg')
+ @patch('dovetail.report.os.path')
+ @patch('builtins.open')
+ @patch('dovetail.report.os.getenv')
+ def test_get_checksum_tosca(self, mock_env, mock_open, mock_path,
+ mock_config):
+ mock_config.dovetail_config = {
+ 'config_dir': 'config_dir'
+ }
+ mock_env.return_value = 'csar_file'
+ file_obj = Mock()
+ file_obj.read.return_value = 'info'
+ file_obj.__exit__ = Mock()
+ file_obj.__enter__ = Mock()
+ mock_open.return_value = file_obj
+ mock_path.isdir.return_value = False
+ mock_path.isfile.return_value = True
+
+ dt_report.Report.get_checksum('tosca')
+
+ @patch('dovetail.report.dt_cfg')
+ @patch('dovetail.report.os.path')
+ @patch('dovetail.report.os.walk')
+ @patch('builtins.open')
+ @patch('dovetail.report.os.getenv')
+ def test_get_checksum_heat(self, mock_env, mock_open, mock_walk, mock_path,
+ mock_config):
+ mock_config.dovetail_config = {
+ 'config_dir': 'config_dir'
+ }
+ mock_env.return_value = 'heat_templates_archive'
+ file_obj = Mock()
+ file_obj.read.return_value = 'info'
+ file_obj.__exit__ = Mock()
+ file_obj.__enter__ = Mock()
+ mock_open.return_value = file_obj
+ mock_path.isdir.return_value = True
+ mock_walk.return_value = [('root', ['dir'], ['file'])]
+
+ dt_report.Report.get_checksum('heat')
diff --git a/dovetail/tests/unit/test_run.py b/dovetail/tests/unit/test_run.py
index c7fe4d6d..c1e37116 100644
--- a/dovetail/tests/unit/test_run.py
+++ b/dovetail/tests/unit/test_run.py
@@ -46,7 +46,7 @@ class RunTesting(unittest.TestCase):
mock_testsuite.load.assert_called_once_with()
mock_testsuite.get.assert_called_once_with('testsuite')
- self.assertEquals('suite_a', result)
+ self.assertEqual('suite_a', result)
@patch('dovetail.run.dt_report.Report')
def test_run_test_no_list(self, mock_report):
@@ -57,30 +57,45 @@ class RunTesting(unittest.TestCase):
logger.warning.assert_called_once_with(
"No test case will be executed.")
+ @patch('dovetail.run.datetime')
+ @patch('dovetail.run.dt_utils')
@patch('dovetail.run.dt_cfg')
@patch('dovetail.run.dt_report.Report')
@patch('dovetail.run.dt_testcase.Testcase')
@patch('dovetail.run.time')
- def test_run_test(self, mock_time, mock_testcase, mock_report,
- mock_config):
+ @patch('os.getenv')
+ def test_run_test(self, mock_getenv, mock_time, mock_testcase, mock_report,
+ mock_config, mock_utils, mock_datetime):
logger = Mock()
report_obj = Mock()
mock_report.return_value = report_obj
- mock_time.time.side_effect = [42, 84]
+ mock_time.time.side_effect = [42, 43, 83, 84]
+ datetime_obj = Mock()
+ mock_datetime.fromtimestamp.return_value = datetime_obj
+ datetime_obj.strftime.side_effect = ['1969-12-31 19:00:43',
+ '1969-12-31 19:01:23']
testcase_name = 'testcase'
testcase_obj = Mock()
mock_testcase.get.return_value = testcase_obj
mock_config.dovetail_config = {'stop': True}
+ mock_getenv.return_value = 'true'
report_obj.check_tc_result.return_value = {'criteria': 'PASS'}
+ mock_utils.push_results_to_db.return_value = True
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_has_calls([call(), call()])
+ mock_time.time.assert_has_calls([call(), call(), call(), call()])
logger.info.assert_called_once_with(
'>>[testcase]: {}'.format(testcase_name))
mock_testcase.get.assert_called_once_with(testcase_name)
testcase_obj.run.assert_called_once_with()
report_obj.check_tc_result.assert_called_once_with(testcase_obj)
+ mock_utils.push_results_to_db.assert_called_once_with(
+ case_name=testcase_name,
+ start_date='1969-12-31 19:00:43',
+ stop_date='1969-12-31 19:01:23',
+ details={'criteria': 'PASS'},
+ logger=logger)
report_obj.generate.assert_called_once_with([testcase_name], 42)
report_obj.save_logs.assert_called_once_with()
@@ -101,7 +116,8 @@ class RunTesting(unittest.TestCase):
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_called_once_with()
+ mock_time.time.assert_has_calls([call(), call(), call().__float__(),
+ call(), call().__float__()])
logger.info.assert_has_calls([
call('>>[testcase]: {}'.format(testcase_name)),
call('Stop because {} failed'.format(testcase_name))])
@@ -127,7 +143,7 @@ class RunTesting(unittest.TestCase):
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_called_once_with()
+ mock_time.time.assert_has_calls([call(), call(), call()])
logger.info.assert_has_calls([
call('>>[testcase]: {}'.format(testcase_name)),
call('Stop because {} failed'.format(testcase_name))])
@@ -166,14 +182,14 @@ class RunTesting(unittest.TestCase):
result = dt_run.filter_config(input_dict, Mock())
- self.assertEquals(expected_dict, result)
+ self.assertEqual(expected_dict, result)
@patch('dovetail.run.dt_cfg')
def test_filter_config_none(self, mock_config):
mock_config.dovetail_config = {'cli': {}}
result = dt_run.filter_config({}, Mock())
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.run.dt_cfg')
def test_filter_config_keyerror(self, mock_config):
@@ -211,11 +227,9 @@ class RunTesting(unittest.TestCase):
mock_report.Report.create_log.assert_called_once_with()
mock_report.FunctestCrawler.create_log.assert_called_once_with()
mock_report.YardstickCrawler.create_log.assert_called_once_with()
- mock_report.VnftestCrawler.create_log.assert_called_once_with()
mock_report.BottlenecksCrawler.create_log.assert_called_once_with()
mock_report.FunctestChecker.create_log.assert_called_once_with()
mock_report.YardstickChecker.create_log.assert_called_once_with()
- mock_report.VnftestChecker.create_log.assert_called_once_with()
mock_report.BottlenecksChecker.create_log.assert_called_once_with()
mock_testcase.Testcase.create_log.assert_called_once_with()
mock_testcase.Testsuite.create_log.assert_called_once_with()
@@ -235,7 +249,7 @@ class RunTesting(unittest.TestCase):
mock_os.path.exists.assert_called_once_with('value')
mock_os.path.isdir.assert_called_once_with('value')
mock_utils.exec_cmd.assert_called_once_with(
- 'sudo rm -rf value/*', exit_on_error=False, exec_msg_on=False)
+ 'rm -rf value/*', exit_on_error=False, exec_msg_on=False)
@patch('dovetail.run.dt_utils')
@patch('dovetail.run.dt_cfg')
@@ -259,7 +273,8 @@ class RunTesting(unittest.TestCase):
dovetail_home = 'dovetail_home'
mock_os.environ = {'DOVETAIL_HOME': dovetail_home}
mock_os.path.join.side_effect = [
- 'result_path', 'images_dir', 'pre_config_path', 'patch_set_path']
+ 'result_path', 'images_dir', 'pre_config_path', 'patch_set_path',
+ 'userconfig_dir']
mock_config.dovetail_config = {}
result = dt_run.get_result_path()
@@ -268,14 +283,16 @@ class RunTesting(unittest.TestCase):
call(dovetail_home, 'results'),
call(dovetail_home, 'images'),
call(dovetail_home, 'pre_config'),
- call(dovetail_home, 'patches')])
+ call(dovetail_home, 'patches'),
+ call(dovetail_home, 'userconfig')])
expected_dict = {
'result_dir': 'result_path',
'images_dir': 'images_dir',
'config_dir': 'pre_config_path',
- 'patch_dir': 'patch_set_path'}
- self.assertEquals(expected_dict, mock_config.dovetail_config)
- self.assertEquals(dovetail_home, result)
+ 'patch_dir': 'patch_set_path',
+ 'userconfig_dir': 'userconfig_dir'}
+ self.assertEqual(expected_dict, mock_config.dovetail_config)
+ self.assertEqual(dovetail_home, result)
@patch('dovetail.run.os')
def test_get_result_path_exception(self, mock_os):
@@ -283,7 +300,7 @@ class RunTesting(unittest.TestCase):
result = dt_run.get_result_path()
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.run.constants')
@patch('dovetail.run.dt_cfg')
@@ -291,7 +308,7 @@ class RunTesting(unittest.TestCase):
@patch('dovetail.run.os')
def test_copy_userconfig_files(self, mock_os, mock_utils, mock_config,
mock_constants):
- mock_config.dovetail_config = {'config_dir': 'value'}
+ mock_config.dovetail_config = {'userconfig_dir': 'value'}
mock_os.path.isdir.return_value = False
mock_constants.USERCONF_PATH = 'value'
logger = Mock()
@@ -301,7 +318,7 @@ class RunTesting(unittest.TestCase):
mock_os.path.isdir.assert_called_once_with('value')
mock_os.makedirs.assert_called_once_with('value')
mock_utils.exec_cmd.assert_called_once_with(
- 'sudo cp -r value/* value', logger, exit_on_error=False)
+ 'cp -r value/* value', logger, exit_on_error=False)
@patch('dovetail.run.constants')
@patch('dovetail.run.dt_cfg')
@@ -319,7 +336,7 @@ class RunTesting(unittest.TestCase):
mock_os.path.isdir.assert_called_once_with('value')
mock_os.makedirs.assert_called_once_with('value')
mock_utils.exec_cmd.assert_called_once_with(
- 'sudo cp -a -r value/* value', logger, exit_on_error=False)
+ 'cp -a -r value/* value', logger, exit_on_error=False)
@patch('dovetail.run.os')
def test_update_deploy_scenario(self, mock_os):
@@ -329,7 +346,7 @@ class RunTesting(unittest.TestCase):
dt_run.update_deploy_scenario(logger, deploy_scenario='a')
logger.info.assert_called_once_with('DEPLOY_SCENARIO : %s', 'a')
- self.assertEquals({'DEPLOY_SCENARIO': 'a'}, mock_os.environ)
+ self.assertEqual({'DEPLOY_SCENARIO': 'a'}, mock_os.environ)
@patch('dovetail.run.dt_cfg')
@patch.object(dt_run, 'filter_config')
@@ -355,7 +372,7 @@ class RunTesting(unittest.TestCase):
logger.warning.assert_called_once_with(
'Strict API response validation DISABLED.')
- self.assertEquals(expected_dict, mock_config.dovetail_config)
+ self.assertEqual(expected_dict, mock_config.dovetail_config)
@patch('dovetail.run.dt_cfg')
@patch.object(dt_run, 'filter_config')
@@ -377,7 +394,7 @@ class RunTesting(unittest.TestCase):
'no_api_validation': False
}
- self.assertEquals(expected_dict, mock_config.dovetail_config)
+ self.assertEqual(expected_dict, mock_config.dovetail_config)
def test_check_testcase_list_not_in_list(self):
logger = Mock()
@@ -386,7 +403,7 @@ class RunTesting(unittest.TestCase):
logger.error.assert_called_once_with(
'Test case testcase is not defined.')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
def test_check_testcase_list_none(self):
logger = Mock()
@@ -394,7 +411,7 @@ class RunTesting(unittest.TestCase):
logger.error.assert_called_once_with(
'There is no test case to be executed.')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.run.dt_testcase.Testcase')
def test_check_testcase_list(self, mock_testcase):
@@ -403,7 +420,7 @@ class RunTesting(unittest.TestCase):
result = dt_run.check_testcase_list(testcase_list)
- self.assertEquals(testcase_list, result)
+ self.assertEqual(testcase_list, result)
@patch('dovetail.run.dt_testcase.Testcase')
@patch.object(dt_run, 'check_testcase_list')
@@ -415,7 +432,7 @@ class RunTesting(unittest.TestCase):
mock_check.assert_called_once_with(testcase_list, None)
mock_testcase.load.assert_called_once_with()
- self.assertEquals(testcase_list, result)
+ self.assertEqual(testcase_list, result)
@patch('dovetail.run.dt_cfg')
@patch('dovetail.run.dt_testcase.Testcase')
@@ -439,7 +456,7 @@ class RunTesting(unittest.TestCase):
mock_testcase.get_testcases_for_testsuite.assert_called_once_with(
'testsuite_yaml', 'area')
mock_check.assert_called_once_with(testcase_list, None)
- self.assertEquals(testcase_list, result)
+ self.assertEqual(testcase_list, result)
@patch('dovetail.run.dt_cfg')
@patch('dovetail.run.dt_testcase.Testcase')
@@ -455,7 +472,7 @@ class RunTesting(unittest.TestCase):
mock_testcase.check_testarea.assert_called_once_with('area')
logger.error.assert_called_once_with(
'Test suite suite is not defined.')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.run.dt_cfg')
@patch('dovetail.run.dt_testcase.Testcase')
@@ -471,9 +488,11 @@ class RunTesting(unittest.TestCase):
mock_testcase.check_testarea.assert_called_once_with('area')
logger.error.assert_called_once_with(
'Test area area is not defined.')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
+ @patch('builtins.open')
@patch('dovetail.run.os')
+ @patch('dovetail.run.json')
@patch('dovetail.run.uuid')
@patch('dovetail.run.dt_logger')
@patch('dovetail.run.dt_cfg')
@@ -490,37 +509,44 @@ class RunTesting(unittest.TestCase):
def test_main(self, mock_create_logs, mock_run, mock_get_list,
mock_copy_patch, mock_copy_userconf, mock_update, mock_parse,
mock_clean, mock_get_result, mock_utils, mock_config,
- mock_logger, mock_uuid, mock_os):
+ mock_logger, mock_uuid, mock_json, mock_os, mock_open):
mock_config.dovetail_config = {}
- mock_os.environ = {}
+ mock_os.environ = {'DOVETAIL_HOME': 'dovetail_home'}
logger_obj = Mock()
logger_temp_obj = Mock()
+ file_obj = Mock()
logger_temp_obj.getLogger.return_value = logger_obj
mock_logger.Logger.return_value = logger_temp_obj
+ mock_open.return_value.__enter__.return_value = file_obj
+ mock_json.dumps.return_value = 'results text'
mock_uuid.uuid1.return_value = 42
mock_get_result.return_value = True
testcase_list = ['testcase']
mock_get_list.return_value = testcase_list
kwargs_dict = {
'debug': True,
+ 'opnfv_ci': True,
'report': True,
'testsuite': 'testsuite',
- 'docker_tag': '2.0.0'
+ 'docker_tag': '2.0.0',
+ 'no_api_validation': False
}
with self.assertRaises(SystemExit) as cm:
- dt_run.main([
- '--testsuite=testsuite', '--debug', '--report', '2.0.0'])
+ dt_run.main(['--testsuite=testsuite', '--debug', '--report',
+ '2.0.0', '--opnfv-ci'])
expected = cm.exception
logger_temp_obj.getLogger.assert_called_once_with()
mock_logger.Logger.assert_called_once_with('run')
mock_uuid.uuid1.assert_called_once_with()
- self.assertEquals({'build_tag': 'daily-master-42'},
- mock_config.dovetail_config)
+ self.assertEqual({'build_tag': 'daily-master-42'},
+ mock_config.dovetail_config)
mock_get_result.assert_called_once_with()
mock_clean.assert_called_once_with()
- self.assertEquals({'DEBUG': 'true'}, mock_os.environ)
+ self.assertEqual({'DOVETAIL_HOME': 'dovetail_home', 'DEBUG': 'true',
+ 'OPNFV_CI': 'true', 'validation': 'enabled'},
+ mock_os.environ)
mock_create_logs.assert_called_once_with()
logger_obj.info.assert_has_calls([
call('================================================'),
@@ -535,7 +561,7 @@ class RunTesting(unittest.TestCase):
mock_get_list.assert_called_once_with(logger_obj, **kwargs_dict)
mock_run.assert_called_once_with(
testcase_list, kwargs_dict['report'], logger_obj)
- self.assertEquals(expected.code, 0)
+ self.assertEqual(expected.code, 0)
@patch('dovetail.run.uuid')
@patch('dovetail.run.dt_cfg')
@@ -551,11 +577,13 @@ class RunTesting(unittest.TestCase):
expected = cm.exception
mock_uuid.uuid1.assert_called_once_with()
- self.assertEquals({'build_tag': 'daily-master-42'},
- mock_config.dovetail_config)
+ self.assertEqual({'build_tag': 'daily-master-42'},
+ mock_config.dovetail_config)
mock_get_result.assert_called_once_with()
- self.assertEquals(expected.code, 0)
+ self.assertEqual(expected.code, 0)
+ @patch('builtins.open')
+ @patch('dovetail.run.json')
@patch('dovetail.run.os')
@patch('dovetail.run.uuid')
@patch('dovetail.run.dt_logger')
@@ -574,37 +602,45 @@ class RunTesting(unittest.TestCase):
mock_get_list, mock_copy_patch,
mock_copy_userconf, mock_update, mock_clean,
mock_get_result, mock_utils, mock_config,
- mock_logger, mock_uuid, mock_os):
+ mock_logger, mock_uuid, mock_os, mock_json,
+ mock_open):
mock_config.dovetail_config = {}
- mock_os.environ = {}
+ mock_os.environ = {'DOVETAIL_HOME': 'dovetail_home'}
logger_obj = Mock()
logger_temp_obj = Mock()
+ file_obj = Mock()
logger_temp_obj.getLogger.return_value = logger_obj
mock_logger.Logger.return_value = logger_temp_obj
+ mock_open.return_value.__enter__.return_value = file_obj
+ mock_json.dumps.return_value = 'results text'
mock_uuid.uuid1.return_value = 42
mock_get_result.return_value = True
mock_get_list.return_value = None
kwargs_dict = {
'debug': True,
+ 'opnfv_ci': False,
'report': True,
'testsuite': 'testsuite',
- 'docker_tag': '2.0.0'
+ 'docker_tag': '2.0.0',
+ 'no_api_validation': False
}
with self.assertRaises(SystemExit) as cm:
dt_run.main([
'--testsuite=testsuite', '--debug', '--report', '2.0.0'])
expected = cm.exception
- self.assertEquals(expected.code, 2)
+ self.assertEqual(expected.code, 2)
logger_temp_obj.getLogger.assert_called_once_with()
mock_logger.Logger.assert_called_once_with('run')
mock_uuid.uuid1.assert_called_once_with()
- self.assertEquals({'build_tag': 'daily-master-42'},
- mock_config.dovetail_config)
+ self.assertEqual({'build_tag': 'daily-master-42'},
+ mock_config.dovetail_config)
mock_get_result.assert_called_once_with()
mock_clean.assert_called_once_with()
- self.assertEquals({'DEBUG': 'true'}, mock_os.environ)
+ self.assertEqual({'DOVETAIL_HOME': 'dovetail_home', 'DEBUG': 'true',
+ 'OPNFV_CI': 'false', 'validation': 'enabled'},
+ mock_os.environ)
mock_create_logs.assert_called_once_with()
logger_obj.info.assert_has_calls([
call('================================================'),
diff --git a/dovetail/tests/unit/test_test_runner.py b/dovetail/tests/unit/test_test_runner.py
index 08dbde69..232de7b1 100644
--- a/dovetail/tests/unit/test_test_runner.py
+++ b/dovetail/tests/unit/test_test_runner.py
@@ -43,65 +43,6 @@ class TestRunnerTesting(unittest.TestCase):
@patch('dovetail.test_runner.dt_utils')
@patch('dovetail.test_runner.dt_cfg')
- def test_pre_copy_no_container(self, mock_config, mock_utils):
- t_runner.FunctestRunner.create_log()
- mock_config.dovetail_config = {'result_dir': 'result_dir'}
- docker_runner = t_runner.FunctestRunner(self.testcase)
-
- result = docker_runner.pre_copy(
- container=None, dest_path=None,
- src_file=None, exist_file=None)
-
- docker_runner.logger.error.assert_called_once_with(
- 'Container instance is None.')
- self.assertEquals(None, result)
-
- @patch('dovetail.test_runner.dt_utils')
- @patch('dovetail.test_runner.dt_cfg')
- def test_pre_copy_no_dest_path(self, mock_config, mock_utils):
- t_runner.FunctestRunner.create_log()
- mock_config.dovetail_config = {'result_dir': 'result_dir'}
- docker_runner = t_runner.FunctestRunner(self.testcase)
-
- result = docker_runner.pre_copy(
- container='container', dest_path=None,
- src_file=None, exist_file=None)
-
- docker_runner.logger.error.assert_called_once_with(
- 'There has no dest_path in {} config file.'.format(
- self.testcase_name))
- self.assertEquals(None, result)
-
- @patch('dovetail.test_runner.dt_cfg')
- @patch('dovetail.test_runner.os.path')
- def test_pre_copy(self, mock_path, mock_config):
- t_runner.FunctestRunner.create_log()
- docker_runner = t_runner.FunctestRunner(self.testcase)
- mock_config.dovetail_config = {
- 'functest': {
- 'result': {
- 'dir': 'result_dir'
- },
- 'config': {
- 'dir': 'config_dir'
- }
- }
- }
- container_obj = Mock()
- mock_path.join.return_value = 'join'
-
- result = docker_runner.pre_copy(
- container=container_obj, dest_path='dest_path',
- src_file='src_file', exist_file='exist_file')
-
- mock_path.join.assert_has_calls([
- call('result_dir', 'src_file'),
- call('config_dir', 'pre_config', 'exist_file')])
- container_obj.copy_file.assert_called_once_with('join', 'dest_path')
- self.assertEquals('dest_path', result)
-
- @patch('dovetail.test_runner.dt_utils')
- @patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.Container')
def test_run_offline_not_exist(self, mock_container, mock_config,
mock_utils):
@@ -166,41 +107,8 @@ class TestRunnerTesting(unittest.TestCase):
docker_img_obj = Mock()
container_obj.get_docker_image.return_value = docker_img_obj
container_obj.pull_image.return_value = True
- container_obj.create.return_value = False
- mock_container.return_value = container_obj
-
- docker_runner.run()
-
- mock_container.assert_called_once_with(self.testcase)
- container_obj.get_docker_image.assert_called_once_with()
- container_obj.pull_image.assert_called_once_with(docker_img_obj)
- container_obj.create.assert_called_once_with(docker_img_obj)
- docker_runner.logger.error.assert_called_once_with(
- 'Failed to create container.')
-
- @patch('dovetail.test_runner.dt_cfg')
- @patch('dovetail.test_runner.Container')
- @patch.object(t_runner.DockerRunner, 'pre_copy')
- def test_run__not_offline_src_file_no_precopy(self, mock_precopy,
- mock_container, mock_config):
- t_runner.VnftestRunner.create_log()
- docker_runner = t_runner.VnftestRunner(self.testcase)
- mock_config.dovetail_config = {
- 'offline': False
- }
- container_obj = Mock()
- docker_img_obj = Mock()
- container_obj.get_docker_image.return_value = docker_img_obj
- container_obj.pull_image.return_value = True
- container_id = '12345'
- container_obj.create.return_value = container_id
+ container_obj.create.return_value = [None, 'error']
mock_container.return_value = container_obj
- dest_path = 'dest_path'
- src_file_name = 'src_file'
- exist_file_name = 'exist_src_file'
- self.testcase.pre_copy_path.side_effect = [
- dest_path, src_file_name, exist_file_name]
- mock_precopy.return_value = False
docker_runner.run()
@@ -208,20 +116,13 @@ class TestRunnerTesting(unittest.TestCase):
container_obj.get_docker_image.assert_called_once_with()
container_obj.pull_image.assert_called_once_with(docker_img_obj)
container_obj.create.assert_called_once_with(docker_img_obj)
- docker_runner.logger.debug.assert_called_with(
- 'container id: {}'.format(container_id))
- self.testcase.pre_copy_path.assert_has_calls([
- call(dest_path),
- call(src_file_name),
- call(exist_file_name)])
- mock_precopy.assert_called_once_with(
- container_obj, dest_path, src_file_name, exist_file_name)
+ docker_runner.logger.error.assert_has_calls([
+ call('Failed to create container.'), call('error')])
@patch('dovetail.test_runner.dt_utils')
@patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.Container')
- @patch.object(t_runner.DockerRunner, 'pre_copy')
- def test_run__not_offline_no_prepare(self, mock_precopy, mock_container,
+ def test_run__not_offline_no_prepare(self, mock_container,
mock_config, mock_utils):
t_runner.FunctestRunner.create_log()
mock_config.dovetail_config = {
@@ -236,14 +137,13 @@ class TestRunnerTesting(unittest.TestCase):
container_obj.get_docker_image.return_value = docker_img_obj
container_obj.pull_image.return_value = True
container_id = '12345'
- container_obj.create.return_value = container_id
+ container_msg = 'Successfully to create container.'
+ container_obj.create.return_value = [container_id, container_msg]
mock_container.return_value = container_obj
- self.testcase.pre_copy_path.return_value = None
self.testcase.pre_condition.return_value = ['cmd']
self.testcase.prepare_cmd.return_value = False
self.testcase.post_condition.return_value = ['cmd']
container_obj.exec_cmd.return_value = (1, 'error')
- mock_precopy.return_value = False
docker_runner.run()
@@ -253,10 +153,6 @@ class TestRunnerTesting(unittest.TestCase):
container_obj.create.assert_called_once_with(docker_img_obj)
docker_runner.logger.debug.assert_called_with(
'container id: {}'.format(container_id))
- self.testcase.pre_copy_path.assert_has_calls([
- call('dest_path'),
- call('src_file'),
- call('exist_src_file')])
self.testcase.pre_condition.assert_called_once_with()
container_obj.exec_cmd.assert_has_calls([
call('cmd'), call('cmd')])
@@ -271,8 +167,7 @@ class TestRunnerTesting(unittest.TestCase):
@patch('dovetail.test_runner.dt_utils')
@patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.Container')
- @patch.object(t_runner.DockerRunner, 'pre_copy')
- def test_run__not_offline_prepare(self, mock_precopy, mock_container,
+ def test_run__not_offline_prepare(self, mock_container,
mock_config, mock_utils):
t_runner.FunctestRunner.create_log()
mock_config.dovetail_config = {
@@ -286,15 +181,14 @@ class TestRunnerTesting(unittest.TestCase):
container_obj.get_docker_image.return_value = docker_img_obj
container_obj.pull_image.return_value = True
container_id = '12345'
- container_obj.create.return_value = container_id
+ container_msg = 'Successfully to create container.'
+ container_obj.create.return_value = [container_id, container_msg]
mock_container.return_value = container_obj
- self.testcase.pre_copy_path.return_value = None
self.testcase.pre_condition.return_value = ['cmd']
self.testcase.prepare_cmd.return_value = True
self.testcase.post_condition.return_value = ['cmd']
self.testcase.cmds = ['cmd']
container_obj.exec_cmd.return_value = (1, 'error')
- mock_precopy.return_value = False
docker_runner.run()
@@ -304,10 +198,6 @@ class TestRunnerTesting(unittest.TestCase):
container_obj.create.assert_called_once_with(docker_img_obj)
docker_runner.logger.debug.assert_called_with(
'container id: {}'.format(container_id))
- self.testcase.pre_copy_path.assert_has_calls([
- call('dest_path'),
- call('src_file'),
- call('exist_src_file')])
self.testcase.pre_condition.assert_called_once_with()
container_obj.exec_cmd.assert_has_calls([
call('cmd'), call('cmd'), call('cmd')])
@@ -335,7 +225,7 @@ class TestRunnerTesting(unittest.TestCase):
mock_utils.get_value_from_dict.assert_has_calls([
call('report.source_archive_files', self.testcase_dict),
call('report.dest_archive_files', self.testcase_dict)])
- self.assertEquals(True, result)
+ self.assertEqual(True, result)
@patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.dt_utils')
@@ -358,7 +248,7 @@ class TestRunnerTesting(unittest.TestCase):
"Can't find corresponding 'result_dest_files' "
"for 'result_source_files' with testcase {}"
.format(self.testcase_name))
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
@patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.dt_utils')
@@ -387,7 +277,7 @@ class TestRunnerTesting(unittest.TestCase):
mock_os.path.isfile.assert_has_calls([call('src_file_path')])
docker_runner.logger.error.assert_called_once_with(
"Can't find file {}.".format('src_file_path'))
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
@patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.dt_utils')
@@ -416,7 +306,7 @@ class TestRunnerTesting(unittest.TestCase):
mock_os.path.isfile.assert_has_calls([call('src_file_path')])
mock_os.renames.assert_called_once_with(
'src_file_path', 'dest_file_path')
- self.assertEquals(True, result)
+ self.assertEqual(True, result)
@patch('dovetail.test_runner.jinja2')
def test_render(self, mock_jinja):
@@ -429,13 +319,14 @@ class TestRunnerTesting(unittest.TestCase):
mock_jinja.Template.assert_called_once_with('task_template')
template_obj.render.assert_called_with()
- self.assertEquals(render_obj, result)
+ self.assertEqual(render_obj, result)
@patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.os')
def test_add_testcase_info(self, mock_os, mock_config):
mock_os.getenv.side_effect = ['os_insecure', 'dovetail_home', 'debug',
- 'os_cacert']
+ 'os_cacert', 'host_url', 'csar_file',
+ 'heat_templates_archive']
mock_os.environ = {'DEPLOY_SCENARIO': 'deploy_scenario'}
mock_config.dovetail_config = {'build_tag': 'build_tag'}
@@ -444,15 +335,19 @@ class TestRunnerTesting(unittest.TestCase):
'testcase': 'testcase_name', 'os_insecure': 'os_insecure',
'deploy_scenario': 'deploy_scenario',
'dovetail_home': 'dovetail_home', 'debug': 'debug',
- 'build_tag': 'build_tag', 'cacert': 'os_cacert'}
+ 'build_tag': 'build_tag', 'cacert': 'os_cacert',
+ 'host_url': 'host_url', 'csar_file': 'csar_file',
+ 'heat_templates_archive': 'heat_templates_archive'
+ }
result = t_runner.FunctestRunner._add_testcase_info(self.testcase)
self.testcase.validate_testcase.assert_called_once_with()
self.testcase.name.assert_called_once_with()
mock_os.getenv.assert_has_calls([
call('OS_INSECURE'), call('DOVETAIL_HOME'), call('DEBUG'),
- call('OS_CACERT')])
- self.assertEquals(expected, result)
+ call('OS_CACERT'), call('HOST_URL'), call('CSAR_FILE'),
+ call('VNF_ARCHIVE_NAME')])
+ self.assertEqual(expected, result)
@patch('dovetail.test_runner.dt_utils')
@patch('dovetail.test_runner.dt_cfg')
@@ -478,7 +373,7 @@ class TestRunnerTesting(unittest.TestCase):
call('conf_path', docker_runner.config_file_name)])
mock_utils.read_plain_file.assert_called_once_with(
'config_file', docker_runner.logger)
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.test_runner.yaml.safe_load')
@patch('dovetail.test_runner.dt_utils')
@@ -513,7 +408,7 @@ class TestRunnerTesting(unittest.TestCase):
mock_add_info.assert_called_once_with(self.testcase)
mock_render.assert_called_once_with(True, config_item='item')
mock_load.assert_called_once_with('full_task')
- self.assertEquals(
+ self.assertEqual(
{'config_dir': 'one',
'pod_file': 'two',
'full_task_yaml': 'full_value',
@@ -553,7 +448,7 @@ class TestRunnerTesting(unittest.TestCase):
mock_add_info.assert_called_once_with(self.testcase)
mock_render.assert_called_once_with(True, config_item='item')
mock_load.assert_called_once_with('full_task')
- self.assertEquals(
+ self.assertEqual(
{'config_dir': 'one',
'pod_file': 'two',
'full_task_yaml': 'full_value',
@@ -599,14 +494,14 @@ class TestRunnerTesting(unittest.TestCase):
"Need key '{}' in {}".format('testcase_name', {'key': 'value'}))
mock_render.assert_called_once_with(True, config_item='item')
mock_load.assert_called_once_with('full_task')
- self.assertEquals(
+ self.assertEqual(
{'config_dir': 'one',
'pod_file': 'two',
'full_task_yaml': 'full_value',
'result_dir': 'three'},
result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.test_runner.json')
@patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.dt_utils')
@@ -651,7 +546,7 @@ class TestRunnerTesting(unittest.TestCase):
'pass': 'FAIL'})
file_obj.write.assert_called_once_with(dump_obj)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.test_runner.dt_cfg')
@patch('dovetail.test_runner.dt_utils')
@patch('dovetail.test_runner.os.path')
@@ -692,7 +587,7 @@ class TestRunnerTesting(unittest.TestCase):
result = docker_runner.create(self.testcase)
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.test_runner.constants')
@patch('dovetail.test_runner.dt_utils')
@@ -713,7 +608,7 @@ class TestRunnerTesting(unittest.TestCase):
call('conf_path', docker_runner.config_file_name)])
mock_utils.read_plain_file.assert_has_calls([
call('config_file', docker_runner.logger)])
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.test_runner.yaml.safe_load')
@patch('dovetail.test_runner.constants')
@@ -746,8 +641,72 @@ class TestRunnerTesting(unittest.TestCase):
mock_add_info.assert_has_calls([call(self.testcase)])
mock_render.assert_has_calls([call(True, config_item='item')])
mock_load.assert_has_calls([call('full_task')])
- self.assertEquals(
+ self.assertEqual(
{'config_dir': 'one',
'pod_file': 'two',
'full_task_yaml': 'full_value'},
result)
+
+ @patch('dovetail.test_runner.dt_utils')
+ @patch('dovetail.test_runner.os.path')
+ @patch('dovetail.test_runner.dt_cfg')
+ def test_init_onapvtprunner_no_env_file(self, mock_config, mock_path,
+ mock_utils):
+ t_runner.OnapVtpRunner.create_log()
+ mock_path.join.side_effect = ['env_file']
+ mock_config.dovetail_config = {'config_dir': 'one', 'env_file': 'two'}
+ mock_path.isfile.return_value = False
+
+ docker_runner = t_runner.OnapVtpRunner(self.testcase)
+
+ mock_path.join.assert_has_calls([call('one', 'two')])
+ mock_path.isfile.assert_called_once()
+ docker_runner.logger.error.assert_called_once_with(
+ 'File env_file does not exist.')
+
+ @patch('dovetail.test_runner.dt_utils')
+ @patch('dovetail.test_runner.os.path')
+ @patch('dovetail.test_runner.dt_cfg')
+ def test_init_onapvtprunner(self, mock_config, mock_path, mock_utils):
+ t_runner.OnapVtpRunner.create_log()
+ mock_path.join.side_effect = ['env_file']
+ mock_config.dovetail_config = {'config_dir': 'one', 'env_file': 'two'}
+ mock_path.isfile.return_value = True
+
+ t_runner.OnapVtpRunner(self.testcase)
+
+ mock_path.join.assert_has_calls([call('one', 'two')])
+ mock_path.isfile.assert_called_once()
+ mock_utils.source_env.assert_called_once_with('env_file')
+
+ @patch('dovetail.test_runner.dt_utils')
+ @patch('dovetail.test_runner.os.path')
+ @patch('dovetail.test_runner.dt_cfg')
+ def test_init_onapvvprunner_no_env_file(self, mock_config, mock_path,
+ mock_utils):
+ t_runner.OnapVvpRunner.create_log()
+ mock_path.join.side_effect = ['env_file']
+ mock_config.dovetail_config = {'config_dir': 'one', 'env_file': 'two'}
+ mock_path.isfile.return_value = False
+
+ docker_runner = t_runner.OnapVvpRunner(self.testcase)
+
+ mock_path.join.assert_has_calls([call('one', 'two')])
+ mock_path.isfile.assert_called_once()
+ docker_runner.logger.error.assert_called_once_with(
+ 'File env_file does not exist.')
+
+ @patch('dovetail.test_runner.dt_utils')
+ @patch('dovetail.test_runner.os.path')
+ @patch('dovetail.test_runner.dt_cfg')
+ def test_init_onapvvprunner(self, mock_config, mock_path, mock_utils):
+ t_runner.OnapVvpRunner.create_log()
+ mock_path.join.side_effect = ['env_file']
+ mock_config.dovetail_config = {'config_dir': 'one', 'env_file': 'two'}
+ mock_path.isfile.return_value = True
+
+ t_runner.OnapVvpRunner(self.testcase)
+
+ mock_path.join.assert_has_calls([call('one', 'two')])
+ mock_path.isfile.assert_called_once()
+ mock_utils.source_env.assert_called_once_with('env_file')
diff --git a/dovetail/tests/unit/test_testcase.py b/dovetail/tests/unit/test_testcase.py
index 0d303206..81a8de39 100644
--- a/dovetail/tests/unit/test_testcase.py
+++ b/dovetail/tests/unit/test_testcase.py
@@ -43,7 +43,7 @@ class TestcaseTesting(unittest.TestCase):
tcase.Testcase.create_log()
- self.assertEquals(getlogger_obj, tcase.Testcase.logger)
+ self.assertEqual(getlogger_obj, tcase.Testcase.logger)
@patch('dovetail.testcase.Parser')
def test_parse_cmd_no_lines(self, mock_parser):
@@ -55,7 +55,7 @@ class TestcaseTesting(unittest.TestCase):
mock_parser.parse_cmd.assert_called_once_with(
'cmd', testcase)
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
@patch('dovetail.testcase.Parser')
def test_parse_cmd(self, mock_parser):
@@ -70,8 +70,8 @@ class TestcaseTesting(unittest.TestCase):
mock_parser.parse_cmd.assert_called_once_with(
'cmd', testcase)
logger_obj.debug.assert_called_once_with("cmds: ['cmd_lines']")
- self.assertEquals(['cmd_lines'], testcase.cmds)
- self.assertEquals(True, result)
+ self.assertEqual(['cmd_lines'], testcase.cmds)
+ self.assertEqual(True, result)
@patch('dovetail.testcase.dt_cfg')
def test_prepare_cmd_no_cmds(self, mock_config):
@@ -84,12 +84,12 @@ class TestcaseTesting(unittest.TestCase):
logger_obj.error.assert_called_once_with(
'Test case {} has no cmds.'.format(testcase.name()))
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
@patch('dovetail.testcase.dt_cfg')
@patch.object(tcase.Testcase, 'parse_cmd')
def test_prepare_cmd_testcase_cmd(self, mock_parse, mock_config):
- testcase = tcase.VnftestTestcase(self.testcase_yaml)
+ testcase = tcase.ShellTestcase(self.testcase_yaml)
testcase.testcase['validate']['cmds'] = ['cmd']
mock_config.dovetail_config = {}
mock_parse.return_value = True
@@ -97,7 +97,7 @@ class TestcaseTesting(unittest.TestCase):
result = testcase.prepare_cmd('type')
mock_parse.assert_called_once_with(['cmd'])
- self.assertEquals(True, result)
+ self.assertEqual(True, result)
@patch('dovetail.testcase.dt_cfg')
@patch.object(tcase.Testcase, 'parse_cmd')
@@ -110,22 +110,22 @@ class TestcaseTesting(unittest.TestCase):
result = testcase.prepare_cmd('type')
mock_parse.assert_called_once_with(['cmd'])
- self.assertEquals(True, result)
+ self.assertEqual(True, result)
def test_str(self):
- testcase = tcase.Testcase(self.testcase_yaml)
+ testcase = tcase.OnapVtpTestcase(self.testcase_yaml)
result = testcase.__str__()
- self.assertEquals(testcase.testcase, result)
+ self.assertEqual(testcase.testcase, result)
def test_objective(self):
- testcase = tcase.Testcase(self.testcase_yaml)
+ testcase = tcase.OnapVvpTestcase(self.testcase_yaml)
testcase.testcase['objective'] = 'objective'
result = testcase.objective()
- self.assertEquals('objective', result)
+ self.assertEqual('objective', result)
@patch('dovetail.testcase.dt_utils')
def test_sub_testcase(self, mock_utils):
@@ -136,7 +136,7 @@ class TestcaseTesting(unittest.TestCase):
mock_utils.get_value_from_dict.assert_called_once_with(
'report.sub_testcase_list', testcase.testcase)
- self.assertEquals('value', result)
+ self.assertEqual('value', result)
def test_sub_testcase_passed(self):
testcase = tcase.Testcase(self.testcase_yaml)
@@ -145,37 +145,49 @@ class TestcaseTesting(unittest.TestCase):
result = testcase.sub_testcase_passed('name', 'passed')
- logger_obj.debug.assert_called_once_with(
- 'sub_testcase_passed: name passed')
- self.assertEquals('passed', result)
+ self.assertEqual('passed', result)
def test_validate_type(self):
testcase = tcase.Testcase(self.testcase_yaml)
result = testcase.validate_type()
- self.assertEquals('functest', result)
+ self.assertEqual('functest', result)
def test_validate_testcase(self):
testcase = tcase.Testcase(self.testcase_yaml)
result = testcase.validate_testcase()
- self.assertEquals('tempest_smoke_serial', result)
+ self.assertEqual('tempest_smoke_serial', result)
+
+ def test_portal_key_file(self):
+ testcase = tcase.Testcase(self.testcase_yaml)
+
+ result = testcase.portal_key_file()
+
+ self.assertEqual('tempest_logs/tempest_smoke_serial.html', result)
+
+ def test_vnf_type(self):
+ testcase = tcase.OnapVtpTestcase(self.testcase_yaml)
+
+ result = testcase.vnf_type()
+
+ self.assertEqual('tosca', result)
def test_passed(self):
testcase = tcase.Testcase(self.testcase_yaml)
result = testcase.passed('passed')
- self.assertEquals('passed', result)
+ self.assertEqual('passed', result)
def test_set_get_results(self):
testcase = tcase.Testcase(self.testcase_yaml)
testcase.set_results('results')
- self.assertEquals('results', testcase.get_results())
+ self.assertEqual('results', testcase.get_results())
def test_pre_condition_exists(self):
testcase = tcase.Testcase(self.testcase_yaml)
@@ -183,7 +195,7 @@ class TestcaseTesting(unittest.TestCase):
result = testcase.pre_condition()
- self.assertEquals('pre_condition', result)
+ self.assertEqual('pre_condition', result)
@patch.object(tcase.Testcase, 'pre_condition_cls')
def test_pre_condition_not_exists(self, mock_pre_condition):
@@ -197,7 +209,7 @@ class TestcaseTesting(unittest.TestCase):
mock_pre_condition.assert_called_once_with('functest')
logger_obj.debug.assert_called_once_with(
'Test case: {} pre_condition is empty.'.format(testcase.name()))
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
def test_pre_copy_path(self):
testcase = tcase.Testcase(self.testcase_yaml)
@@ -205,14 +217,14 @@ class TestcaseTesting(unittest.TestCase):
result = testcase.pre_copy_path('key')
- self.assertEquals('value', result)
+ self.assertEqual('value', result)
def test_pre_copy_path_error(self):
testcase = tcase.Testcase(self.testcase_yaml)
result = testcase.pre_copy_path('key')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
def test_post_condition_exists(self):
testcase = tcase.Testcase(self.testcase_yaml)
@@ -220,7 +232,7 @@ class TestcaseTesting(unittest.TestCase):
result = testcase.post_condition()
- self.assertEquals('post_condition', result)
+ self.assertEqual('post_condition', result)
@patch.object(tcase.Testcase, 'post_condition_cls')
def test_post_condition_not_exists(self, mock_post_condition):
@@ -234,14 +246,13 @@ class TestcaseTesting(unittest.TestCase):
mock_post_condition.assert_called_once_with('functest')
logger_obj.debug.assert_called_once_with(
'Test case: {} post_condition is empty.'.format(testcase.name()))
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.testcase.os.path')
@patch('dovetail.testcase.dt_cfg')
- @patch.object(tcase.Testcase, 'pre_copy_path')
@patch.object(tcase.Testcase, 'sub_testcase')
- def test_mk_src_file(self, mock_sub_testcase, mock_pre_copy, mock_config,
+ def test_mk_src_file(self, mock_sub_testcase, mock_config,
mock_path, mock_open):
testcase = tcase.Testcase(self.testcase_yaml)
logger_obj = Mock()
@@ -249,8 +260,6 @@ class TestcaseTesting(unittest.TestCase):
mock_config.dovetail_config = {'result_dir': 'value'}
sub_test = 'sub_test'
file_path = 'file_path'
- testcase_src_file = 'testcase_src_file'
- mock_pre_copy.return_value = testcase_src_file
mock_path.join.return_value = file_path
mock_sub_testcase.return_value = [sub_test]
file_obj = Mock()
@@ -258,21 +267,18 @@ class TestcaseTesting(unittest.TestCase):
result = testcase.mk_src_file()
- mock_pre_copy.assert_called_once_with('src_file')
- mock_path.join.assert_called_once_with('value', testcase_src_file)
+ mock_path.join.assert_called_once_with('value', 'tempest_custom.txt')
mock_open.assert_called_once_with(file_path, 'w+')
file_obj.write.assert_called_once_with(sub_test + '\n')
- logger_obj.debug.assert_has_calls([
- call('Save test cases {}'.format(sub_test)),
- call('Save test cases to {}'.format(file_path))])
- self.assertEquals(file_path, result)
+ logger_obj.debug.assert_called_once_with(
+ 'Save test cases to {}'.format(file_path))
+ self.assertEqual(file_path, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.testcase.os.path')
@patch('dovetail.testcase.dt_cfg')
- @patch.object(tcase.Testcase, 'pre_copy_path')
@patch.object(tcase.Testcase, 'sub_testcase')
- def test_mk_src_file_exception(self, mock_sub_testcase, mock_pre_copy,
+ def test_mk_src_file_exception(self, mock_sub_testcase,
mock_config, mock_path, mock_open):
testcase = tcase.Testcase(self.testcase_yaml)
logger_obj = Mock()
@@ -280,19 +286,16 @@ class TestcaseTesting(unittest.TestCase):
mock_config.dovetail_config = {'result_dir': 'value'}
sub_test = 'sub_test'
file_path = 'file_path'
- testcase_src_file = 'testcase_src_file'
- mock_pre_copy.return_value = testcase_src_file
mock_path.join.return_value = file_path
mock_sub_testcase.return_value = [sub_test]
mock_open.return_value.__enter__.side_effect = Exception()
result = testcase.mk_src_file()
- mock_pre_copy.assert_called_once_with('src_file')
- mock_path.join.assert_called_once_with('value', testcase_src_file)
+ mock_path.join.assert_called_once_with('value', 'tempest_custom.txt')
mock_open.assert_called_once_with(file_path, 'w+')
logger_obj.exception('Failed to save: {}'.format(file_path))
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.testcase.TestRunnerFactory')
def test_run(self, mock_factory):
@@ -316,28 +319,28 @@ class TestcaseTesting(unittest.TestCase):
mock_config.dovetail_config = {'type': {'pre_condition': 'value'}}
result = tcase.Testcase.pre_condition_cls('type')
- self.assertEquals('value', result)
+ self.assertEqual('value', result)
@patch('dovetail.testcase.dt_cfg')
def test_pre_condition_cls_key_error(self, mock_config):
mock_config.dovetail_config = {}
result = tcase.Testcase.pre_condition_cls('type')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.testcase.dt_cfg')
def test_post_condition_cls(self, mock_config):
mock_config.dovetail_config = {'type': {'post_condition': 'value'}}
result = tcase.Testcase.post_condition_cls('type')
- self.assertEquals('value', result)
+ self.assertEqual('value', result)
@patch('dovetail.testcase.dt_cfg')
def test_post_condition_cls_key_error(self, mock_config):
mock_config.dovetail_config = {}
result = tcase.Testcase.post_condition_cls('type')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
def test_increase_retry(self):
testcase = tcase.Testcase(self.testcase_yaml)
@@ -346,9 +349,9 @@ class TestcaseTesting(unittest.TestCase):
for _ in range(0, 42):
result = testcase.increase_retry()
- self.assertEquals(42, result)
+ self.assertEqual(42, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.testcase.yaml')
@patch('dovetail.testcase.os')
@patch('dovetail.testcase.TestcaseFactory')
@@ -373,9 +376,9 @@ class TestcaseTesting(unittest.TestCase):
mock_open.assert_called_once_with('testcase_path')
mock_yaml.safe_load.assert_called_once_with(file_obj)
mock_factory.create.assert_called_once_with('value', yaml_dict)
- self.assertEquals(runner_obj, tcase.Testcase.get('key'))
+ self.assertEqual(runner_obj, tcase.Testcase.get('key'))
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.testcase.yaml')
@patch('dovetail.testcase.os')
@patch('dovetail.testcase.TestcaseFactory')
@@ -404,41 +407,33 @@ class TestcaseTesting(unittest.TestCase):
'Failed to create test case: file')
def test_get_none(self):
- self.assertEquals(None, tcase.Testcase.get('unknown'))
+ self.assertEqual(None, tcase.Testcase.get('unknown'))
def test_check_testarea_none(self):
- self.assertEquals((True, ['full']),
- tcase.Testcase.check_testarea(None))
-
- @patch('dovetail.testcase.dt_cfg')
- def test_check_testarea_not_in_config(self, mock_config):
- mock_config.dovetail_config = {'testarea_supported': []}
- self.assertEquals((False, None),
- tcase.Testcase.check_testarea(['area']))
+ self.assertEqual((True, ['full']),
+ tcase.Testcase.check_testarea(None))
@patch('dovetail.testcase.dt_cfg')
def test_check_testarea_full(self, mock_config):
- mock_config.dovetail_config = {'testarea_supported': ['full']}
- self.assertEquals((True, ['full']),
- tcase.Testcase.check_testarea(['full']))
+ self.assertEqual((True, ['full']),
+ tcase.Testcase.check_testarea(['full']))
@patch('dovetail.testcase.dt_cfg')
def test_check_testarea(self, mock_config):
- mock_config.dovetail_config = {'testarea_supported': ['area']}
- self.assertEquals((True, ['area']),
- tcase.Testcase.check_testarea(['area']))
+ self.assertEqual((True, ['area']),
+ tcase.Testcase.check_testarea(['area']))
def test_check_testcase_area(self):
- self.assertEquals(False,
- tcase.Testcase.check_testcase_area(None, None))
+ self.assertEqual(False,
+ tcase.Testcase.check_testcase_area(None, None))
def test_check_testcase_area_full_or_in_testcase(self):
- self.assertEquals(True,
- tcase.Testcase.check_testcase_area(['full'], 'full'))
+ self.assertEqual(True,
+ tcase.Testcase.check_testcase_area(['full'], 'full'))
def test_check_testcase_area_not_in_testcase_or_full(self):
- self.assertEquals(False,
- tcase.Testcase.check_testcase_area(['full'], 'half'))
+ self.assertEqual(False,
+ tcase.Testcase.check_testcase_area(['full'], 'half'))
@patch('dovetail.testcase.dt_utils')
def test_get_testcases_for_testsuite_no_testcases(self, mock_utils):
@@ -450,7 +445,7 @@ class TestcaseTesting(unittest.TestCase):
call('testcases_list', 'suite'),
call('mandatory', None),
call('optional', None)])
- self.assertEquals([], result)
+ self.assertEqual([], result)
@patch('dovetail.testcase.dt_cfg')
@patch('dovetail.testcase.dt_utils')
@@ -479,7 +474,7 @@ class TestcaseTesting(unittest.TestCase):
.format(testsuite['name'])),
call('There is no optional test case in test suite {}'
.format(testsuite['name']))])
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
@patch('dovetail.testcase.dt_cfg')
@patch('dovetail.testcase.dt_utils')
@@ -513,10 +508,10 @@ class TestcaseTesting(unittest.TestCase):
mock_check.assert_has_calls([
call('mandatory', 'area'),
call('optional', 'area')])
- self.assertEquals(['mandatory', 'optional'], result)
- self.assertEquals(
+ self.assertEqual(['mandatory', 'optional'], result)
+ self.assertEqual(
True, tcase.Testcase.testcase_list['mandatory'].is_mandatory)
- self.assertEquals(
+ self.assertEqual(
False, tcase.Testcase.testcase_list['optional'].is_mandatory)
@patch('dovetail.testcase.dt_cfg')
@@ -551,13 +546,12 @@ class TestcaseTesting(unittest.TestCase):
mock_check.assert_has_calls([
call('mandatory', 'area'),
call('optional', 'area')])
- self.assertEquals(['mandatory', 'optional'], result)
- self.assertEquals(True,
- tcase.Testcase.testcase_list['mandatory']
- .is_mandatory)
- self.assertEquals(False,
- tcase.Testcase.testcase_list['optional']
- .is_mandatory)
+ self.assertEqual(['mandatory', 'optional'], result)
+ self.assertEqual(True,
+ tcase.Testcase.testcase_list['mandatory']
+ .is_mandatory)
+ self.assertEqual(False,
+ tcase.Testcase.testcase_list['optional'].is_mandatory)
@patch.object(tcase.Testcase, 'prepare_cmd')
def test_functest_case_prepare_cmd_false(self, mock_prepare):
@@ -567,7 +561,7 @@ class TestcaseTesting(unittest.TestCase):
result = testcase.prepare_cmd('type')
mock_prepare.assert_called_once_with('type')
- self.assertEquals(False, result)
+ self.assertEqual(False, result)
@patch('dovetail.testcase.os.path')
@patch('dovetail.testcase.dt_cfg')
@@ -580,30 +574,29 @@ class TestcaseTesting(unittest.TestCase):
mock_prepare.return_value = True
mock_config.dovetail_config = {
'no_api_validation': True,
- 'functest': {'config': {'dir': 'value'}}}
+ 'functest': {'patches_dir': 'value'}}
mock_path.join.return_value = 'patch_cmd'
result = testcase.prepare_cmd('type')
mock_path.join.assert_called_once_with(
- 'value', 'patches', 'functest', 'disable-api-validation',
- 'apply.sh')
+ 'value', 'functest', 'disable-api-validation', 'apply.sh')
logger_obj.debug.assert_called_once_with(
'Updated list of commands for test run with '
'disabled API response validation: {}'
.format(testcase.cmds))
- self.assertEquals(['patch_cmd'], testcase.cmds)
- self.assertEquals(True, result)
+ self.assertEqual(['patch_cmd'], testcase.cmds)
+ self.assertEqual(True, result)
def test_testfactory_error(self):
- self.assertEquals(None,
- tcase.TestcaseFactory.create('unknown',
- self.testcase_yaml))
+ self.assertEqual(None,
+ tcase.TestcaseFactory.create('unknown',
+ self.testcase_yaml))
def test_testfactory_k8s(self):
k8s_testcase = tcase.TestcaseFactory.create('functest-k8s',
self.testcase_yaml)
- self.assertEquals('functest-k8s', k8s_testcase.type)
+ self.assertEqual('functest-k8s', k8s_testcase.type)
@patch('dovetail.testcase.dt_logger')
def test_testsuite_create_log(self, mock_logger):
@@ -614,7 +607,7 @@ class TestcaseTesting(unittest.TestCase):
tcase.Testsuite.create_log()
- self.assertEquals(getlogger_obj, tcase.Testsuite.logger)
+ self.assertEqual(getlogger_obj, tcase.Testsuite.logger)
def test_testsuite_get_test(self):
suite = tcase.Testsuite('suite')
@@ -622,16 +615,16 @@ class TestcaseTesting(unittest.TestCase):
result = suite.get_test('testcase')
- self.assertEquals('value', result)
+ self.assertEqual('value', result)
def test_testsuite_get_test_not_exists(self):
suite = tcase.Testsuite('suite')
result = suite.get_test('testcase')
- self.assertEquals(None, result)
+ self.assertEqual(None, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('dovetail.testcase.yaml')
@patch('dovetail.testcase.os')
@patch('dovetail.testcase.constants')
@@ -650,16 +643,16 @@ class TestcaseTesting(unittest.TestCase):
mock_os.path.join.assert_called_with('root', 'file')
mock_open.assert_called_once_with('file_path')
mock_yaml.safe_load.assert_called_once_with(file_obj)
- self.assertEquals({'testsuite': 'value'},
- tcase.Testsuite.testsuite_list)
+ self.assertEqual({'testsuite': 'value'},
+ tcase.Testsuite.testsuite_list)
def test_testsuite_get_none(self):
- self.assertEquals(None, tcase.Testsuite.get('unknown'))
+ self.assertEqual(None, tcase.Testsuite.get('unknown'))
def test_testsuite_get(self):
tcase.Testsuite.testsuite_list.update({'key': 'value'})
- self.assertEquals('value', tcase.Testsuite.get('key'))
+ self.assertEqual('value', tcase.Testsuite.get('key'))
def test_testsuite_get_all(self):
tcase.Testsuite.testsuite_list.update({'key': 'value'})
- self.assertEquals({'key': 'value'}, tcase.Testsuite.get_all())
+ self.assertEqual({'key': 'value'}, tcase.Testsuite.get_all())
diff --git a/dovetail/tests/unit/test_testcase.yaml b/dovetail/tests/unit/test_testcase.yaml
index cb947cd9..b4cd3b1d 100644
--- a/dovetail/tests/unit/test_testcase.yaml
+++ b/dovetail/tests/unit/test_testcase.yaml
@@ -1,11 +1,22 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
dovetail.ipv6.tc001:
name: dovetail.ipv6.tc001
objective: VIM ipv6 operations, to create/delete network, port and subnet in bulk operation
+ vnf_type: tosca
validate:
type: functest
testcase: tempest_smoke_serial
report:
+ portal_key_file: tempest_logs/tempest_smoke_serial.html
sub_testcase_list:
- tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_network
- tempest.api.network.test_networks.BulkNetworkOpsIpV7Test.test_bulk_create_delete_port
diff --git a/dovetail/tests/unit/utils/test_dovetail_config.py b/dovetail/tests/unit/utils/test_dovetail_config.py
index c7ac5b96..8e835f49 100644
--- a/dovetail/tests/unit/utils/test_dovetail_config.py
+++ b/dovetail/tests/unit/utils/test_dovetail_config.py
@@ -39,7 +39,7 @@ class DovetailConfigTesting(unittest.TestCase):
dovetail_cfg.set_leaf_dict(dict_to_test, ['aa', 'bb', 'cc'], 'val')
- self.assertEquals({'aa': {'bb': {'cc': 'val'}}}, dict_to_test)
+ self.assertEqual({'aa': {'bb': {'cc': 'val'}}}, dict_to_test)
@patch.object(DovetailConfig, 'set_leaf_dict')
@patch.object(DovetailConfig, 'dovetail_config')
diff --git a/dovetail/tests/unit/utils/test_dovetail_logger.py b/dovetail/tests/unit/utils/test_dovetail_logger.py
index a8867890..a70c0503 100644
--- a/dovetail/tests/unit/utils/test_dovetail_logger.py
+++ b/dovetail/tests/unit/utils/test_dovetail_logger.py
@@ -49,7 +49,7 @@ class DovetailLoggerTesting(unittest.TestCase):
logger_name = 'name'
dovetail_logger = Logger(logger_name)
mock_logging.getLogger.assert_called_once_with(logger_name)
- self.assertEquals(dovetail_logger.logger.propagate, 0)
+ self.assertEqual(dovetail_logger.logger.propagate, 0)
logger.setLevel.assert_called_once_with(level_const_debug)
mock_os.path.exists.assert_called_once_with(file_path)
# mock_os.makedirs.assert_called_once_with(file_path)
@@ -60,7 +60,7 @@ class DovetailLoggerTesting(unittest.TestCase):
file_handler_obj.setLevel.assert_called_once_with(level_const_info)
logger.addHandler.assert_has_calls([
call(stream_handler_obj), call(file_handler_obj)])
- self.assertEquals(dovetail_logger.getLogger(), logger)
+ self.assertEqual(dovetail_logger.getLogger(), logger)
@patch('sys.stdout')
@patch('dovetail.utils.dovetail_logger.os')
@@ -84,7 +84,7 @@ class DovetailLoggerTesting(unittest.TestCase):
logger_name = 'name'
dovetail_logger = Logger(logger_name)
mock_logging.getLogger.assert_called_once_with(logger_name)
- self.assertEquals(dovetail_logger.logger.propagate, 0)
+ self.assertEqual(dovetail_logger.logger.propagate, 0)
logger.setLevel.assert_called_once_with(level_const_debug)
mock_os.path.exists.assert_called_once_with(file_path)
# mock_os.makedirs.assert_called_once_with(file_path)
@@ -95,4 +95,4 @@ class DovetailLoggerTesting(unittest.TestCase):
file_handler_obj.setLevel.assert_called_once_with(level_const_debug)
logger.addHandler.assert_has_calls([
call(stream_handler_obj), call(file_handler_obj)])
- self.assertEquals(dovetail_logger.getLogger(), logger)
+ self.assertEqual(dovetail_logger.getLogger(), logger)
diff --git a/dovetail/tests/unit/utils/test_dovetail_utils.py b/dovetail/tests/unit/utils/test_dovetail_utils.py
index 0f0e14f3..7d1fddc1 100644
--- a/dovetail/tests/unit/utils/test_dovetail_utils.py
+++ b/dovetail/tests/unit/utils/test_dovetail_utils.py
@@ -27,7 +27,7 @@ class DovetailUtilsTesting(unittest.TestCase):
pass
@patch('sys.stdout')
- @patch('__builtin__.print')
+ @patch('builtins.print')
def test_exec_log_no_verbose(self, mock_print, mock_stdout):
dovetail_utils.exec_log(verbose=False, logger=None, msg='',
level='info', flush=True)
@@ -36,7 +36,7 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_stdout.flush.assert_not_called()
@patch('sys.stdout')
- @patch('__builtin__.print')
+ @patch('builtins.print')
def test_exec_log_no_logger_flush(self, mock_print, mock_stdout):
message = 'message'
@@ -47,7 +47,7 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_stdout.flush.assert_called_once()
@patch('sys.stdout')
- @patch('__builtin__.print')
+ @patch('builtins.print')
def test_exec_log_no_logger_no_flush(self, mock_print, mock_stdout):
message = 'message'
@@ -134,7 +134,7 @@ class DovetailUtilsTesting(unittest.TestCase):
.format(file_path))
self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_read_plain_file(self, mock_path, mock_open):
file_path = 'known_file'
@@ -149,7 +149,7 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_open.assert_called_once_with(file_path, 'r')
self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_read_plain_file_raised_exception(self, mock_path, mock_open):
logger = Mock()
@@ -182,7 +182,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('yaml.safe_load')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_read_yaml_file(self, mock_path, mock_open, mock_load):
file_obj = Mock()
@@ -200,7 +200,7 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_load.assert_called_once_with(file_obj)
self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_read_yaml_file_raised_exception(self, mock_path, mock_open):
logger = Mock()
@@ -227,7 +227,7 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_path.isfile.return_value = False
logger = Mock()
- expected = ''
+ expected = {}
result = dovetail_utils.get_hosts_info(logger)
mock_path.join.assert_called_once_with(file_path, 'hosts.yaml')
@@ -235,7 +235,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('yaml.safe_load')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_get_hosts_info_not_yaml(self, mock_path, mock_open, mock_load):
file_path = 'file_path'
@@ -248,7 +248,7 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_load.return_value = None
logger = Mock()
- expected = ''
+ expected = {}
result = dovetail_utils.get_hosts_info(logger)
mock_path.join.assert_called_once_with(file_path, 'hosts.yaml')
@@ -260,7 +260,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('yaml.safe_load')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_get_hosts_info_no_hosts_info(self, mock_path, mock_open,
mock_load):
@@ -274,7 +274,7 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_load.return_value = {'a': 'b'}
logger = Mock()
- expected = ''
+ expected = {}
result = dovetail_utils.get_hosts_info(logger)
mock_path.join.assert_called_once_with(file_path, 'hosts.yaml')
@@ -287,7 +287,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('yaml.safe_load')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_get_hosts_info_no_hostname(self, mock_path, mock_open, mock_load):
file_path = 'file_path'
@@ -299,7 +299,7 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_open.return_value.__enter__.return_value = file_obj
mock_load.return_value = {'hosts_info': {'127.0.0.1': []}}
- expected = ''
+ expected = {}
result = dovetail_utils.get_hosts_info()
mock_path.join.assert_called_once_with(file_path, 'hosts.yaml')
@@ -310,7 +310,7 @@ class DovetailUtilsTesting(unittest.TestCase):
@patch('dovetail.utils.dovetail_utils.add_hosts_info')
@patch('yaml.safe_load')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_get_hosts_info_no_valid_hostname(self, mock_path, mock_open,
mock_load, mock_fn):
@@ -324,20 +324,20 @@ class DovetailUtilsTesting(unittest.TestCase):
hosts_info = {'127.0.0.1': [None]}
mock_load.return_value = {'hosts_info': hosts_info}
- expected = ''
+ expected = {}
result = dovetail_utils.get_hosts_info()
mock_path.join.assert_called_once_with(file_path, 'hosts.yaml')
mock_path.isfile.assert_called_once_with(file_complete_name)
mock_open.assert_called_once_with(file_complete_name)
mock_load.assert_called_once_with(file_obj)
- mock_fn.assert_called_once_with(hosts_info.keys()[0],
- hosts_info.values()[0])
+ mock_fn.assert_called_once_with(list(hosts_info.keys())[0],
+ list(hosts_info.values())[0])
self.assertEqual(expected, result)
@patch('dovetail.utils.dovetail_utils.add_hosts_info')
@patch('yaml.safe_load')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path', autospec=True)
def test_get_hosts_info(self, mock_path, mock_open, mock_load, mock_fn):
file_path = 'file_path'
@@ -353,7 +353,7 @@ class DovetailUtilsTesting(unittest.TestCase):
logger = Mock()
names_str = ' '.join(hostnames)
- expected = ' --add-host=\'{}\':{} '.format(names_str, hosts_ip)
+ expected = {names_str: hosts_ip}
result = dovetail_utils.get_hosts_info(logger)
mock_path.join.assert_called_once_with(file_path, 'hosts.yaml')
@@ -430,7 +430,7 @@ class DovetailUtilsTesting(unittest.TestCase):
date = '2018-08-10 05:12:27'
logger = Mock()
- expected = '0m0s'
+ expected = '0.0m0s'
result = dovetail_utils.get_duration(date, date, logger)
self.assertEqual(expected, result)
@@ -510,15 +510,12 @@ class DovetailUtilsTesting(unittest.TestCase):
hosts_obj.add.assert_called_once_with([entry_obj])
hosts_obj.write.assert_called_once()
- @patch('dovetail.utils.dovetail_utils.objwalk')
- def test_get_obj_by_path(self, mock_walk):
- path = dist_path = 'path'
- obj = 'obj'
- mock_walk.return_value = [(path, obj)]
-
- expected = obj
- result = dovetail_utils.get_obj_by_path(obj, dist_path)
+ def test_get_obj_by_path(self):
+ obj = {'list': ['a', 'b'], 'name': 'name'}
+ dst_path = ('name',)
+ expected = 'name'
+ result = dovetail_utils.get_obj_by_path(obj, dst_path)
self.assertEqual(expected, result)
@patch('dovetail.utils.dovetail_utils.objwalk')
@@ -533,7 +530,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.environ')
def test_source_env(self, mock_env, mock_open):
file_path = 'file_path'
@@ -547,68 +544,47 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_open.assert_called_once_with(file_path, 'r')
mock_env.update.assert_called_once_with({env_name: env_value})
- @patch('dovetail.utils.dovetail_utils.exec_cmd')
- def test_check_docker_version(self, mock_exec):
- server_version = client_version = '1.12.3'
- server_ret = client_ret = 0
- mock_exec.side_effect = [(server_ret, server_version),
- (client_ret, client_version)]
+ @patch('dovetail.utils.dovetail_utils.docker')
+ def test_check_docker_version(self, mock_docker):
+ server_version = '1.12.3'
+ client_obj = Mock()
+ mock_docker.from_env.return_value = client_obj
+ client_obj.version.return_value = {'Version': server_version}
logger = Mock()
dovetail_utils.check_docker_version(logger)
- mock_exec.assert_has_calls(
- [call("sudo docker version -f'{{.Server.Version}}'",
- logger=logger),
- call("sudo docker version -f'{{.Client.Version}}'",
- logger=logger)])
logger.debug.assert_has_calls(
- [call('docker server version: {}'.format(server_version)),
- call('docker client version: {}'.format(client_version))])
+ [call('Docker server version: {}'.format(server_version))])
- @patch('dovetail.utils.dovetail_utils.exec_cmd')
- def test_check_docker_version_error(self, mock_exec):
- server_version = client_version = '1.12.3'
- server_ret = client_ret = 1
- mock_exec.side_effect = [(server_ret, server_version),
- (client_ret, client_version)]
+ @patch('dovetail.utils.dovetail_utils.docker')
+ def test_check_docker_version_error(self, mock_docker):
+ client_obj = Mock()
+ mock_docker.from_env.return_value = client_obj
+ client_obj.version.return_value = {}
logger = Mock()
dovetail_utils.check_docker_version(logger)
- mock_exec.assert_has_calls(
- [call("sudo docker version -f'{{.Server.Version}}'",
- logger=logger),
- call("sudo docker version -f'{{.Client.Version}}'",
- logger=logger)])
logger.error.assert_has_calls(
[call("Don't support this Docker server version. "
- "Docker server should be updated to at least 1.12.3."),
- call("Don't support this Docker client version. "
- "Docker client should be updated to at least 1.12.3.")])
-
- @patch('dovetail.utils.dovetail_utils.exec_cmd')
- def test_check_docker_version_less_than(self, mock_exec):
- server_version = client_version = '1.12.1'
- server_ret = client_ret = 0
- mock_exec.side_effect = [(server_ret, server_version),
- (client_ret, client_version)]
+ "Docker server should be updated to at least 1.12.3.")])
+
+ @patch('dovetail.utils.dovetail_utils.docker')
+ def test_check_docker_version_less_than(self, mock_docker):
+ server_version = '1.12.1'
+ client_obj = Mock()
+ mock_docker.from_env.return_value = client_obj
+ client_obj.version.return_value = {'Version': server_version}
logger = Mock()
dovetail_utils.check_docker_version(logger)
- mock_exec.assert_has_calls(
- [call("sudo docker version -f'{{.Server.Version}}'",
- logger=logger),
- call("sudo docker version -f'{{.Client.Version}}'",
- logger=logger)])
logger.error.assert_has_calls(
[call("Don't support this Docker server version. "
- "Docker server should be updated to at least 1.12.3."),
- call("Don't support this Docker client version. "
- "Docker client should be updated to at least 1.12.3.")])
+ "Docker server should be updated to at least 1.12.3.")])
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path')
@patch('os.listdir')
@patch('json.load')
@@ -640,7 +616,7 @@ class DovetailUtilsTesting(unittest.TestCase):
file_obj.write.assert_called_once_with(file_content_str)
self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path')
@patch('os.listdir')
def test_combine_files_read_exception(self, mock_listdir, mock_path,
@@ -663,7 +639,7 @@ class DovetailUtilsTesting(unittest.TestCase):
'Failed to read file {}.'.format(file_complete_name))
self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path')
@patch('os.listdir')
@patch('json.load')
@@ -699,7 +675,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('json.dump')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path')
@patch('dovetail.utils.dovetail_utils.check_https_enabled')
@patch('os.getenv')
@@ -818,7 +794,7 @@ class DovetailUtilsTesting(unittest.TestCase):
.format(services_exception_msg))
self.assertEqual(expected, result)
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('os.path')
@patch('dovetail.utils.dovetail_utils.check_https_enabled')
@patch('os.getenv')
@@ -1050,7 +1026,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('os.path')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('yaml.safe_load')
def test_get_inventory_password(self, mock_load, mock_open, mock_path):
name = 'name'
@@ -1089,7 +1065,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('os.path')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('yaml.safe_load')
def test_get_inventory_key_filename(self, mock_load, mock_open, mock_path):
name = 'name'
@@ -1133,7 +1109,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('os.path')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('yaml.safe_load')
def test_get_inventory_other(self, mock_load, mock_open, mock_path):
name = 'name'
@@ -1166,7 +1142,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('os.path')
- @patch('__builtin__.open')
+ @patch('builtins.open')
@patch('yaml.safe_load')
def test_get_inventory_keyerror(self, mock_load, mock_open, mock_path):
name = 'name'
@@ -1197,7 +1173,7 @@ class DovetailUtilsTesting(unittest.TestCase):
self.assertEqual(expected, result)
@patch('os.path')
- @patch('__builtin__.open')
+ @patch('builtins.open')
def test_get_inventory_exception(self, mock_open, mock_path):
inventory_file_name = 'inventory'
pod_file_name = 'pod'
@@ -1254,9 +1230,9 @@ class DovetailUtilsTesting(unittest.TestCase):
subp_stdout = Mock()
subprocess_obj.stdout = subp_stdout
subprocess_obj.wait.return_value = 0
- subp_stdout.readline.side_effect = [cmd_output, '']
+ subp_stdout.readline.side_effect = [cmd_output.encode()]
- expected = (0, 'line')
+ expected = (0, "b'line'")
result = dovetail_utils.exec_cmd(
cmd, logger=logger, exit_on_error=True, info=False,
exec_msg_on=True, err_msg='', verbose=verbose,
@@ -1272,7 +1248,7 @@ class DovetailUtilsTesting(unittest.TestCase):
subp_stdout.close.assert_called_once_with()
subprocess_obj.wait.assert_called_once_with()
mock_getenv.assert_called_once_with('DEBUG')
- mock_bar.assert_called_once_with(1)
+ # mock_bar.assert_called_once_with(1)
self.assertEqual(expected, result)
@patch('sys.exit')
@@ -1297,7 +1273,7 @@ class DovetailUtilsTesting(unittest.TestCase):
subp_stdout = Mock()
subprocess_obj.stdout = subp_stdout
subprocess_obj.wait.return_value = 1
- subp_stdout.readline.side_effect = [cmd_output, '']
+ subp_stdout.readline.side_effect = [cmd_output.encode()]
dovetail_utils.exec_cmd(
cmd, logger=logger, exit_on_error=True, info=False,
@@ -1315,7 +1291,7 @@ class DovetailUtilsTesting(unittest.TestCase):
subp_stdout.close.assert_called_once_with()
subprocess_obj.wait.assert_called_once_with()
mock_getenv.assert_called_once_with('DEBUG')
- mock_bar.assert_called_once_with(1)
+ # mock_bar.assert_called_once_with(1)
mock_exit.assert_called_once_with(1)
@patch('os.path', autospec=True)
@@ -1355,3 +1331,72 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_host.assert_called_once()
mock_endpoint.assert_called_once()
mock_hardware.assert_called_once()
+
+ @patch('json.dumps')
+ @patch('dovetail.utils.dovetail_utils.requests')
+ @patch('os.getenv')
+ def test_push_results_to_db(self, mock_getenv, mock_requests, mock_dumps):
+ logger = Mock()
+ case_name = 'case_name'
+ details = {'criteria': 'PASS'}
+ start_date = 'start_date'
+ stop_date = 'stop_date'
+ mock_getenv.side_effect = [
+ 'url', 'installer', 'scenario', 'pod_name', 'build_tag', 'version']
+ post_req = Mock()
+ post_req.raise_for_status.return_value = None
+ mock_requests.post.return_value = post_req
+ mock_dumps.return_value = {"project_name": "dovetail"}
+
+ dovetail_utils.push_results_to_db(
+ case_name, details, start_date, stop_date, logger)
+
+ mock_requests.post.assert_called_once_with(
+ 'url',
+ data={"project_name": "dovetail"},
+ headers={"Content-Type": "application/json"})
+ logger.debug.assert_called_once_with(
+ "The results were successfully pushed to DB.")
+
+ @patch('json.dumps')
+ @patch('dovetail.utils.dovetail_utils.requests')
+ @patch('os.getenv')
+ def test_push_results_to_db_exception(self, mock_getenv, mock_requests,
+ mock_dumps):
+ logger = Mock()
+ case_name = 'case_name'
+ details = {'criteria': 'PASS'}
+ start_date = 'start_date'
+ stop_date = 'stop_date'
+ mock_getenv.side_effect = [
+ 'url', 'installer', 'scenario', 'pod_name', 'build_tag', 'version']
+ post_req = Mock()
+ post_req.raise_for_status.side_effect = Exception()
+ mock_requests.post.return_value = post_req
+ mock_dumps.return_value = {"project_name": "dovetail"}
+ dovetail_utils.push_results_to_db(
+ case_name, details, start_date, stop_date, logger)
+
+ logger.debug.assert_not_called()
+ logger.exception.assert_called_once_with(
+ "The results cannot be pushed to DB.")
+
+ def test_get_mount_list_error_mount(self):
+ project_cfg = {'mounts': ['aaa']}
+ res, msg = dovetail_utils.get_mount_list(project_cfg)
+ self.assertEqual(None, res)
+ self.assertEqual('Error mount aaa.', msg)
+
+ def test_get_mount_list_keyerror_exception(self):
+ project_cfg = {'mounts': ['aaa=a,bbb=b', '']}
+ res, msg = dovetail_utils.get_mount_list(project_cfg)
+ self.assertEqual(None, res)
+ self.assertEqual("'target'", str(msg))
+
+ def test_get_mount_list(self):
+ project_cfg = {'mounts': ['target=a,source=b', '']}
+ res, msg = dovetail_utils.get_mount_list(project_cfg)
+ expected = [{'Source': 'b', 'Type': 'bind', 'ReadOnly': False,
+ 'Target': 'a'}]
+ self.assertEqual(expected, res)
+ self.assertEqual('Successfully to get mount list.', msg)
diff --git a/dovetail/utils/dovetail_config.py b/dovetail/utils/dovetail_config.py
index e448888f..98be8e62 100644
--- a/dovetail/utils/dovetail_config.py
+++ b/dovetail/utils/dovetail_config.py
@@ -22,16 +22,10 @@ class DovetailConfig(object):
with open(os.path.join(conf_path, 'dovetail_config.yml')) as f:
cls.dovetail_config = yaml.safe_load(f)
- for extra_config_file in cls.dovetail_config['include_config']:
- file_path = os.path.join(conf_path, extra_config_file)
- with open(file_path) as f:
- extra_config = yaml.safe_load(f)
- cls.dovetail_config.update(extra_config)
-
path = os.path.join(conf_path, cls.dovetail_config['cli_file_name'])
with open(path) as f:
cmd_yml = yaml.safe_load(f)
- cls.dovetail_config['cli'] = cmd_yml[cmd_yml.keys()[0]]
+ cls.dovetail_config['cli'] = cmd_yml[list(cmd_yml.keys())[0]]
# update dovetail_config dict with the giving path.
# if path is in the dovetail_config dict, its value will be replaced.
diff --git a/dovetail/utils/dovetail_logger.py b/dovetail/utils/dovetail_logger.py
index e22ef4d6..7c00a252 100644
--- a/dovetail/utils/dovetail_logger.py
+++ b/dovetail/utils/dovetail_logger.py
@@ -26,7 +26,7 @@ import logging
import os
import sys
-from dovetail_config import DovetailConfig as dt_cfg
+from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
class Logger(object):
diff --git a/dovetail/utils/dovetail_utils.py b/dovetail/utils/dovetail_utils.py
index 2e7b6102..1c4aca9d 100644
--- a/dovetail/utils/dovetail_utils.py
+++ b/dovetail/utils/dovetail_utils.py
@@ -12,6 +12,7 @@ from __future__ import print_function
import sys
import os
import re
+import requests
import subprocess
from collections import Mapping, Set, Sequence
import json
@@ -19,10 +20,12 @@ from datetime import datetime
from distutils.version import LooseVersion
import yaml
import python_hosts
+import docker
+from docker.types import Mount
from dovetail import constants
-from dovetail_config import DovetailConfig as dt_cfg
-from openstack_utils import OS_Utils
+from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
+from dovetail.utils.openstack_utils import OS_Utils
def exec_log(verbose, logger, msg, level, flush=False):
@@ -58,8 +61,9 @@ def exec_cmd(cmd, logger=None, exit_on_error=False, info=False,
count = 1
DEBUG = os.getenv('DEBUG')
for line in iter(p.stdout.readline, b''):
- exec_log(verbose, logger, line.strip(), level, True)
- stdout += line
+ exec_log(verbose, logger, line.strip().decode('unicode-escape'),
+ level, True)
+ stdout += str(line)
if progress_bar and (DEBUG is None or DEBUG.lower() != 'true'):
show_progress_bar(count)
count += 1
@@ -77,13 +81,13 @@ def exec_cmd(cmd, logger=None, exit_on_error=False, info=False,
# walkthrough the object, yield path and value
-# dual python 2/3 compatability, inspired by the "six" library
-string_types = (str, unicode) if str is bytes else (str, bytes)
-# iteritems = lambda mapping: getattr(mapping, 'iteritems', mapping.items)()
+# dual python 2/3 compatibility, inspired by the "six" library
+string_types = (str, 'unicode') if str is bytes else (str, bytes)
+# items = lambda mapping: getattr(mapping, 'items', mapping.items)()
-def iteritems(mapping):
- return getattr(mapping, 'iteritems', mapping.items)()
+def items(mapping):
+ return getattr(mapping, 'items', mapping.items)()
def objwalk(obj, path=(), memo=None):
@@ -91,7 +95,7 @@ def objwalk(obj, path=(), memo=None):
memo = set()
iterator = None
if isinstance(obj, Mapping):
- iterator = iteritems
+ iterator = items
elif isinstance(obj, (Sequence, Set)) and not isinstance(obj,
string_types):
iterator = enumerate
@@ -157,20 +161,17 @@ def show_progress_bar(length):
def check_docker_version(logger=None):
- server_ret, server_ver = \
- exec_cmd("sudo docker version -f'{{.Server.Version}}'", logger=logger)
- client_ret, client_ver = \
- exec_cmd("sudo docker version -f'{{.Client.Version}}'", logger=logger)
- if server_ret == 0:
- logger.debug('docker server version: {}'.format(server_ver))
- if server_ret != 0 or (LooseVersion(server_ver) < LooseVersion('1.12.3')):
+ client = docker.from_env()
+ server_ver = None
+ try:
+ server_ver = client.version()['Version']
+ except Exception:
+ logger.error('Failed to get Docker server version')
+ if server_ver and (LooseVersion(server_ver) >= LooseVersion('1.12.3')):
+ logger.debug('Docker server version: {}'.format(server_ver))
+ else:
logger.error("Don't support this Docker server version. "
"Docker server should be updated to at least 1.12.3.")
- if client_ret == 0:
- logger.debug('docker client version: {}'.format(client_ver))
- if client_ret != 0 or (LooseVersion(client_ver) < LooseVersion('1.12.3')):
- logger.error("Don't support this Docker client version. "
- "Docker client should be updated to at least 1.12.3.")
def add_hosts_info(ip, hostnames):
@@ -316,7 +317,7 @@ def check_cacert_file(cacert, logger=None):
def get_hosts_info(logger=None):
- hosts_config = ''
+ hosts_config = {}
hosts_config_file = os.path.join(dt_cfg.dovetail_config['config_dir'],
'hosts.yaml')
if not os.path.isfile(hosts_config_file):
@@ -333,7 +334,7 @@ def get_hosts_info(logger=None):
logger.error('There is no key hosts_info in file {}'
.format(hosts_config_file))
return hosts_config
- for ip, hostnames in hosts_info.iteritems():
+ for ip, hostnames in hosts_info.items():
if not hostnames:
continue
add_hosts_info(ip, hostnames)
@@ -341,7 +342,7 @@ def get_hosts_info(logger=None):
if hostname)
if not names_str:
continue
- hosts_config += ' --add-host=\'{}\':{} '.format(names_str, ip)
+ hosts_config[names_str] = ip
logger.debug('Get hosts info {}:{}.'.format(ip, names_str))
return hosts_config
@@ -405,3 +406,53 @@ def get_openstack_info(logger):
get_hosts_info(logger)
get_openstack_endpoint(logger)
get_hardware_info(logger)
+
+
+def push_results_to_db(case_name, details, start_date, stop_date, logger):
+ """
+ Push results to OPNFV TestAPI DB when running with OPNFV CI jobs.
+ All results can be filtered with TestAPI.
+ http://testresults.opnfv.org/test/#/results
+ """
+ try:
+ url = os.getenv('TEST_DB_URL')
+ data = {'project_name': 'dovetail', 'case_name': case_name,
+ 'details': details, 'start_date': start_date,
+ 'stop_date': stop_date}
+ data['criteria'] = details['criteria'] if details else 'FAIL'
+ data['installer'] = os.getenv('INSTALLER_TYPE')
+ data['scenario'] = os.getenv('DEPLOY_SCENARIO')
+ data['pod_name'] = os.getenv('NODE_NAME')
+ data['build_tag'] = os.getenv('BUILD_TAG')
+ data['version'] = os.getenv('VERSION')
+ req = requests.post(url, data=json.dumps(data, sort_keys=True),
+ headers={'Content-Type': 'application/json'})
+ req.raise_for_status()
+ logger.debug('The results were successfully pushed to DB.')
+ return True
+ except Exception:
+ logger.exception('The results cannot be pushed to DB.')
+ return False
+
+
+def get_mount_list(project_cfg):
+ mount_list = []
+ mounts = get_value_from_dict('mounts', project_cfg)
+ for mount in mounts:
+ if mount:
+ param_dict = {}
+ for param in mount.split(','):
+ key_word = param.split('=')
+
+ if len(key_word) != 2:
+ return None, 'Error mount {}.'.format(mount)
+
+ param_dict[key_word[0]] = key_word[1]
+ try:
+ mount_list.append(Mount(target=param_dict['target'],
+ source=param_dict['source'],
+ type='bind'))
+ except Exception as e:
+ return None, e
+
+ return mount_list, 'Successfully to get mount list.'
diff --git a/etc/compliance/debug.yml b/etc/compliance/debug.yml
index e74cff25..b15c3cec 100644
--- a/etc/compliance/debug.yml
+++ b/etc/compliance/debug.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
# only used for dovetail tool development and the integration testing
# of new test cases. New test case candidates for the next release,
diff --git a/etc/compliance/healthcheck.yml b/etc/compliance/healthcheck.yml
index 01b670d0..ad461cad 100644
--- a/etc/compliance/healthcheck.yml
+++ b/etc/compliance/healthcheck.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
# Pre-flight check before running more complex tests
# Will be used for running on CI
@@ -5,6 +14,4 @@ healthcheck:
name: healthcheck
testcases_list:
optional:
- - functest.healthcheck.snaps_health_check
- functest.healthcheck.connection_check
- - functest.healthcheck.api_check
diff --git a/etc/compliance/onap.1.0.0.yml b/etc/compliance/onap.1.0.0.yml
deleted file mode 100644
index e9e805c4..00000000
--- a/etc/compliance/onap.1.0.0.yml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-onap.1.0.0:
- name: onap.1.0.0
- testcases_list:
- optional:
- - onap.lifecycle.tc001
diff --git a/etc/compliance/onap.heat.2019.04.yaml b/etc/compliance/onap.heat.2019.04.yaml
new file mode 100644
index 00000000..1777bda2
--- /dev/null
+++ b/etc/compliance/onap.heat.2019.04.yaml
@@ -0,0 +1,17 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+onap.heat.2019.04:
+ name: onap.heat.2019.04
+ version: '2019.04'
+ vnf_type: heat
+ testcases_list:
+ mandatory:
+ - onap-vvp.validate.heat
diff --git a/etc/compliance/onap.tosca.2019.04.yaml b/etc/compliance/onap.tosca.2019.04.yaml
new file mode 100644
index 00000000..15072010
--- /dev/null
+++ b/etc/compliance/onap.tosca.2019.04.yaml
@@ -0,0 +1,17 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+onap.tosca.2019.04:
+ name: onap.tosca.2019.04
+ version: '2019.04'
+ vnf_type: tosca
+ testcases_list:
+ mandatory:
+ - onap-vtp.validate.csar
diff --git a/etc/compliance/ovp.2018.09.yaml b/etc/compliance/ovp.2019.12.yaml
index 9821b4cb..a1f7a50b 100644
--- a/etc/compliance/ovp.2018.09.yaml
+++ b/etc/compliance/ovp.2019.12.yaml
@@ -1,6 +1,16 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
-ovp.2018.09:
- name: ovp.2018.09
+ovp.2019.12:
+ name: ovp.2019.12
+ version: '2019.12'
testcases_list:
mandatory:
- functest.vping.userdata
@@ -33,12 +43,7 @@ ovp.2018.09:
- functest.tempest.network_scenario
- functest.tempest.bgpvpn
- functest.security.patrole_vxlan_dependent
- - functest.bgpvpn.subnet_connectivity
- - functest.bgpvpn.tenant_separation
- - functest.bgpvpn.router_association
- - functest.bgpvpn.router_association_floating_ip
- yardstick.ha.neutron_l3_agent
- yardstick.ha.controller_restart
- functest.vnf.vims
- functest.vnf.vepc
- - functest.snaps.smoke
diff --git a/etc/compliance/proposed_tests.yml b/etc/compliance/proposed_tests.yml
index 639a0309..17eadff8 100644
--- a/etc/compliance/proposed_tests.yml
+++ b/etc/compliance/proposed_tests.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
proposed_tests:
# proposed new test cases for next release
@@ -7,3 +16,14 @@ proposed_tests:
optional:
- functest.k8s.conformance
- functest.k8s.smoke
+ - functest.rally.authenticate
+ - functest.rally.cinder
+ - functest.rally.glance
+ - functest.rally.gnocchi
+ - functest.rally.heat
+ - functest.rally.keystone
+ - functest.rally.neutron
+ - functest.rally.nova
+ - functest.rally.quotas
+ - functest.tempest.neutron_tempest_plugin_api
+ - functest.tempest.networking_sfc
diff --git a/etc/conf/bottlenecks_config.yml b/etc/conf/bottlenecks_config.yml
index 5866aa3a..c23ad9cc 100644
--- a/etc/conf/bottlenecks_config.yml
+++ b/etc/conf/bottlenecks_config.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
{% set validate_testcase = validate_testcase or '' %}
@@ -8,31 +17,38 @@
{% set build_tag = build_tag or '' %}
{% set cacert_volume = '' %}
{% if cacert %}
- {% set cacert_volume = ' -v ' + cacert + ':' + cacert %}
+ {% set cacert_volume = 'source=' + cacert + ',target=' + cacert %}
{% endif %}
{% set openrc_file = '/tmp/admin_rc.sh' %}
{% set result_dir = '/home/opnfv/bottlenecks/results' %}
+{% set images_dir = '/home/opnfv/images' %}
{% set config_dir = '/home/opnfv/userconfig' %}
{% set image_file = '/tmp/yardstick.img' %}
bottlenecks:
image_name: opnfv/bottlenecks
- docker_tag: latest
- opts: '-id --privileged=true'
- envs: '-e DEPLOY_SCENARIO={{deploy_scenario}} -e Yardstick_TAG=stable
- -e OUTPUT_FILE={{testcase}}.out -e CI_DEBUG={{debug}}
- -e BUILD_TAG={{build_tag}}-{{testcase}}'
+ docker_tag: 8.0.1-latest
+ opts:
+ detach: true
+ stdin_open: true
+ privileged: true
+ shell: '/bin/bash'
+ envs:
+ - 'DEPLOY_SCENARIO={{deploy_scenario}}'
+ - 'Yardstick_TAG=opnfv-8.0.0'
+ - 'OUTPUT_FILE={{testcase}}.out'
+ - 'CI_DEBUG={{debug}}'
+ - 'BUILD_TAG={{build_tag}}-{{testcase}}'
volumes:
- - '-v /var/run/docker.sock:/var/run/docker.sock'
- - '-v {{dovetail_home}}/results/bottlenecks:/tmp'
- - '-v {{dovetail_home}}/pre_config/env_config.sh:{{openrc_file}}'
+ - '{{dovetail_home}}/results/bottlenecks:/tmp'
+ - '{{dovetail_home}}/images:{{images_dir}}'
+ - '{{dovetail_home}}/results:{{result_dir}}'
+ mounts:
+ - 'source=/var/run/docker.sock,target=/var/run/docker.sock'
+ - 'source={{dovetail_home}}/pre_config/env_config.sh,target={{openrc_file}}'
- {{cacert_volume}}
- - '-v {{dovetail_home}}:{{config_dir}}'
- - '-v {{dovetail_home}}/results:{{result_dir}}'
- config:
- dir: {{config_dir}}
pre_condition:
- - 'cp {{config_dir}}/images/ubuntu-16.04-server-cloudimg-amd64-disk1.img {{image_file}}'
+ - 'cp {{images_dir}}/ubuntu-16.04-server-cloudimg-amd64-disk1.img {{image_file}}'
cmds:
- 'python /home/opnfv/bottlenecks/testsuites/run_testsuite.py testcase {{validate_testcase}} False'
post_condition:
@@ -40,8 +56,6 @@ bottlenecks:
- 'cp /tmp/bottlenecks.log {{result_dir}}'
- 'cp /tmp/bottlenecks.stress.ping.out {{result_dir}}'
- 'rm {{image_file}}'
- result:
- dir: {{result_dir}}
openrc: {{openrc_file}}
extra_container:
- 'Bottlenecks-Yardstick'
diff --git a/etc/conf/cmd_config.yml b/etc/conf/cmd_config.yml
index f02c864e..63c87343 100644
--- a/etc/conf/cmd_config.yml
+++ b/etc/conf/cmd_config.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
cli:
arguments:
@@ -19,7 +28,7 @@ cli:
testsuite:
flags:
- '--testsuite'
- default: 'ovp.2018.09'
+ default: 'ovp.2019.12'
help: 'compliance testsuite.'
testarea:
flags:
@@ -79,3 +88,8 @@ cli:
- '--optional'
is_flag: 'True'
help: 'Run all optional test cases.'
+ opnfvci:
+ flags:
+ - '--opnfv-ci'
+ is_flag: 'True'
+ help: 'Only enabled when running with OPNFV CI jobs and pushing results to TestAPI DB'
diff --git a/etc/conf/dovetail_config.yml b/etc/conf/dovetail_config.yml
index ed6433b9..10b81fbb 100644
--- a/etc/conf/dovetail_config.yml
+++ b/etc/conf/dovetail_config.yml
@@ -1,4 +1,14 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
+version: '2019.12'
cli_file_name: 'cmd_config.yml'
result_file: 'results.json'
@@ -17,21 +27,9 @@ testsuite_supported:
- proposed_tests
- debug
- healthcheck
- - ovp.2018.09
- - onap.1.0.0
-# testarea supported, should adjust accordingly
-testarea_supported:
- - ha
- - healthcheck
- - onap
- - bgpvpn
- - security
- - snaps
- - stress
- - tempest
- - vnf
- - vping
- - k8s
+ - ovp.2019.12
+ - onap.tosca.2019.04
+ - onap.heat.2019.04
# used for testcase cmd template in jinja2 format
# we have two variables available now
@@ -42,6 +40,3 @@ parameters:
path: '("name",)'
- name: validate_testcase
path: '("validate", "testcase")'
-
-include_config:
- - vnftest_config.yml
diff --git a/etc/conf/functest-k8s_config.yml b/etc/conf/functest-k8s_config.yml
index e717ef94..a01f5715 100644
--- a/etc/conf/functest-k8s_config.yml
+++ b/etc/conf/functest-k8s_config.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
{% set validate_testcase = validate_testcase or '' %}
@@ -11,21 +20,27 @@
functest-k8s:
image_name: opnfv/functest-kubernetes-healthcheck
- docker_tag: gambia
- opts: '-id'
- envs: '-e INSTALLER_TYPE=unknown -e DEPLOY_SCENARIO=k8-deploy -e NODE_NAME=unknown
- -e TEST_DB_URL=file:///home/opnfv/functest/results/functest_results.txt
- -e CI_DEBUG={{debug}} -e BUILD_TAG={{build_tag}}-{{testcase}}'
+ docker_tag: hunter
+ opts:
+ detach: true
+ stdin_open: true
+ shell: '/bin/bash'
+ envs:
+ - 'INSTALLER_TYPE=unknown'
+ - 'DEPLOY_SCENARIO=k8-deploy'
+ - 'NODE_NAME=unknown'
+ - 'TEST_DB_URL=file:///home/opnfv/functest/results/functest_results.txt'
+ - 'CI_DEBUG={{debug}}'
+ - 'BUILD_TAG={{build_tag}}-{{testcase}}'
volumes:
- - '-v {{dovetail_home}}/pre_config/k8.creds:{{openrc_file}}'
- - '-v {{dovetail_home}}/pre_config/admin.conf:{{kube_file}}'
- - '-v {{dovetail_home}}/results/:{{result_dir}}'
+ - '{{dovetail_home}}/results/:{{result_dir}}'
+ mounts:
+ - 'source={{dovetail_home}}/pre_config/k8.creds,target={{openrc_file}}'
+ - 'source={{dovetail_home}}/pre_config/admin.conf,target={{kube_file}}'
pre_condition:
- 'echo test for precondition in functest'
cmds:
- 'run_tests -t {{validate_testcase}} -r'
post_condition:
- 'echo test for postcondition in functest'
- result:
- dir: {{result_dir}}
openrc: {{openrc_file}}
diff --git a/etc/conf/functest_config.yml b/etc/conf/functest_config.yml
index 451b1675..91fd68e2 100644
--- a/etc/conf/functest_config.yml
+++ b/etc/conf/functest_config.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
{% set validate_testcase = validate_testcase or '' %}
@@ -6,41 +15,51 @@
{% set os_insecure = os_insecure or 'False' %}
{% set os_verify = '' %}
{% if os_insecure == 'True' %}
- {% set os_verify = ' -e OS_VERIFY= ' %}
+ {% set os_verify = 'OS_VERIFY=' %}
{% endif %}
{% set dovetail_home = dovetail_home or '' %}
{% set debug = debug or 'false' %}
{% set build_tag = build_tag or '' %}
{% set cacert_volume = '' %}
{% if cacert %}
- {% set cacert_volume = ' -v ' + cacert + ':' + cacert %}
+ {% set cacert_volume = 'source=' + cacert + ',target=' + cacert %}
{% endif %}
{% set openrc_file = '/home/opnfv/functest/conf/env_file' %}
{% set result_dir = '/home/opnfv/functest/results' %}
-{% set config_dir = '/home/opnfv/userconfig' %}
+{% set userconfig_dir = '/home/opnfv/userconfig' %}
+{% set patches_dir = '/home/opnfv/patches' %}
{% set images_dir = '/home/opnfv/functest/images' %}
functest:
image_name: opnfv/functest-smoke
- docker_tag: gambia
- opts: '-id --privileged=true'
- envs: '{{os_verify}} -e INSTALLER_TYPE=unknown -e DEPLOY_SCENARIO={{deploy_scenario}} -e NODE_NAME=unknown
- -e TEST_DB_URL=file://{{result_dir}}/functest_results.txt
- -e CI_DEBUG={{debug}} -e BUILD_TAG={{build_tag}}-{{testcase}}'
+ docker_tag: hunter
+ opts:
+ detach: true
+ stdin_open: true
+ privileged: true
+ shell: '/bin/bash'
+ envs:
+ - {{os_verify}}
+ - 'INSTALLER_TYPE=unknown'
+ - 'DEPLOY_SCENARIO={{deploy_scenario}}'
+ - 'NODE_NAME=unknown'
+ - 'TEST_DB_URL=file://{{result_dir}}/functest_results.txt'
+ - 'CI_DEBUG={{debug}}'
+ - 'BUILD_TAG={{build_tag}}-{{testcase}}'
volumes:
- - '-v {{dovetail_home}}/pre_config/env_config.sh:{{openrc_file}}'
+ - '{{dovetail_home}}/userconfig:{{userconfig_dir}}'
+ - '{{dovetail_home}}/patches:{{patches_dir}}'
+ - '{{dovetail_home}}/results:{{result_dir}}'
+ - '{{dovetail_home}}/images:{{images_dir}}'
+ mounts:
+ - 'source={{dovetail_home}}/pre_config/env_config.sh,target={{openrc_file}}'
+ - 'source={{dovetail_home}}/pre_config,target=/home/opnfv/pre_config'
- {{cacert_volume}}
- - '-v {{dovetail_home}}:{{config_dir}}'
- - '-v {{dovetail_home}}/results:{{result_dir}}'
- - '-v {{dovetail_home}}/images:{{images_dir}}'
- config:
- dir: {{config_dir}}
+ patches_dir: {{patches_dir}}
pre_condition:
- 'echo test for precondition in functest'
cmds:
- 'run_tests -t {{validate_testcase}} -r'
post_condition:
- 'echo test for postcondition in functest'
- result:
- dir: {{result_dir}}
openrc: {{openrc_file}}
diff --git a/etc/conf/onap-vtp_config.yml b/etc/conf/onap-vtp_config.yml
new file mode 100644
index 00000000..2cfc96fc
--- /dev/null
+++ b/etc/conf/onap-vtp_config.yml
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+
+{% set validate_testcase = validate_testcase or '' %}
+{% set testcase = testcase or '' %}
+{% set build_tag = build_tag or '' %}
+{% set csar_file = csar_file or '' %}
+{% set host_url = host_url or '' %}
+{% set result_dir = '/tmp/onap' %}
+
+onap-vtp:
+ image_name: nexus3.onap.org:10001/onap/cli
+ docker_tag: 2.0.5
+ opts:
+ detach: true
+ tty: true
+ shell: '/bin/bash'
+ envs:
+ - 'OPEN_CLI_MODE=daemon'
+ - 'BUILD_TAG={{build_tag}}-{{testcase}}'
+ - 'OPEN_CLI_PRODUCT_IN_USE=onap-vtp'
+ volumes:
+ - '{{dovetail_home}}/pre_config/{{csar_file}}:/{{csar_file}}'
+ - '{{dovetail_home}}/results:{{result_dir}}'
+ pre_condition:
+ - 'echo this is pre_condition'
+ cmds:
+ - "oclip vnftest-run --name {{validate_testcase}} --param csar=/{{csar_file}}
+ --host-url {{host_url}} --format json > {{result_dir}}/{{testcase}}.out"
+ post_condition:
+ - 'echo this is post_condition'
diff --git a/etc/conf/onap-vvp_config.yml b/etc/conf/onap-vvp_config.yml
new file mode 100644
index 00000000..ee09b711
--- /dev/null
+++ b/etc/conf/onap-vvp_config.yml
@@ -0,0 +1,32 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+
+{% set build_tag = build_tag or '' %}
+{% set heat_templates_archive = heat_templates_archive or '' %}
+{% set result_dir = '/vvp/reports' %}
+
+onap-vvp:
+ image_name: nexus3.onap.org:10001/onap/vvp/validation-scripts
+ docker_tag: latest
+ opts:
+ detach: true
+ tty: true
+ entrypoint: ''
+ shell: '/bin/ash'
+ volumes:
+ - '{{dovetail_home}}/pre_config/{{heat_templates_archive}}.zip:/tmp/{{heat_templates_archive}}.zip'
+ - '{{dovetail_home}}/results:{{result_dir}}'
+ pre_condition:
+ - 'unzip -o -d /vvp/{{heat_templates_archive}} /tmp/{{heat_templates_archive}}.zip'
+ cmds:
+ - 'pytest tests --template-directory=/vvp/{{heat_templates_archive}} --output-directory={{result_dir}} --report-format=json --continue-on-failure'
+ post_condition:
+ - 'echo this is post_condition'
diff --git a/etc/conf/vnftest_config.yml b/etc/conf/vnftest_config.yml
deleted file mode 100644
index 82e068b5..00000000
--- a/etc/conf/vnftest_config.yml
+++ /dev/null
@@ -1,30 +0,0 @@
----
-vnftest:
- image_name: onap/vnfsdk/vnftest
- docker_tag: latest
- opts: '-id --privileged=true'
- config:
- dir: '/home/onap/userconfig'
- pre_condition:
- - 'echo this is pre_condition'
- cmds:
- - 'mkdir -p /home/onap/vnftest/results/'
- - "cd /home/onap/repos/vnftest && source /etc/vnftest/openstack.creds &&
- export CONF_FILE=/etc/vnftest/vnftest.yaml &&
- vnftest task start --output-file /home/onap/vnftest/results/{{testcase}}.out
- /etc/vnftest/vnf_descriptor.yaml
- tests/onap/test_cases/{{validate_testcase}}.yaml"
- post_condition:
- - 'echo this is post_condition'
- result:
- dir: '/home/onap/vnftest/results'
- log: '/tmp/vnftest'
- key_path: '/root/.ssh/id_rsa'
- openrc: '/etc/vnftest/openstack.creds'
- vnftest_conf:
- -
- src_file: '/home/opnfv/dovetail/pre_config/vnftest_conf.yaml'
- dest_file: '/etc/vnftest/vnftest.yaml'
- -
- src_file: '/home/opnfv/dovetail/pre_config/vnf_descriptor.yaml'
- dest_file: '/etc/vnftest/vnf_descriptor.yaml'
diff --git a/etc/conf/yardstick_config.yml b/etc/conf/yardstick_config.yml
index 764ca94b..3c4273f8 100644
--- a/etc/conf/yardstick_config.yml
+++ b/etc/conf/yardstick_config.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
{% set attack_host = attack_host or '' %}
@@ -7,43 +16,49 @@
{% set os_insecure = os_insecure or 'False' %}
{% set os_verify = '' %}
{% if os_insecure == 'True' %}
- {% set os_verify = ' -e OS_VERIFY= ' %}
+ {% set os_verify = 'OS_VERIFY=' %}
{% endif %}
{% set dovetail_home = dovetail_home or '' %}
{% set debug = debug or 'false' %}
{% set build_tag = build_tag or '' %}
{% set cacert_volume = '' %}
{% if cacert %}
- {% set cacert_volume = ' -v ' + cacert + ':' + cacert %}
+ {% set cacert_volume = 'source=' + cacert + ',target=' + cacert %}
{% endif %}
{% set openrc_file = '/etc/yardstick/openstack.creds' %}
+{% set pod_file = '/etc/yardstick/pod.yaml' %}
{% set result_dir = '/tmp/yardstick' %}
-{% set config_dir = '/home/opnfv/userconfig' %}
yardstick:
image_name: opnfv/yardstick
- docker_tag: latest
- opts: '-id --privileged=true'
- envs: "{{os_verify}} -e YARDSTICK_BRANCH=fraser -e CI_DEBUG={{debug}}
- -e BUILD_TAG={{build_tag}}-{{testcase}}"
+ docker_tag: opnfv-8.0.0
+ opts:
+ detach: true
+ stdin_open: true
+ privileged: true
+ shell: '/bin/bash'
+ envs:
+ - {{os_verify}}
+ - 'YARDSTICK_BRANCH=fraser'
+ - 'CI_DEBUG={{debug}}'
+ - 'BUILD_TAG={{build_tag}}-{{testcase}}"'
volumes:
- - '-v {{dovetail_home}}/pre_config/env_config.sh:{{openrc_file}}'
+ - '{{dovetail_home}}/images:/home/opnfv/images'
+ - '{{dovetail_home}}/results:{{result_dir}}'
+ mounts:
+ - 'source={{dovetail_home}}/pre_config,target={{dovetail_home}}/pre_config'
+ - 'source={{dovetail_home}}/pre_config/env_config.sh,target={{openrc_file}}'
+ - 'source={{dovetail_home}}/pre_config/pod.yaml,target={{pod_file}}'
- {{cacert_volume}}
- - '-v {{dovetail_home}}:{{config_dir}}'
- - '-v {{dovetail_home}}/results:{{result_dir}}'
- config:
- dir: {{config_dir}}
pre_condition:
- 'echo this is pre_condition'
cmds:
- "cd /home/opnfv/repos/yardstick && source {{openrc_file}} &&
yardstick task start tests/opnfv/test_cases/{{validate_testcase}}.yaml
--output-file {{result_dir}}/{{testcase}}.out
- --task-args '{'file': '{{config_dir}}/pre_config/pod.yaml',
+ --task-args '{'file': '{{pod_file}}',
'attack_host': {{attack_host}},
'attack_process': {{attack_process}}}'"
post_condition:
- 'echo this is post_condition'
- result:
- dir: {{result_dir}}
openrc: {{openrc_file}}
diff --git a/etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch b/etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch
index 7e81f49b..51136813 100644
--- a/etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch
+++ b/etc/patches/functest/disable-api-validation/0001-Allow-additional-properties-in-API-responses.patch
@@ -1,47 +1,54 @@
-From 90e0e23ce73a4459d981acca588f3873eb29c770 Mon Sep 17 00:00:00 2001
-From: Georg Kunz <georg.kunz@ericsson.com>
-Date: Wed, 6 Jun 2018 15:07:43 +0200
+From 56c7c31129ca7942e7d658b249e32a4b05a08c94 Mon Sep 17 00:00:00 2001
+From: Dovetail <ovp-support@lfnetworking.org>
+Date: Fri, 8 Nov 2019 07:56:32 +0000
Subject: [PATCH] Allow additional properties in API responses
+Signed-off-by: Dovetail <ovp-support@lfnetworking.org>
---
- .../response/compute/v2_1/agents.py | 10 +--
+ .../response/compute/v2_1/agents.py | 10 ++--
.../response/compute/v2_1/aggregates.py | 8 +--
.../compute/v2_1/availability_zone.py | 8 +--
.../response/compute/v2_1/baremetal_nodes.py | 6 +-
.../response/compute/v2_1/certificates.py | 4 +-
.../response/compute/v2_1/extensions.py | 4 +-
.../response/compute/v2_1/fixed_ips.py | 4 +-
- .../response/compute/v2_1/flavors.py | 10 +--
+ .../response/compute/v2_1/flavors.py | 10 ++--
.../response/compute/v2_1/flavors_access.py | 4 +-
.../compute/v2_1/flavors_extra_specs.py | 2 +-
- .../response/compute/v2_1/floating_ips.py | 20 +++---
- .../api_schema/response/compute/v2_1/hosts.py | 14 ++--
+ .../response/compute/v2_1/floating_ips.py | 20 +++----
+ .../api_schema/response/compute/v2_1/hosts.py | 14 ++---
.../response/compute/v2_1/hypervisors.py | 22 +++----
.../response/compute/v2_1/images.py | 16 ++---
.../compute/v2_1/instance_usage_audit_logs.py | 8 +--
.../response/compute/v2_1/interfaces.py | 8 +--
- .../response/compute/v2_1/keypairs.py | 14 ++--
- .../response/compute/v2_1/limits.py | 10 +--
+ .../response/compute/v2_1/keypairs.py | 14 ++---
+ .../response/compute/v2_1/limits.py | 10 ++--
.../response/compute/v2_1/migrations.py | 4 +-
.../response/compute/v2_1/parameter_types.py | 4 +-
.../response/compute/v2_1/quotas.py | 4 +-
.../v2_1/security_group_default_rule.py | 8 +--
.../response/compute/v2_1/security_groups.py | 16 ++---
- .../response/compute/v2_1/servers.py | 66 +++++++++----------
+ .../response/compute/v2_1/server_groups.py | 6 +-
+ .../response/compute/v2_1/servers.py | 60 +++++++++----------
.../response/compute/v2_1/services.py | 12 ++--
.../response/compute/v2_1/snapshots.py | 6 +-
.../response/compute/v2_1/tenant_networks.py | 6 +-
- .../response/compute/v2_1/versions.py | 10 +--
+ .../response/compute/v2_1/versions.py | 10 ++--
.../response/compute/v2_1/volumes.py | 12 ++--
.../response/compute/v2_11/services.py | 4 +-
- .../response/compute/v2_16/servers.py | 14 ++--
+ .../response/compute/v2_16/servers.py | 14 ++---
.../response/compute/v2_23/migrations.py | 4 +-
.../response/compute/v2_26/servers.py | 2 +-
- .../response/compute/v2_3/servers.py | 14 ++--
+ .../response/compute/v2_3/servers.py | 14 ++---
+ .../response/compute/v2_45/images.py | 2 +-
.../response/compute/v2_47/servers.py | 2 +-
- .../response/compute/v2_48/servers.py | 10 +--
+ .../response/compute/v2_48/servers.py | 10 ++--
+ .../response/compute/v2_53/services.py | 4 +-
+ .../response/compute/v2_55/flavors.py | 10 ++--
.../response/compute/v2_6/servers.py | 4 +-
- 37 files changed, 187 insertions(+), 187 deletions(-)
+ .../response/compute/v2_61/flavors.py | 6 +-
+ tempest/lib/api_schema/response/volume/qos.py | 16 ++---
+ 43 files changed, 206 insertions(+), 206 deletions(-)
diff --git a/tempest/lib/api_schema/response/compute/v2_1/agents.py b/tempest/lib/api_schema/response/compute/v2_1/agents.py
index 6f712b41e..09feb73df 100644
@@ -252,7 +259,7 @@ index a653213f0..b53565aab 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_1/flavors.py b/tempest/lib/api_schema/response/compute/v2_1/flavors.py
-index 547d94d57..76cbb8a55 100644
+index bd5e3d636..27948a784 100644
--- a/tempest/lib/api_schema/response/compute/v2_1/flavors.py
+++ b/tempest/lib/api_schema/response/compute/v2_1/flavors.py
@@ -28,13 +28,13 @@ list_flavors = {
@@ -278,10 +285,10 @@ index 547d94d57..76cbb8a55 100644
- 'additionalProperties': False,
+ 'additionalProperties': True,
# 'OS-FLV-DISABLED', 'os-flavor-access', 'rxtx_factor' and
- # 'OS-FLV-EXT-DATA' are API extensions. So they are not 'required'.
+ # 'OS-FLV-EXT-DATA' are API extensions, so they are not 'required'.
'required': ['name', 'links', 'ram', 'vcpus', 'swap', 'disk', 'id']
@@ -77,7 +77,7 @@ list_flavors_details = {
- # to be present always So it is not 'required'.
+ # to be present always so it is not 'required'.
'flavors_links': parameter_types.links
},
- 'additionalProperties': False,
@@ -289,7 +296,7 @@ index 547d94d57..76cbb8a55 100644
'required': ['flavors']
}
}
-@@ -93,7 +93,7 @@ create_get_flavor_details = {
+@@ -89,7 +89,7 @@ create_update_get_flavor_details = {
'properties': {
'flavor': common_flavor_info
},
@@ -318,7 +325,7 @@ index a4d6af0d7..958ed02b5 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_1/flavors_extra_specs.py b/tempest/lib/api_schema/response/compute/v2_1/flavors_extra_specs.py
-index a438d4869..c8988b1de 100644
+index 3aa1edac4..081d21a92 100644
--- a/tempest/lib/api_schema/response/compute/v2_1/flavors_extra_specs.py
+++ b/tempest/lib/api_schema/response/compute/v2_1/flavors_extra_specs.py
@@ -24,7 +24,7 @@ set_get_flavor_extra_specs = {
@@ -996,8 +1003,39 @@ index 5ed5a5c80..d9f1794c6 100644
'required': ['security_group_rule']
}
}
+diff --git a/tempest/lib/api_schema/response/compute/v2_1/server_groups.py b/tempest/lib/api_schema/response/compute/v2_1/server_groups.py
+index 01db20b88..49a8f0d95 100644
+--- a/tempest/lib/api_schema/response/compute/v2_1/server_groups.py
++++ b/tempest/lib/api_schema/response/compute/v2_1/server_groups.py
+@@ -29,7 +29,7 @@ common_server_group = {
+ },
+ 'metadata': {'type': 'object'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['id', 'name', 'policies', 'members', 'metadata']
+ }
+
+@@ -40,7 +40,7 @@ create_show_server_group = {
+ 'properties': {
+ 'server_group': common_server_group
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['server_group']
+ }
+ }
+@@ -59,7 +59,7 @@ list_server_groups = {
+ 'items': common_server_group
+ }
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['server_groups']
+ }
+ }
diff --git a/tempest/lib/api_schema/response/compute/v2_1/servers.py b/tempest/lib/api_schema/response/compute/v2_1/servers.py
-index 2954de005..e22fba32c 100644
+index 3300298d1..ac050eb43 100644
--- a/tempest/lib/api_schema/response/compute/v2_1/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_1/servers.py
@@ -29,14 +29,14 @@ create_server = {
@@ -1167,34 +1205,7 @@ index 2954de005..e22fba32c 100644
'required': ['addresses']
}
}
-@@ -362,7 +362,7 @@ common_server_group = {
- },
- 'metadata': {'type': 'object'}
- },
-- 'additionalProperties': False,
-+ 'additionalProperties': True,
- 'required': ['id', 'name', 'policies', 'members', 'metadata']
- }
-
-@@ -373,7 +373,7 @@ create_show_server_group = {
- 'properties': {
- 'server_group': common_server_group
- },
-- 'additionalProperties': False,
-+ 'additionalProperties': True,
- 'required': ['server_group']
- }
- }
-@@ -392,7 +392,7 @@ list_server_groups = {
- 'items': common_server_group
- }
- },
-- 'additionalProperties': False,
-+ 'additionalProperties': True,
- 'required': ['server_groups']
- }
- }
-@@ -408,7 +408,7 @@ instance_actions = {
+@@ -356,7 +356,7 @@ instance_actions = {
'message': {'type': ['string', 'null']},
'instance_uuid': {'type': 'string'}
},
@@ -1203,16 +1214,16 @@ index 2954de005..e22fba32c 100644
'required': ['action', 'request_id', 'user_id', 'project_id',
'start_time', 'message', 'instance_uuid']
}
-@@ -429,7 +429,7 @@ instance_action_events = {
+@@ -377,7 +377,7 @@ instance_action_events = {
'result': {'type': ['string', 'null']},
'traceback': {'type': ['string', 'null']}
},
- 'additionalProperties': False,
+ 'additionalProperties': True,
- 'required': ['event', 'start_time', 'finish_time', 'result',
- 'traceback']
- }
-@@ -445,7 +445,7 @@ list_instance_actions = {
+ # NOTE(zhufl): events.traceback can only be seen by admin users
+ # with default policy.json, so it shouldn't be a required field.
+ 'required': ['event', 'start_time', 'finish_time', 'result']
+@@ -394,7 +394,7 @@ list_instance_actions = {
'items': instance_actions
}
},
@@ -1221,7 +1232,7 @@ index 2954de005..e22fba32c 100644
'required': ['instanceActions']
}
}
-@@ -463,7 +463,7 @@ show_instance_action = {
+@@ -412,7 +412,7 @@ show_instance_action = {
'properties': {
'instanceAction': instance_actions_with_events
},
@@ -1230,7 +1241,7 @@ index 2954de005..e22fba32c 100644
'required': ['instanceAction']
}
}
-@@ -475,7 +475,7 @@ show_password = {
+@@ -424,7 +424,7 @@ show_password = {
'properties': {
'password': {'type': 'string'}
},
@@ -1239,7 +1250,7 @@ index 2954de005..e22fba32c 100644
'required': ['password']
}
}
-@@ -494,11 +494,11 @@ get_vnc_console = {
+@@ -443,11 +443,11 @@ get_vnc_console = {
'format': 'uri'
}
},
@@ -1253,7 +1264,7 @@ index 2954de005..e22fba32c 100644
'required': ['console']
}
}
-@@ -510,7 +510,7 @@ get_console_output = {
+@@ -459,7 +459,7 @@ get_console_output = {
'properties': {
'output': {'type': 'string'}
},
@@ -1262,7 +1273,7 @@ index 2954de005..e22fba32c 100644
'required': ['output']
}
}
-@@ -527,7 +527,7 @@ set_server_metadata = {
+@@ -476,7 +476,7 @@ set_server_metadata = {
}
}
},
@@ -1271,7 +1282,7 @@ index 2954de005..e22fba32c 100644
'required': ['metadata']
}
}
-@@ -552,7 +552,7 @@ set_show_server_metadata_item = {
+@@ -501,7 +501,7 @@ set_show_server_metadata_item = {
}
}
},
@@ -1280,7 +1291,7 @@ index 2954de005..e22fba32c 100644
'required': ['meta']
}
}
-@@ -583,7 +583,7 @@ evacuate_server_with_admin_pass = {
+@@ -532,7 +532,7 @@ evacuate_server_with_admin_pass = {
'properties': {
'adminPass': {'type': 'string'}
},
@@ -1448,7 +1459,7 @@ index 7f5623928..b57d1b8cb 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_1/volumes.py b/tempest/lib/api_schema/response/compute/v2_1/volumes.py
-index c35dae981..c487aa5a4 100644
+index d367f2adb..1125cbfa4 100644
--- a/tempest/lib/api_schema/response/compute/v2_1/volumes.py
+++ b/tempest/lib/api_schema/response/compute/v2_1/volumes.py
@@ -42,7 +42,7 @@ create_get_volume = {
@@ -1460,9 +1471,9 @@ index c35dae981..c487aa5a4 100644
# NOTE- If volume is not attached to any server
# then, 'attachments' attributes comes as array
# with empty objects "[{}]" due to that elements
-@@ -52,13 +52,13 @@ create_get_volume = {
- }
- }
+@@ -53,13 +53,13 @@ create_get_volume = {
+ },
+ 'os-vol-host-attr:host': {'type': 'string'},
},
- 'additionalProperties': False,
+ 'additionalProperties': True,
@@ -1476,7 +1487,7 @@ index c35dae981..c487aa5a4 100644
'required': ['volume']
}
}
-@@ -93,7 +93,7 @@ list_volumes = {
+@@ -94,7 +94,7 @@ list_volumes = {
'volumeId': {'type': 'string'},
'serverId': {'type': 'string'}
},
@@ -1485,7 +1496,7 @@ index c35dae981..c487aa5a4 100644
# NOTE- If volume is not attached to any server
# then, 'attachments' attributes comes as array
# with empty object "[{}]" due to that elements
-@@ -103,7 +103,7 @@ list_volumes = {
+@@ -104,7 +104,7 @@ list_volumes = {
}
}
},
@@ -1494,7 +1505,7 @@ index c35dae981..c487aa5a4 100644
'required': ['id', 'status', 'displayName',
'availabilityZone', 'createdAt',
'displayDescription', 'volumeType',
-@@ -112,7 +112,7 @@ list_volumes = {
+@@ -113,7 +113,7 @@ list_volumes = {
}
}
},
@@ -1504,7 +1515,7 @@ index c35dae981..c487aa5a4 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_11/services.py b/tempest/lib/api_schema/response/compute/v2_11/services.py
-index 18b833bd2..885e8cdac 100644
+index 9ece1f9b3..b17d180e3 100644
--- a/tempest/lib/api_schema/response/compute/v2_11/services.py
+++ b/tempest/lib/api_schema/response/compute/v2_11/services.py
@@ -36,11 +36,11 @@ update_forced_down = {
@@ -1522,7 +1533,7 @@ index 18b833bd2..885e8cdac 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_16/servers.py b/tempest/lib/api_schema/response/compute/v2_16/servers.py
-index 3eb658f4e..d0a30e3b0 100644
+index fc81ff70a..495ed3ec9 100644
--- a/tempest/lib/api_schema/response/compute/v2_16/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_16/servers.py
@@ -32,7 +32,7 @@ server_detail = {
@@ -1611,10 +1622,10 @@ index 3cd0f6ec1..af6fd8ade 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_26/servers.py b/tempest/lib/api_schema/response/compute/v2_26/servers.py
-index b03bdf6fa..6b3936b3c 100644
+index 5a0f98732..248605b29 100644
--- a/tempest/lib/api_schema/response/compute/v2_26/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_26/servers.py
-@@ -54,7 +54,7 @@ list_tags = {
+@@ -68,7 +68,7 @@ list_tags = {
'properties': {
'tags': tag_items,
},
@@ -1624,7 +1635,7 @@ index b03bdf6fa..6b3936b3c 100644
}
}
diff --git a/tempest/lib/api_schema/response/compute/v2_3/servers.py b/tempest/lib/api_schema/response/compute/v2_3/servers.py
-index f24103ea2..5b5c9c197 100644
+index 1674c1b11..4dcfad49c 100644
--- a/tempest/lib/api_schema/response/compute/v2_3/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_3/servers.py
@@ -40,7 +40,7 @@ server_detail = {
@@ -1690,8 +1701,21 @@ index f24103ea2..5b5c9c197 100644
# NOTE(gmann): servers_links attribute is not necessary to be
# present always So it is not 'required'.
'required': ['servers']
+diff --git a/tempest/lib/api_schema/response/compute/v2_45/images.py b/tempest/lib/api_schema/response/compute/v2_45/images.py
+index 8a48f363e..395dd177d 100644
+--- a/tempest/lib/api_schema/response/compute/v2_45/images.py
++++ b/tempest/lib/api_schema/response/compute/v2_45/images.py
+@@ -19,7 +19,7 @@ create_image = {
+ 'properties': {
+ 'image_id': {'type': 'string'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['image_id']
+ }
+ }
diff --git a/tempest/lib/api_schema/response/compute/v2_47/servers.py b/tempest/lib/api_schema/response/compute/v2_47/servers.py
-index 37a084f1c..aa2f312a4 100644
+index d580f2c63..52ac89370 100644
--- a/tempest/lib/api_schema/response/compute/v2_47/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_47/servers.py
@@ -30,7 +30,7 @@ flavor = {
@@ -1704,7 +1728,7 @@ index 37a084f1c..aa2f312a4 100644
}
diff --git a/tempest/lib/api_schema/response/compute/v2_48/servers.py b/tempest/lib/api_schema/response/compute/v2_48/servers.py
-index 59047583a..e688db305 100644
+index e2e45bc29..3310783ed 100644
--- a/tempest/lib/api_schema/response/compute/v2_48/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_48/servers.py
@@ -45,7 +45,7 @@ show_server_diagnostics = {
@@ -1750,11 +1774,78 @@ index 59047583a..e688db305 100644
'required': [
'state', 'driver', 'hypervisor', 'hypervisor_os', 'uptime',
'config_drive', 'num_cpus', 'num_nics', 'num_disks',
+diff --git a/tempest/lib/api_schema/response/compute/v2_53/services.py b/tempest/lib/api_schema/response/compute/v2_53/services.py
+index 97b0c7260..b526e638d 100644
+--- a/tempest/lib/api_schema/response/compute/v2_53/services.py
++++ b/tempest/lib/api_schema/response/compute/v2_53/services.py
+@@ -51,13 +51,13 @@ update_service = {
+ 'zone': {'type': 'string'},
+ 'forced_down': {'type': 'boolean'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['id', 'binary', 'disabled_reason', 'host',
+ 'state', 'status', 'updated_at', 'zone',
+ 'forced_down']
+ }
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['service']
+ }
+ }
+diff --git a/tempest/lib/api_schema/response/compute/v2_55/flavors.py b/tempest/lib/api_schema/response/compute/v2_55/flavors.py
+index 554f43b4c..07adf6270 100644
+--- a/tempest/lib/api_schema/response/compute/v2_55/flavors.py
++++ b/tempest/lib/api_schema/response/compute/v2_55/flavors.py
+@@ -45,13 +45,13 @@ list_flavors = {
+ 'id': {'type': 'string'},
+ 'description': flavor_description
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['name', 'links', 'id', 'description']
+ }
+ },
+ 'flavors_links': parameter_types.links
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ # NOTE(gmann): flavors_links attribute is not necessary
+ # to be present always So it is not 'required'.
+ 'required': ['flavors']
+@@ -76,7 +76,7 @@ common_flavor_info = {
+ 'OS-FLV-EXT-DATA:ephemeral': {'type': 'integer'},
+ 'description': flavor_description
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ # 'OS-FLV-DISABLED', 'os-flavor-access', 'rxtx_factor' and
+ # 'OS-FLV-EXT-DATA' are API extensions. So they are not 'required'.
+ 'required': ['name', 'links', 'ram', 'vcpus', 'swap', 'disk', 'id',
+@@ -96,7 +96,7 @@ list_flavors_details = {
+ # to be present always So it is not 'required'.
+ 'flavors_links': parameter_types.links
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['flavors']
+ }
+ }
+@@ -108,7 +108,7 @@ create_update_get_flavor_details = {
+ 'properties': {
+ 'flavor': common_flavor_info
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['flavor']
+ }
+ }
diff --git a/tempest/lib/api_schema/response/compute/v2_6/servers.py b/tempest/lib/api_schema/response/compute/v2_6/servers.py
-index 29b3e8600..4caf107a4 100644
+index 922bf7923..b94050d86 100644
--- a/tempest/lib/api_schema/response/compute/v2_6/servers.py
+++ b/tempest/lib/api_schema/response/compute/v2_6/servers.py
-@@ -38,11 +38,11 @@ get_remote_consoles = {
+@@ -50,11 +50,11 @@ get_remote_consoles = {
'format': 'uri'
}
},
@@ -1768,6 +1859,104 @@ index 29b3e8600..4caf107a4 100644
'required': ['remote_console']
}
}
+diff --git a/tempest/lib/api_schema/response/compute/v2_61/flavors.py b/tempest/lib/api_schema/response/compute/v2_61/flavors.py
+index 5119466ba..6d1d8ddb5 100644
+--- a/tempest/lib/api_schema/response/compute/v2_61/flavors.py
++++ b/tempest/lib/api_schema/response/compute/v2_61/flavors.py
+@@ -58,7 +58,7 @@ common_flavor_info = {
+ 'description': flavor_description,
+ 'extra_specs': flavor_extra_specs
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ # 'OS-FLV-DISABLED', 'os-flavor-access', 'rxtx_factor' and
+ # 'OS-FLV-EXT-DATA' are API extensions. so they are not 'required'.
+ 'required': ['name', 'links', 'ram', 'vcpus', 'swap', 'disk', 'id',
+@@ -78,7 +78,7 @@ list_flavors_details = {
+ # to be present always so it is not 'required'.
+ 'flavors_links': parameter_types.links
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['flavors']
+ }
+ }
+@@ -90,7 +90,7 @@ create_update_get_flavor_details = {
+ 'properties': {
+ 'flavor': common_flavor_info
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['flavor']
+ }
+ }
+diff --git a/tempest/lib/api_schema/response/volume/qos.py b/tempest/lib/api_schema/response/volume/qos.py
+index d1b391062..3594c3f4e 100644
+--- a/tempest/lib/api_schema/response/volume/qos.py
++++ b/tempest/lib/api_schema/response/volume/qos.py
+@@ -25,7 +25,7 @@ show_qos = {
+ 'consumer': {'type': 'string'},
+ 'specs': {'type': ['object', 'null']},
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['name', 'id', 'specs']
+ },
+ 'links': {
+@@ -37,12 +37,12 @@ show_qos = {
+ 'format': 'uri'},
+ 'rel': {'type': 'string'},
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['href', 'rel']
+ }
+ }
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['qos_specs', 'links']
+ }
+ }
+@@ -67,12 +67,12 @@ list_qos = {
+ 'id': {'type': 'string', 'format': 'uuid'},
+ 'name': {'type': 'string'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['specs', 'id', 'name']
+ }
+ }
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['qos_specs']
+ }
+ }
+@@ -87,7 +87,7 @@ set_qos_key = {
+ 'patternProperties': {'^.+$': {'type': 'string'}}
+ },
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['qos_specs']
+ }
+ }
+@@ -109,12 +109,12 @@ show_association_qos = {
+ 'id': {'type': 'string', 'format': 'uuid'},
+ 'name': {'type': 'string'}
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['association_type', 'id', 'name']
+ }
+ },
+ },
+- 'additionalProperties': False,
++ 'additionalProperties': True,
+ 'required': ['qos_associations']
+ }
+ }
--
-2.17.1
+2.20.1
diff --git a/etc/patches/functest/disable-api-validation/apply.sh b/etc/patches/functest/disable-api-validation/apply.sh
index 915bce43..09ff912e 100755
--- a/etc/patches/functest/disable-api-validation/apply.sh
+++ b/etc/patches/functest/disable-api-validation/apply.sh
@@ -1,9 +1,18 @@
#!/bin/bash
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
set -e
set -u
# without setting the user, git does not allow to create a commit
-git config --global user.email "verified@opnfv.org"
+git config --global user.email "ovp-support@lfnetworking.org"
git config --global user.name "Dovetail"
cd /src/tempest
diff --git a/etc/testcase/bottlenecks.stress.ping.yml b/etc/testcase/bottlenecks.stress.ping.yml
index 63717047..e8c2acb9 100644
--- a/etc/testcase/bottlenecks.stress.ping.yml
+++ b/etc/testcase/bottlenecks.stress.ping.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
bottlenecks.stress.ping:
name: bottlenecks.stress.ping
@@ -14,5 +23,7 @@ bottlenecks.stress.ping:
dest_archive_files:
- stress_logs/bottlenecks.stress.ping.log
- stress_logs/bottlenecks.stress.ping.out
- check_results_file: stress_logs/bottlenecks.stress.ping.out
+ check_results_files:
+ - stress_logs/bottlenecks.stress.ping.out
+ portal_key_file: stress_logs/bottlenecks.stress.ping.log
sub_testcase_list:
diff --git a/etc/testcase/functest.bgpvpn.router_association.yml b/etc/testcase/functest.bgpvpn.router_association.yml
deleted file mode 100644
index 0922fb01..00000000
--- a/etc/testcase/functest.bgpvpn.router_association.yml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-functest.bgpvpn.router_association:
- name: functest.bgpvpn.router_association
- objective: VPN provides connectivity between subnets using association of Neutron Router to VPNs
- validate:
- type: functest
- testcase: bgpvpn
- image_name: opnfv/functest-features
- pre_copy:
- exist_src_file: sdnvpn_config_testcase4.yaml
- dest_path: /usr/lib/python2.7/site-packages/sdnvpn/test/functest/config.yaml
- pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/bgpvpn_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- report:
- source_archive_files:
- - functest.log
- dest_archive_files:
- - bgpvpn_logs/functest.bgpvpn.router_association.log
- check_results_file: 'functest_results.txt'
- sub_testcase_list:
diff --git a/etc/testcase/functest.bgpvpn.router_association_floating_ip.yml b/etc/testcase/functest.bgpvpn.router_association_floating_ip.yml
deleted file mode 100644
index d27400fb..00000000
--- a/etc/testcase/functest.bgpvpn.router_association_floating_ip.yml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-functest.bgpvpn.router_association_floating_ip:
- name: functest.bgpvpn.router_association_floating_ip
- objective: associate Neutron Router with an attached subnet to a VPN and verify reachability of the Floating IP
- validate:
- type: functest
- testcase: bgpvpn
- image_name: opnfv/functest-features
- pre_copy:
- exist_src_file: sdnvpn_config_testcase8.yaml
- dest_path: /usr/lib/python2.7/site-packages/sdnvpn/test/functest/config.yaml
- pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/bgpvpn_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- report:
- source_archive_files:
- - functest.log
- dest_archive_files:
- - bgpvpn_logs/functest.bgpvpn.router_association_floating_ip.log
- check_results_file: 'functest_results.txt'
- sub_testcase_list:
diff --git a/etc/testcase/functest.bgpvpn.subnet_connectivity.yml b/etc/testcase/functest.bgpvpn.subnet_connectivity.yml
deleted file mode 100644
index 7260ccd8..00000000
--- a/etc/testcase/functest.bgpvpn.subnet_connectivity.yml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-functest.bgpvpn.subnet_connectivity:
- name: functest.bgpvpn.subnet_connectivity
- objective: Connectivity between Neutron subnets through association of Neutron Networks to VPNs
- validate:
- type: functest
- testcase: bgpvpn
- image_name: opnfv/functest-features
- pre_copy:
- exist_src_file: sdnvpn_config_testcase1.yaml
- dest_path: /usr/lib/python2.7/site-packages/sdnvpn/test/functest/config.yaml
- pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/bgpvpn_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- report:
- source_archive_files:
- - functest.log
- dest_archive_files:
- - bgpvpn_logs/functest.bgpvpn.subnet_connectivity.log
- check_results_file: 'functest_results.txt'
- sub_testcase_list:
diff --git a/etc/testcase/functest.bgpvpn.tenant_separation.yml b/etc/testcase/functest.bgpvpn.tenant_separation.yml
deleted file mode 100644
index efc34fb8..00000000
--- a/etc/testcase/functest.bgpvpn.tenant_separation.yml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-functest.bgpvpn.tenant_separation:
- name: functest.bgpvpn.tenant_separation
- objective: Separation of tenant networks through association to different VPNs
- validate:
- type: functest
- testcase: bgpvpn
- image_name: opnfv/functest-features
- pre_copy:
- exist_src_file: sdnvpn_config_testcase2.yaml
- dest_path: /usr/lib/python2.7/site-packages/sdnvpn/test/functest/config.yaml
- pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/bgpvpn_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- report:
- source_archive_files:
- - functest.log
- dest_archive_files:
- - bgpvpn_logs/functest.bgpvpn.tenant_separation.log
- check_results_file: 'functest_results.txt'
- sub_testcase_list:
diff --git a/etc/testcase/functest.healthcheck.api_check.yml b/etc/testcase/functest.healthcheck.api_check.yml
deleted file mode 100644
index ed586689..00000000
--- a/etc/testcase/functest.healthcheck.api_check.yml
+++ /dev/null
@@ -1,15 +0,0 @@
----
-functest.healthcheck.api_check:
- name: functest.healthcheck.api_check
- objective: verify OpenStack API with simple queries
- validate:
- type: functest
- testcase: api_check
- image_name: opnfv/functest-healthcheck
- report:
- source_archive_files:
- - functest.log
- dest_archive_files:
- - healthcheck_logs/functest.healthcheck.api_check.log
- check_results_file: 'functest_results.txt'
- sub_testcase_list:
diff --git a/etc/testcase/functest.healthcheck.connection_check.yml b/etc/testcase/functest.healthcheck.connection_check.yml
index e9ddd40b..b3a232b5 100644
--- a/etc/testcase/functest.healthcheck.connection_check.yml
+++ b/etc/testcase/functest.healthcheck.connection_check.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.healthcheck.connection_check:
name: functest.healthcheck.connection_check
@@ -11,5 +20,7 @@ functest.healthcheck.connection_check:
- functest.log
dest_archive_files:
- healthcheck_logs/functest.healthcheck.connection_check.log
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: healthcheck_logs/functest.healthcheck.connection_check.log
sub_testcase_list:
diff --git a/etc/testcase/functest.healthcheck.snaps_health_check.yml b/etc/testcase/functest.healthcheck.snaps_health_check.yml
deleted file mode 100644
index 014ccfd4..00000000
--- a/etc/testcase/functest.healthcheck.snaps_health_check.yml
+++ /dev/null
@@ -1,15 +0,0 @@
----
-functest.healthcheck.snaps_health_check:
- name: functest.healthcheck.snaps_health_check
- objective: basic VM instance creation with port and IPv4 address through DHCP
- validate:
- type: functest
- testcase: snaps_health_check
- image_name: opnfv/functest-healthcheck
- report:
- source_archive_files:
- - functest.log
- dest_archive_files:
- - healthcheck_logs/functest.healthcheck.snaps_health_check.log
- check_results_file: 'functest_results.txt'
- sub_testcase_list:
diff --git a/etc/testcase/functest.k8s.conformance.yml b/etc/testcase/functest.k8s.conformance.yml
index db85342c..d203c89e 100644
--- a/etc/testcase/functest.k8s.conformance.yml
+++ b/etc/testcase/functest.k8s.conformance.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.k8s.conformance:
name: functest.k8s.conformance
@@ -11,5 +20,7 @@ functest.k8s.conformance:
- functest-kubernetes.log
dest_archive_files:
- k8s_logs/functest.k8s.conformance.log
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: k8s_logs/functest.k8s.conformance.log
sub_testcase_list:
diff --git a/etc/testcase/functest.k8s.smoke.yml b/etc/testcase/functest.k8s.smoke.yml
index cedc0223..a674dfef 100644
--- a/etc/testcase/functest.k8s.smoke.yml
+++ b/etc/testcase/functest.k8s.smoke.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.k8s.smoke:
name: functest.k8s.smoke
@@ -11,5 +20,7 @@ functest.k8s.smoke:
- functest-kubernetes.log
dest_archive_files:
- k8s_logs/functest.k8s.smoke.log
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: k8s_logs/functest.k8s.smoke.log
sub_testcase_list:
diff --git a/etc/testcase/functest.rally.authenticate.yml b/etc/testcase/functest.rally.authenticate.yml
new file mode 100644
index 00000000..d327f0ee
--- /dev/null
+++ b/etc/testcase/functest.rally.authenticate.yml
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.authenticate:
+ name: functest.rally.authenticate
+ objective: Run all Functest Rally Authenticate test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_authenticate_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/authenticate.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_authenticate_logs/functest.rally.authenticate.functest.log
+ - rally_authenticate_logs/authenticate.json
+ - rally_authenticate_logs/authenticate.html
+ - rally_authenticate_logs/authenticate.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_authenticate_logs/functest.rally.authenticate.functest.log
+ sub_testcase_list:
+ - 'Authenticate.keystone'
+ - 'Authenticate.validate_cinder'
+ - 'Authenticate.validate_glance'
+ - 'Authenticate.validate_heat'
+ - 'Authenticate.validate_neutron'
+ - 'Authenticate.validate_nova'
diff --git a/etc/testcase/functest.rally.cinder.yml b/etc/testcase/functest.rally.cinder.yml
new file mode 100644
index 00000000..955a0026
--- /dev/null
+++ b/etc/testcase/functest.rally.cinder.yml
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.cinder:
+ name: functest.rally.cinder
+ objective: Run all Functest Rally Cinder test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_cinder_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/cinder.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_cinder_logs/functest.rally.cinder.functest.log
+ - rally_cinder_logs/cinder.json
+ - rally_cinder_logs/cinder.html
+ - rally_cinder_logs/cinder.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_cinder_logs/functest.rally.cinder.functest.log
+ sub_testcase_list:
+ - 'CinderVolumes.create_and_attach_volume'
+ - 'CinderVolumes.create_and_list_snapshots'
+ - 'CinderVolumes.create_and_list_volume'
+ - 'CinderVolumes.create_and_list_volume-2'
+ - 'CinderVolumes.create_and_upload_volume_to_image'
+ - 'CinderVolumes.create_nested_snapshots_and_attach_volume'
+ - 'CinderVolumes.create_snapshot_and_attach_volume'
+ - 'CinderVolumes.create_volume'
+ - 'CinderVolumes.create_volume-2'
+ - 'CinderVolumes.list_volumes'
+ - 'CinderVolumes.create_and_delete_snapshot'
+ - 'CinderVolumes.create_and_delete_volume'
+ - 'CinderVolumes.create_and_delete_volume-2'
+ - 'CinderVolumes.create_and_delete_volume-3'
+ - 'CinderVolumes.create_and_extend_volume'
+ - 'CinderVolumes.create_from_volume_and_delete_volume'
+ - 'CinderQos.create_and_get_qos'
+ - 'CinderQos.create_and_list_qos'
+ - 'CinderQos.create_and_set_qos'
+ - 'CinderVolumeTypes.create_and_get_volume_type'
+ - 'CinderVolumeTypes.create_and_list_volume_types'
+ - 'CinderVolumeTypes.create_and_update_volume_type'
+ - 'CinderVolumeTypes.create_volume_type_and_encryption_type'
+ - 'CinderVolumeTypes.create_volume_type_add_and_list_type_access'
diff --git a/etc/testcase/functest.rally.glance.yml b/etc/testcase/functest.rally.glance.yml
new file mode 100644
index 00000000..589c861b
--- /dev/null
+++ b/etc/testcase/functest.rally.glance.yml
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.glance:
+ name: functest.rally.glance
+ objective: Run all Functest Rally Glance test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_glance_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/glance.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_glance_logs/functest.rally.glance.functest.log
+ - rally_glance_logs/glance.json
+ - rally_glance_logs/glance.html
+ - rally_glance_logs/glance.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_glance_logs/functest.rally.glance.functest.log
+ sub_testcase_list:
+ - 'GlanceImages.create_and_delete_image'
+ - 'GlanceImages.create_and_list_image'
+ - 'GlanceImages.list_images'
+ - 'GlanceImages.create_image_and_boot_instances'
+ - 'GlanceImages.create_and_deactivate_image'
+ - 'GlanceImages.create_and_download_image'
+ - 'GlanceImages.create_and_get_image'
+ - 'GlanceImages.create_and_update_image'
diff --git a/etc/testcase/functest.rally.gnocchi.yml b/etc/testcase/functest.rally.gnocchi.yml
new file mode 100644
index 00000000..7ce9c117
--- /dev/null
+++ b/etc/testcase/functest.rally.gnocchi.yml
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.gnocchi:
+ name: functest.rally.gnocchi
+ objective: Run all Functest Rally Gnocchi test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_gnocchi_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/gnocchi.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_gnocchi_logs/functest.rally.gnocchi.functest.log
+ - rally_gnocchi_logs/gnocchi.json
+ - rally_gnocchi_logs/gnocchi.html
+ - rally_gnocchi_logs/gnocchi.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_gnocchi_logs/functest.rally.gnocchi.functest.log
+ sub_testcase_list:
+ - 'Gnocchi.list_capabilities'
+ - 'Gnocchi.get_status'
+ - 'GnocchiArchivePolicyRule.list_archive_policy_rule'
+ - 'GnocchiArchivePolicyRule.create_archive_policy_rule'
+ - 'GnocchiArchivePolicyRule.create_delete_archive_policy_rule'
+ - 'GnocchiArchivePolicy.list_archive_policy'
+ - 'GnocchiArchivePolicy.create_archive_policy'
+ - 'GnocchiArchivePolicy.create_delete_archive_policy'
+ - 'GnocchiResourceType.list_resource_type'
+ - 'GnocchiResourceType.create_resource_type'
+ - 'GnocchiResourceType.create_delete_resource_type'
+ - 'GnocchiMetric.list_metric'
+ - 'GnocchiMetric.create_metric'
+ - 'GnocchiMetric.create_delete_metric'
+ - 'GnocchiResource.create_resource'
+ - 'GnocchiResource.create_delete_resource'
diff --git a/etc/testcase/functest.rally.heat.yml b/etc/testcase/functest.rally.heat.yml
new file mode 100644
index 00000000..a6a632a2
--- /dev/null
+++ b/etc/testcase/functest.rally.heat.yml
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.heat:
+ name: functest.rally.heat
+ objective: Run all Functest Rally Heat test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_heat_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/heat.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_heat_logs/functest.rally.heat.functest.log
+ - rally_heat_logs/heat.json
+ - rally_heat_logs/heat.html
+ - rally_heat_logs/heat.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_heat_logs/functest.rally.heat.functest.log
+ sub_testcase_list:
+ - 'HeatStacks.create_and_delete_stack'
+ - 'HeatStacks.create_and_delete_stack-2'
+ - 'HeatStacks.create_and_delete_stack-3'
+ - 'HeatStacks.create_and_list_stack'
+ - 'HeatStacks.create_update_delete_stack'
+ - 'HeatStacks.create_update_delete_stack-2'
+ - 'HeatStacks.create_update_delete_stack-3'
+ - 'HeatStacks.create_update_delete_stack-4'
+ - 'HeatStacks.create_update_delete_stack-5'
+ - 'HeatStacks.create_update_delete_stack-6'
+ - 'HeatStacks.create_check_delete_stack'
+ - 'HeatStacks.create_suspend_resume_delete_stack'
+ - 'HeatStacks.list_stacks_and_resources'
diff --git a/etc/testcase/functest.rally.keystone.yml b/etc/testcase/functest.rally.keystone.yml
new file mode 100644
index 00000000..01b5751f
--- /dev/null
+++ b/etc/testcase/functest.rally.keystone.yml
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.keystone:
+ name: functest.rally.keystone
+ objective: Run all Functest Rally Keystone test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_keystone_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/keystone.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_keystone_logs/functest.rally.keystone.functest.log
+ - rally_keystone_logs/keystone.json
+ - rally_keystone_logs/keystone.html
+ - rally_keystone_logs/keystone.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_keystone_logs/functest.rally.keystone.functest.log
+ sub_testcase_list:
+ - 'KeystoneBasic.add_and_remove_user_role'
+ - 'KeystoneBasic.create_add_and_list_user_roles'
+ - 'KeystoneBasic.create_and_list_tenants'
+ - 'KeystoneBasic.create_and_delete_role'
+ - 'KeystoneBasic.create_and_delete_service'
+ - 'KeystoneBasic.get_entities'
+ - 'KeystoneBasic.create_update_and_delete_tenant'
+ - 'KeystoneBasic.create_user'
+ - 'KeystoneBasic.create_tenant'
+ - 'KeystoneBasic.create_and_list_users'
+ - 'KeystoneBasic.create_tenant_with_users'
diff --git a/etc/testcase/functest.rally.neutron.yml b/etc/testcase/functest.rally.neutron.yml
new file mode 100644
index 00000000..233a1ff6
--- /dev/null
+++ b/etc/testcase/functest.rally.neutron.yml
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.neutron:
+ name: functest.rally.neutron
+ objective: Run all Functest Rally Neutron test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_neutron_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/neutron.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_neutron_logs/functest.rally.neutron.functest.log
+ - rally_neutron_logs/neutron.json
+ - rally_neutron_logs/neutron.html
+ - rally_neutron_logs/neutron.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_neutron_logs/functest.rally.neutron.functest.log
+ sub_testcase_list:
+ - 'NeutronNetworks.create_and_update_networks'
+ - 'NeutronNetworks.create_and_update_ports'
+ - 'NeutronNetworks.create_and_update_routers'
+ - 'NeutronNetworks.create_and_update_subnets'
+ - 'NeutronNetworks.create_and_delete_networks'
+ - 'NeutronNetworks.create_and_delete_ports'
+ - 'NeutronNetworks.create_and_delete_routers'
+ - 'NeutronNetworks.create_and_delete_subnets'
+ - 'NeutronNetworks.create_and_list_networks'
+ - 'NeutronNetworks.create_and_list_ports'
+ - 'NeutronNetworks.create_and_list_routers'
+ - 'NeutronNetworks.create_and_list_subnets'
+ - 'NeutronSecurityGroup.create_and_delete_security_groups'
+ - 'NeutronSecurityGroup.create_and_delete_security_group_rule'
+ - 'NeutronSecurityGroup.create_and_list_security_group_rules'
+ - 'NeutronSecurityGroup.create_and_show_security_group'
+ - 'NeutronNetworks.set_and_clear_router_gateway'
+ - 'NeutronNetworks.create_and_show_ports'
+ - 'NeutronNetworks.create_and_show_routers'
+ - 'NeutronNetworks.create_and_show_subnets'
diff --git a/etc/testcase/functest.rally.nova.yml b/etc/testcase/functest.rally.nova.yml
new file mode 100644
index 00000000..2cc07264
--- /dev/null
+++ b/etc/testcase/functest.rally.nova.yml
@@ -0,0 +1,61 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.nova:
+ name: functest.rally.nova
+ objective: Run all Functest Rally Nova test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_nova_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/nova.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_nova_logs/functest.rally.nova.functest.log
+ - rally_nova_logs/nova.json
+ - rally_nova_logs/nova.html
+ - rally_nova_logs/nova.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_nova_logs/functest.rally.nova.functest.log
+ sub_testcase_list:
+ - 'NovaKeypair.create_and_delete_keypair'
+ - 'NovaKeypair.create_and_list_keypairs'
+ - 'NovaServers.boot_and_bounce_server'
+ - 'NovaServers.boot_and_delete_server'
+ - 'NovaServers.boot_and_list_server'
+ - 'NovaServers.boot_and_rebuild_server'
+ - 'NovaServers.snapshot_server'
+ - 'NovaServers.boot_server_from_volume'
+ - 'NovaServers.boot_server'
+ - 'NovaServers.list_servers'
+ - 'NovaServers.resize_server'
+ - 'NovaServers.boot_and_live_migrate_server'
+ - 'NovaServers.boot_server_attach_created_volume_and_live_migrate'
+ - 'NovaServers.boot_server_from_volume_and_live_migrate'
+ - 'NovaKeypair.boot_and_delete_server_with_keypair'
+ - 'NovaServers.boot_server_from_volume_and_delete'
+ - 'NovaServers.pause_and_unpause_server'
+ - 'NovaServers.boot_and_migrate_server'
+ - 'NovaServers.boot_server_and_list_interfaces'
+ - 'NovaServers.boot_and_get_console_url'
+ - 'NovaServers.boot_server_and_attach_interface'
+ - 'NovaServers.boot_server_attach_volume_and_list_attachments'
+ - 'NovaServers.boot_server_associate_and_dissociate_floating_ip'
+ - 'NovaServers.boot_and_associate_floating_ip'
+ - 'NovaServerGroups.create_and_delete_server_group'
+ - 'NovaServerGroups.create_and_get_server_group'
+ - 'NovaServerGroups.create_and_list_server_groups'
diff --git a/etc/testcase/functest.rally.quotas.yml b/etc/testcase/functest.rally.quotas.yml
new file mode 100644
index 00000000..44449e9c
--- /dev/null
+++ b/etc/testcase/functest.rally.quotas.yml
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.rally.quotas:
+ name: functest.rally.quotas
+ objective: Run all Functest Rally Quotas test cases
+ validate:
+ type: functest
+ testcase: rally_full
+ image_name: opnfv/functest-benchmarking
+ pre_condition:
+ - 'cp /home/opnfv/userconfig/rally_quotas_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - rally_full/quotas.json
+ - rally_full/rally_full.html
+ - rally_full/rally_full.xml
+ dest_archive_files:
+ - rally_quotas_logs/functest.rally.quotas.functest.log
+ - rally_quotas_logs/quotas.json
+ - rally_quotas_logs/quotas.html
+ - rally_quotas_logs/quotas.xml
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: rally_quotas_logs/functest.rally.quotas.functest.log
+ sub_testcase_list:
+ - 'Quotas.cinder_update_and_delete'
+ - 'Quotas.cinder_update'
+ - 'Quotas.neutron_update'
+ - 'Quotas.nova_update'
diff --git a/etc/testcase/functest.security.patrole.yml b/etc/testcase/functest.security.patrole.yml
index db1d9e4b..6dd8c67d 100644
--- a/etc/testcase/functest.security.patrole.yml
+++ b/etc/testcase/functest.security.patrole.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.security.patrole:
name: functest.security.patrole
@@ -6,17 +15,20 @@ functest.security.patrole:
type: functest
testcase: patrole
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/patrole_blacklist.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/blacklist.yaml'
report:
source_archive_files:
- functest.log
- - patrole/tempest.log
+ - patrole/rally.log
- patrole/tempest-report.html
dest_archive_files:
- security_logs/functest.security.patrole.functest.log
- security_logs/functest.security.patrole.log
- security_logs/functest.security.patrole.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: security_logs/functest.security.patrole.html
sub_testcase_list:
- patrole_tempest_plugin.tests.api.image.test_image_namespace_objects_rbac.ImageNamespacesObjectsRbacTest.test_create_metadef_object_in_namespace
- patrole_tempest_plugin.tests.api.image.test_image_namespace_objects_rbac.ImageNamespacesObjectsRbacTest.test_list_metadef_objects_in_namespace
@@ -27,7 +39,6 @@ functest.security.patrole:
- patrole_tempest_plugin.tests.api.image.test_image_namespace_property_rbac.NamespacesPropertyRbacTest.test_get_md_property
- patrole_tempest_plugin.tests.api.image.test_image_namespace_property_rbac.NamespacesPropertyRbacTest.test_modify_md_properties
- patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_create_metadef_namespace
- - patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_list_metadef_namespaces
- patrole_tempest_plugin.tests.api.image.test_image_namespace_rbac.ImageNamespacesRbacTest.test_modify_metadef_namespace
- patrole_tempest_plugin.tests.api.image.test_image_namespace_tags_rbac.NamespaceTagsRbacTest.test_create_namespace_tag
- patrole_tempest_plugin.tests.api.image.test_image_namespace_tags_rbac.NamespaceTagsRbacTest.test_create_namespace_tags
@@ -58,28 +69,35 @@ functest.security.patrole:
- patrole_tempest_plugin.tests.api.network.test_agents_rbac.DHCPAgentSchedulersRbacTest.test_add_dhcp_agent_to_network
- patrole_tempest_plugin.tests.api.network.test_agents_rbac.DHCPAgentSchedulersRbacTest.test_delete_network_from_dhcp_agent
- patrole_tempest_plugin.tests.api.network.test_agents_rbac.DHCPAgentSchedulersRbacTest.test_list_networks_hosted_by_one_dhcp_agent
+ - patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_create_flavor
+ - patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_delete_flavor
+ - patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_list_flavors
+ - patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_show_flavor
+ - patrole_tempest_plugin.tests.api.network.test_flavors_rbac.FlavorsPluginRbacTest.test_update_flavor
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_create_floating_ip
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_create_floating_ip_floatingip_address
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_delete_floating_ip
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_show_floating_ip
- patrole_tempest_plugin.tests.api.network.test_floating_ips_rbac.FloatingIpsRbacTest.test_update_floating_ip
+ - patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_create_network_segments
+ - patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_show_network_segments
+ - patrole_tempest_plugin.tests.api.network.test_network_segments_rbac.NetworkSegmentsRbacTest.test_update_network_segments
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_is_default
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_router_external
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_shared
- - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_delete_network
- - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_delete_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_list_dhcp_agents_on_hosting_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_network_type
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_physical_network
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_provider_segmentation_id
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_network_router_external
- - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_show_subnet
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_provider_physical_network
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_provider_segmentation_id
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_router_external
- patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_shared
- - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_subnet
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_allowed_address_pairs
- patrole_tempest_plugin.tests.api.network.test_ports_rbac.PortsRbacTest.test_create_port_binding_host_id
@@ -108,6 +126,7 @@ functest.security.patrole:
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_create_router_external_fixed_ips
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_delete_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_remove_router_interface
+ - patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_show_high_availability_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_show_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_update_router
- patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_update_router_enable_snat
@@ -121,7 +140,7 @@ functest.security.patrole:
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_list_security_group_rules
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_list_security_groups
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_group_rule
- - patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_groups
+ - patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_show_security_group
- patrole_tempest_plugin.tests.api.network.test_security_groups_rbac.SecGroupRbacTest.test_update_security_group
- patrole_tempest_plugin.tests.api.network.test_service_providers_rbac.ServiceProvidersRbacTest.test_list_service_providers
- patrole_tempest_plugin.tests.api.network.test_subnetpools_rbac.SubnetPoolsRbacTest.test_create_subnetpool
diff --git a/etc/testcase/functest.security.patrole_vxlan_dependent.yml b/etc/testcase/functest.security.patrole_vxlan_dependent.yml
index f5afa3b4..252a3e8c 100644
--- a/etc/testcase/functest.security.patrole_vxlan_dependent.yml
+++ b/etc/testcase/functest.security.patrole_vxlan_dependent.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.security.patrole_vxlan_dependent:
name: functest.security.patrole_vxlan_dependent
@@ -6,21 +15,21 @@ functest.security.patrole_vxlan_dependent:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- security_logs/functest.security.patrole_vxlan_dependent.functest.log
- security_logs/functest.security.patrole_vxlan_dependent.log
- security_logs/functest.security.patrole_vxlan_dependent.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: security_logs/functest.security.patrole_vxlan_dependent.html
sub_testcase_list:
- - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_network_type[admin]
- - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_segmentation_id[admin]
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_network_type[id-3c42f7b8-b80c-44ef-8fa4-69ec4b1836bc]
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_segmentation_id[id-b9decb7b-68ef-4504-b99b-41edbf7d2af5]
diff --git a/etc/testcase/functest.snaps.smoke.yml b/etc/testcase/functest.snaps.smoke.yml
deleted file mode 100644
index fc0cd4e0..00000000
--- a/etc/testcase/functest.snaps.smoke.yml
+++ /dev/null
@@ -1,14 +0,0 @@
----
-functest.snaps.smoke:
- name: functest.snaps.smoke
- objective: snaps smoke test cases
- validate:
- type: functest
- testcase: snaps_smoke
- report:
- source_archive_files:
- - functest.log
- dest_archive_files:
- - snaps_logs/functest.snaps.smoke.log
- check_results_file: 'functest_results.txt'
- sub_testcase_list:
diff --git a/etc/testcase/functest.tempest.bgpvpn.yml b/etc/testcase/functest.tempest.bgpvpn.yml
index bcac7541..a1b6ae2c 100644
--- a/etc/testcase/functest.tempest.bgpvpn.yml
+++ b/etc/testcase/functest.tempest.bgpvpn.yml
@@ -1,40 +1,52 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.bgpvpn:
name: functest.tempest.bgpvpn
objective: validate BGPVPN API CRUD operations by means of Tempest tests from networking-bgpvpn repository
validate:
type: functest
- testcase: tempest_custom
- image_name: opnfv/functest-features
+ testcase: networking-bgpvpn
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
- - tempest_custom/tempest-report.html
+ - networking-bgpvpn/rally.log
+ - networking-bgpvpn/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.bgpvpn.functest.log
- tempest_logs/functest.tempest.bgpvpn.log
- tempest_logs/functest.tempest.bgpvpn.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.bgpvpn.html
sub_testcase_list:
- networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_associate_disassociate_network
- networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_associate_disassociate_router
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_associate_invalid_network[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_attach_associated_subnet_to_associated_router[negative]
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_associate_invalid_network
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_attach_associated_subnet_to_associated_router
- networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_create_bgpvpn
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_create_bgpvpn_as_non_admin_fail[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_create_bgpvpn_with_invalid_routetargets[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_delete_bgpvpn_as_non_admin_fail[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_disassociate_invalid_network[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_list_bgpvpn_as_non_owner_fail[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_list_netassoc_as_non_owner_fail[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_show_bgpvpn_as_non_owner_fail[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_show_netassoc_as_non_owner_fail[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_update_bgpvpn_invalid_routetargets[negative]
- - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_update_route_target_non_admin_fail[negative]
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_create_bgpvpn_as_non_admin_fail
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_create_bgpvpn_with_invalid_routetargets
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_delete_bgpvpn
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_delete_bgpvpn_as_non_admin_fail
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_disassociate_invalid_network
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_list_bgpvpn
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_list_bgpvpn_as_non_owner_fail
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_list_netassoc_as_non_owner_fail
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_list_show_network_association
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_list_show_router_association
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_show_bgpvpn
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_show_bgpvpn_as_non_owner_fail
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_show_netassoc_as_non_owner_fail
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_update_bgpvpn_invalid_routetargets
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_update_route_target
+ - networking_bgpvpn_tempest.tests.api.test_bgpvpn.BgpvpnTest.test_update_route_target_non_admin_fail
diff --git a/etc/testcase/functest.tempest.compute.yml b/etc/testcase/functest.tempest.compute.yml
index 36b4d569..66b20a31 100644
--- a/etc/testcase/functest.tempest.compute.yml
+++ b/etc/testcase/functest.tempest.compute.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.compute:
name: functest.tempest.compute
@@ -6,28 +15,28 @@ functest.tempest.compute:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.compute.functest.log
- tempest_logs/functest.tempest.compute.log
- tempest_logs/functest.tempest.compute.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.compute.html
sub_testcase_list:
- tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_get_flavor[id-1f12046b-753d-40d2-abb6-d8eb8b30cb2f,smoke]
- tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors[id-e36c0eaa-dff5-4082-ad1f-3f9a80aa3f59,smoke]
- tempest.api.compute.security_groups.test_security_group_rules.SecurityGroupRulesTestJSON.test_security_group_rules_create[id-850795d7-d4d3-4e55-b527-a774c0123d3a,smoke]
- tempest.api.compute.security_groups.test_security_group_rules.SecurityGroupRulesTestJSON.test_security_group_rules_list[id-a6154130-5a55-4850-8be4-5e9e796dbf17,smoke]
- tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_groups_create_list_delete[id-eb2b087d-633d-4d0d-a7bd-9e6ba35b32de,smoke]
- - tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesTestJSON.test_add_remove_fixed_ip[id-c7e0e60b-ee45-43d0-abeb-8596fd42a2f9,network,smoke]
+ - tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesUnderV243Test.test_add_remove_fixed_ip[id-c7e0e60b-ee45-43d0-abeb-8596fd42a2f9,network,smoke]
- tempest.api.compute.servers.test_server_addresses.ServerAddressesTestJSON.test_list_server_addresses[id-6eb718c0-02d9-4d5e-acd1-4e0c269cef39,network,smoke]
- tempest.api.compute.servers.test_server_addresses.ServerAddressesTestJSON.test_list_server_addresses_by_network[id-87bbc374-5538-4f64-b673-2b0e4443cc30,network,smoke]
- tempest.api.compute.test_versions.TestVersions.test_get_version_details[id-b953a29e-929c-4a8e-81be-ec3a7e03cb76,smoke]
diff --git a/etc/testcase/functest.tempest.identity_v3.yml b/etc/testcase/functest.tempest.identity_v3.yml
index 7e64ce19..d37f649a 100644
--- a/etc/testcase/functest.tempest.identity_v3.yml
+++ b/etc/testcase/functest.tempest.identity_v3.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.identity_v3:
name: functest.tempest.identity_v3
@@ -6,24 +15,23 @@ functest.tempest.identity_v3:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.identity_v3.functest.log
- tempest_logs/functest.tempest.identity_v3.log
- tempest_logs/functest.tempest.identity_v3.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.identity_v3.html
sub_testcase_list:
- tempest.api.identity.admin.v3.test_credentials.CredentialsTestJSON.test_credentials_create_get_update_delete[id-7cd59bf9-bda4-4c72-9467-d21cab278355,smoke]
- - tempest.api.identity.admin.v3.test_domains.DefaultDomainTestJSON.test_default_domain_exists[id-17a5de24-e6a0-4e4a-a9ee-d85b6e5612b5,smoke]
- tempest.api.identity.admin.v3.test_domains.DomainsTestJSON.test_create_update_delete_domain[id-f2f5b44a-82e8-4dad-8084-0661ea3b18cf,smoke]
- tempest.api.identity.admin.v3.test_endpoints.EndPointsTestJSON.test_update_endpoint[id-37e8f15e-ee7c-4657-a1e7-f6b61e375eff,smoke]
- tempest.api.identity.admin.v3.test_groups.GroupsV3TestJSON.test_group_users_add_list_delete[id-1598521a-2f36-4606-8df9-30772bd51339,smoke]
@@ -33,3 +41,4 @@ functest.tempest.identity_v3:
- tempest.api.identity.admin.v3.test_services.ServicesTestJSON.test_create_update_get_service[id-5193aad5-bcb7-411d-85b0-b3b61b96ef06,smoke]
- tempest.api.identity.admin.v3.test_trusts.TrustsV3TestJSON.test_get_trusts_all[id-4773ebd5-ecbf-4255-b8d8-b63e6f72b65d,smoke]
- tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_list_api_versions[id-721f480f-35b6-46c7-846e-047e6acea0dc,smoke]
+ - tempest.api.identity.v3.test_domains.DefaultDomainTestJSON.test_default_domain_exists[id-17a5de24-e6a0-4e4a-a9ee-d85b6e5612b5,smoke]
diff --git a/etc/testcase/functest.tempest.image.yml b/etc/testcase/functest.tempest.image.yml
index d81aa58d..36c484d5 100644
--- a/etc/testcase/functest.tempest.image.yml
+++ b/etc/testcase/functest.tempest.image.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.image:
name: functest.tempest.image
@@ -6,21 +15,21 @@ functest.tempest.image:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.image.functest.log
- tempest_logs/functest.tempest.image.log
- tempest_logs/functest.tempest.image.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.image.html
sub_testcase_list:
- tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file[id-139b765e-7f3d-4b3d-8b37-3ca3876ee318,smoke]
- tempest.api.image.v2.test_versions.VersionsTest.test_list_versions[id-659ea30a-a17c-4317-832c-0f68ed23c31d,smoke]
diff --git a/etc/testcase/functest.tempest.ipv6_api.yml b/etc/testcase/functest.tempest.ipv6_api.yml
index 38a5c8d7..d1fd55f5 100644
--- a/etc/testcase/functest.tempest.ipv6_api.yml
+++ b/etc/testcase/functest.tempest.ipv6_api.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.ipv6_api:
name: functest.tempest.ipv6_api
@@ -6,21 +15,21 @@ functest.tempest.ipv6_api:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.ipv6_api.functest.log
- tempest_logs/functest.tempest.ipv6_api.log
- tempest_logs/functest.tempest.ipv6_api.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.ipv6_api.html
sub_testcase_list:
- tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_network[id-d4f9024d-1e28-4fc1-a6b1-25dbc6fa11e2,smoke]
- tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_port[id-48037ff2-e889-4c3b-b86a-8e3f34d2d060,smoke]
diff --git a/etc/testcase/functest.tempest.ipv6_scenario.yml b/etc/testcase/functest.tempest.ipv6_scenario.yml
index 2378f5e7..6ddae4a4 100644
--- a/etc/testcase/functest.tempest.ipv6_scenario.yml
+++ b/etc/testcase/functest.tempest.ipv6_scenario.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.ipv6_scenario:
name: functest.tempest.ipv6_scenario
@@ -6,21 +15,21 @@ functest.tempest.ipv6_scenario:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.ipv6_scenario.functest.log
- tempest_logs/functest.tempest.ipv6_scenario.log
- tempest_logs/functest.tempest.ipv6_scenario.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.ipv6_scenario.html
sub_testcase_list:
- tempest.scenario.test_network_v6.TestGettingAddress.test_dhcp6_stateless_from_os[compute,id-d7e1f858-187c-45a6-89c9-bdafde619a9f,network,slow]
- tempest.scenario.test_network_v6.TestGettingAddress.test_dualnet_dhcp6_stateless_from_os[compute,id-76f26acd-9688-42b4-bc3e-cd134c4cb09e,network,slow]
@@ -29,4 +38,4 @@ functest.tempest.ipv6_scenario:
- tempest.scenario.test_network_v6.TestGettingAddress.test_slaac_from_os[compute,id-2c92df61-29f0-4eaa-bee3-7c65bef62a43,network,slow]
- tempest.scenario.test_network_v6.TestGettingAddress.test_dualnet_slaac_from_os[compute,id-b6399d76-4438-4658-bcf5-0d6c8584fde2,network,slow]
- tempest.scenario.test_network_v6.TestGettingAddress.test_multi_prefix_slaac[compute,id-dec222b1-180c-4098-b8c5-cc1b8342d611,network,slow]
- - tempest.scenario.test_network_v6.TestGettingAddress.test_dualnet_multi_prefix_slaac[compute,id-9178ad42-10e4-47e9-8987-e02b170cc5cd,network]
+ - tempest.scenario.test_network_v6.TestGettingAddress.test_dualnet_multi_prefix_slaac[compute,id-9178ad42-10e4-47e9-8987-e02b170cc5cd,network,slow]
diff --git a/etc/testcase/functest.tempest.multi_node_scheduling.yml b/etc/testcase/functest.tempest.multi_node_scheduling.yml
index 3f14294a..133f87d5 100644
--- a/etc/testcase/functest.tempest.multi_node_scheduling.yml
+++ b/etc/testcase/functest.tempest.multi_node_scheduling.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.multi_node_scheduling:
name: functest.tempest.multi_node_scheduling
@@ -6,21 +15,21 @@ functest.tempest.multi_node_scheduling:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.multi_node_scheduling.functest.log
- tempest_logs/functest.tempest.multi_node_scheduling.log
- tempest_logs/functest.tempest.multi_node_scheduling.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.multi_node_scheduling.html
sub_testcase_list:
- tempest.scenario.test_server_multinode.TestServerMultinode.test_schedule_to_all_nodes[compute,id-9cecbe35-b9d4-48da-a37e-7ce70aa43d30,network,smoke]
- tempest.api.compute.servers.test_server_group.ServerGroupTestJSON.test_create_delete_multiple_server_groups_with_same_name_policy[id-154dc5a4-a2fe-44b5-b99e-f15806a4a113]
diff --git a/etc/testcase/functest.tempest.network_api.yml b/etc/testcase/functest.tempest.network_api.yml
index 8d2f53ba..8d1298a8 100644
--- a/etc/testcase/functest.tempest.network_api.yml
+++ b/etc/testcase/functest.tempest.network_api.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.network_api:
name: functest.tempest.network_api
@@ -6,21 +15,21 @@ functest.tempest.network_api:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.network_api.functest.log
- tempest_logs/functest.tempest.network_api.log
- tempest_logs/functest.tempest.network_api.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.network_api.html
sub_testcase_list:
- tempest.api.network.test_extensions.ExtensionsTestJSON.test_list_show_extensions[id-ef28c7e6-e646-4979-9d67-deb207bc5564,smoke]
- tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_floating_ip_specifying_a_fixed_ip_address[id-36de4bd0-f09c-43e3-a8e1-1decc1ffd3a5,smoke]
diff --git a/etc/testcase/functest.tempest.network_scenario.yml b/etc/testcase/functest.tempest.network_scenario.yml
index 24511b1d..00847e8f 100644
--- a/etc/testcase/functest.tempest.network_scenario.yml
+++ b/etc/testcase/functest.tempest.network_scenario.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.network_scenario:
name: functest.tempest.network_scenario
@@ -6,21 +15,21 @@ functest.tempest.network_scenario:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.network_scenario.functest.log
- tempest_logs/functest.tempest.network_scenario.log
- tempest_logs/functest.tempest.network_scenario.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.network_scenario.html
sub_testcase_list:
- tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_hotplug_nic[compute,id-c5adff73-e961-41f1-b4a9-343614f18cfa,network]
- tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_network_basic_ops[compute,id-f323b3ba-82f8-4db7-8ea6-6a895869ec49,network,smoke]
diff --git a/etc/testcase/functest.tempest.network_security.yml b/etc/testcase/functest.tempest.network_security.yml
index f2a7f75f..452adaad 100644
--- a/etc/testcase/functest.tempest.network_security.yml
+++ b/etc/testcase/functest.tempest.network_security.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.network_security:
name: functest.tempest.network_security
@@ -6,21 +15,21 @@ functest.tempest.network_security:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.network_security.functest.log
- tempest_logs/functest.tempest.network_security.log
- tempest_logs/functest.tempest.network_security.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.network_security.html
sub_testcase_list:
- tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_port_security_macspoofing_port[compute,id-7c0bb1a2-d053-49a4-98f9-ca1a1d849f63,network,slow]
- tempest.scenario.test_security_groups_basic_ops.TestSecurityGroupsBasicOps.test_cross_tenant_traffic[compute,id-e79f879e-debb-440c-a7e4-efeda05b6848,network]
diff --git a/etc/testcase/functest.tempest.networking_sfc.yml b/etc/testcase/functest.tempest.networking_sfc.yml
new file mode 100644
index 00000000..f6a2dd82
--- /dev/null
+++ b/etc/testcase/functest.tempest.networking_sfc.yml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.tempest.networking_sfc:
+ name: functest.tempest.networking_sfc
+ objective: validate SFC API CRUD operations by means of Tempest tests from networking-sfc repository
+ validate:
+ type: functest
+ testcase: networking-sfc
+ pre_condition:
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ report:
+ source_archive_files:
+ - functest.log
+ - networking-sfc/rally.log
+ - networking-sfc/tempest-report.html
+ dest_archive_files:
+ - tempest_logs/functest.tempest.networking_sfc.functest.log
+ - tempest_logs/functest.tempest.networking_sfc.log
+ - tempest_logs/functest.tempest.networking_sfc.html
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.networking_sfc.html
+ sub_testcase_list:
diff --git a/etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml b/etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml
new file mode 100644
index 00000000..245dd6cc
--- /dev/null
+++ b/etc/testcase/functest.tempest.neutron_tempest_plugin_api.yml
@@ -0,0 +1,600 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+functest.tempest.neutron_tempest_plugin_api:
+ name: functest.tempest.neutron_tempest_plugin_api
+ objective: Run Neutron Tempest plugin API tests
+ validate:
+ type: functest
+ testcase: neutron-tempest-plugin-api
+ report:
+ source_archive_files:
+ - functest.log
+ - neutron-tempest-plugin-api/rally.log
+ - neutron-tempest-plugin-api/tempest-report.html
+ dest_archive_files:
+ - neutron_tempest_plugin_api_logs/functest.tempest.neutron_tempest_plugin_api.functest.log
+ - neutron_tempest_plugin_api_logs/functest.tempest.neutron_tempest_plugin_api.log
+ - neutron_tempest_plugin_api_logs/functest.tempest.neutron_tempest_plugin_api.html
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: neutron_tempest_plugin_api_logs/functest.tempest.neutron_tempest_plugin_api.html
+ sub_testcase_list:
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_create
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_get_rules_by_policy
+ - neutron_tempest_plugin.api.test_routers.DvrRoutersTestToCentralized.test_convert_centralized_router
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_create_delete_metering_label_with_name_max_length
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupProtocolTest.test_create_security_group_rule_with_ipv6_protocol_integers
+ - neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_show_address_scope
+ - neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_with_subport
+ - neutron_tempest_plugin.api.admin.test_logging_negative.LoggingNegativeTestJSON.test_create_log_with_nonexistent_sg
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_forbidden_for_regular_tenants
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_list_regular_rule_types
+ - neutron_tempest_plugin.api.admin.test_logging.LoggingTestJSON.test_log_lifecycle
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_forbidden_for_regular_tenants
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_network_bumps_revision
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_no_pagination_limit_0
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_create_delete_metering_label_with_name_max_length
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_subnet_bumps_network_revision
+ - neutron_tempest_plugin.api.admin.test_tag.TagFloatingIpTestJSON.test_floatingip_tags
+ - neutron_tempest_plugin.api.admin.test_networks.NetworksTestAdmin.test_create_network_with_project_and_tenant
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_with_boolean_type_name
+ - neutron_tempest_plugin.api.admin.test_ports.PortTestCasesAdmin.test_update_mac_address
+ - neutron_tempest_plugin.api.test_floating_ips.FloatingIPTestJSON.test_blank_update_clears_association
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_wildcard_policy_delete_blocked_on_default_ext
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_passing_dict
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_change_dhcp_flag_then_create_port
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterSubnetpoolTestJSON.test_filter_subnetpool_tags
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_greater_than_subport
+ - neutron_tempest_plugin.api.admin.test_routers_flavors.RoutersFlavorTestCase.test_delete_router_flavor_in_use
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_qos_network_policy_binding_bumps_revision
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_get_rules_by_policy
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_tenant_network
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_update_router_with_timestamp
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_update_network_with_timestamp
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_equal_to_subport
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_show_flavor
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_create_subnet_different_pools_same_network
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_update_self_share_rule
+ - neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler.DHCPAgentSchedulersTestJSON.test_list_dhcp_agent_hosting_network
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_create_update_router_description
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithSecurityGroup.test_show_sg_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_service_type_management.ServiceTypeManagementTest.test_service_provider_list
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_network_constrained_by_revision
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_tenant_create_non_default_subnetpool
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_update_subnetpool_description
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_regular_client_shares_with_another
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_network_attached_with_two_routers
+ - neutron_tempest_plugin.api.admin.test_extension_driver_port_security_admin.PortSecurityAdminTests.test_create_port_security_false_on_shared_network
+ - neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_list_networks_fields_keystone_v3
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithSecurityGroup.test_create_sgrule_with_timestamp
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_create_port_with_timestamp
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_desc
+ - neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_create
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_tenant_cant_delete_other_tenants_ports
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_port_id
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_sorts_desc
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_filter_fields
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination_with_marker
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_regular_client_shares_to_another_regular_client
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_update_router_set_gateway_without_snat
+ - neutron_tempest_plugin.api.admin.test_networks.NetworksTestAdmin.test_create_tenant_network_vxlan
+ - neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_create_forbidden_for_regular_tenants
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_filter_fields
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_filter_fields
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_create_update_delete_service_profile
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_create_update_delete_service_profile
+ - neutron_tempest_plugin.api.admin.test_networks.NetworksTestAdmin.test_create_network_with_project
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterFloatingIpTestJSON.test_filter_floatingip_tags
+ - neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_network_when_quotas_is_full
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_wildcard_policy_created_from_external_network_api
+ - neutron_tempest_plugin.api.test_extra_dhcp_options.ExtraDHCPOptionsIpV6TestJSON.test_update_show_port_with_extra_dhcp_options
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_update_router_set_gateway_with_snat_explicit
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_non_existent_subnetpool
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_port_sec_update_port_failed
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_port_id_disabled_trunk
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_portbinding_bumps_revision
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_policy_target_update
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_external_network_bumps_revision
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_update_network_segment_range_failed_with_existing_range_impacted
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination_with_marker
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_fail_for_the_same_type
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_filter_rbac_policies
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_remove_subport
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterQosPolicyTestJSON.test_filter_qos_policy_tags
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_create_subnetpool_with_timestamp
+ - neutron_tempest_plugin.api.test_routers_negative.DvrRoutersNegativeTest.test_router_create_tenant_distributed_returns_forbidden
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_sorts_desc
+ - neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairIpV6TestJSON.test_update_port_with_address_pair
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_tenant_update_subnetpool
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_get_subnetpool
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_admin_create_shared_subnetpool
+ - neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler.DHCPAgentSchedulersTestJSON.test_add_remove_network_from_dhcp_agent
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_update_forbidden_for_regular_tenants_own_policy
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_show_router_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_network_nonexistent_policy
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_list_metering_label_rules
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_port_sg_binding_bumps_revision
+ - neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_create_rule_nonexistent_policy
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_delete_not_allowed_if_policy_in_use_by_port
+ - neutron_tempest_plugin.api.test_floating_ips.FloatingIPTestJSON.test_floatingip_update_extra_attributes_port_id_not_changed
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination_page_reverse_desc
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update_forbidden_for_regular_tenants_own_policy
+ - neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_floatingip_when_quotas_is_full
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_sorts_desc
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupProtocolTest.test_create_security_group_rule_with_ipv6_protocol_names
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_update_security_group_with_too_long_name
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_create_forbidden_for_regular_tenants
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_list_shared_networks
+ - neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_delete_address_scope
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_update_trunk
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_show_ip_availability_after_port_delete
+ - neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairTestJSON.test_update_port_with_multiple_ip_mac_address_pair
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_network_presence_prevents_policy_rbac_policy_deletion
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_sorts_asc
+ - neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_create_update_network_dns_domain
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_prefix_intersect
+ - neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler.DHCPAgentSchedulersTestJSON.test_dhcp_port_status_active
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_list_ip_availability_before_subnet
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_extra_dhcp_opt_bumps_revision
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_desc
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_create_policy
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_tenant_get_not_shared_admin_subnetpool
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_tenant_create_shared_subnetpool
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_tenant_create_default_subnetpool
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_create_update_router_description
+ - neutron_tempest_plugin.api.test_routers.DvrRoutersTest.test_create_distributed_router
+ - neutron_tempest_plugin.api.admin.test_logging_negative.LoggingNegativeTestJSON.test_create_log_with_nonexistent_port
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_list_admin_rule_types
+ - neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairIpV6TestJSON.test_update_port_with_multiple_ip_mac_address_pair
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_show_ip_availability_after_subnet_and_ports_create
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_fail_for_the_same_type
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_create_sp_associate_address_scope_multiple_prefix_intersect
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_port_presence_prevents_network_rbac_policy_deletion
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupIPv6Test.test_update_security_group_with_too_long_name
+ - neutron_tempest_plugin.api.admin.test_routers_dvr.RouterTestCentralizedToDVR.test_centralized_router_update_to_dvr
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_duplicate_segmentation_details
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_create_delete_metering_label_with_filters
+ - neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_port_sec_update_pass
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_prefixes_append
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_create_router_with_snat_explicit
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_show_subnetpool_has_project_id
+ - neutron_tempest_plugin.api.test_floating_ips.FloatingIPPoolTestJSON.test_create_floatingip_from_specific_pool
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterRouterTestJSON.test_filter_router_tags
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_tenant_get_not_shared_admin_address_scope
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_sorts_desc
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_list_ip_availability_after_port_delete
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_disabled_trunk
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterTrunkTestJSON.test_filter_trunk_tags
+ - neutron_tempest_plugin.api.admin.test_floating_ips_admin_actions.FloatingIPAdminTestJSON.test_create_floatingip_with_specified_ip_address
+ - neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_trunk_details_no_subports
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_no_pagination_limit_0
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_passing_dict
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_get_subports
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_qos_policy_delete_with_rules
+ - neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_tenant_create_list_address_scope
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_add_subports
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_duplicate_policy_error
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_port_presence_prevents_policy_rbac_policy_deletion
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_conversion_on_policy_create
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination_page_reverse_asc
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_delete
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_port_non_shared_policy
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_filter_policies
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterSubnetTestJSON.test_filter_subnet_tags
+ - neutron_tempest_plugin.api.admin.test_routers_dvr.RoutersTestDVR.test_centralized_router_creation
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_multiple_prefix_intersect
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_list_network_segment_ranges
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination_page_reverse_desc
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_invalid_inherit_network_segmentation_type
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_trunk_disabled_trunk
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_network_on_shared_policy_delete
+ - neutron_tempest_plugin.api.admin.test_tag.TagSubnetPoolTestJSON.test_subnetpool_tags
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_list_flavors
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_delete_self_share_rule
+ - neutron_tempest_plugin.api.admin.test_tag.TagSecGroupTestJSON.test_security_group_tags
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_user_create_port_with_admin_qos_policy
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_update_subnetpool_associate_address_scope
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update_forbidden_for_regular_tenants_foreign_policy
+ - neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_get_rules_by_policy
+ - neutron_tempest_plugin.api.admin.test_tag.TagSubnetTestJSON.test_subnet_tags
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination_page_reverse_asc
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_sorts_asc
+ - neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_allowed_address_pairs
+ - neutron_tempest_plugin.api.test_routers_negative.HaRoutersNegativeTest.test_router_create_tenant_ha_returns_forbidden
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_conversion_on_one_policy_delete
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_create_port_shared_network_as_non_admin_tenant
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_security_group_rule_protocol_ints
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_pagination_page_reverse_desc
+ - neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_show_network
+ - neutron_tempest_plugin.api.test_auto_allocated_topology.TestAutoAllocatedTopology.test_get_allocated_net_topology_as_tenant
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination_page_reverse_asc
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_tenant_create_shared_address_scope
+ - neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairIpV6TestJSON.test_create_list_port_with_address_pair
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_create_delete_metering_label_rule_with_filters
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnetpool_associate_address_scope
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_sg_group_bumps_revision
+ - neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_invalid_rule_create
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_show_ip_availability_after_port_delete
+ - neutron_tempest_plugin.api.admin.test_l3_agent_scheduler.L3AgentSchedulerTestJSON.test_add_list_remove_router_on_l3_agent
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_get_policy_that_is_shared
+ - neutron_tempest_plugin.api.test_qos_negative.QosNegativeTestJSON.test_add_policy_with_too_long_name
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_net_bound_shared_policy_wildcard_and_tenant_id_wild_remains
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_tenant_create_non_default_subnetpool
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_associate_address_scope_of_other_owner
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_allowed_address_pairs_bumps_revision
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update_forbidden_for_regular_tenants_own_policy
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_disassociate_address_scope
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_port_with_propagate_uplink_status
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_sorts_asc
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_update_subnetpool_disassociate_address_scope
+ - neutron_tempest_plugin.api.test_routers_negative.RoutersNegativeTest.test_delete_router_in_use
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_no_pagination_limit_0
+ - neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_admin_update_shared_address_scope
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_get_non_existent_subnetpool
+ - neutron_tempest_plugin.api.admin.test_ports.PortTestCasesResourceRequest.test_port_resource_request_empty
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_show_rule_type_details_as_admin
+ - neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_admin_create_shared_address_scope
+ - neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_list_agent
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_network_attached_with_two_routers
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_create_floatingip_with_timestamp
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_associate_address_scope
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_parent_port_id
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_show_networks_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_floating_ips.FloatingIPTestJSON.test_create_update_floatingip_port_details
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_sorts_desc
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_update_extra_route
+ - neutron_tempest_plugin.api.test_qos_negative.QosNegativeTestJSON.test_add_policy_with_too_long_description
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_show_network_segment_range
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_policy_sharing_with_wildcard_and_tenant_id
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_create_rbac_policy_with_target_tenant_none
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestMtusJSON.test_create_trunk_with_mtu_smaller_than_subport
+ - neutron_tempest_plugin.api.test_auto_allocated_topology.TestAutoAllocatedTopology.test_delete_allocated_net_topology_as_tenant
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_create_forbidden_for_regular_tenants
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_create_subnetpool_associate_invalid_address_scope
+ - neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_delete
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_tenant_update_address_scope_shared_false
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_create_router_with_timestamp
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_get_rules_by_policy
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_port_security_bumps_revisions
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination_page_reverse_desc
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_update_security_group_with_boolean_type_name
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_port_id
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_policy_target_update
+ - neutron_tempest_plugin.api.admin.test_ports.PortTestCasesAdmin.test_regenerate_mac_address
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_equal_to_trunk
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_port_bumps_revision
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_show_port_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination_with_href_links
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupIPv6Test.test_create_security_group_with_boolean_type_name
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_show_policy_has_project_id
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_show_shared_networks_attribute
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_tenant_create_subnetpool_associate_shared_address_scope
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_regular_client_shares_to_another_regular_client
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update_forbidden_for_regular_tenants_foreign_policy
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_get_non_existent_address_scope
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnet_from_pool_with_subnet_cidr
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestMtusJSON.test_add_subport_with_mtu_greater_than_trunk
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_empty_subports_list
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_delete_address_scope_associated_with_subnetpool
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_update_trunk_with_description
+ - neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_port_sec_specific_value_2
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_list_ip_availability_after_subnet_and_ports
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_update_shared_address_scope_to_unshare
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_sg_rule_bumps_sg_revision
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update_association_with_port_shared_policy
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_subport
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_pass_for_direction_ingress
+ - neutron_tempest_plugin.api.test_routers_negative.RoutersNegativePolicyTest.test_add_interface_wrong_tenant
+ - neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_list_agents_non_admin
+ - neutron_tempest_plugin.api.admin.test_tag.UpdateTagsTest.test_update_tags_affects_only_updated_resource
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_update_subnet_with_timestamp
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_get_subnetpool
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination_with_href_links
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupIPv6Test.test_update_default_security_group_name
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_get_rules_by_policy
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_update_port_description
+ - neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_update_agent_description
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_regular_client_blocked_from_sharing_with_wildcard
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination_page_reverse_asc
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_regular_client_blocked_from_creating_external_wild_policies
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.AllowedAddressPairSharedNetworkTest.test_update_with_address_pair_blocked_on_other_network
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_list_ip_availability_after_port_delete
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_update_non_existent_address_scope
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_policy_allows_tenant_to_attach_ext_gw
+ - neutron_tempest_plugin.api.test_networks_negative.NetworksNegativeTest.test_delete_network_in_use
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_subnet_from_pool_with_default_prefixlen
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_security_group_rule_protocol_legacy_names
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_duplicate_subport_segmentation_ids
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_create_subnetpool_associate_non_exist_address_scope
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_create_delete_metering_label_with_filters
+ - neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairTestJSON.test_update_port_with_address_pair
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupIPv6Test.test_delete_in_use_sec_group
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_update_subnetpool_with_timestamp
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_segment_with_timestamp
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_list_service_profiles
+ - neutron_tempest_plugin.api.admin.test_tag.TagRouterTestJSON.test_router_tags
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_create_network_with_default_network_segment_range
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_prefixes_shrink
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_show_metering_label_rule
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_update_router_reset_gateway_without_snat
+ - neutron_tempest_plugin.api.admin.test_logging_negative.LoggingNegativeTestJSON.test_create_log_with_invalid_resource_type
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_update_subnetpool_prefixes_append
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_network_non_shared_policy
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_pagination_with_href_links
+ - neutron_tempest_plugin.api.admin.test_networks.NetworksTestAdmin.test_create_network_with_project_and_other_tenant
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_create_update_delete_flavor
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_delete_in_use_sec_group
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_create_router_with_snat_explicit
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_forbidden_for_regular_tenants
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupIPv6Test.test_create_security_group_with_too_long_name
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_admin_network
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_create_update_shared_network
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination_page_reverse_desc
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_trunk
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_update_router_set_gateway_without_snat
+ - neutron_tempest_plugin.api.admin.test_ports.PortTestCasesResourceRequest.test_port_resource_request
+ - neutron_tempest_plugin.api.admin.test_routers_dvr.RoutersTestDVR.test_distributed_router_creation
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update_forbidden_for_regular_tenants_own_policy
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_subnet_service_types_bumps_revisions
+ - neutron_tempest_plugin.api.admin.test_routers_ha.RoutersTestHA.test_legacy_router_update_to_ha
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_remove_subport_not_found
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_update_subnetpool_associate_another_address_scope
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_rule_nonexistent_policy
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithSecurityGroup.test_show_sgrule_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_update_port_with_dns_domain
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupTest.test_create_list_update_show_delete_security_group
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_update_association_with_admin_network
+ - neutron_tempest_plugin.api.admin.test_tag.TagPortTestJSON.test_port_tags
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterPortTestJSON.test_filter_port_tags
+ - neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_security_group_when_quotas_is_full
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_filtering_shared_subnets
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_show_subnetpool_has_project_id
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_show_subnet_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv6Test.test_show_ip_availability_after_subnet_and_ports_create
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_port_shared_policy
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_validation_filters
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_rbac_policy_show
+ - neutron_tempest_plugin.api.admin.test_agent_availability_zone.AgentAvailabilityZoneTestCase.test_agents_availability_zone
+ - neutron_tempest_plugin.api.admin.test_tag.TagTrunkTestJSON.test_trunk_tags
+ - neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_subnet_when_quotas_is_full
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_regular_client_blocked_from_sharing_anothers_policy
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_policy_show
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_qos_port_policy_binding_bumps_revision
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_with_too_long_name
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_show_service_profile
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithSecurityGroup.test_update_sg_with_timestamp
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_associate_another_address_scope
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_net_bound_shared_policy_wildcard_and_tenant_id_wild_remove
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_delete_policies_while_tenant_attached_to_net
+ - neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_delete_with_port_sec
+ - neutron_tempest_plugin.api.test_availability_zones.ListAvailableZonesTest.test_list_available_zones
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_create_router_with_default_snat_value
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolIPv6Test.test_security_group_rule_protocol_names
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_delete_port_in_use_by_trunk
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_subnet_from_pool_with_prefixlen
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_filtering_shared_networks
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_list_subnetpool
+ - neutron_tempest_plugin.api.admin.test_tag.TagQosPolicyTestJSON.test_qos_policy_tags
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_subnet_from_pool_with_subnet_cidr
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_router_bumps_revision
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterNetworkTestJSON.test_filter_network_tags
+ - neutron_tempest_plugin.api.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_fixedips_outrange
+ - neutron_tempest_plugin.api.test_networks.NetworksMtuTestJSON.test_update_network_custom_mtu
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_regular_client_shares_to_another_regular_client
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_no_pagination_limit_0
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_list_policy_filter_by_name
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_list_ip_availability_before_subnet
+ - neutron_tempest_plugin.api.admin.test_tag.TagFilterSecGroupTestJSON.test_filter_security_group_tags
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_subnet_from_pool_with_quota
+ - neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_show_network_fields_keystone_v3
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_show_metering_label
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_pagination_with_href_links
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_default_policy_creating_network_without_policy
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_list_service_profiles
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_no_pagination_limit_0
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_show_metering_label_rule
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_show_flavor
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_href_links
+ - neutron_tempest_plugin.api.test_network_ip_availability_negative.NetworksIpAvailabilityNegativeTest.test_network_availability_nonexistent_network_id
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_create_network_with_timestamp
+ - neutron_tempest_plugin.api.admin.test_logging.LoggingTestJSON.test_list_supported_logging_types
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_delete_non_existent_address_scope
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_regular_client_blocked_from_sharing_anothers_policy
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_fail_for_the_same_type
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_delete_policy
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_policy_allows_tenant_to_allocate_floatingip
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_update_router_set_gateway_with_snat_explicit
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_missing_segmentation_id
+ - neutron_tempest_plugin.api.admin.test_routers_ha.RoutersTestHA.test_legacy_router_creation
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_admin_create_shared_subnetpool
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_type
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_sorts_asc
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestInheritJSONBase.test_add_subport
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_update_subnetpool_description
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_update_subnetpool_prefixes_extend
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_network_only_visible_to_policy_target
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_subnetpool_associate_address_scope
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_policy_target_update
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_list_metering_labels
+ - neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler.DHCPAgentSchedulersTestJSON.test_list_networks_hosted_by_one_dhcp
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_update_forbidden_for_regular_tenants_foreign_policy
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_show_rule_type_details_as_user
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_update_port_with_timestamp
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_delete
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolTest.test_security_group_rule_protocol_names
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_update_port_with_dns_name
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_associate_address_scope_wrong_ip_version
+ - neutron_tempest_plugin.api.test_networks.NetworksTestJSON.test_create_network_with_project
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_rbac_bumps_network_revision
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupProtocolTest.test_security_group_rule_protocol_ints
+ - neutron_tempest_plugin.api.test_qos.RbacSharedQosPoliciesTest.test_policy_sharing_with_wildcard
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_floatingip_bumps_revision
+ - neutron_tempest_plugin.api.test_floating_ips_negative.FloatingIPNegativeTestJSON.test_associate_floatingip_with_port_with_floatingip
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_router_interface_status
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_delete
+ - neutron_tempest_plugin.api.admin.test_quotas.QuotasTest.test_quotas
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_update_subnetpool_prefixes_extend
+ - neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_tenant_update_address_scope
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_asc
+ - neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_port_sec_default_value
+ - neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairIpV6TestJSON.test_update_port_with_cidr_address_pair
+ - neutron_tempest_plugin.api.admin.test_tag.TagNetworkTestJSON.test_network_tags
+ - neutron_tempest_plugin.api.admin.test_routers_ha.RoutersTestHA.test_delete_ha_router_keeps_ha_network_segment_data
+ - neutron_tempest_plugin.api.test_trunk_details.TestTrunkDetailsJSON.test_port_resource_empty_trunk_details
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_pagination_with_marker
+ - neutron_tempest_plugin.api.test_metering_negative.MeteringNegativeTestJSON.test_create_metering_label_with_too_long_name
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_tenant_update_sp_prefix_associated_with_shared_addr_scope
+ - neutron_tempest_plugin.api.test_qos.QosSearchCriteriaTest.test_list_sorts_desc
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_create_update_delete_network_segment_range
+ - neutron_tempest_plugin.api.admin.test_security_groups.SecGroupAdminTest.test_security_group_recreated_on_port_update
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_create_rbac_policy_with_target_tenant_too_long_id
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_subnet_bumps_revision
+ - neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_router_when_quotas_is_full
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_rule_nonexistent_policy
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_show_delete_trunk
+ - neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_port_when_quotas_is_full
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_regular_client_blocked_from_sharing_anothers_network
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_rbac_policy_quota
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_create_rule_nonexistent_policy
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_router_extra_attributes_bumps_revision
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_add_ips_to_port
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_tenant_can_delete_port_on_own_network
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_dual_stack_subnets_from_subnetpools
+ - neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_show_agent
+ - neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_sorting
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_create_rule_nonexistent_policy
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_validation_filters
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_subnet_on_network_only_visible_to_policy_target
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsJson.test_list_flavors
+ - neutron_tempest_plugin.api.test_address_scopes.AddressScopeTest.test_show_address_scope_project_id
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_delete_trunk_with_subport_is_allowed
+ - neutron_tempest_plugin.api.test_network_ip_availability.NetworksIpAvailabilityIPv4Test.test_list_ip_availability_after_subnet_and_ports
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_greater_than_subport
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_pagination_with_marker
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_shared_policy_update
+ - neutron_tempest_plugin.api.admin.test_quotas.QuotasTest.test_detail_quotas
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringTestJSON.test_show_metering_label
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_subnetpool_bumps_revision
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_create_subnet_with_timestamp
+ - neutron_tempest_plugin.api.admin.test_agent_management.AgentManagementTestJSON.test_update_agent_status
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_router_extra_routes_bumps_revision
+ - neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_port_sec_specific_value_1
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_update
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_delete_network_segment_range_failed_with_segment_referenced
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_nonexistent_subport_port_id
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_show_trunk_has_project_id
+ - neutron_tempest_plugin.api.admin.test_ports.PortTestCasesResourceRequest.test_port_resource_request_inherited_policy
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupIPv6Test.test_update_security_group_with_boolean_type_name
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_update_floatingip_with_timestamp
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_update_forbidden_for_regular_tenants_foreign_policy
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_subports_not_specified
+ - neutron_tempest_plugin.api.admin.test_routers_flavors.RoutersFlavorTestCase.test_badrequest_on_requesting_flags_and_flavor
+ - neutron_tempest_plugin.api.admin.test_routers_flavors.RoutersFlavorTestCase.test_create_router_with_flavor
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_router_interface_status
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_rbac_policy_show
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_sorts_asc
+ - neutron_tempest_plugin.api.test_security_groups_negative.NegativeSecGroupTest.test_update_default_security_group_name
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination_with_href_links
+ - neutron_tempest_plugin.api.test_ports.PortsSearchCriteriaTest.test_list_pagination_with_marker
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_create_subnetpool_associate_address_scope_prefix_intersect
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_update_port_with_no_dns_name
+ - neutron_tempest_plugin.api.test_revisions.TestRevisions.test_update_dns_domain_bumps_revision
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_default_policy_creating_network_with_policy
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_update_policy_from_wildcard_to_specific_tenant
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_tenant_id
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithL3.test_show_floatingip_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnet_from_pool_with_prefixlen
+ - neutron_tempest_plugin.api.admin.test_quotas_negative.QuotasAdminNegativeTestJSON.test_create_security_group_rule_when_quotas_is_full
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnet_from_pool_with_quota
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_sorts_asc
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_policy_association_with_port_nonexistent_policy
+ - neutron_tempest_plugin.api.test_security_groups.RbacSharedSecurityGroupTest.test_filter_rbac_policies
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_trunk_with_subport_missing_segmentation_id
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.SharedNetworksTest.test_create_bulk_shared_network
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_no_pagination_limit_0
+ - neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairTestJSON.test_update_port_with_cidr_address_pair
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_update_extra_route
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_pagination_page_reverse_asc
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_list_trunks
+ - neutron_tempest_plugin.api.test_routers.RoutersTest.test_update_router_reset_gateway_without_snat
+ - neutron_tempest_plugin.api.test_qos.QosTestJSON.test_delete_not_allowed_if_policy_in_use_by_network
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_create_delete_metering_label_rule_with_filters
+ - neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_pagination
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_list_metering_labels
+ - neutron_tempest_plugin.api.test_ports.PortsTestJSON.test_create_port_without_propagate_uplink_status
+ - neutron_tempest_plugin.api.test_extra_dhcp_options.ExtraDHCPOptionsTestJSON.test_update_show_port_with_extra_dhcp_options
+ - neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_includes_all
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_update
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTest.test_create_subnet_from_pool_with_default_prefixlen
+ - neutron_tempest_plugin.api.test_routers.HaRoutersTest.test_convert_legacy_router
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_show_service_profile
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_sorts_asc
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination_with_href_links
+ - neutron_tempest_plugin.api.admin.test_l3_agent_scheduler.L3AgentSchedulerTestJSON.test_list_routers_on_l3_agent
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination_page_reverse_asc
+ - neutron_tempest_plugin.api.test_extra_dhcp_options.ExtraDHCPOptionsIpV6TestJSON.test_create_list_port_with_extra_dhcp_options
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination_page_reverse_desc
+ - neutron_tempest_plugin.api.test_address_scopes_negative.AddressScopeTestNegative.test_tenant_update_address_scope_shared_true
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStampWithSecurityGroup.test_create_sg_with_timestamp
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleWithDirectionTestJSON.test_rule_delete
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination_with_marker
+ - neutron_tempest_plugin.api.test_qos_negative.QosNegativeTestJSON.test_add_policy_with_too_long_tenant_id
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_delete_non_existent_subnetpool
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_validation_filters
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_create_list_subnetpool
+ - neutron_tempest_plugin.api.test_networks.NetworksSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_qos.QosMinimumBandwidthRuleTestJSON.test_rule_create_fail_for_missing_min_kbps
+ - neutron_tempest_plugin.api.admin.test_floating_ips_admin_actions.FloatingIPAdminTestJSON.test_associate_floating_ip_with_port_from_another_project
+ - neutron_tempest_plugin.api.admin.test_external_network_extension.ExternalNetworksRBACTestJSON.test_external_conversion_on_policy_delete
+ - neutron_tempest_plugin.api.test_allowed_address_pair.AllowedAddressPairTestJSON.test_create_list_port_with_address_pair
+ - neutron_tempest_plugin.api.test_extensions.ExtensionsTest.test_list_extensions_project_id
+ - neutron_tempest_plugin.api.admin.test_network_segment_range.NetworkSegmentRangeTestJson.test_create_network_with_tenant_specific_network_segment_range
+ - neutron_tempest_plugin.api.test_subnets.SubnetsSearchCriteriaTest.test_list_no_pagination_limit_0
+ - neutron_tempest_plugin.api.test_qos.QosBandwidthLimitRuleTestJSON.test_rule_create_fail_for_the_same_type
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_marker
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsSearchCriteriaTest.test_list_pagination
+ - neutron_tempest_plugin.api.test_timestamp.TestTimeStamp.test_show_subnetpool_attribute_with_timestamp
+ - neutron_tempest_plugin.api.test_extra_dhcp_options.ExtraDHCPOptionsTestJSON.test_create_list_port_with_extra_dhcp_options
+ - neutron_tempest_plugin.api.test_routers.RoutersIpV6Test.test_create_router_with_default_snat_value
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_port_id
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_update_subnetpool_not_modifiable_shared
+ - neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_update
+ - neutron_tempest_plugin.api.test_security_groups.SecGroupTest.test_create_bulk_sec_groups
+ - neutron_tempest_plugin.api.test_networks.NetworksMtuTestJSON.test_create_network_custom_mtu
+ - neutron_tempest_plugin.api.admin.test_routers_ha.RoutersTestHA.test_ha_router_creation
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.AllowedAddressPairSharedNetworkTest.test_create_with_address_pair_blocked_on_other_network
+ - neutron_tempest_plugin.api.test_metering_extensions.MeteringIpV6TestJSON.test_list_metering_label_rules
+ - neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_filtering_works_with_rbac_records_present
+ - neutron_tempest_plugin.api.test_subnetpools_negative.SubnetPoolsNegativeTestJSON.test_create_subnetpool_associate_address_scope_of_other_owner
+ - neutron_tempest_plugin.api.test_flavors_extensions.TestFlavorsIpV6TestJSON.test_create_update_delete_flavor
+ - neutron_tempest_plugin.api.test_extension_driver_port_security.PortSecTest.test_create_port_sec_with_security_group
+ - neutron_tempest_plugin.api.test_routers.RoutersSearchCriteriaTest.test_list_pagination
+ - neutron_tempest_plugin.api.test_router_interface_fip.RouterInterfaceFip.test_router_interface_fip
+ - neutron_tempest_plugin.api.test_subnetpools.SubnetPoolsTestV6.test_tenant_update_subnetpool
+ - neutron_tempest_plugin.api.test_qos.QosDscpMarkingRuleTestJSON.test_rule_create_fail_for_the_same_type
+ - neutron_tempest_plugin.api.test_floating_ips.FloatingIPTestJSON.test_create_update_floatingip_description
diff --git a/etc/testcase/functest.tempest.osinterop.yml b/etc/testcase/functest.tempest.osinterop.yml
index 124ad419..0c83ef15 100644
--- a/etc/testcase/functest.tempest.osinterop.yml
+++ b/etc/testcase/functest.tempest.osinterop.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.osinterop:
name: functest.tempest.osinterop
@@ -9,215 +18,236 @@ functest.tempest.osinterop:
type: functest
testcase: refstack_defcore
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
report:
source_archive_files:
- functest.log
- - refstack_defcore/tempest.log
+ - refstack_defcore/rally.log
- refstack_defcore/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.osinterop.functest.log
- tempest_logs/functest.tempest.osinterop.log
- tempest_logs/functest.tempest.osinterop.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.osinterop.html
sub_testcase_list:
- - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_icmp_type_code[id-c9463db8-b44d-4f52-b6c0-8dbda99f26ce]
- - tempest.api.network.test_networks.NetworksTest.test_update_subnet_gw_dns_host_routes_dhcp[id-3d3852eb-3009-49ec-97ac-5ce83b73010a]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_volume_delete_nonexistent_volume_id[id-555efa6e-efcd-44ef-8a3b-4a7ca4837a29,negative]
- - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_delete_deleted_image[id-e57fc127-7ba0-4693-92d7-1d8a05ebcba9,negative]
- - tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete_from_image[id-54a01030-c7fc-447c-86ee-c1182beae638,image,smoke]
- - tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete[id-27fb0e9f-fb64-41dd-8bdb-1ffa762f0d51,smoke]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_no_params[id-1e341d7a-90a9-494c-b143-2cdf2aeb6aee]
- - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_remote_ip_prefix[id-16459776-5da2-4634-bce4-4b55ee3ec188]
- - tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token[id-6f8e4436-fc96-4282-8122-e41df57197a9]
- - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_delete_server_metadata_item[id-127642d6-4c7b-4486-b7cd-07265a378658]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_get_non_existent_server[id-3436b02f-1b1e-4f03-881e-c6a602327439,negative]
- - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_host_name_is_same_as_server_name[id-ac1ad47f-984b-4441-9274-c9079b7a0666]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_active_status[id-ca78e20e-fddb-4ce6-b7f7-bcbf8605e66e]
- - tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_with_existing_server_name[id-8fea6be7-065e-47cf-89b8-496e6f96c699]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volumes_list_details_by_status[id-2943f712-71ec-482a-bf49-d5ca06216b9f]
- - tempest.api.network.test_networks.NetworksTest.test_list_subnets[id-db68ba48-f4ea-49e9-81d1-e367f6d0b20a,smoke]
- - tempest.api.compute.servers.test_servers.ServersTestJSON.test_update_server_name[id-5e6ccff8-349d-4852-a8b3-055df7988dd2]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_invalid_ip_v6_address[id-5226dd80-1e9c-4d8a-b5f9-b26ca4763fd0,negative]
- - tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_list_get_volume_attachments[id-7fa563fe-f0f7-43eb-9e22-a1ece036b513]
- - tempest.api.network.test_networks.NetworksTest.test_show_network[id-2bf13842-c93f-4a69-83ed-717d2ec3b44e,smoke]
- - tempest.api.network.test_security_groups.SecGroupTest.test_create_show_delete_security_group_rule[id-cfb99e0e-7410-4a3d-8a0c-959a63ee77e9,smoke]
- - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_gw_and_allocation_pools[id-8217a149-0c6c-4cfb-93db-0486f707d13f]
- - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_image_null_id[id-ef45000d-0a72-4781-866d-4cb7bf2562ad,negative]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_set_empty_name[id-38204696-17c6-44da-9590-40f87fb5a899,negative]
- - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_host_name_is_same_as_server_name[id-ac1ad47f-984b-4441-9274-c9079b7a0666]
- - tempest.api.network.test_ports.PortsTestJSON.test_show_port[id-c9a685bd-e83f-499c-939f-9f7863ca259f,smoke]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_image[id-05e8a8e7-9659-459a-989d-92c2f501f4ba]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_id_exceeding_length_limit[id-f4d7279b-5fd2-4bf2-9ba4-ae35df0d18c5,negative]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_limit[id-614cdfc1-d557-4bac-915b-3e67b48eee76]
- - tempest.api.compute.servers.test_servers.ServersTestJSON.test_update_access_server_address[id-89b90870-bc13-4b73-96af-f9d4f2b70077]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_stop_non_existent_server[id-a31460a9-49e1-42aa-82ee-06e0bb7c2d03,negative]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_get_images_schema[id-25c8d7b2-df21-460f-87ac-93130bcdc684]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_with_details[id-adcbb5a7-5ad8-4b61-bd10-5380e111a877]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_reboot_non_existent_server[id-d4c023a0-9c55-4747-9dd5-413b820143c7,negative]
- - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_image_null_id[id-32248db1-ab88-4821-9604-c7c369f1f88c,negative]
- - tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_volume_upload[id-d8f1ca95-3d5b-44a3-b8ca-909691c9532d,image]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_non_existent_security_group[id-be308db6-a7cf-4d5c-9baf-71bafd73f35e,negative]
- - tempest.api.network.test_ports.PortsTestJSON.test_show_port_fields[id-45fcdaf2-dab0-4c13-ac6c-fcddfb579dbd]
- - tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_specify_keypair[id-f9e15296-d7f9-4e62-b53f-a04e89160833]
- - tempest.api.compute.test_versions.TestVersions.test_list_api_versions[id-6c0a0990-43b6-4529-9b61-5fd8daf7c55c,smoke]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_with_param_metadata[id-b5ebea1b-0603-40a0-bb41-15fcd0a53214]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_list_volumes_with_invalid_status[id-143b279b-7522-466b-81be-34a87d564a7c,negative]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_update_volume_with_empty_volume_id[id-72aeca85-57a5-4c1f-9057-f320f9ea575b,negative]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_by_name[id-a28e8da4-0b56-472f-87a8-0f4d3f819c02]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_disk_format[id-4a4735a7-f22f-49b6-b0d9-66e1ef7453eb]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_name_wildcard[id-e9f624ee-92af-4562-8bec-437945a18dcb]
- - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_all_attributes[id-a4d9ec4c-0306-4111-a75c-db01a709030b]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_status[id-7fc9e369-0f58-4d05-9aa5-0969e2d59d15]
- - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_get_server_metadata_item[id-3043c57d-7e0e-49a6-9a96-ad569c265e6a]
- - tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshots_list_with_params[id-59f41f43-aebf-48a9-ab5d-d76340fab32b]
- - tempest.api.network.test_security_groups.SecGroupTest.test_create_list_update_show_delete_security_group[id-bfd128e5-3c92-44b6-9d66-7fe29d22c802,smoke]
- - tempest.api.compute.test_quotas.QuotasTestJSON.test_get_quotas[id-f1ef0a97-dbbb-4cca-adc5-c9fbc4f76107]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_image[id-b3304c3b-97df-46d2-8cd3-e2b6659724e7]
- - tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_attach_detach_volume[id-52e9045a-e90d-4c0d-9087-79d657faffff]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_string[id-679bc053-5e70-4514-9800-3dfab1a380a6,negative]
- - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details[id-5de47127-9977-400a-936f-abcfbec1218f,smoke]
- - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_non_existing_image[id-6fe40f1c-57bd-4918-89cc-8500f850f3de,negative]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_delete_invalid_volume_id[id-1f035827-7c32-4019-9240-b4ec2dbd9dfd,negative]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_duplicate_security_group_rule_fails[id-8fde898f-ce88-493b-adc9-4e4692879fc5,negative]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volumes_list_by_status[id-39654e13-734c-4dab-95ce-7613bf8407ce]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_details_with_multiple_params[id-2a7064eb-b9c3-429b-b888-33928fc5edd3]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_non_existent_server[id-d86141a7-906e-4731-b187-d64a2ea61422,negative]
- - tempest.api.compute.servers.test_instance_actions.InstanceActionsTestJSON.test_list_instance_actions[id-77ca5cc5-9990-45e0-ab98-1de8fead201a]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_name[id-9b067a7b-7fee-4f6a-b29c-be43fe18fc5a]
- - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers_with_detail[id-585e934c-448e-43c4-acbf-d06a9b899997]
- - tempest.api.network.test_networks.NetworksTest.test_create_update_delete_network_subnet[id-0e269138-0da6-4efc-a46d-578161e7b221,smoke]
- - tempest.api.volume.test_volume_metadata.VolumesMetadataTest.test_update_show_volume_metadata_item[id-862261c5-8df4-475a-8c21-946e50e36a20]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_flavor[id-573637f5-7325-47bb-9144-3476d0416908]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_nonexistent_source_volid[id-47c73e08-4be8-45bb-bfdf-0c4e79b88344,negative]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_flavor[id-5913660b-223b-44d4-a651-a0fbfd44ca75,negative]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_with_a_deleted_server[id-24a26f1a-1ddc-4eea-b0d7-a90cc874ad8f,negative]
- - tempest.api.network.test_networks.NetworksTest.test_delete_network_with_subnet[id-f04f61a9-b7f3-4194-90b2-9bcf660d1bfe]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_list_volumes_detail_with_nonexistent_name[id-9ca17820-a0e7-4cbd-a7fa-f4468735e359,negative]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_get_image_schema[id-622b925c-479f-4736-860d-adeaf13bc371]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_update_volume_with_nonexistent_volume_id[id-0186422c-999a-480e-a026-6a665744c30c,negative]
- - tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_resources[id-b9232f5e-d9e5-4d97-b96c-28d3db4de1bd,smoke]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_list_volumes_with_nonexistent_name[id-0f4aa809-8c7b-418f-8fb3-84c7a5dfc52f,negative]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_limit[id-e914a891-3cc8-4b40-ad32-e0a39ffbddbb]
- - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_gw[id-9393b468-186d-496d-aa36-732348cd76e7]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_invalid_size[id-1ed83a8a-682d-4dfb-a30e-ee63ffd6c049,negative]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list[id-0b6ddd39-b948-471f-8038-4787978747c4,smoke]
- - tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_statuses[id-8879a470-abfb-47bb-bb8d-5a7fd279ad1e,smoke]
- - tempest.api.compute.servers.test_availability_zone.AZV2TestJSON.test_get_availability_zone_list_with_non_admin_user[id-a8333aa2-205c-449f-a828-d38c2489bf25]
- - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_unlock_server[id-80a8094c-211e-440a-ab88-9e59d556c7ee]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_delete_volume_without_passing_volume_id[id-441a1550-5d44-4b30-af0f-a6d402f52026,negative]
- - tempest.api.network.test_security_groups.SecGroupTest.test_list_security_groups[id-e30abd17-fef9-4739-8617-dc26da88e686,smoke]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group[id-424fd5c3-9ddc-486a-b45f-39bf0c820fc6,negative]
- - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_additional_args[id-87dfbcf9-1849-43ea-b1e4-efa3eeae9f71]
- - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_dhcp_enabled[id-94ce038d-ff0a-4a4c-a56b-09da3ca0b55d]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_non_existent_remote_groupid[id-4bf786fd-2f02-443c-9716-5b98e159a49a,negative]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_remote_ip_prefix[id-5f8daf69-3c5f-4aaa-88c9-db1d66f68679,negative]
- - tempest.api.network.test_ports.PortsTestJSON.test_list_ports_fields[id-ff7f117f-f034-4e0e-abff-ccef05c454b4]
- - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_stop_start_server[id-af8eafd4-38a7-4a4b-bdbc-75145a580560]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_details_pagination[id-e9138a2c-f67b-4796-8efa-635c196d01de]
- - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details[id-5de47127-9977-400a-936f-abcfbec1218f,smoke]
- - tempest.api.compute.servers.test_delete_server.DeleteServersTestJSON.test_delete_active_server[id-925fdfb4-5b13-47ea-ac8a-c36ae6fddb05]
- - tempest.api.network.test_networks.NetworksTest.test_list_networks_fields[id-6ae6d24f-9194-4869-9c85-c313cb20e080]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_network_uuid[id-4e72dc2d-44c5-4336-9667-f7972e95c402,negative]
- - tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_update_tags_for_non_existing_image[id-8cd30f82-6f9a-4c6e-8034-c1b51fba43d9,negative]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_with_detail_param_metadata[id-1ca92d3c-4a8e-4b43-93f5-e4c7fb3b291d]
- - tempest.api.network.test_networks.NetworksTest.test_show_subnet_fields[id-270fff0b-8bfc-411f-a184-1e8fd35286f0]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_server_status[id-de2612ab-b7dd-4044-b0b1-d2539601911f]
- - tempest.api.compute.servers.test_instance_actions.InstanceActionsTestJSON.test_get_instance_action[id-aacc71ca-1d70-4aa5-bbf6-0ff71470e43c]
- - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_remote_group_id[id-c2ed2deb-7a0c-44d8-8b4c-a5825b5c310b]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_get_volume_without_passing_volume_id[id-c6c3db06-29ad-4e91-beb0-2ab195fe49e3,negative]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_invalid_date[id-87d12517-e20a-4c9c-97b6-dd1628d6d6c9,negative]
- - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_list_server_metadata[id-479da087-92b3-4dcf-aeb3-fd293b2d14ce]
- - tempest.api.volume.test_volumes_snapshots_negative.VolumesSnapshotNegativeTestJSON.test_create_snapshot_with_nonexistent_volume_id[id-e3e466af-70ab-4f4b-a967-ab04e3532ea7,negative]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_container_format[id-9959ca1d-1aa7-4b7a-a1ea-0fff0499b37e]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_param_display_name_and_status[id-777c87c1-2fc4-4883-8b8e-5c0b951d1ec8]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_min_max_size[id-4ad8c157-971a-4ba8-aa84-ed61154b1e7f]
- - tempest.api.compute.test_quotas.QuotasTestJSON.test_get_default_quotas[id-9bfecac7-b966-4f47-913f-1a9e2c12134a]
- - tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_delete_non_existing_tag[id-39c023a2-325a-433a-9eea-649bf1414b19,negative]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volumes_list_details_by_availability_zone[id-e1b80d13-94f0-4ba2-a40e-386af29f8db1]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_detail_server_is_deleted[id-93055106-2d34-46fe-af68-d9ddbf7ee570,negative]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_nonexistent_snapshot_id[id-0c36f6ae-4604-4017-b0a9-34fdc63096f9,negative]
- - tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_volume_bootable[id-63e21b4c-0a0c-41f6-bfc3-7c2816815599]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_update_volume_with_invalid_volume_id[id-e66e40d6-65e6-4e75-bdc7-636792fa152d,negative]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_size[id-cf1b9a48-8340-480e-af7b-fe7e17690876]
- - tempest.api.network.test_networks.NetworksTest.test_show_network_fields[id-867819bb-c4b6-45f7-acf9-90edcf70aa5e]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_server_name[id-e2c77c4a-000a-4af3-a0bd-629a328bde7c,negative]
- - tempest.api.volume.test_availability_zone.AvailabilityZoneTestJSON.test_get_availability_zone_list[id-01f1ae88-eba9-4c6b-a011-6f7ace06b725]
- - tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_without_passing_size[id-9387686f-334f-4d31-a439-33494b9e2683,negative]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_list_volumes_detail_with_invalid_status[id-ba94b27b-be3f-496c-a00e-0283b373fa75,negative]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_negative_value[id-62610dd9-4713-4ee0-8beb-fd2c1aa7f950,negative]
- - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_non_existent_image[id-668743d5-08ad-4480-b2b8-15da34f81d9f,negative]
- - tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name[id-3b7c6fe4-dfe7-477c-9243-b06359db51e6]
- - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata[id-211021f6-21de-4657-a68f-908878cfe251]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_nonexistent_volume_type[id-10254ed8-3849-454e-862e-3ab8e6aa01d2,negative]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_deleted_server[id-98fa0458-1485-440f-873b-fe7f0d714930,negative]
- - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_visibility[id-7a95bb92-d99e-4b12-9718-7bc6ab73e6d2]
- - tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors_with_detail[id-6e85fde4-b3cd-4137-ab72-ed5f418e8c24]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_name_length_exceeds_256[id-5c8e244c-dada-4590-9944-749c455b431f,negative]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_size_zero[id-41331caa-eaf4-4001-869d-bc18c1869360,negative]
- - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers[id-9a438d88-10c6-4bcd-8b5b-5b6e25e1346f,smoke]
- - tempest.api.network.test_ports.PortsTestJSON.test_list_ports[id-cf95b358-3e92-4a29-a148-52445e1ac50e,smoke]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_pagination[id-af55e775-8e4b-4feb-8719-215c43b0238c]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group_rule[id-4c094c09-000b-4e41-8100-9617600c02a6,negative]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_image[id-ff01387d-c7ad-47b4-ae9e-64fa214638fe,negative]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_additional_default_security_group_fails[id-2323061e-9fbf-4eb0-b547-7e8fafc90849,negative]
- - tempest.api.network.test_networks.NetworksTest.test_list_networks[id-f7ffdeda-e200-4a7a-bcbe-05716e86bf43,smoke]
- - tempest.api.volume.test_volume_metadata.VolumesMetadataTest.test_crud_volume_metadata[id-6f5b125b-f664-44bf-910f-751591fe5769]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_negative_id[id-75f79124-277c-45e6-a373-a1d6803f4cc4,negative]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_greater_than_actual_count[id-d47c17fb-eebd-4287-8e95-f20a7e627b18,negative]
- - tempest.api.volume.test_snapshot_metadata.SnapshotMetadataTestJSON.test_update_show_snapshot_metadata_item[id-e8ff85c5-8f97-477f-806a-3ac364a949ed]
- - tempest.api.volume.test_snapshot_metadata.SnapshotMetadataTestJSON.test_crud_snapshot_metadata[id-a2f20f99-e363-4584-be97-bc33afb1a56c]
- - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_created_server_vcpus[id-cbc0f52f-05aa-492b-bdc1-84b575ca294b]
- - tempest.api.network.test_subnetpools_extensions.SubnetPoolsTestJSON.test_create_list_show_update_delete_subnetpools[id-62595970-ab1c-4b7f-8fcc-fddfe55e9811,smoke]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volumes_list_by_availability_zone[id-c0cfa863-3020-40d7-b587-e35f597d5d87]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_future_date[id-74745ad8-b346-45b5-b9b8-509d7447fc1f,negative]
- - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers[id-9a438d88-10c6-4bcd-8b5b-5b6e25e1346f,smoke]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_protocol[id-981bdc22-ce48-41ed-900a-73148b583958,negative]
- - tempest.api.network.test_networks.NetworksTest.test_list_subnets_fields[id-842589e3-9663-46b0-85e4-7f01273b0412]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_server_name_blank[id-dbbfd247-c40c-449e-8f6c-d2aa7c7da7cf,negative]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_flavor[id-18f5227f-d155-4429-807c-ccb103887537,negative]
- - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_host_routes_and_dns_nameservers[id-d830de0a-be47-468f-8f02-1fd996118289]
- - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_created_server_vcpus[id-cbc0f52f-05aa-492b-bdc1-84b575ca294b]
- - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_update_server_metadata[id-344d981e-0c33-4997-8a5d-6c1d803e4134]
- - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard[id-2cb1baf6-ac8d-4429-bf0d-ba8a0ba53e32,smoke]
- - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_without_gateway[id-d2d596e2-8e76-47a9-ac51-d4648009f4d3]
- - tempest.api.volume.test_volumes_snapshots.VolumesSnapshotTestJSON.test_volume_from_snapshot[id-677863d1-3142-456d-b6ac-9924f667a7f4]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_volume_get_nonexistent_volume_id[id-f131c586-9448-44a4-a8b0-54ca838aa43e,negative]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_name_of_non_existent_server[id-aa8eed43-e2cb-4ebf-930b-da14f6a21d81,negative]
- - tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_media_types[id-657c1970-4722-4189-8831-7325f3bc4265,smoke]
- - tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_delete_image[id-f848bb94-1c6e-45a4-8726-39e3a5b23535,smoke]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_ethertype[id-5666968c-fff3-40d6-9efc-df1c8bd01abb,negative]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_details_by_name[id-2de3a6d4-12aa-403b-a8f2-fdeb42a89623]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_delete_non_existent_security_group[id-1f1bb89d-5664-4956-9fcd-83ee0fa603df,negative]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_metadata_exceeds_length_limit[id-7fc74810-0bd2-4cd7-8244-4f33a9db865a,negative]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_name_length_exceeds_256[id-c3e0fb12-07fc-4d76-a22e-37409887afe8,negative]
- - tempest.api.network.test_ports.PortsTestJSON.test_create_port_in_allowed_allocation_pools[id-0435f278-40ae-48cb-a404-b8a087bc09b1,smoke]
- - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_protocol_integer_value[id-0a307599-6655-4220-bebc-fd70c64f2290]
- - tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_server_with_admin_password[id-b92d5ec7-b1dd-44a2-87e4-45e888c46ef0]
- - tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_volume_readonly_update[id-fff74e1e-5bd3-4b33-9ea9-24c103bc3f59]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_image[id-fcba1052-0a50-4cf3-b1ac-fae241edf02f,negative]
- - tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors[id-e36c0eaa-dff5-4082-ad1f-3f9a80aa3f59,smoke]
- - tempest.api.volume.test_volumes_snapshots.VolumesSnapshotTestJSON.test_snapshot_create_get_list_update_delete[id-2a8abbe4-d871-46db-b049-c41f5af8216e]
- - tempest.api.volume.test_volumes_snapshots_negative.VolumesSnapshotNegativeTestJSON.test_create_snapshot_without_passing_volume_id[id-bb9da53e-d335-4309-9c15-7e76fd5e4d6d,negative]
- - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_status_non_existing[id-fcdf192d-0f74-4d89-911f-1ec002b822c4,negative]
- - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_invalid_ports[id-0d9c7791-f2ad-4e2f-ac73-abf2373b0d2d,negative]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_get_invalid_volume_id[id-30799cfd-7ee4-446c-b66c-45b383ed211b,negative]
- - tempest.api.network.test_ports.PortsTestJSON.test_create_update_delete_port[id-c72c1c0c-2193-4aca-aaa4-b1442640f51c,smoke]
- - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_size_negative[id-8b472729-9eba-446e-a83b-916bdb34bef7,negative]
- - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers_with_detail[id-585e934c-448e-43c4-acbf-d06a9b899997]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_limit_results[id-67aec2d0-35fe-4503-9f92-f13272b867ed]
- - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_allocation_pools[id-bec949c4-3147-4ba6-af5f-cd2306118404]
- - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata_item[id-58c02d4f-5c67-40be-8744-d3fa5982eb1c]
- - tempest.api.network.test_networks.NetworksTest.test_show_subnet[id-bd635d81-6030-4dd1-b3b9-31ba0cfdf6cc,smoke]
- - tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_update_image[id-f66891a7-a35c-41a8-b590-a065c2a1caa6,smoke]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_server_name[id-f9eb2b70-735f-416c-b260-9914ac6181e4]
- - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_flavor[id-80c574cc-0925-44ba-8602-299028357dd9]
- - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_rebuild_server[id-aaa6cdf3-55a7-461a-add9-1c8596b9a07c]
- - tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image[id-3731d080-d4c5-4872-b41a-64d0d0021314]
- - tempest.api.image.v2.test_images_tags.ImagesTagsTest.test_update_delete_tags_for_image[id-10407036-6059-4f95-a2cd-cbbbee7ed329]
- - tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete_as_clone[id-3f591b4a-7dc6-444c-bd51-77469506b3a1]
- - tempest.api.volume.test_extensions.ExtensionsTestJSON.test_list_extensions[id-94607eb0-43a5-47ca-82aa-736b41bd2e2c]
- - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_numeric_server_name[id-fd57f159-68d6-4c2a-902b-03070828a87e,negative]
- - tempest.api.network.test_ports.PortsTestJSON.test_create_bulk_port[id-67f1b811-f8db-43e2-86bd-72c074d4a42c]
- - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_with_detail_param_display_name_and_status[id-856ab8ca-6009-4c37-b691-be1065528ad4]
+ - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_icmp_type_code
+ - tempest.api.network.test_networks.NetworksTest.test_update_subnet_gw_dns_host_routes_dhcp
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_volume_delete_nonexistent_volume_id
+ - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_delete_deleted_image
+ - tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete_from_image
+ - tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_no_params
+ - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_remote_ip_prefix
+ - tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token
+ - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_delete_server_metadata_item
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_get_non_existent_server
+ - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_host_name_is_same_as_server_name
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_active_status
+ - tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_with_existing_server_name
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volumes_list_details_by_status
+ - tempest.api.network.test_networks.NetworksTest.test_list_subnets
+ - tempest.api.compute.servers.test_servers.ServersTestJSON.test_update_server_name
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_invalid_ip_v6_address
+ - tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_list_get_volume_attachments
+ - tempest.api.network.test_networks.NetworksTest.test_show_network
+ - tempest.api.network.test_security_groups.SecGroupTest.test_create_show_delete_security_group_rule
+ - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_gw_and_allocation_pools
+ - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_image_null_id
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_set_empty_name
+ - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_host_name_is_same_as_server_name
+ - tempest.api.network.test_ports.PortsTestJSON.test_show_port
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_image
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_id_exceeding_length_limit
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_limit
+ - tempest.api.compute.servers.test_servers.ServersTestJSON.test_update_access_server_address
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_stop_non_existent_server
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_get_images_schema
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_with_details
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_reboot_non_existent_server
+ - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_image_null_id
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_non_existent_security_group
+ - tempest.api.network.test_ports.PortsTestJSON.test_show_port_fields
+ - tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_specify_keypair
+ - tempest.api.compute.test_versions.TestVersions.test_list_api_versions
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_with_param_metadata
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_list_volumes_with_invalid_status
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_update_volume_with_empty_volume_id
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_by_name
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_disk_format
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_name_wildcard
+ - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_all_attributes
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_status
+ - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_get_server_metadata_item
+ - tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshots_list_with_params
+ - tempest.api.network.test_security_groups.SecGroupTest.test_create_list_update_show_delete_security_group
+ - tempest.api.compute.test_quotas.QuotasTestJSON.test_get_quotas
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_image
+ - tempest.api.compute.volumes.test_attach_volume.AttachVolumeTestJSON.test_attach_detach_volume
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_string
+ - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details
+ - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_non_existing_image
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_delete_invalid_volume_id
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_duplicate_security_group_rule_fails
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volumes_list_by_status
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_details_with_multiple_params
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_non_existent_server
+ - tempest.api.compute.servers.test_instance_actions.InstanceActionsTestJSON.test_list_instance_actions
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_name
+ - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers_with_detail
+ - tempest.api.network.test_networks.NetworksTest.test_create_update_delete_network_subnet
+ - tempest.api.volume.test_volume_metadata.VolumesMetadataTest.test_update_show_volume_metadata_item
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_flavor
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_nonexistent_source_volid
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_flavor
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_with_a_deleted_server
+ - tempest.api.network.test_networks.NetworksTest.test_delete_network_with_subnet
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_list_volumes_detail_with_nonexistent_name
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_get_image_schema
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_update_volume_with_nonexistent_volume_id
+ - tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_resources
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_list_volumes_with_nonexistent_name
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_limit
+ - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_gw
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_invalid_size
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list
+ - tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_statuses
+ - tempest.api.compute.servers.test_availability_zone.AZV2TestJSON.test_get_availability_zone_list_with_non_admin_user
+ - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_unlock_server
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_delete_volume_without_passing_volume_id
+ - tempest.api.network.test_security_groups.SecGroupTest.test_list_security_groups
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group
+ - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_additional_args
+ - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_dhcp_enabled
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_non_existent_remote_groupid
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_remote_ip_prefix
+ - tempest.api.network.test_ports.PortsTestJSON.test_list_ports_fields
+ - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_stop_start_server
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_details_pagination
+ - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details
+ - tempest.api.compute.servers.test_delete_server.DeleteServersTestJSON.test_delete_active_server
+ - tempest.api.network.test_networks.NetworksTest.test_list_networks_fields
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_network_uuid
+ - tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_update_tags_for_non_existing_image
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_with_detail_param_metadata
+ - tempest.api.network.test_networks.NetworksTest.test_show_subnet_fields
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_server_status
+ - tempest.api.compute.servers.test_instance_actions.InstanceActionsTestJSON.test_get_instance_action
+ - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_remote_group_id
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_get_volume_without_passing_volume_id
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_invalid_date
+ - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_list_server_metadata
+ - tempest.api.volume.test_volumes_snapshots_negative.VolumesSnapshotNegativeTestJSON.test_create_snapshot_with_nonexistent_volume_id
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_container_format
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_param_display_name_and_status
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_min_max_size
+ - tempest.api.compute.test_quotas.QuotasTestJSON.test_get_default_quotas
+ - tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_delete_non_existing_tag
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volumes_list_details_by_availability_zone
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_detail_server_is_deleted
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_nonexistent_snapshot_id
+ - tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_volume_bootable
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_update_volume_with_invalid_volume_id
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_size
+ - tempest.api.network.test_networks.NetworksTest.test_show_network_fields
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_server_name
+ - tempest.api.volume.test_availability_zone.AvailabilityZoneTestJSON.test_get_availability_zone_list
+ - tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshots_list_details_with_params
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_without_passing_size
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_list_volumes_detail_with_invalid_status
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_pass_negative_value
+ - tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_non_existent_image
+ - tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name
+ - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_nonexistent_volume_type
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_rebuild_deleted_server
+ - tempest.api.image.v2.test_images.ListUserImagesTest.test_list_images_param_visibility
+ - tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors_with_detail
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_server_name_length_exceeds_256
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_size_zero
+ - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers
+ - tempest.api.network.test_ports.PortsTestJSON.test_list_ports
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_pagination
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group_rule
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_non_existing_image
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_additional_default_security_group_fails
+ - tempest.api.network.test_networks.NetworksTest.test_list_networks
+ - tempest.api.volume.test_volume_metadata.VolumesMetadataTest.test_crud_volume_metadata
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_delete_server_pass_negative_id
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_limits_greater_than_actual_count
+ - tempest.api.volume.test_snapshot_metadata.SnapshotMetadataTestJSON.test_update_show_snapshot_metadata_item
+ - tempest.api.volume.test_snapshot_metadata.SnapshotMetadataTestJSON.test_crud_snapshot_metadata
+ - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_created_server_vcpus
+ - tempest.api.network.test_subnetpools_extensions.SubnetPoolsTestJSON.test_create_list_show_update_delete_subnetpools
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volumes_list_by_availability_zone
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_future_date
+ - tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_protocol
+ - tempest.api.network.test_networks.NetworksTest.test_list_subnets_fields
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_server_name_blank
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_flavor
+ - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_host_routes_and_dns_nameservers
+ - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_created_server_vcpus
+ - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_update_server_metadata
+ - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard
+ - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_without_gateway
+ - tempest.api.volume.test_volumes_snapshots.VolumesSnapshotTestJSON.test_volume_from_snapshot
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_volume_get_nonexistent_volume_id
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_update_name_of_non_existent_server
+ - tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_media_types
+ - tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_delete_image
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_bad_ethertype
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_details_by_name
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_delete_non_existent_security_group
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_metadata_exceeds_length_limit
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_server_name_length_exceeds_256
+ - tempest.api.network.test_ports.PortsTestJSON.test_create_port_in_allowed_allocation_pools
+ - tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_protocol_integer_value
+ - tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_server_with_admin_password
+ - tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_volume_readonly_update
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_with_invalid_image
+ - tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors
+ - tempest.api.volume.test_volumes_snapshots.VolumesSnapshotTestJSON.test_snapshot_create_get_list_update_delete
+ - tempest.api.volume.test_volumes_snapshots_negative.VolumesSnapshotNegativeTestJSON.test_create_snapshot_without_passing_volume_id
+ - tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_status_non_existing
+ - tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_create_security_group_rule_with_invalid_ports
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_get_invalid_volume_id
+ - tempest.api.network.test_ports.PortsTestJSON.test_create_update_delete_port
+ - tempest.api.volume.test_volumes_negative.VolumesNegativeTest.test_create_volume_with_size_negative
+ - tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers_with_detail
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_limit_results
+ - tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_allocation_pools
+ - tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata_item
+ - tempest.api.network.test_networks.NetworksTest.test_show_subnet
+ - tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_update_image
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_server_name
+ - tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_detailed_filter_by_flavor
+ - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_rebuild_server
+ - tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image
+ - tempest.api.image.v2.test_images_tags.ImagesTagsTest.test_update_delete_tags_for_image
+ - tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete_as_clone
+ - tempest.api.volume.test_extensions.ExtensionsTestJSON.test_list_extensions
+ - tempest.api.compute.servers.test_servers_negative.ServersNegativeTestJSON.test_create_numeric_server_name
+ - tempest.api.network.test_ports.PortsTestJSON.test_create_bulk_port
+ - tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list_with_detail_param_display_name_and_status
+ - tempest.api.network.test_networks.NetworksTest.test_external_network_visibility
+ - tempest.api.network.test_ports.PortsTestJSON.test_port_list_filter_by_router_id
+ - tempest.api.network.test_routers_negative.RoutersNegativeTest.test_add_router_interfaces_on_overlapping_subnets_returns_400
+ - tempest.api.network.test_routers_negative.RoutersNegativeTest.test_delete_non_existent_router_returns_404
+ - tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_add_gateway_invalid_network_returns_404
+ - tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_add_gateway_net_not_external_returns_400
+ - tempest.api.network.test_routers_negative.RoutersNegativeTest.test_router_remove_interface_in_use_returns_409
+ - tempest.api.network.test_routers_negative.RoutersNegativeTest.test_show_non_existent_router_returns_404
+ - tempest.api.network.test_routers_negative.RoutersNegativeTest.test_update_non_existent_router_returns_404
+ - tempest.api.network.test_routers.RoutersTest.test_add_multiple_router_interfaces
+ - tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_port_id
+ - tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_subnet_id
+ - tempest.api.network.test_routers.RoutersTest.test_create_show_list_update_delete_router
+ - tempest.api.network.test_routers.RoutersTest.test_update_delete_extra_route
+ - tempest.api.network.test_routers.RoutersTest.test_update_router_admin_state
+ - tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources
+ - tempest.api.volume.test_versions.VersionsTest.test_list_versions
+ - tempest.api.compute.keypairs.test_keypairs_v22.KeyPairsV22TestJSON.test_keypairsv22_create_list_show_with_type
+ - tempest.api.identity.v3.test_catalog.IdentityCatalogTest.test_catalog_standardization
+ - tempest.api.identity.v3.test_tokens.TokensV3Test.test_validate_token
diff --git a/etc/testcase/functest.tempest.trunk-ports.yml b/etc/testcase/functest.tempest.trunk-ports.yml
index 1b064b2b..0434b912 100644
--- a/etc/testcase/functest.tempest.trunk-ports.yml
+++ b/etc/testcase/functest.tempest.trunk-ports.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.neutron_trunk_ports:
name: functest.tempest.neutron_trunk_ports
@@ -6,19 +15,22 @@ functest.tempest.neutron_trunk_ports:
type: functest
testcase: neutron_trunk
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/trunk_port_blacklist.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/blacklist.yaml'
report:
source_archive_files:
- functest.log
- - neutron_trunk/tempest.log
+ - neutron_trunk/rally.log
- neutron_trunk/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.neutron_trunk_ports.functest.log
- tempest_logs/functest.tempest.neutron_trunk_ports.log
- tempest_logs/functest.tempest.neutron_trunk_ports.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.neutron_trunk_ports.html
sub_testcase_list:
- - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_add_subport
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_add_subports
- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_show_delete_trunk
- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_empty_subports_list
- neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_trunk_subports_not_specified
@@ -39,7 +51,7 @@ functest.tempest.neutron_trunk_ports:
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_duplicate_segmentation_details
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_passing_dict
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_disabled_trunk
- - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_trunk_port_id
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_add_subport_port_id_uses_parent_port_id
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_missing_segmentation_id
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_port_id
- neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_nonexistent_trunk
diff --git a/etc/testcase/functest.tempest.vm_lifecycle.yml b/etc/testcase/functest.tempest.vm_lifecycle.yml
index 4bf7b326..8939ad64 100644
--- a/etc/testcase/functest.tempest.vm_lifecycle.yml
+++ b/etc/testcase/functest.tempest.vm_lifecycle.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.vm_lifecycle:
name: functest.tempest.vm_lifecycle
@@ -6,31 +15,31 @@ functest.tempest.vm_lifecycle:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.vm_lifecycle.functest.log
- tempest_logs/functest.tempest.vm_lifecycle.log
- tempest_logs/functest.tempest.vm_lifecycle.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.vm_lifecycle.html
sub_testcase_list:
- tempest.scenario.test_minimum_basic.TestMinimumBasicScenario.test_minimum_basic_scenario[compute,id-bdbb5441-9204-419d-a225-b4fdbfb1a1a8,image,network,volume]
- tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_cold_migration[compute,id-a4858f6c-401e-4155-9a49-d5cd053d1a2f,network,slow]
- tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_pause_unpause[compute,id-2b2642db-6568-4b35-b812-eceed3fa20ce,network,slow]
- - tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_reboot[compute,id-7b6860c2-afa3-4846-9522-adeb38dfbe08,network]
+ - tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_reboot[compute,id-7b6860c2-afa3-4846-9522-adeb38dfbe08,network,slow]
- tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_rebuild[compute,id-88a529c2-1daa-4c85-9aec-d541ba3eb699,network,slow]
- tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_resize[compute,id-719eb59d-2f42-4b66-b8b1-bb1254473967,network,slow]
- tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_stop_start[compute,id-61f1aa9a-1573-410e-9054-afa557cab021,network,slow]
- tempest.scenario.test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_suspend_resume[compute,id-5cdf9499-541d-4923-804e-b9a60620a7f0,network,slow]
- - tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_resize_volume_backed_server_confirm[compute,id-e6c28180-7454-4b59-b188-0257af08a63b,slow,volume]
+ - tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_resize_volume_backed_server_confirm[id-e6c28180-7454-4b59-b188-0257af08a63b,volume]
- tempest.scenario.test_server_advanced_ops.TestServerAdvancedOps.test_server_sequence_suspend_resume[compute,id-949da7d5-72c8-4808-8802-e3d70df98e2c,slow]
- tempest.scenario.test_shelve_instance.TestShelveInstance.test_shelve_instance[compute,id-1164e700-0af0-4a4c-8792-35909a88743c,image,network,slow]
- tempest.scenario.test_shelve_instance.TestShelveInstance.test_shelve_volume_backed_instance[compute,id-c1b6318c-b9da-490b-9c67-9339b627271f,image,network,slow,volume]
diff --git a/etc/testcase/functest.tempest.volume.yml b/etc/testcase/functest.tempest.volume.yml
index 62e33956..405716da 100644
--- a/etc/testcase/functest.tempest.volume.yml
+++ b/etc/testcase/functest.tempest.volume.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.tempest.volume:
name: functest.tempest.volume
@@ -6,21 +15,21 @@ functest.tempest.volume:
type: functest
testcase: tempest_custom
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
- - 'cp /home/opnfv/userconfig/pre_config/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
- pre_copy:
- src_file: tempest_custom.txt
- dest_path: /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
+ - 'cp /home/opnfv/pre_config/tempest_conf.yaml /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml'
+ - 'cp /home/opnfv/userconfig/tempest_custom_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/functest/results/tempest_custom.txt /usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt'
report:
source_archive_files:
- functest.log
- - tempest_custom/tempest.log
+ - tempest_custom/rally.log
- tempest_custom/tempest-report.html
dest_archive_files:
- tempest_logs/functest.tempest.volume.functest.log
- tempest_logs/functest.tempest.volume.log
- tempest_logs/functest.tempest.volume.html
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: tempest_logs/functest.tempest.volume.html
sub_testcase_list:
- tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_attach_detach_volume_to_instance[compute,id-fff42874-7db5-4487-a8e1-ddda5fb5288d,smoke]
- - tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern[compute,id-557cd2c2-4eb8-4dce-98be-f86765ff311b,image,volume]
+ - tempest.scenario.test_volume_boot_pattern.TestVolumeBootPattern.test_volume_boot_pattern[compute,id-557cd2c2-4eb8-4dce-98be-f86765ff311b,image,slow,volume]
diff --git a/etc/testcase/functest.vnf.vepc.yml b/etc/testcase/functest.vnf.vepc.yml
index 63a48358..7825c42f 100644
--- a/etc/testcase/functest.vnf.vepc.yml
+++ b/etc/testcase/functest.vnf.vepc.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.vnf.vepc:
name: functest.vnf.vepc
@@ -7,11 +16,13 @@ functest.vnf.vepc:
testcase: juju_epc
image_name: opnfv/functest-vnf
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/vnf_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/userconfig/vnf_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
report:
source_archive_files:
- functest.log
dest_archive_files:
- vnf_logs/functest.vnf.vepc.log
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: vnf_logs/functest.vnf.vepc.log
sub_testcase_list:
diff --git a/etc/testcase/functest.vnf.vims.yml b/etc/testcase/functest.vnf.vims.yml
index 8217972c..17769ca3 100644
--- a/etc/testcase/functest.vnf.vims.yml
+++ b/etc/testcase/functest.vnf.vims.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.vnf.vims:
name: functest.vnf.vims
@@ -7,11 +16,13 @@ functest.vnf.vims:
testcase: cloudify_ims
image_name: opnfv/functest-vnf
pre_condition:
- - 'cp /home/opnfv/userconfig/pre_config/vnf_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
+ - 'cp /home/opnfv/userconfig/vnf_testcases.yaml /usr/lib/python2.7/site-packages/xtesting/ci/testcases.yaml'
report:
source_archive_files:
- functest.log
dest_archive_files:
- vnf_logs/functest.vnf.vims.log
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: vnf_logs/functest.vnf.vims.log
sub_testcase_list:
diff --git a/etc/testcase/functest.vping.ssh.yml b/etc/testcase/functest.vping.ssh.yml
index 43a6e2b4..d0efb7e9 100644
--- a/etc/testcase/functest.vping.ssh.yml
+++ b/etc/testcase/functest.vping.ssh.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.vping.ssh:
name: functest.vping.ssh
@@ -11,5 +20,7 @@ functest.vping.ssh:
- functest.log
dest_archive_files:
- vping_logs/functest.vping.ssh.log
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: vping_logs/functest.vping.ssh.log
sub_testcase_list:
diff --git a/etc/testcase/functest.vping.userdata.yml b/etc/testcase/functest.vping.userdata.yml
index 1943e92e..d98c19ec 100644
--- a/etc/testcase/functest.vping.userdata.yml
+++ b/etc/testcase/functest.vping.userdata.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
functest.vping.userdata:
name: functest.vping.userdata
@@ -11,5 +20,7 @@ functest.vping.userdata:
- functest.log
dest_archive_files:
- vping_logs/functest.vping.userdata.log
- check_results_file: 'functest_results.txt'
+ check_results_files:
+ - 'functest_results.txt'
+ portal_key_file: vping_logs/functest.vping.userdata.log
sub_testcase_list:
diff --git a/etc/testcase/onap-vtp.validate.csar.yml b/etc/testcase/onap-vtp.validate.csar.yml
new file mode 100644
index 00000000..ed8f32ed
--- /dev/null
+++ b/etc/testcase/onap-vtp.validate.csar.yml
@@ -0,0 +1,26 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+onap-vtp.validate.csar:
+ name: onap-vtp.validate.csar
+ objective: onap csar validation
+ vnf_type: tosca
+ validate:
+ type: onap-vtp
+ testcase: csar-validate
+ report:
+ source_archive_files:
+ - onap-vtp.validate.csar.out
+ dest_archive_files:
+ - onap-vtp_logs/onap-vtp.validate.csar.out
+ check_results_files:
+ - onap-vtp_logs/onap-vtp.validate.csar.out
+ portal_key_file: onap-vtp_logs/onap-vtp.validate.csar.out
+ sub_testcase_list:
diff --git a/etc/testcase/onap-vvp.validate.heat.yml b/etc/testcase/onap-vvp.validate.heat.yml
new file mode 100644
index 00000000..a166bdc6
--- /dev/null
+++ b/etc/testcase/onap-vvp.validate.heat.yml
@@ -0,0 +1,28 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+onap-vvp.validate.heat:
+ name: onap-vvp.validate.heat
+ objective: onap heat template validation
+ vnf_type: heat
+ validate:
+ type: onap-vvp
+ testcase: ice_validator
+ report:
+ source_archive_files:
+ - failures
+ - report.json
+ dest_archive_files:
+ - onap-vvp_logs/failures
+ - onap-vvp_logs/report.json
+ check_results_files:
+ - onap-vvp_logs/report.json
+ portal_key_file: onap-vvp_logs/report.json
+ sub_testcase_list:
diff --git a/etc/testcase/onap.onap.vnf_lifecycle.yml b/etc/testcase/onap.onap.vnf_lifecycle.yml
deleted file mode 100644
index 9bb928ce..00000000
--- a/etc/testcase/onap.onap.vnf_lifecycle.yml
+++ /dev/null
@@ -1,16 +0,0 @@
----
-onap.onap.vnf_lifecycle:
- name: onap.onap.vnf_lifecycle
- objective: vnf lifecycle tests
- validate:
- type: vnftest
- testcase: onap_vnftest_tc001
- report:
- source_archive_files:
- - vnftest.log
- - onap.onap.vnf_lifecycle.out
- dest_archive_files:
- - onap_logs/onap.onap.vnf_lifecycle.log
- - onap_logs/onap.onap.vnf_lifecycle.out
- check_results_file: onap_logs/onap.onap.vnf_lifecycle.out
- sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.cinder_api.yml b/etc/testcase/yardstick.ha.cinder_api.yml
index ca99e10d..e6162739 100644
--- a/etc/testcase/yardstick.ha.cinder_api.yml
+++ b/etc/testcase/yardstick.ha.cinder_api.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.cinder_api:
name: yardstick.ha.cinder_api
@@ -14,5 +23,7 @@ yardstick.ha.cinder_api:
dest_archive_files:
- ha_logs/yardstick.ha.cinder_api.log
- ha_logs/yardstick.ha.cinder_api.out
- check_results_file: ha_logs/yardstick.ha.cinder_api.out
+ check_results_files:
+ - ha_logs/yardstick.ha.cinder_api.out
+ portal_key_file: ha_logs/yardstick.ha.cinder_api.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.controller_restart.yml b/etc/testcase/yardstick.ha.controller_restart.yml
index fba59f8d..dddf89b7 100644
--- a/etc/testcase/yardstick.ha.controller_restart.yml
+++ b/etc/testcase/yardstick.ha.controller_restart.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.controller_restart:
name: yardstick.ha.controller_restart
@@ -12,5 +21,7 @@ yardstick.ha.controller_restart:
dest_archive_files:
- ha_logs/yardstick.ha.controller_restart.log
- ha_logs/yardstick.ha.controller_restart.out
- check_results_file: ha_logs/yardstick.ha.controller_restart.out
+ check_results_files:
+ - ha_logs/yardstick.ha.controller_restart.out
+ portal_key_file: ha_logs/yardstick.ha.controller_restart.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.cpu_load.yml b/etc/testcase/yardstick.ha.cpu_load.yml
index f991df89..86c25091 100644
--- a/etc/testcase/yardstick.ha.cpu_load.yml
+++ b/etc/testcase/yardstick.ha.cpu_load.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.cpu_load:
name: yardstick.ha.cpu_load
@@ -17,5 +26,7 @@ yardstick.ha.cpu_load:
dest_archive_files:
- ha_logs/yardstick.ha.cpu_load.log
- ha_logs/yardstick.ha.cpu_load.out
- check_results_file: ha_logs/yardstick.ha.cpu_load.out
+ check_results_files:
+ - ha_logs/yardstick.ha.cpu_load.out
+ portal_key_file: ha_logs/yardstick.ha.cpu_load.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.database.yml b/etc/testcase/yardstick.ha.database.yml
index a653f397..ea0a6d49 100644
--- a/etc/testcase/yardstick.ha.database.yml
+++ b/etc/testcase/yardstick.ha.database.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.database:
name: yardstick.ha.database
@@ -14,5 +23,7 @@ yardstick.ha.database:
dest_archive_files:
- ha_logs/yardstick.ha.database.log
- ha_logs/yardstick.ha.database.out
- check_results_file: ha_logs/yardstick.ha.database.out
+ check_results_files:
+ - ha_logs/yardstick.ha.database.out
+ portal_key_file: ha_logs/yardstick.ha.database.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.disk_load.yml b/etc/testcase/yardstick.ha.disk_load.yml
index 6311a7bd..6969984c 100644
--- a/etc/testcase/yardstick.ha.disk_load.yml
+++ b/etc/testcase/yardstick.ha.disk_load.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.disk_load:
name: yardstick.ha.disk_load
@@ -16,5 +25,7 @@ yardstick.ha.disk_load:
dest_archive_files:
- ha_logs/yardstick.ha.disk_load.log
- ha_logs/yardstick.ha.disk_load.out
- check_results_file: ha_logs/yardstick.ha.disk_load.out
+ check_results_files:
+ - ha_logs/yardstick.ha.disk_load.out
+ portal_key_file: ha_logs/yardstick.ha.disk_load.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.glance_api.yml b/etc/testcase/yardstick.ha.glance_api.yml
index ee900cf1..bbce399e 100644
--- a/etc/testcase/yardstick.ha.glance_api.yml
+++ b/etc/testcase/yardstick.ha.glance_api.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.glance_api:
name: yardstick.ha.glance_api
@@ -14,5 +23,7 @@ yardstick.ha.glance_api:
dest_archive_files:
- ha_logs/yardstick.ha.glance_api.log
- ha_logs/yardstick.ha.glance_api.out
- check_results_file: ha_logs/yardstick.ha.glance_api.out
+ check_results_files:
+ - ha_logs/yardstick.ha.glance_api.out
+ portal_key_file: ha_logs/yardstick.ha.glance_api.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.haproxy.yml b/etc/testcase/yardstick.ha.haproxy.yml
index 6fc1df9d..16748dbe 100644
--- a/etc/testcase/yardstick.ha.haproxy.yml
+++ b/etc/testcase/yardstick.ha.haproxy.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.haproxy:
name: yardstick.ha.haproxy
@@ -17,5 +26,7 @@ yardstick.ha.haproxy:
dest_archive_files:
- ha_logs/yardstick.ha.haproxy.log
- ha_logs/yardstick.ha.haproxy.out
- check_results_file: ha_logs/yardstick.ha.haproxy.out
+ check_results_files:
+ - ha_logs/yardstick.ha.haproxy.out
+ portal_key_file: ha_logs/yardstick.ha.haproxy.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.keystone.yml b/etc/testcase/yardstick.ha.keystone.yml
index 101b255b..31d84aab 100644
--- a/etc/testcase/yardstick.ha.keystone.yml
+++ b/etc/testcase/yardstick.ha.keystone.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.keystone:
name: yardstick.ha.keystone
@@ -14,5 +23,7 @@ yardstick.ha.keystone:
dest_archive_files:
- ha_logs/yardstick.ha.keystone.log
- ha_logs/yardstick.ha.keystone.out
- check_results_file: ha_logs/yardstick.ha.keystone.out
+ check_results_files:
+ - ha_logs/yardstick.ha.keystone.out
+ portal_key_file: ha_logs/yardstick.ha.keystone.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.neutron_l3_agent.yml b/etc/testcase/yardstick.ha.neutron_l3_agent.yml
index 1fb2326d..0af89646 100644
--- a/etc/testcase/yardstick.ha.neutron_l3_agent.yml
+++ b/etc/testcase/yardstick.ha.neutron_l3_agent.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.neutron_l3_agent:
name: yardstick.ha.neutron_l3_agent
@@ -9,13 +18,13 @@ yardstick.ha.neutron_l3_agent:
- 'source /etc/yardstick/openstack.creds &&
openstack --insecure image create neutron-l3-agent_ha_image
--disk-format qcow2 --container-format bare --public
- --file /home/opnfv/userconfig/images/cirros-0.4.0-x86_64-disk.img &&
+ --file /home/opnfv/images/cirros-0.4.0-x86_64-disk.img &&
openstack --insecure flavor create --ram 512 --vcpu 1 --disk 1 neutron-l3-agent_ha_flavor'
cmds:
- "cd /home/opnfv/repos/yardstick && source /etc/yardstick/openstack.creds &&
yardstick task start tests/opnfv/test_cases/{{validate_testcase}}.yaml
--output-file /tmp/yardstick/{{testcase}}.out
- --task-args '{'file': '/home/opnfv/userconfig/pre_config/pod.yaml',
+ --task-args '{'file': '/etc/yardstick/pod.yaml',
'image': 'neutron-l3-agent_ha_image', 'flavor': 'neutron-l3-agent_ha_flavor'}'"
post_condition:
- 'source /etc/yardstick/openstack.creds &&
@@ -28,5 +37,7 @@ yardstick.ha.neutron_l3_agent:
dest_archive_files:
- ha_logs/yardstick.ha.neutron_l3_agent.log
- ha_logs/yardstick.ha.neutron_l3_agent.out
- check_results_file: ha_logs/yardstick.ha.neutron_l3_agent.out
+ check_results_files:
+ - ha_logs/yardstick.ha.neutron_l3_agent.out
+ portal_key_file: ha_logs/yardstick.ha.neutron_l3_agent.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.neutron_server.yml b/etc/testcase/yardstick.ha.neutron_server.yml
index 9d741c2b..d700294c 100644
--- a/etc/testcase/yardstick.ha.neutron_server.yml
+++ b/etc/testcase/yardstick.ha.neutron_server.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.neutron_server:
name: yardstick.ha.neutron_server
@@ -14,5 +23,7 @@ yardstick.ha.neutron_server:
dest_archive_files:
- ha_logs/yardstick.ha.neutron_server.log
- ha_logs/yardstick.ha.neutron_server.out
- check_results_file: ha_logs/yardstick.ha.neutron_server.out
+ check_results_files:
+ - ha_logs/yardstick.ha.neutron_server.out
+ portal_key_file: ha_logs/yardstick.ha.neutron_server.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.nova_api.yml b/etc/testcase/yardstick.ha.nova_api.yml
index b15f2db6..91f1a609 100644
--- a/etc/testcase/yardstick.ha.nova_api.yml
+++ b/etc/testcase/yardstick.ha.nova_api.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.nova_api:
name: yardstick.ha.nova_api
@@ -14,5 +23,7 @@ yardstick.ha.nova_api:
dest_archive_files:
- ha_logs/yardstick.ha.nova_api.log
- ha_logs/yardstick.ha.nova_api.out
- check_results_file: ha_logs/yardstick.ha.nova_api.out
+ check_results_files:
+ - ha_logs/yardstick.ha.nova_api.out
+ portal_key_file: ha_logs/yardstick.ha.nova_api.log
sub_testcase_list:
diff --git a/etc/testcase/yardstick.ha.rabbitmq.yml b/etc/testcase/yardstick.ha.rabbitmq.yml
index 027cb358..447f0270 100644
--- a/etc/testcase/yardstick.ha.rabbitmq.yml
+++ b/etc/testcase/yardstick.ha.rabbitmq.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
yardstick.ha.rabbitmq:
name: yardstick.ha.rabbitmq
@@ -12,5 +21,7 @@ yardstick.ha.rabbitmq:
dest_archive_files:
- ha_logs/yardstick.ha.rabbitmq.log
- ha_logs/yardstick.ha.rabbitmq.out
- check_results_file: ha_logs/yardstick.ha.rabbitmq.out
+ check_results_files:
+ - ha_logs/yardstick.ha.rabbitmq.out
+ portal_key_file: ha_logs/yardstick.ha.rabbitmq.log
sub_testcase_list:
diff --git a/etc/userconfig/ansible.cfg b/etc/userconfig/ansible.cfg
index 14c80651..15e9c250 100644
--- a/etc/userconfig/ansible.cfg
+++ b/etc/userconfig/ansible.cfg
@@ -1,2 +1,11 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
[defaults]
host_key_checking = False
diff --git a/etc/userconfig/bgpvpn_testcases.yaml b/etc/userconfig/bgpvpn_testcases.yaml
deleted file mode 100644
index cadfe025..00000000
--- a/etc/userconfig/bgpvpn_testcases.yaml
+++ /dev/null
@@ -1,18 +0,0 @@
----
-tiers:
- -
- name: features
- order: 2
- description: >-
- Test suites from feature projects
- integrated in functest
- testcases:
- -
- case_name: bgpvpn
- project_name: sdnvpn
- criteria: 100
- blocking: false
- description: >-
- Test suite from SDNVPN project.
- run:
- name: bgpvpn
diff --git a/etc/userconfig/env_config.sh.onap.sample b/etc/userconfig/env_config.sh.onap.sample
new file mode 100644
index 00000000..81363e48
--- /dev/null
+++ b/etc/userconfig/env_config.sh.onap.sample
@@ -0,0 +1,22 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+## Special environment parameters for TOSCA validation tests.
+
+# The url under which the VNF SDK container is accessible through the host.
+export HOST_URL="http://<docker host ip>:8702"
+
+# Absolute path of the CSAR file in container 'refrepo'.
+export CSAR_FILE="/opt/test.csar"
+
+## Special environment parameters for Heat validation tests.
+
+# The VNF archive should be zip file and put at $DOVETAIL_HOME/pre_config.
+# Here should give the name without postfix .zip.
+export VNF_ARCHIVE_NAME="vnf_archive_name"
diff --git a/etc/userconfig/env_config.sh.sample b/etc/userconfig/env_config.sh.sample
index f39d8d05..3909427e 100644
--- a/etc/userconfig/env_config.sh.sample
+++ b/etc/userconfig/env_config.sh.sample
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
# Project-level authentication scope (name or ID), recommend admin project.
export OS_PROJECT_NAME=admin
diff --git a/etc/userconfig/hosts.yaml.sample b/etc/userconfig/hosts.yaml.sample
index 45f8db3e..445711db 100644
--- a/etc/userconfig/hosts.yaml.sample
+++ b/etc/userconfig/hosts.yaml.sample
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
hosts_info:
192.168.141.101:
diff --git a/etc/userconfig/patrole_blacklist.yaml b/etc/userconfig/patrole_blacklist.yaml
new file mode 100644
index 00000000..8fb99fee
--- /dev/null
+++ b/etc/userconfig/patrole_blacklist.yaml
@@ -0,0 +1,37 @@
+---
+-
+ scenarios:
+ - (.*)
+ tests:
+ # need metering extension, otherwise the following 6 sub test cases will skip
+ - patrole_tempest_plugin.tests.api.network.test_metering_label_rules_rbac.MeteringLabelRulesRbacTest.test_create_metering_label_rule
+ - patrole_tempest_plugin.tests.api.network.test_metering_label_rules_rbac.MeteringLabelRulesRbacTest.test_delete_metering_label_rule
+ - patrole_tempest_plugin.tests.api.network.test_metering_label_rules_rbac.MeteringLabelRulesRbacTest.test_show_metering_label_rule
+ - patrole_tempest_plugin.tests.api.network.test_metering_labels_rbac.MeteringLabelsRbacTest.test_create_metering_label
+ - patrole_tempest_plugin.tests.api.network.test_metering_labels_rbac.MeteringLabelsRbacTest.test_delete_metering_label
+ - patrole_tempest_plugin.tests.api.network.test_metering_labels_rbac.MeteringLabelsRbacTest.test_show_metering_label
+ # need image type 'shared' and 'community' which were added in the Image API v2.5
+ - patrole_tempest_plugin.tests.api.image.test_images_member_rbac.ImagesMemberRbacTest.test_update_image_member
+ - patrole_tempest_plugin.tests.api.image.test_images_rbac.BasicOperationsImagesRbacTest.test_communitize_image
+ # need l3_agent_scheduler extension which may not enabled on some SUTs with odl
+ - patrole_tempest_plugin.tests.api.network.test_agents_rbac.L3AgentSchedulerRbacTest.test_create_router_on_l3_agent
+ - patrole_tempest_plugin.tests.api.network.test_agents_rbac.L3AgentSchedulerRbacTest.test_delete_router_from_l3_agent
+ - patrole_tempest_plugin.tests.api.network.test_agents_rbac.L3AgentSchedulerRbacTest.test_list_routers_on_l3_agent
+ # need network extension: dvr, otherwise the following 3 sub test cases will skip
+ - patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_create_distributed_router
+ - patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_show_distributed_router
+ - patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_update_distributed_router
+ # need network extension: l3-ha which may not enabled on some SUTs with odl
+ - patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_create_high_availability_router
+ - patrole_tempest_plugin.tests.api.network.test_routers_rbac.RouterRbacTest.test_update_high_availability_router
+ # need segment extension, otherwise the following 4 sub test cases will skip
+ - patrole_tempest_plugin.tests.api.network.test_segments_rbac.SegmentsPluginRbacTest.test_create_segment
+ - patrole_tempest_plugin.tests.api.network.test_segments_rbac.SegmentsPluginRbacTest.test_delete_segment
+ - patrole_tempest_plugin.tests.api.network.test_segments_rbac.SegmentsPluginRbacTest.test_show_segment
+ - patrole_tempest_plugin.tests.api.network.test_segments_rbac.SegmentsPluginRbacTest.test_update_segment
+ # need flat provider network type
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_physical_network
+ # need vxlan provider network type
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_update_network_provider_network_type
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_network_type
+ - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_segmentation_id
diff --git a/etc/userconfig/rally_authenticate_testcases.yaml b/etc/userconfig/rally_authenticate_testcases.yaml
new file mode 100644
index 00000000..e2e67446
--- /dev/null
+++ b/etc/userconfig/rally_authenticate_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Authenticate scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'authenticate'
diff --git a/etc/userconfig/rally_cinder_testcases.yaml b/etc/userconfig/rally_cinder_testcases.yaml
new file mode 100644
index 00000000..245b1285
--- /dev/null
+++ b/etc/userconfig/rally_cinder_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Cinder scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'cinder'
diff --git a/etc/userconfig/rally_glance_testcases.yaml b/etc/userconfig/rally_glance_testcases.yaml
new file mode 100644
index 00000000..abaead70
--- /dev/null
+++ b/etc/userconfig/rally_glance_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Glance scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'glance'
diff --git a/etc/userconfig/rally_gnocchi_testcases.yaml b/etc/userconfig/rally_gnocchi_testcases.yaml
new file mode 100644
index 00000000..378b5f59
--- /dev/null
+++ b/etc/userconfig/rally_gnocchi_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Gnocchi scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'gnocchi'
diff --git a/etc/userconfig/rally_heat_testcases.yaml b/etc/userconfig/rally_heat_testcases.yaml
new file mode 100644
index 00000000..311d4f3f
--- /dev/null
+++ b/etc/userconfig/rally_heat_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Heat scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'heat'
diff --git a/etc/userconfig/rally_keystone_testcases.yaml b/etc/userconfig/rally_keystone_testcases.yaml
new file mode 100644
index 00000000..6a60ca7c
--- /dev/null
+++ b/etc/userconfig/rally_keystone_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Keystone scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'keystone'
diff --git a/etc/userconfig/rally_neutron_testcases.yaml b/etc/userconfig/rally_neutron_testcases.yaml
new file mode 100644
index 00000000..66a7b030
--- /dev/null
+++ b/etc/userconfig/rally_neutron_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Neutron scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'neutron'
diff --git a/etc/userconfig/rally_nova_testcases.yaml b/etc/userconfig/rally_nova_testcases.yaml
new file mode 100644
index 00000000..8c54de21
--- /dev/null
+++ b/etc/userconfig/rally_nova_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Nova scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'nova'
diff --git a/etc/userconfig/rally_quotas_testcases.yaml b/etc/userconfig/rally_quotas_testcases.yaml
new file mode 100644
index 00000000..add0cca5
--- /dev/null
+++ b/etc/userconfig/rally_quotas_testcases.yaml
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+---
+tiers:
+ -
+ name: components
+ order: 1
+ description: >-
+ Run several OpenStack performance tools
+ https://docs.openstack.org/performance-docs/latest/methodologies/tools.html
+ testcases:
+ -
+ case_name: rally_full
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ This test case runs the Quotas scenarios of the
+ OpenStack Rally suite using several threads and iterations.
+ run:
+ name: rally_full
+ args:
+ tests:
+ - 'quotas'
diff --git a/etc/userconfig/sdnvpn_config_tc000.yaml b/etc/userconfig/sdnvpn_config_tc000.yaml
deleted file mode 100644
index 80f1b5d9..00000000
--- a/etc/userconfig/sdnvpn_config_tc000.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-defaults:
- flavor: m1.tiny # adapt to your environment
-
-testcases:
- sdnvpn.test.functest.run_tempest:
- enabled: true
- description: Neutron BGPVPN tests in tempest
diff --git a/etc/userconfig/sdnvpn_config_testcase1.yaml b/etc/userconfig/sdnvpn_config_testcase1.yaml
deleted file mode 100644
index 5736dcff..00000000
--- a/etc/userconfig/sdnvpn_config_testcase1.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-defaults:
- flavor: m1.tiny # adapt to your environment
-
-testcases:
- sdnvpn.test.functest.testcase_1:
- enabled: true
- description: VPN provides connectivity between subnets
- testname_db: functest_testcase_1
- instance_1_name: sdnvpn-1-1
- instance_2_name: sdnvpn-1-2
- instance_3_name: sdnvpn-1-3
- instance_4_name: sdnvpn-1-4
- instance_5_name: sdnvpn-1-5
- image_name: sdnvpn-image
- net_1_name: sdnvpn-1-1-net
- subnet_1_name: sdnvpn-1-1-subnet
- subnet_1_cidr: 10.10.10.0/24
- router_1_name: sdnvpn-1-1-router
- net_2_name: sdnvpn-1-2-net
- subnet_2_name: sdnvpn-1-2-subnet
- subnet_2_cidr: 10.10.11.0/24
- router_2_name: sdnvpn-1-2-router
- secgroup_name: sdnvpn-sg
- secgroup_descr: Security group for SDNVPN test cases
- targets1: '88:88'
- targets2: '55:55'
- route_distinguishers: '11:11'
diff --git a/etc/userconfig/sdnvpn_config_testcase2.yaml b/etc/userconfig/sdnvpn_config_testcase2.yaml
deleted file mode 100644
index 2be3bdb5..00000000
--- a/etc/userconfig/sdnvpn_config_testcase2.yaml
+++ /dev/null
@@ -1,37 +0,0 @@
-defaults:
- flavor: m1.tiny # adapt to your environment
-
-testcases:
- sdnvpn.test.functest.testcase_2:
- enabled: true
- description: Tenant separation
- testname_db: functest_testcase_2
- instance_1_name: sdnvpn-2-1
- instance_2_name: sdnvpn-2-2
- instance_3_name: sdnvpn-2-3
- instance_4_name: sdnvpn-2-4
- instance_5_name: sdnvpn-2-5
- instance_1_ip: 10.10.10.11
- instance_2_ip: 10.10.10.12
- instance_3_ip: 10.10.11.13
- instance_4_ip: 10.10.10.12
- instance_5_ip: 10.10.11.13
- image_name: sdnvpn-image
- net_1_name: sdnvpn-2-1-net
- subnet_1a_name: sdnvpn-2-1a-subnet
- subnet_1a_cidr: 10.10.10.0/24
- subnet_1b_name: sdnvpn-2-1b-subnet
- subnet_1b_cidr: 10.10.11.0/24
- router_1_name: sdnvpn-2-1-router
- net_2_name: sdnvpn-2-2-net
- subnet_2a_name: sdnvpn-2-2a-subnet
- subnet_2a_cidr: 10.10.11.0/24
- subnet_2b_name: sdnvpn-2-2b-subnet
- subnet_2b_cidr: 10.10.10.0/24
- router_2_name: sdnvpn-2-2-router
- secgroup_name: sdnvpn-sg
- secgroup_descr: Security group for SDNVPN test cases
- targets1: '88:88'
- targets2: '55:55'
- route_distinguishers1: '111:111'
- route_distinguishers2: '222:222'
diff --git a/etc/userconfig/sdnvpn_config_testcase3.yaml b/etc/userconfig/sdnvpn_config_testcase3.yaml
deleted file mode 100644
index c1ffc13b..00000000
--- a/etc/userconfig/sdnvpn_config_testcase3.yaml
+++ /dev/null
@@ -1,26 +0,0 @@
-defaults:
- flavor: m1.tiny # adapt to your environment
-
-testcases:
- sdnvpn.test.functest.testcase_3:
- enabled: true
- description: Data center gateway integration
- testname_db: functest_testcase_3
- secgroup_name: sdnvpn-sg
- secgroup_descr: Security group for SDNVPN test cases
- image_name: sdnvpn-image
- ubuntu_image_name: sdnvpn-ubuntu-image
- net_1_name: sdnvpn-3-1-net
- subnet_1_name: sdnvpn-3-1-subnet
- subnet_1_cidr: 10.10.10.0/24
- router_1_name: sdnvpn-3-1-router
- quagga_net_name: sdnvpn-3-2-quagga-net
- quagga_subnet_name: sdnvpn-3-2-quagga-subnet
- quagga_subnet_cidr: 10.10.11.0/24
- quagga_router_name: sdnvpn-3-2-quagga-router
- quagga_instance_name: sdnvpn-3-2-quagga
- quagga_instance_ip: 10.10.11.5
- instance_1_name: sdnvpn-3-1
- instance_1_ip: 10.10.10.5
- import_targets: '31:31'
- export_targets: '32:32'
diff --git a/etc/userconfig/sdnvpn_config_testcase4.yaml b/etc/userconfig/sdnvpn_config_testcase4.yaml
deleted file mode 100644
index f3f5a56b..00000000
--- a/etc/userconfig/sdnvpn_config_testcase4.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-defaults:
- flavor: m1.tiny # adapt to your environment
-
-testcases:
- sdnvpn.test.functest.testcase_4:
- enabled: true
- description: VPN provides connectivity between subnets using router association
- testname_db: functest_testcase_4
- instance_1_name: sdnvpn-4-1
- instance_2_name: sdnvpn-4-2
- instance_3_name: sdnvpn-4-3
- instance_4_name: sdnvpn-4-4
- instance_5_name: sdnvpn-4-5
- image_name: sdnvpn-image
- net_1_name: sdnvpn-4-1-net
- subnet_1_name: sdnvpn-4-1-subnet
- subnet_1_cidr: 10.10.10.0/24
- router_1_name: sdnvpn-4-1-router
- net_2_name: sdnvpn-4-2-net
- subnet_2_name: sdnvpn-4-2-subnet
- subnet_2_cidr: 10.10.11.0/24
- router_2_name: sdnvpn-4-2-router
- secgroup_name: sdnvpn-sg
- secgroup_descr: Security group for SDNVPN test cases
- targets1: '88:88'
- targets2: '55:55'
- route_distinguishers: '12:12'
diff --git a/etc/userconfig/sdnvpn_config_testcase8.yaml b/etc/userconfig/sdnvpn_config_testcase8.yaml
deleted file mode 100644
index 8712122b..00000000
--- a/etc/userconfig/sdnvpn_config_testcase8.yaml
+++ /dev/null
@@ -1,23 +0,0 @@
-defaults:
- flavor: m1.tiny # adapt to your environment
-
-testcases:
- sdnvpn.test.functest.testcase_8:
- enabled: true
- description: Test floating IP and router assoc coexistence
- testname_db: functest_testcase_8
- image_name: sdnvpn-image
- instance_1_name: sdnvpn-8-1
- instance_2_name: sdnvpn-8-2
- net_1_name: sdnvpn-8-1
- subnet_1_name: sdnvpn-8-1-subnet
- subnet_1_cidr: 10.10.10.0/24
- router_1_name: sdnvpn-8-1-router
- net_2_name: sdnvpn-8-2
- subnet_2_name: sdnvpn-8-2-subnet
- subnet_2_cidr: 10.10.20.0/24
- router_2_name: sdnvpn-8-2-router
- secgroup_name: sdnvpn-sg
- secgroup_descr: Security group for SDNVPN test cases
- targets: '88:88'
- route_distinguishers: '18:18'
diff --git a/etc/userconfig/tempest_conf.yaml.sample b/etc/userconfig/tempest_conf.yaml.sample
index 944e3a9f..43a2a76b 100644
--- a/etc/userconfig/tempest_conf.yaml.sample
+++ b/etc/userconfig/tempest_conf.yaml.sample
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
# This is an empty configuration file to be filled up with the desired options
# to generate a custom tempest.conf
# Examples:
diff --git a/etc/userconfig/tempest_custom_testcases.yaml b/etc/userconfig/tempest_custom_testcases.yaml
index 5821a6ff..03170b40 100644
--- a/etc/userconfig/tempest_custom_testcases.yaml
+++ b/etc/userconfig/tempest_custom_testcases.yaml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
tiers:
-
diff --git a/etc/userconfig/testcases.yaml b/etc/userconfig/testcases.yaml
index 44b99bb1..440807bb 100644
--- a/etc/userconfig/testcases.yaml
+++ b/etc/userconfig/testcases.yaml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
tiers:
-
diff --git a/etc/userconfig/trunk_port_blacklist.yaml b/etc/userconfig/trunk_port_blacklist.yaml
new file mode 100644
index 00000000..bf4cfe08
--- /dev/null
+++ b/etc/userconfig/trunk_port_blacklist.yaml
@@ -0,0 +1,28 @@
+---
+-
+ scenarios:
+ - (.*)
+ tests:
+ # need VLAN type driver, otherwise the following 1 sub test case will skip
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestInheritJSONBase.test_add_subport
+ # need vxlan type driver, otherwise the following 1 sub test case will skip
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestJSON.test_create_subport_invalid_inherit_network_segmentation_type
+ # need vxlan or gre type driver, otherwise the following 6 sub test case will skip
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_equal_to_trunk
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_add_subport_with_mtu_greater_than_subport
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_equal_to_subport
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestMtusJSON.test_create_trunk_with_mtu_greater_than_subport
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestMtusJSON.test_add_subport_with_mtu_greater_than_trunk
+ - neutron_tempest_plugin.api.test_trunk_negative.TrunkTestMtusJSON.test_create_trunk_with_mtu_smaller_than_subport
+ # advanced image is required to run this test, otherwise the following 2 sub test case will skip
+ - neutron_tempest_plugin.scenario.test_trunk.TrunkTest.test_parent_port_connectivity_after_trunk_deleted_lb
+ - neutron_tempest_plugin.scenario.test_trunk.TrunkTest.test_subport_connectivity
+ # key 'revision_number' is a new extension for Newton and not support by Mitaka
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_update_trunk
+ # extension 'project-id' is a new extension for Newton
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_show_trunk_has_project_id
+ # Can not work correctly on Mitaka
+ - neutron_tempest_plugin.api.test_trunk.TrunkTestJSON.test_create_update_trunk_with_description
+ # Remove neutron trunk test cases with href links
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_page_reverse_with_href_links
+ - neutron_tempest_plugin.api.test_trunk.TrunksSearchCriteriaTest.test_list_pagination_with_href_links
diff --git a/etc/userconfig/vnf_descriptor.yaml.sample b/etc/userconfig/vnf_descriptor.yaml.sample
deleted file mode 100644
index 98741768..00000000
--- a/etc/userconfig/vnf_descriptor.yaml.sample
+++ /dev/null
@@ -1,20 +0,0 @@
-##############################################################################
-# Copyright 2018 EuropeanSoftwareMarketingLtd.
-# ===================================================================
-# Licensed under the ApacheLicense, Version2.0 (the"License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# software distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and limitations under
-# the License
-##############################################################################
-
----
-
-vnf_name: sample_firewall
-type: CSAR
-vnf_id: 123456
-csar_package_location: /home/opnfv/userconfig/pre_config/vFW_sample.csar \ No newline at end of file
diff --git a/etc/userconfig/vnf_testcases.yaml b/etc/userconfig/vnf_testcases.yaml
index fbf91ca6..21586ab6 100644
--- a/etc/userconfig/vnf_testcases.yaml
+++ b/etc/userconfig/vnf_testcases.yaml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
---
tiers:
-
diff --git a/etc/userconfig/vnftest_conf.yaml b/etc/userconfig/vnftest_conf.yaml
deleted file mode 100644
index 781540bd..00000000
--- a/etc/userconfig/vnftest_conf.yaml
+++ /dev/null
@@ -1,36 +0,0 @@
-##############################################################################
-# Copyright 2018 EuropeanSoftwareMarketingLtd.
-# ===================================================================
-# Licensed under the ApacheLicense, Version2.0 (the"License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# software distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and limitations under
-# the License
-##############################################################################
-
-dir:
- conf: /etc/nvftest
- repos: /home/vnftest/repos/vnftest
- log: /tmp/vnftest
-
-file:
- output_file: /tmp/vnftest.out
- html_file: /tmp/vnftest.htm
- reporting_file: /tmp/report.html
-
-component:
- aai_ip: 10.247.43.140
- aai_port: 30202
- aai_ssl_port: 30233
- mso_ip: 10.247.43.140
- sdc_ip: 10.247.43.140
- sdc_port: 30205
- sdc_catalog_port: 30206
- sdc_designer_user: cs0008
- sdc_tester_user: jm0007
- sdc_governance_user: gv0001
- sdc_operations_user: op0001 \ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 4d41a31a..4e6b808d 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,21 +1,21 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
ansible==2.2.0
-click==6.6
-Jinja2==2.8
-keystoneauth1==3.4.0
-openstacksdk==0.11.2
-os-client-config==1.28.0
-osc-lib==1.10.0
-paramiko==1.18.0
-pbr==2.0.0
-python-cinderclient==1.9.0
-python-glanceclient==2.5.0
+click==6.7
+docker==3.4.1
+flask==1.0.2
+flask-cors==3.0.8
+gunicorn==19.9.0
+Jinja2==2.10
+os-client-config==1.29.0
+pbr==3.1.1
python-hosts==0.4.1
-python-keystoneclient==3.8.0
-python-novaclient==6.0.2
-python-openstackclient==3.2.1
-pytz==2016.7
PyYAML==3.12
-requests==2.18.0
-six==1.10.0
-stevedore==1.20.0
-shade==1.22.2
+shade==1.27.2
diff --git a/setup.cfg b/setup.cfg
index 1c81594a..1ad83a05 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,15 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
[metadata]
name = dovetail
-version = 2018.09.0
+version = 2019.12
home-page = https://wiki.opnfv.org/display/dovetail
[files]
diff --git a/test-requirements.txt b/test-requirements.txt
index 97ed0965..6daa1003 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,7 +1,15 @@
-coverage>=4.0,!=4.4 # Apache-2.0
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
flake8<3.0 # MIT
pytest # MIT
pytest-cov # MIT
yamllint
mock # BSD
-testtools
+munch # MIT
diff --git a/tox.ini b/tox.ini
index 280e359c..9f39a307 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,16 +1,24 @@
+##############################################################################
+# Copyright (c) 2019 opnfv.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
[tox]
minversion = 2.5
skipsdist = True
envlist =
pep8,
- py27,
- docs,
+ py35,
docs-linkcheck
[testenv]
-basepython=python2
+basepython=python3.5
usedevelop = True
-install_command = pip install -U {opts} {packages}
+install_command = pip3 install -U {opts} {packages}
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands =
@@ -19,6 +27,7 @@ commands =
--cov=dovetail \
--cov-report term-missing \
--cov-report xml \
+ --cov-fail-under=99.8 \
--ignore=cvp \
{posargs}
setenv =
@@ -30,14 +39,17 @@ whitelist_externals = wget
echo
[testenv:pep8]
+basepython = python3.5
commands = flake8 {toxinidir}
[flake8]
+basepython = python3.5
show-source = True
ignore = E123,E125,H803,E722,W503
exclude = .tox,dist,docs,*egg,build,.venv,.git
[testenv:docs]
+basepython = python3.6
deps = -rdocs/requirements.txt
commands =
sphinx-build -b html -n -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/html
@@ -45,11 +57,12 @@ commands =
whitelist_externals = echo
[testenv:docs-linkcheck]
+basepython = python3.5
deps = -rdocs/requirements.txt
commands = sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/linkcheck
[testenv:yamllint]
-basepython = python2.7
+basepython = python3.5
files =
etc
commands =