aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.circleci/config.yml6
-rw-r--r--.pre-commit-config.yaml16
-rw-r--r--.travis.yml56
-rw-r--r--INFO20
-rw-r--r--INFO.yaml27
-rw-r--r--PI.md32
-rw-r--r--README.md211
-rw-r--r--ansible/site.cntt.yml5
-rw-r--r--ansible/site.gate.yml90
-rw-r--r--ansible/site.yml30
-rw-r--r--build.sh20
-rw-r--r--commons/traffic-profile-guidelines.rst8
-rw-r--r--docker/benchmarking-cntt/Dockerfile2
-rw-r--r--docker/benchmarking-cntt/testcases.yaml4
-rw-r--r--docker/benchmarking/Dockerfile8
-rw-r--r--docker/benchmarking/hooks/post_checkout2
-rw-r--r--docker/benchmarking/testcases.yaml6
-rw-r--r--docker/core/Create-new-server-in-test_create_backup.patch84
-rw-r--r--docker/core/Dockerfile38
-rw-r--r--docker/healthcheck/Dockerfile13
-rw-r--r--docker/healthcheck/testcases.yaml8
-rw-r--r--docker/smoke-cntt/Dockerfile2
-rw-r--r--docker/smoke-cntt/tempest_conf.yaml18
-rw-r--r--docker/smoke-cntt/testcases.yaml46
-rw-r--r--docker/smoke/Dockerfile22
-rw-r--r--docker/smoke/compute.txt62
-rw-r--r--docker/smoke/hooks/post_checkout2
-rw-r--r--docker/smoke/object.txt5
-rw-r--r--docker/smoke/platform.txt67
-rw-r--r--docker/smoke/testcases.yaml82
-rw-r--r--docker/vnf/Dockerfile2
-rw-r--r--docker/vnf/hooks/post_checkout2
-rw-r--r--docker/vnf/testcases.yaml8
-rw-r--r--docs/com/css/theme/OPNFV-Berlin.css6
-rw-r--r--docs/com/css/theme/OPNFV.css6
-rw-r--r--docs/com/pres/Summit/Berlin-2016/conversation.html2
-rw-r--r--docs/com/pres/Summit/Berlin-2016/summit-Berlin.html24
-rw-r--r--docs/com/pres/Summit/Berlin-2016/testapi.html16
-rw-r--r--docs/com/pres/dockerslicing/dockerslicing.md2
-rw-r--r--docs/com/pres/euphrates_functest_evolution/euphrates.md2
-rw-r--r--docs/com/pres/gambia/gambia.md4
-rw-r--r--docs/com/pres/oran/ftth.pngbin0 -> 162001 bytes
-rw-r--r--docs/com/pres/oran/oran.md90
-rw-r--r--docs/com/pres/oran/rc1.pngbin0 -> 360960 bytes
-rw-r--r--docs/com/pres/vevent202010/index.html52
-rw-r--r--docs/com/pres/vevent202010/vevent202010.md62
-rw-r--r--docs/release/release-notes/functest-release.rst7
-rw-r--r--docs/results/euphrates/5.0/apex.html398
-rw-r--r--docs/results/euphrates/5.0/compass.html248
-rw-r--r--docs/results/euphrates/5.0/daisy.html42
-rw-r--r--docs/results/euphrates/5.0/fuel@aarch64.html62
-rw-r--r--docs/results/euphrates/5.0/fuel@x86.html124
-rw-r--r--docs/results/euphrates/5.0/joid.html124
-rw-r--r--docs/results/js/default.css2
-rw-r--r--docs/results/js/trend.js4
-rw-r--r--docs/testing/developer/devguide/index.rst9
-rw-r--r--docs/testing/user/configguide/configguide.rst2
-rw-r--r--docs/testing/user/userguide/index.rst6
-rw-r--r--docs/testing/user/userguide/test_details.rst2
-rw-r--r--docs/testing/user/userguide/troubleshooting.rst40
-rw-r--r--elements/functest/element-deps1
-rwxr-xr-xelements/functest/install.d/16-functest14
-rw-r--r--functest/ci/add_proxy.sh9
-rw-r--r--functest/ci/convert_images.sh8
-rw-r--r--functest/ci/download_images.sh7
-rw-r--r--functest/ci/testcases.yaml150
-rw-r--r--functest/core/cloudify.py32
-rw-r--r--functest/core/singlevm.py99
-rw-r--r--functest/core/tenantnetwork.py53
-rw-r--r--functest/opnfv_tests/openstack/api/connection_check.py7
-rw-r--r--functest/opnfv_tests/openstack/barbican/barbican.py8
-rw-r--r--functest/opnfv_tests/openstack/cinder/cinder_test.py14
-rw-r--r--functest/opnfv_tests/openstack/cinder/write_data.sh2
-rw-r--r--functest/opnfv_tests/openstack/patrole/patrole.py8
-rw-r--r--functest/opnfv_tests/openstack/rally/blacklist.yaml8
-rw-r--r--functest/opnfv_tests/openstack/rally/rally.py137
-rw-r--r--functest/opnfv_tests/openstack/rally/scenario/full/opnfv-cinder.yaml27
-rw-r--r--functest/opnfv_tests/openstack/rally/scenario/full/opnfv-nova.yaml30
-rw-r--r--functest/opnfv_tests/openstack/rally/scenario/sanity/opnfv-nova.yaml15
-rw-r--r--functest/opnfv_tests/openstack/rally/scenario/templates/server_with_ports.yaml.template2
-rw-r--r--functest/opnfv_tests/openstack/refstack/refstack.py18
-rw-r--r--functest/opnfv_tests/openstack/shaker/shaker.py42
-rw-r--r--functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml12
-rw-r--r--functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf_ovn.yaml10
-rw-r--r--functest/opnfv_tests/openstack/tempest/tempest.py314
-rw-r--r--functest/opnfv_tests/openstack/vmtp/vmtp.py44
-rw-r--r--functest/opnfv_tests/openstack/vping/vping_ssh.py15
-rw-r--r--functest/opnfv_tests/openstack/vping/vping_userdata.py18
-rw-r--r--functest/opnfv_tests/sdn/odl/odl.py17
-rw-r--r--functest/opnfv_tests/vnf/epc/juju_epc.py70
-rw-r--r--functest/opnfv_tests/vnf/ims/clearwater.py29
-rw-r--r--functest/opnfv_tests/vnf/ims/cloudify_ims.py12
-rw-r--r--functest/opnfv_tests/vnf/ims/heat_ims.py17
-rw-r--r--functest/opnfv_tests/vnf/router/cloudify_vrouter.py12
-rw-r--r--functest/opnfv_tests/vnf/router/test_controller/function_test_exec.py15
-rw-r--r--functest/opnfv_tests/vnf/router/utilvnf.py39
-rw-r--r--functest/opnfv_tests/vnf/router/vnf_controller/ssh_client.py2
-rw-r--r--functest/opnfv_tests/vnf/router/vnf_controller/vm_controller.py8
-rw-r--r--functest/opnfv_tests/vnf/router/vnf_controller/vnf_controller.py22
-rw-r--r--functest/tests/unit/odl/test_odl.py51
-rw-r--r--functest/tests/unit/openstack/cinder/test_cinder.py11
-rw-r--r--functest/tests/unit/openstack/rally/test_rally.py22
-rw-r--r--functest/tests/unit/openstack/tempest/test_tempest.py41
-rw-r--r--functest/tests/unit/openstack/vmtp/test_vmtp.py11
-rw-r--r--functest/tests/unit/openstack/vping/test_vping_ssh.py8
-rw-r--r--functest/tests/unit/utils/test_functest_utils.py81
-rw-r--r--functest/utils/config.py14
-rw-r--r--functest/utils/env.py3
-rw-r--r--functest/utils/functest_utils.py49
-rw-r--r--requirements.txt5
-rw-r--r--rtd-requirements.txt3
-rw-r--r--setup.cfg1
-rw-r--r--test-requirements.txt8
-rw-r--r--tox.ini67
-rw-r--r--upper-constraints.txt37
115 files changed, 2413 insertions, 1686 deletions
diff --git a/.circleci/config.yml b/.circleci/config.yml
index a5c8346ee..f7817da9c 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -3,15 +3,15 @@ version: 2
jobs:
build:
docker:
- - image: circleci/python:3.8
+ - image: circleci/python:3.9
steps:
- checkout
- run:
name: Install dependendencies
- command: sudo apt-get update && sudo apt-get install python2.7-dev enchant
+ command: sudo apt-get update && sudo apt-get install enchant-2
- run:
name: Run tox
- command: sudo pip install tox && tox
+ command: sudo pip install tox tox-pip-version && tox
- store_artifacts:
path: api/build
destination: api
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 000000000..323386c88
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,16 @@
+---
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.4.0
+ hooks:
+ - id: trailing-whitespace
+ exclude: '.patch$'
+ # trailing blanks shall not preferably be fixed in patch files
+
+ - repo: https://github.com/Lucas-C/pre-commit-hooks
+ rev: v1.4.2
+ hooks:
+ - id: remove-tabs
+ stages: [commit]
+ exclude: '^(.git/|docs/make.bat|docs/Makefile|)'
+
diff --git a/.travis.yml b/.travis.yml
index 943290ab4..6b6e0a672 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,18 +2,12 @@
sudo: required
services: docker
language: generic
-dist: xenial
-addons:
- apt:
- packages:
- - libssl1.0.0
+dist: focal
before_script:
- sudo docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
- - sudo add-apt-repository -y ppa:deadsnakes/ppa
- - sudo apt-get update
- - sudo apt-get install python3.8 python3.8-dev python3.8-distutils enchant
- - sudo pip install tox
+ - sudo pip install tox tox-pip-version
+ - sudo apt-get update && sudo apt-get install -y enchant
- sudo -E docker login -u="${DOCKER_USERNAME}" -p="${DOCKER_PASSWORD}"
- (cd .. && git clone https://github.com/estesp/manifest-tool)
- (cd ../manifest-tool && git checkout v0.9.0)
@@ -24,21 +18,21 @@ jobs:
- stage: run unit tests
script: >
tox -e \
- docs,pep8,pylint,yamllint,ansiblelint,bashate,bandit,py38,cover
+ docs,pep8,pylint,yamllint,bashate,bandit,py38,cover
- stage: build functest-core images
- script: sudo -E bash build.sh
+ script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs="docker/core"
- arm64_dirs=""
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs="docker/core"
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
@@ -51,73 +45,73 @@ jobs:
--template ${DOCKER_USERNAME}/functest-core:ARCH-latest \
--target ${DOCKER_USERNAME}/functest-core:latest
- stage: build all functest images
- script: sudo -E bash build.sh
+ script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs="docker/healthcheck"
- arm64_dirs=""
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs="docker/healthcheck"
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs=""
- arm_dirs="docker/healthcheck"
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs="docker/smoke"
- arm64_dirs=""
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs="docker/smoke"
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs=""
- arm_dirs="docker/smoke"
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs="docker/benchmarking"
- arm64_dirs=""
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs="docker/benchmarking"
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs=""
- arm_dirs="docker/benchmarking"
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs="docker/vnf"
- arm64_dirs=""
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs="docker/vnf"
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
@@ -145,37 +139,37 @@ jobs:
--template ${DOCKER_USERNAME}/functest-vnf:ARCH-latest \
--target ${DOCKER_USERNAME}/functest-vnf:latest
- stage: build all functest cntt images
- script: sudo -E bash build.sh
+ script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs="docker/smoke-cntt"
- arm64_dirs=""
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs="docker/smoke-cntt"
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs=""
- arm_dirs="docker/smoke-cntt"
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs="docker/benchmarking-cntt"
- arm64_dirs=""
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
- arm64_dirs="docker/benchmarking-cntt"
- arm_dirs=""
- - script: sudo -E bash build.sh
+ - script: sudo -E sh build.sh
env:
- REPO="${DOCKER_USERNAME}"
- amd64_dirs=""
diff --git a/INFO b/INFO
index dcf064071..eca7c5141 100644
--- a/INFO
+++ b/INFO
@@ -1,9 +1,9 @@
Project: Base System Functionality Testing Project (functest)
Project Creation Date: January 20, 2015
Project Category: Integration & Testing
-Lifecycle State: Incubation
-Primary Contact: Cedric Ollivier (cedric.ollivier@orange.com)
-Project Lead: Cedric Ollivier (cedric.ollivier@orange.com)
+Lifecycle State: Mature
+Primary Contact: Cédric Ollivier (cedric.ollivier@orange.com)
+Project Lead: Cédric Ollivier (cedric.ollivier@orange.com)
Jira Project Name: Base System Functionality Testing Project
Jira Project Prefix: FUNCTEST
Mailing list tag: [functest]
@@ -11,18 +11,6 @@ IRC: Server:freenode.net Channel:#opnfv-functest
Repository: functest
Committers:
-Morgan Richomme <morgan.richomme@orange.com>
-Cedric Ollivier <cedric.ollivier@orange.com>
-Juha Kosonen <juha.kosonen@nokia.com>
-Valentin Boucher <valentin.boucher@kontron.com>
-Viktor Tikkanen <viktor.tikkanen@nokia.com>
-Linda Wang <wangwulin@huawei.com>
+Cédric Ollivier <cedric.ollivier@orange.com>
Link to TSC approval of the project: http://meetbot.opnfv.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-01-20-14.57.html
-
-Link(s) to approval of additional committers:
-
-http://lists.opnfv.org/pipermail/opnfv-tech-discuss/2015-April/001971.html
-http://ircbot.wl.linuxfoundation.org/meetings/opnfv-testperf/2015/opnfv-testperf.2015-09-29-13.00.html
-http://ircbot.wl.linuxfoundation.org/meetings/opnfv-testperf/2016/opnfv-testperf.2016-03-01-08.00.html
-http://ircbot.wl.linuxfoundation.org/meetings/opnfv-functest/2016/opnfv-functest.2016-10-11-08.01.html
diff --git a/INFO.yaml b/INFO.yaml
index ef0954f18..cf3b176c2 100644
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -2,7 +2,7 @@
project: 'Base System Functionality Testing Project (functest)'
project_creation_date: 'January 20, 2015'
project_category: 'Integration & Testing'
-lifecycle_state: 'Incubation'
+lifecycle_state: 'Mature'
project_lead: &opnfv_functest_ptl
name: 'Cédric Ollivier'
email: 'cedric.ollivier@orange.com'
@@ -37,31 +37,6 @@ repositories:
- 'functest-xtesting'
committers:
- <<: *opnfv_functest_ptl
- - name: 'Morgan Richomme'
- email: 'morgan.richomme@orange.com'
- company: 'orange'
- id: 'mrichomme'
- timezone: ''
- - name: 'valentin boucher'
- email: 'valentin.boucher@kontron.com'
- company: 'kontron'
- id: 'boucherv'
- timezone: ''
- - name: 'Viktor Tikkanen'
- email: 'viktor.tikkanen@nokia.com'
- company: 'nokia'
- id: 'vitikkan'
- timezone: ''
- - name: 'Juha Kosonen'
- email: 'juha.kosonen@nokia.com'
- company: 'nokia'
- id: 'jukosone'
- timezone: ''
- - name: 'Linda Wang'
- email: 'wangwulin@huawei.com'
- company: 'huawei'
- id: 'wangwulin'
- timezone: ''
tsc:
# yamllint disable rule:line-length
approval: 'http//meetbot.opnfv.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-01-20-14.57.html'
diff --git a/PI.md b/PI.md
new file mode 100644
index 000000000..cc7c25bc8
--- /dev/null
+++ b/PI.md
@@ -0,0 +1,32 @@
+# Run Functest containers on Raspberry PI
+
+All Functest containers (Hunter and newer) are cross-compiled for arm and arm64
+via [travis-ci](https://travis-ci.org/collivier/functest/branches).
+They are built on top of Alpine armhf to support most of Raspberry PI models.
+
+All Docker manifests are published to run these containers via the same
+commands whatever the architecture.
+
+## Copy the image to the SD card
+
+> https://www.raspberrypi.org/documentation/installation/installing-images/linux.md
+>
+> This is very important, as you will lose all the data on the hard drive if you provide the wrong device name.
+> Make sure the device name is the name of the whole SD card as described above, not just a partition. For example: sdd, not sdds1 or sddp1; mmcblk0, not mmcblk0p1.
+
+
+## Install Docker
+
+```shell
+wget https://downloads.raspberrypi.org/raspbian/images/raspbian-2018-11-15/2018-11-13-raspbian-stretch.zip
+unzip 2018-11-13-raspbian-stretch.zip
+sudo dd bs=4M if=2018-11-13-raspbian-stretch.img of=/dev/mmcblk0 conv=fsync
+```
+
+## Install Docker
+
+```shell
+curl -sSL https://get.docker.com | sudo sh
+```
+
+## That's all folks
diff --git a/README.md b/README.md
new file mode 100644
index 000000000..c9f1f03d1
--- /dev/null
+++ b/README.md
@@ -0,0 +1,211 @@
+# Functest
+
+Network virtualization has dramatically modified our architectures which asks
+for more automation and powerful testing tools like Functest, a collection of
+state-of-the-art virtual infrastructure test suites, including automatic VNF
+testing (cf.
+[[1]](https://www.linuxfoundation.org/press-release/2019/05/opnfv-hunter-delivers-test-tools-ci-cd-framework-to-enable-common-nfvi-for-verifying-vnfs/)).
+
+In context of OPNFV, Functest verifies any kind of OpenStack and Kubernetes
+deployments including production environments. It conforms to upstream rules
+and integrates smoothly lots of the test cases available in the opensource
+market. It includes about 3000+ functional tests and 3 hours upstream API and
+dataplane benchmarks. It’s completed by Virtual Network Function deployments
+and testing (vIMS, vRouter and vEPC) to ensure that the platforms meet Network
+Functions Virtualization requirements. Raspberry PI is also supported to verify
+datacenters as the lowest cost (50 euros hardware and software included).
+
+| Functest releases | OpenStack releases |
+|-------------------|--------------------|
+| Jerma | Train |
+| Kali | Ussuri |
+| Leguer | Victoria |
+| Wallaby | Wallaby |
+| **Master** | **next Xena** |
+
+## Prepare your environment
+
+cat env
+```
+DEPLOY_SCENARIO=XXX # if not os-nosdn-nofeature-noha scenario
+NAMESERVER=XXX # if not 8.8.8.8
+EXTERNAL_NETWORK=XXX # if not first network with router:external=True
+DASHBOARD_URL=XXX # else tempest_horizon will be skipped
+NEW_USER_ROLE=XXX # if not member
+SDN_CONTROLLER_IP=XXX # if odl scenario
+VOLUME_DEVICE_NAME=XXX # if not vdb
+FLAVOR_EXTRA_SPECS=hw:mem_page_size:large # if fdio scenarios
+```
+
+cat openstack.creds
+```
+export OS_AUTH_URL=XXX
+export OS_USER_DOMAIN_NAME=XXX
+export OS_PROJECT_DOMAIN_NAME=XXX
+export OS_USERNAME=XXX
+export OS_PROJECT_NAME=XXX
+export OS_PASSWORD=XXX
+export OS_IDENTITY_API_VERSION=3
+export OS_REGION_NAME=XXX
+```
+
+mkdir -p images && wget -q -O- https://git.opnfv.org/functest/plain/functest/ci/download_images.sh | sh -s -- images && ls -1 images/*
+```
+images/cirros-0.6.1-aarch64-disk.img
+images/cirros-0.6.1-x86_64-disk.img
+images/cloudify-docker-manager-community-19.01.24.tar
+images/Fedora-Cloud-Base-30-1.2.x86_64.qcow2
+images/shaker-image-1.3.0+stretch.qcow2
+images/ubuntu-14.04-server-cloudimg-amd64-disk1.img
+images/ubuntu-14.04-server-cloudimg-arm64-uefi1.img
+images/ubuntu-16.04-server-cloudimg-amd64-disk1.img
+images/vyos-1.1.8-amd64.qcow2
+```
+
+## Run healthcheck suite
+
+```shell
+sudo docker run --env-file env \
+ -v $(pwd)/openstack.creds:/home/opnfv/functest/conf/env_file \
+ -v $(pwd)/images:/home/opnfv/functest/images \
+ opnfv/functest-healthcheck
+```
+
+```
++--------------------------+------------------+---------------------+------------------+----------------+
+| TEST CASE | PROJECT | TIER | DURATION | RESULT |
++--------------------------+------------------+---------------------+------------------+----------------+
+| connection_check | functest | healthcheck | 00:03 | PASS |
+| tenantnetwork1 | functest | healthcheck | 00:05 | PASS |
+| tenantnetwork2 | functest | healthcheck | 00:06 | PASS |
+| vmready1 | functest | healthcheck | 00:06 | PASS |
+| vmready2 | functest | healthcheck | 00:08 | PASS |
+| singlevm1 | functest | healthcheck | 00:32 | PASS |
+| singlevm2 | functest | healthcheck | 00:37 | PASS |
+| vping_ssh | functest | healthcheck | 00:46 | PASS |
+| vping_userdata | functest | healthcheck | 00:39 | PASS |
+| cinder_test | functest | healthcheck | 01:05 | PASS |
+| tempest_smoke | functest | healthcheck | 05:39 | PASS |
+| tempest_horizon | functest | healthcheck | 01:05 | PASS |
+| odl | functest | healthcheck | 00:00 | SKIP |
++--------------------------+------------------+---------------------+------------------+----------------+
+```
+
+## Run smoke suite
+
+```shell
+sudo docker run --env-file env \
+ -v $(pwd)/openstack.creds:/home/opnfv/functest/conf/env_file \
+ -v $(pwd)/images:/home/opnfv/functest/images \
+ opnfv/functest-smoke
+```
+
+```
++---------------------------+------------------+---------------+------------------+----------------+
+| TEST CASE | PROJECT | TIER | DURATION | RESULT |
++---------------------------+------------------+---------------+------------------+----------------+
+| tempest_neutron | functest | smoke | 15:30 | PASS |
+| tempest_cinder | functest | smoke | 02:01 | PASS |
+| tempest_keystone | functest | smoke | 01:17 | PASS |
+| tempest_heat | functest | smoke | 22:14 | PASS |
+| tempest_telemetry | functest | smoke | 00:00 | SKIP |
+| rally_sanity | functest | smoke | 17:24 | PASS |
+| refstack_compute | functest | smoke | 07:03 | PASS |
+| refstack_object | functest | smoke | 02:09 | PASS |
+| refstack_platform | functest | smoke | 07:31 | PASS |
+| tempest_full | functest | smoke | 41:52 | PASS |
+| tempest_scenario | functest | smoke | 08:42 | PASS |
+| tempest_slow | functest | smoke | 43:42 | PASS |
+| patrole_admin | functest | smoke | 21:06 | PASS |
+| patrole_member | functest | smoke | 21:23 | PASS |
+| patrole_reader | functest | smoke | 21:56 | PASS |
+| tempest_barbican | functest | smoke | 02:30 | PASS |
+| tempest_octavia | functest | smoke | 00:00 | SKIP |
+| tempest_cyborg | functest | smoke | 00:00 | SKIP |
++---------------------------+------------------+---------------+------------------+----------------+
+```
+
+## Run smoke CNTT suite
+
+```shell
+sudo docker run --env-file env \
+ -v $(pwd)/openstack.creds:/home/opnfv/functest/conf/env_file \
+ -v $(pwd)/images:/home/opnfv/functest/images \
+ opnfv/functest-smoke-cntt
+```
+
+```
++-------------------------------+------------------+---------------+------------------+----------------+
+| TEST CASE | PROJECT | TIER | DURATION | RESULT |
++-------------------------------+------------------+---------------+------------------+----------------+
+| tempest_neutron_cntt | functest | smoke | 11:35 | PASS |
+| tempest_cinder_cntt | functest | smoke | 01:58 | PASS |
+| tempest_keystone_cntt | functest | smoke | 01:13 | PASS |
+| tempest_heat_cntt | functest | smoke | 22:32 | PASS |
+| rally_sanity_cntt | functest | smoke | 17:16 | PASS |
+| tempest_full_cntt | functest | smoke | 41:13 | PASS |
+| tempest_scenario_cntt | functest | smoke | 08:57 | PASS |
+| tempest_slow_cntt | functest | smoke | 35:58 | PASS |
++-------------------------------+------------------+---------------+------------------+----------------+
+```
+
+## Run benchmarking suite
+
+```shell
+sudo docker run --env-file env \
+ -v $(pwd)/openstack.creds:/home/opnfv/functest/conf/env_file \
+ -v $(pwd)/images:/home/opnfv/functest/images \
+ opnfv/functest-benchmarking
+```
+
+```
++--------------------+------------------+----------------------+------------------+----------------+
+| TEST CASE | PROJECT | TIER | DURATION | RESULT |
++--------------------+------------------+----------------------+------------------+----------------+
+| rally_full | functest | benchmarking | 93:03 | PASS |
+| rally_jobs | functest | benchmarking | 27:05 | PASS |
+| vmtp | functest | benchmarking | 17:56 | PASS |
+| shaker | functest | benchmarking | 24:02 | PASS |
++--------------------+------------------+----------------------+------------------+----------------+
+```
+
+## Run benchmarking CNTT suite
+
+```shell
+sudo docker run --env-file env \
+ -v $(pwd)/openstack.creds:/home/opnfv/functest/conf/env_file \
+ -v $(pwd)/images:/home/opnfv/functest/images \
+ opnfv/functest-benchmarking-cntt
+```
+
+```
++-------------------------+------------------+----------------------+------------------+----------------+
+| TEST CASE | PROJECT | TIER | DURATION | RESULT |
++-------------------------+------------------+----------------------+------------------+----------------+
+| rally_full_cntt | functest | benchmarking | 89:52 | PASS |
+| rally_jobs_cntt | functest | benchmarking | 19:39 | PASS |
+| vmtp | functest | benchmarking | 16:59 | PASS |
+| shaker | functest | benchmarking | 23:43 | PASS |
++-------------------------+------------------+----------------------+------------------+----------------+
+```
+
+## Run vnf suite
+
+```shell
+sudo docker run --env-file env \
+ -v $(pwd)/openstack.creds:/home/opnfv/functest/conf/env_file \
+ -v $(pwd)/images:/home/opnfv/functest/images \
+ opnfv/functest-vnf
+```
+
+```
++----------------------+------------------+--------------+------------------+----------------+
+| TEST CASE | PROJECT | TIER | DURATION | RESULT |
++----------------------+------------------+--------------+------------------+----------------+
+| cloudify | functest | vnf | 05:08 | PASS |
+| cloudify_ims | functest | vnf | 24:46 | PASS |
+| heat_ims | functest | vnf | 33:12 | PASS |
+| vyos_vrouter | functest | vnf | 15:53 | PASS |
+| juju_epc | functest | vnf | 27:52 | PASS |
++----------------------+------------------+--------------+------------------+----------------+
+```
diff --git a/ansible/site.cntt.yml b/ansible/site.cntt.yml
index b0b51b1d6..900f619f6 100644
--- a/ansible/site.cntt.yml
+++ b/ansible/site.cntt.yml
@@ -21,6 +21,7 @@
- tempest_smoke
- tempest_horizon
- container: functest-smoke-cntt
+ timeout: 2h
tests:
- tempest_neutron_cntt
- tempest_cinder_cntt
@@ -31,17 +32,17 @@
- tempest_scenario_cntt
- tempest_slow_cntt
- container: functest-benchmarking-cntt
+ timeout: 4h
tests:
- rally_full_cntt
- rally_jobs_cntt
- vmtp
- shaker
- container: functest-vnf
+ timeout: 2h
tests:
- cloudify
- cloudify_ims
- heat_ims
- vyos_vrouter
- juju_epc
- properties:
- execution-type: SEQUENTIALLY
diff --git a/ansible/site.gate.yml b/ansible/site.gate.yml
new file mode 100644
index 000000000..ed9ce7812
--- /dev/null
+++ b/ansible/site.gate.yml
@@ -0,0 +1,90 @@
+---
+- hosts:
+ - 127.0.0.1
+ roles:
+ - role: collivier.xtesting
+ project: functest
+ use_gerrit: true
+ gerrit_project: functest
+ git_url: https://gerrit.opnfv.org/gerrit/functest
+ docker_tags:
+ - latest:
+ branch: master
+ dependency: '3.16'
+ builds:
+ dependency:
+ repo: _
+ dport:
+ container: alpine
+ steps:
+ - name: build opnfv/functest-core
+ containers:
+ - name: functest-core
+ ref_arg: BRANCH
+ path: docker/core
+ - name: build containers
+ containers:
+ - name: functest-healthcheck
+ ref_arg: BRANCH
+ path: docker/healthcheck
+ - name: functest-smoke
+ ref_arg: BRANCH
+ path: docker/smoke
+ - name: functest-benchmarking
+ ref_arg: BRANCH
+ path: docker/benchmarking
+ - name: functest-vnf
+ ref_arg:
+ path: docker/vnf
+ suites:
+ - container: functest-healthcheck
+ tests:
+ - connection_check
+ - tenantnetwork1
+ - tenantnetwork2
+ - vmready1
+ - vmready2
+ - singlevm1
+ - singlevm2
+ - vping_ssh
+ - vping_userdata
+ - cinder_test
+ - odl
+ - tempest_smoke
+ - tempest_horizon
+ - container: functest-smoke
+ timeout: 2h
+ tests:
+ - tempest_neutron
+ - tempest_cinder
+ - tempest_keystone
+ - tempest_heat
+ - tempest_telemetry
+ - rally_sanity
+ - refstack_compute
+ - refstack_object
+ - refstack_platform
+ - tempest_full
+ - tempest_scenario
+ - tempest_slow
+ - patrole_admin
+ - patrole_member
+ - patrole_reader
+ - tempest_barbican
+ - tempest_octavia
+ - tempest_cyborg
+ - container: functest-benchmarking
+ timeout: 4h
+ tests:
+ - rally_full
+ - rally_jobs
+ - vmtp
+ - shaker
+ - container: functest-vnf
+ timeout: 2h
+ tests:
+ - cloudify
+ - cloudify_ims
+ - heat_ims
+ - vyos_vrouter
+ - juju_epc
diff --git a/ansible/site.yml b/ansible/site.yml
index 2c8307972..60a2b89e8 100644
--- a/ansible/site.yml
+++ b/ansible/site.yml
@@ -4,33 +4,6 @@
roles:
- role: collivier.xtesting
project: functest
- gerrit_project: functest
- builds:
- dependencies:
- - repo: _
- dport:
- container: alpine
- tag: '3.12'
- steps:
- - name: build opnfv/functest-core
- containers:
- - name: functest-core
- ref_arg: BRANCH
- path: docker/core
- - name: build containers
- containers:
- - name: functest-healthcheck
- ref_arg: BRANCH
- path: docker/healthcheck
- - name: functest-smoke
- ref_arg: BRANCH
- path: docker/smoke
- - name: functest-benchmarking
- ref_arg: BRANCH
- path: docker/benchmarking
- - name: functest-vnf
- ref_arg:
- path: docker/vnf
suites:
- container: functest-healthcheck
tests:
@@ -48,6 +21,7 @@
- tempest_smoke
- tempest_horizon
- container: functest-smoke
+ timeout: 2h
tests:
- tempest_neutron
- tempest_cinder
@@ -68,12 +42,14 @@
- tempest_octavia
- tempest_cyborg
- container: functest-benchmarking
+ timeout: 4h
tests:
- rally_full
- rally_jobs
- vmtp
- shaker
- container: functest-vnf
+ timeout: 2h
tests:
- cloudify
- cloudify_ims
diff --git a/build.sh b/build.sh
index 5331a52a4..033d31310 100644
--- a/build.sh
+++ b/build.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -e
@@ -13,7 +13,7 @@ docker/smoke-cntt \
docker/benchmarking-cntt"}
arm_dirs=${arm_dirs-${amd64_dirs}}
arm64_dirs=${arm64_dirs-${amd64_dirs}}
-build_opts=("--pull=true" --no-cache "--force-rm=true")
+build_opts="--pull=true --no-cache --force-rm=true"
find . -name Dockerfile -exec sed -i \
-e "s|opnfv/functest-core|${repo}/functest-core:amd64-latest|g" {} +
@@ -24,18 +24,18 @@ find . -name Dockerfile -exec sed -i \
${repo}/functest-benchmarking:amd64-latest|g" {} +
for dir in ${amd64_dirs}; do
(cd "${dir}" &&
- docker build "${build_opts[@]}" \
+ docker build $build_opts \
-t "${repo}/functest-${dir##**/}:amd64-latest" .)
docker push "${repo}/functest-${dir##**/}:amd64-latest"
[ "${dir}" != "docker/core" ] &&
(docker rmi "${repo}/functest-${dir##**/}:amd64-latest" || true)
done
[ -n "${amd64_dirs}" ] &&
- (docker rmi "${repo}/functest-core:amd64-latest" alpine:3.12 || true)
+ (docker rmi "${repo}/functest-core:amd64-latest" alpine:3.16 || true)
find . -name Dockerfile -exec git checkout {} +
find . -name Dockerfile -exec sed -i \
- -e "s|alpine:3.12|arm64v8/alpine:3.12|g" {} +
+ -e "s|alpine:3.16|arm64v8/alpine:3.16|g" {} +
find . -name Dockerfile -exec sed -i \
-e "s|opnfv/functest-core|${repo}/functest-core:arm64-latest|g" {} +
find . -name Dockerfile -exec sed -i \
@@ -44,7 +44,7 @@ find . -name Dockerfile -exec sed -i \
-e "s|opnfv/functest-benchmarking|\
${repo}/functest-benchmarking:arm64-latest|g" {} +
for dir in ${arm64_dirs}; do
- (cd "${dir}" && docker build "${build_opts[@]}" \
+ (cd "${dir}" && docker build $build_opts \
-t "${repo}/functest-${dir##**/}:arm64-latest" .)
docker push "${repo}/functest-${dir##**/}:arm64-latest"
[ "${dir}" != "docker/core" ] &&
@@ -52,11 +52,11 @@ for dir in ${arm64_dirs}; do
done
[ -n "${arm64_dirs}" ] &&
(docker rmi "${repo}/functest-core:arm64-latest" \
- arm64v8/alpine:3.12 || true)
+ arm64v8/alpine:3.16 || true)
find . -name Dockerfile -exec git checkout {} +
find . -name Dockerfile -exec sed -i \
- -e "s|alpine:3.12|arm32v6/alpine:3.12|g" {} +
+ -e "s|alpine:3.16|arm32v6/alpine:3.16|g" {} +
find . -name Dockerfile -exec sed -i \
-e "s|opnfv/functest-core|${repo}/functest-core:arm-latest|g" {} +
find . -name Dockerfile -exec sed -i \
@@ -65,7 +65,7 @@ find . -name Dockerfile -exec sed -i \
-e "s|opnfv/functest-benchmarking|\
${repo}/functest-benchmarking:arm-latest|g" {} +
for dir in ${arm_dirs}; do
- (cd "${dir}" && docker build "${build_opts[@]}" \
+ (cd "${dir}" && docker build $build_opts \
-t "${repo}/functest-${dir##**/}:arm-latest" .)
docker push "${repo}/functest-${dir##**/}:arm-latest"
[ "${dir}" != "docker/core" ] &&
@@ -73,7 +73,7 @@ for dir in ${arm_dirs}; do
done
[ -n "${arm_dirs}" ] &&
(docker rmi "${repo}/functest-core:arm-latest" \
- arm32v6/alpine:3.12 || true)
+ arm32v6/alpine:3.16 || true)
find . -name Dockerfile -exec git checkout {} +
exit $?
diff --git a/commons/traffic-profile-guidelines.rst b/commons/traffic-profile-guidelines.rst
index 0b965b156..9619af6eb 100644
--- a/commons/traffic-profile-guidelines.rst
+++ b/commons/traffic-profile-guidelines.rst
@@ -11,11 +11,11 @@ Introduction
------------
In order to have consistent testing profiles, it has been suggested to define and store traffic profiles.
-These profiles shall be based on operator representative scenario.
+These profiles shall be based on operator representative scenario.
-These reference profiles may be used by any test projects, unitary, functional or performance tests.
-It is possible to adapt them to specific testcases.
-It is recommended to use them in order to avoid getting as many profiles as tests.
+These reference profiles may be used by any test projects, unitary, functional or performance tests.
+It is possible to adapt them to specific testcases.
+It is recommended to use them in order to avoid getting as many profiles as tests.
It should be helpful to compare the results of test scenario.
.. _howto:
diff --git a/docker/benchmarking-cntt/Dockerfile b/docker/benchmarking-cntt/Dockerfile
index 56b69e270..46acaa036 100644
--- a/docker/benchmarking-cntt/Dockerfile
+++ b/docker/benchmarking-cntt/Dockerfile
@@ -1,5 +1,5 @@
FROM opnfv/functest-benchmarking
-COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml
+COPY testcases.yaml /etc/xtesting/testcases.yaml
COPY blacklist.yaml /src/functest/functest/opnfv_tests/openstack/rally/blacklist.yaml
CMD ["run_tests", "-t", "all"]
diff --git a/docker/benchmarking-cntt/testcases.yaml b/docker/benchmarking-cntt/testcases.yaml
index f96fbee07..30eb3e631 100644
--- a/docker/benchmarking-cntt/testcases.yaml
+++ b/docker/benchmarking-cntt/testcases.yaml
@@ -50,8 +50,6 @@ tiers:
VMTP is a small python application that will automatically
perform ping connectivity, round trip time measurement
(latency) and TCP/UDP throughput
- dependencies:
- - POD_ARCH: '^(?!aarch64$)'
run:
name: vmtp
@@ -65,7 +63,5 @@ tiers:
like iperf, iperf3 and netperf (with help of flent). Shaker
is able to deploy OpenStack instances and networks in
different topologies.
- dependencies:
- - POD_ARCH: '^(?!aarch64$)'
run:
name: shaker
diff --git a/docker/benchmarking/Dockerfile b/docker/benchmarking/Dockerfile
index a89dd0c95..d0957f6b8 100644
--- a/docker/benchmarking/Dockerfile
+++ b/docker/benchmarking/Dockerfile
@@ -1,6 +1,6 @@
FROM opnfv/functest-core
-ARG VMTP_TAG=master
+ARG VMTP_TAG=34a82c9f3598ec7f5d8de0a6d5139b92931db4cc
ARG NEUTRON_TAG=master
RUN apk --no-cache add --update libxml2 libxslt && \
@@ -8,14 +8,14 @@ RUN apk --no-cache add --update libxml2 libxslt && \
python3-dev build-base linux-headers libffi-dev \
openssl-dev libjpeg-turbo-dev libxml2-dev libxslt-dev && \
case $(uname -m) in aarch*|arm*) CFLAGS="-O0" \
- pip3 install --no-cache-dir -c/src/requirements/upper-constraints.txt \
+ pip3 install --use-deprecated=legacy-resolver --no-cache-dir -c/src/requirements/upper-constraints.txt \
-c/src/functest/upper-constraints.txt lxml ;; esac && \
git init /src/vmtp && \
(cd /src/vmtp && \
git fetch --tags https://review.opendev.org/x/vmtp.git $VMTP_TAG && \
git checkout FETCH_HEAD) && \
update-requirements -s --source /src/requirements /src/vmtp/ && \
- pip3 install --no-cache-dir --src /src -c/src/requirements/upper-constraints.txt \
+ pip3 install --use-deprecated=legacy-resolver --no-cache-dir --src /src -c/src/requirements/upper-constraints.txt \
-c/src/functest/upper-constraints.txt \
/src/vmtp && \
mkdir -p /home/opnfv/functest/data/rally/neutron/rally-jobs && \
@@ -26,5 +26,5 @@ RUN apk --no-cache add --update libxml2 libxslt && \
cp /src/neutron/rally-jobs/task-neutron.yaml /home/opnfv/functest/data/rally/neutron/rally-jobs/ && \
rm -r /src/vmtp /src/neutron && \
apk del .build-deps
-COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml
+COPY testcases.yaml /etc/xtesting/testcases.yaml
CMD ["run_tests", "-t", "all"]
diff --git a/docker/benchmarking/hooks/post_checkout b/docker/benchmarking/hooks/post_checkout
index 8d0e98124..c347524ea 100644
--- a/docker/benchmarking/hooks/post_checkout
+++ b/docker/benchmarking/hooks/post_checkout
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
from="${DOCKER_REPO%/*}/functest-core:${DOCKER_TAG}"
sed -i "s|^FROM.*$|FROM ${from}|" Dockerfile
diff --git a/docker/benchmarking/testcases.yaml b/docker/benchmarking/testcases.yaml
index ea58caa7e..c84d3a00f 100644
--- a/docker/benchmarking/testcases.yaml
+++ b/docker/benchmarking/testcases.yaml
@@ -29,6 +29,8 @@ tiers:
description: >-
This test case runs a group of Rally jobs used in
OpenStack gating
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: rally_jobs
args:
@@ -45,7 +47,7 @@ tiers:
perform ping connectivity, round trip time measurement
(latency) and TCP/UDP throughput
dependencies:
- - POD_ARCH: '^(?!aarch64$)'
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: vmtp
@@ -60,6 +62,6 @@ tiers:
is able to deploy OpenStack instances and networks in
different topologies.
dependencies:
- - POD_ARCH: '^(?!aarch64$)'
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: shaker
diff --git a/docker/core/Create-new-server-in-test_create_backup.patch b/docker/core/Create-new-server-in-test_create_backup.patch
deleted file mode 100644
index 1b86b0fc5..000000000
--- a/docker/core/Create-new-server-in-test_create_backup.patch
+++ /dev/null
@@ -1,84 +0,0 @@
-From 03eb38ce54aeec4bc4c1cb3475c6fb84661f8993 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?C=C3=A9dric=20Ollivier?= <cedric.ollivier@orange.com>
-Date: Tue, 21 Jul 2020 13:28:50 +0200
-Subject: [PATCH] Create new server in test_create_backup
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-test_reboot_server_hard sometimes fail in all gates [1].
-This hack could highlight if they are side effects between
-test_create_backup and test_reboot_server_hard.
-
-[1] http://artifacts.opnfv.org/functest/E5AZMH89OOK6/functest-opnfv-functest-smoke-cntt-hunter-tempest_full_cntt-run-142/tempest_full_cntt/tempest-report.html
-
-Change-Id: I203562f686b004094e5e18858004b7a2d26567a6
-Signed-off-by: Cédric Ollivier <cedric.ollivier@orange.com>
----
- .../api/compute/servers/test_server_actions.py | 15 ++++++++-------
- 1 file changed, 8 insertions(+), 7 deletions(-)
-
-diff --git a/tempest/api/compute/servers/test_server_actions.py b/tempest/api/compute/servers/test_server_actions.py
-index d477be0eb..c369311d3 100644
---- a/tempest/api/compute/servers/test_server_actions.py
-+++ b/tempest/api/compute/servers/test_server_actions.py
-@@ -443,6 +443,7 @@ class ServerActionsTestJSON(base.BaseV2ComputeTest):
- # Check if glance v1 is available to determine which client to use. We
- # prefer glance v1 for the compute API tests since the compute image
- # API proxy was written for glance v1.
-+ newserver = self.create_test_server(wait_until='ACTIVE')
- if CONF.image_feature_enabled.api_v1:
- glance_client = self.os_primary.image_client
- elif CONF.image_feature_enabled.api_v2:
-@@ -453,7 +454,7 @@ class ServerActionsTestJSON(base.BaseV2ComputeTest):
- '[image-feature-enabled].')
-
- backup1 = data_utils.rand_name('backup-1')
-- resp = self.client.create_backup(self.server_id,
-+ resp = self.client.create_backup(newserver['id'],
- backup_type='daily',
- rotation=2,
- name=backup1)
-@@ -481,8 +482,8 @@ class ServerActionsTestJSON(base.BaseV2ComputeTest):
- image1_id, 'active')
-
- backup2 = data_utils.rand_name('backup-2')
-- waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
-- resp = self.client.create_backup(self.server_id,
-+ waiters.wait_for_server_status(self.client, newserver['id'], 'ACTIVE')
-+ resp = self.client.create_backup(newserver['id'],
- backup_type='daily',
- rotation=2,
- name=backup2)
-@@ -499,7 +500,7 @@ class ServerActionsTestJSON(base.BaseV2ComputeTest):
- properties = {
- 'image_type': 'backup',
- 'backup_type': "daily",
-- 'instance_uuid': self.server_id,
-+ 'instance_uuid': newserver['id'],
- }
- params = {
- 'status': 'active',
-@@ -524,8 +525,8 @@ class ServerActionsTestJSON(base.BaseV2ComputeTest):
- # create the third one, due to the rotation is 2,
- # the first one will be deleted
- backup3 = data_utils.rand_name('backup-3')
-- waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
-- resp = self.client.create_backup(self.server_id,
-+ waiters.wait_for_server_status(self.client, newserver['id'], 'ACTIVE')
-+ resp = self.client.create_backup(newserver['id'],
- backup_type='daily',
- rotation=2,
- name=backup3)
-@@ -536,7 +537,7 @@ class ServerActionsTestJSON(base.BaseV2ComputeTest):
- image3_id = data_utils.parse_image_id(resp.response['location'])
- self.addCleanup(glance_client.delete_image, image3_id)
- # the first back up should be deleted
-- waiters.wait_for_server_status(self.client, self.server_id, 'ACTIVE')
-+ waiters.wait_for_server_status(self.client, newserver['id'], 'ACTIVE')
- glance_client.wait_for_resource_deletion(image1_id)
- oldest_backup_exist = False
- if CONF.image_feature_enabled.api_v1:
---
-2.27.0
-
diff --git a/docker/core/Dockerfile b/docker/core/Dockerfile
index 80be9ee3d..2715cce39 100644
--- a/docker/core/Dockerfile
+++ b/docker/core/Dockerfile
@@ -1,18 +1,18 @@
-FROM alpine:3.12
+FROM alpine:3.16
ARG BRANCH=master
ARG OPENSTACK_TAG=master
COPY Switch-to-threading.Thread-for-Rally-tasks.patch /tmp/Switch-to-threading.Thread-for-Rally-tasks.patch
-COPY Create-new-server-in-test_create_backup.patch /tmp/Create-new-server-in-test_create_backup.patch
-RUN apk --no-cache add --update \
+RUN apk -U upgrade && \
+ apk --no-cache add --update \
python3 py3-wheel libffi openssl libjpeg-turbo py3-pip bash \
- grep sed wget ca-certificates git openssh-client qemu-img iputils coreutils mailcap libstdc++ && \
+ grep sed wget ca-certificates git openssh-client qemu-img iputils coreutils mailcap libstdc++ \
+ libxml2 libxslt && \
apk --no-cache add --virtual .build-deps --update \
python3-dev build-base linux-headers libffi-dev \
- openssl-dev libjpeg-turbo-dev && \
- apk --no-cache add --update py3-distlib\>=0.3.1 \
- --repository=http://dl-cdn.alpinelinux.org/alpine/edge/main && \
+ openssl-dev libjpeg-turbo-dev rust cargo \
+ libxml2-dev libxslt-dev && \
git init /src/requirements && \
(cd /src/requirements && \
git fetch --tags https://review.opendev.org/openstack/requirements $OPENSTACK_TAG && \
@@ -22,30 +22,26 @@ RUN apk --no-cache add --update \
git fetch --tags https://gerrit.opnfv.org/gerrit/functest $BRANCH && \
git checkout FETCH_HEAD) && \
sed -i -E /^tempest==+.*$/d /src/requirements/upper-constraints.txt && \
+ sed -i -E /^packaging==+.*$/d /src/requirements/upper-constraints.txt && \
case $(uname -m) in aarch*|arm*) sed -i -E /^PyNaCl=/d /src/requirements/upper-constraints.txt && apk add --no-cache py3-pynacl ;; esac && \
sed -i -E /#egg=functest/d /src/functest/upper-constraints.txt && \
- pip3 install --no-cache-dir --src /src -c/src/functest/upper-constraints.txt -c/src/requirements/upper-constraints.txt \
+ pip3 install --use-deprecated=legacy-resolver --no-cache-dir --src /src -c/src/functest/upper-constraints.txt -c/src/requirements/upper-constraints.txt \
-e /src/requirements && \
update-requirements -s --source /src/requirements /src/functest && \
- pip3 install --no-cache-dir --src /src -c/src/functest/upper-constraints.txt -c/src/requirements/upper-constraints.txt \
+ pip3 install --use-deprecated=legacy-resolver --no-cache-dir --src /src -c/src/functest/upper-constraints.txt -c/src/requirements/upper-constraints.txt \
-e /src/functest && \
(cd /src/rally && patch -p1 < /tmp/Switch-to-threading.Thread-for-Rally-tasks.patch) && \
sed -i -E /#egg=rally/d /src/functest/upper-constraints.txt && \
sed -i -E /#egg=tempest/d /src/functest/upper-constraints.txt && \
- (cd /src/tempest && \
- git config --global user.email "opnfv-tech-discuss@lists.opnfv.org" && \
- git config --global user.name "Functest" && \
- patch -p1 < /tmp/Create-new-server-in-test_create_backup.patch && \
- git commit -a -m "Backport critical bugfixes" && \
- rm ~/.gitconfig) && \
rm -r /src/requirements/.git /src/functest/.git \
- /tmp/Switch-to-threading.Thread-for-Rally-tasks.patch \
- /tmp/Create-new-server-in-test_create_backup.patch && \
- cp /src/functest/functest/ci/logging.ini /usr/lib/python3.8/site-packages/xtesting/ci/ && \
- cp /src/functest/functest/ci/logging.debug.ini /usr/lib/python3.8/site-packages/xtesting/ci/ && \
- bash -c "mkdir -p /var/lib/xtesting /home/opnfv" && \
+ /tmp/Switch-to-threading.Thread-for-Rally-tasks.patch && \
+ mkdir -p /etc/xtesting && \
+ cp /src/functest/functest/ci/logging.ini /etc/xtesting/ && \
+ cp /src/functest/functest/ci/logging.debug.ini /etc/xtesting/ && \
+ sh -c "mkdir -p /var/lib/xtesting /home/opnfv" && \
ln -s /var/lib/xtesting /home/opnfv/functest && \
- bash -c "mkdir -p /home/opnfv/functest{/conf,/data,/images,/results} /home/opnfv/repos/vnfs" && \
+ sh -c "mkdir -p /home/opnfv/functest/conf /home/opnfv/functest/conf/data /home/opnfv/functest/conf/images /home/opnfv/functest/conf/results && \
+ mkdir -p /home/opnfv/repos/vnfs" && \
mkdir -p /etc/rally && \
printf "[database]\nconnection = 'sqlite:////var/lib/rally/database/rally.sqlite'\n" > /etc/rally/rally.conf && \
printf "\n[openstack]\nneutron_bind_l2_agent_types = Open vSwitch agent,Linux bridge agent,OVN Controller Gateway agent\n" >> /etc/rally/rally.conf && \
diff --git a/docker/healthcheck/Dockerfile b/docker/healthcheck/Dockerfile
index 4d55e93f9..404ff2d58 100644
--- a/docker/healthcheck/Dockerfile
+++ b/docker/healthcheck/Dockerfile
@@ -1,24 +1,15 @@
FROM opnfv/functest-core
ARG ODL_TAG=89b88a0a23561f0bda62338b394ec41655679b2d
-ARG TEMPEST_HORIZON_TAG=master
COPY thirdparty-requirements.txt thirdparty-requirements.txt
RUN apk --no-cache add --virtual .build-deps --update \
python3-dev build-base linux-headers libffi-dev openssl-dev && \
- git init /src/tempest-horizon && \
- (cd /src/tempest-horizon && \
- git fetch --tags https://opendev.org/openstack/tempest-horizon.git $TEMPEST_HORIZON_TAG && \
- git checkout FETCH_HEAD) && \
- update-requirements -s --source /src/requirements /src/tempest-horizon/ && \
- pip3 install --no-cache-dir --src /src -c/src/requirements/upper-constraints.txt \
- -c/src/functest/upper-constraints.txt \
- /src/tempest-horizon -rthirdparty-requirements.txt && \
git init /src/odl_test && \
(cd /src/odl_test && \
git fetch --tags https://git.opendaylight.org/gerrit/integration/test $ODL_TAG && \
git checkout FETCH_HEAD) && \
- rm -r /src/odl_test/.git /src/tempest-horizon/ thirdparty-requirements.txt && \
+ rm -r /src/odl_test/.git thirdparty-requirements.txt && \
apk del .build-deps
-COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml
+COPY testcases.yaml /etc/xtesting/testcases.yaml
CMD ["run_tests", "-t", "all"]
diff --git a/docker/healthcheck/testcases.yaml b/docker/healthcheck/testcases.yaml
index ff743f0d4..7b6b2a108 100644
--- a/docker/healthcheck/testcases.yaml
+++ b/docker/healthcheck/testcases.yaml
@@ -27,6 +27,8 @@ tiers:
It creates and configures all tenant network ressources
required by advanced testcases (subnet, network and
router).
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: tenantnetwork1
@@ -39,6 +41,8 @@ tiers:
It creates new user/project before creating and configuring
all tenant network ressources required by a testcase
(subnet, network and router).
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: tenantnetwork2
@@ -173,6 +177,6 @@ tiers:
dependencies:
- DASHBOARD_URL: '^(?!\s*$).+'
run:
- name: tempest_horizon
+ name: tempest_common
args:
- mode: '^tempest_horizon.'
+ mode: '^tempest.scenario.test_dashboard_basic_ops.'
diff --git a/docker/smoke-cntt/Dockerfile b/docker/smoke-cntt/Dockerfile
index 4585edb34..a8e8a6f75 100644
--- a/docker/smoke-cntt/Dockerfile
+++ b/docker/smoke-cntt/Dockerfile
@@ -1,5 +1,5 @@
FROM opnfv/functest-smoke
-COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml
+COPY testcases.yaml /etc/xtesting/testcases.yaml
COPY tempest_conf.yaml /src/functest/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml
CMD ["run_tests", "-t", "all"]
diff --git a/docker/smoke-cntt/tempest_conf.yaml b/docker/smoke-cntt/tempest_conf.yaml
index 74a546a8b..d9f878992 100644
--- a/docker/smoke-cntt/tempest_conf.yaml
+++ b/docker/smoke-cntt/tempest_conf.yaml
@@ -1,6 +1,7 @@
---
compute:
- max_microversion: 2.79
+ min_microversion: 2.44
+ max_microversion: 2.88
compute-feature-enabled:
attach_encrypted_volume: false
block_migration_for_live_migration: false
@@ -11,6 +12,7 @@ compute-feature-enabled:
console_output: true
disk_config: true
enable_instance_password: true
+ hostname_fqdn_sanitization: false
interface_attach: true
live_migration: true
live_migrate_back_and_forth: false
@@ -46,13 +48,14 @@ identity-feature-enabled:
external_idp: false
project_tags: true
application_credentials: true
+ access_rules: true
image-feature-enabled:
api_v2: true
api_v1: false
+ import_image: false
network-feature-enabled:
port_admin_state_change: true
port_security: true
- floating_ips: true
placement:
max_microversion: 1.36
validation:
@@ -60,9 +63,8 @@ validation:
ssh_timeout: 196
ip_version_for_ssh: 4
run_validation: true
- connect_method: floating
volume:
- max_microversion: 3.59
+ max_microversion: 3.64
storage_protocol: ceph
manage_volume_ref: source-name,volume-%s
manage_snapshot_ref: source-name,snapshot-%s
@@ -73,7 +75,8 @@ volume-feature-enabled:
clone: true
manage_snapshot: true
manage_volume: true
- extend_attached_volume: false
+ extend_attached_volume: true
+ extend_attached_encrypted_volume: false
consistency_group: false
volume_revert: true
load_balancer:
@@ -81,13 +84,14 @@ load_balancer:
neutron_plugin_options:
agent_availability_zone: nova
available_type_drivers: flat,geneve,vlan,gre,local,vxlan
- provider_vlans: foo,
+ provider_vlans: public,
create_shared_resources: true
object-storage-feature-enabled:
discoverable_apis: "account_quotas,formpost,bulk_upload,bulk_delete,\
tempurl,crossdomain,container_quotas,staticweb,account_quotas,slo"
object_versioning: true
discoverability: true
+ tempurl_digest_hashlib: sha1
heat_plugin:
skip_functional_test_list: EncryptionVolTypeTest
skip_scenario_test_list: "AodhAlarmTest,SoftwareConfigIntegrationTest,\
@@ -96,3 +100,5 @@ heat_plugin:
auth_version: 3
heat_features_enabled:
multi_cloud: false
+rbac:
+ enable_rbac: true
diff --git a/docker/smoke-cntt/testcases.yaml b/docker/smoke-cntt/testcases.yaml
index 5ea0b7080..1d2aec38b 100644
--- a/docker/smoke-cntt/testcases.yaml
+++ b/docker/smoke-cntt/testcases.yaml
@@ -11,7 +11,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 523
+ tests_count: 564
description: >-
This test case runs the Tempest suite proposed by the
Neutron project. The list of test cases is generated by
@@ -34,6 +34,7 @@ tiers:
(?!.*test_conntrack_helper)\
(?!.*test_floating_ips.FloatingIPPoolTestJSON)\
(?!.*test_floating_ips.FloatingIPTestJSON.test_create_update_floatingip_port_details)\
+ (?!.*test_local_ip)\
(?!.*test_metering_extensions)\
(?!.*test_metering_negative)\
(?!.*test_networks.NetworksSearchCriteriaTest.test_list_validation_filters)\
@@ -79,7 +80,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 9
+ tests_count: 10
description: >-
This test case runs the Tempest suite proposed by the
Cinder project.
@@ -90,6 +91,8 @@ tiers:
(?!.*test_consistencygroups)\
(?!.*test_backup_crossproject_admin_negative)\
(?!.*test_backup_crossproject_user_negative)\
+ (?!.*test_volume_encrypted.TestEncryptedCinderVolumes)\
+ (?!.*rbac)\
(^cinder_tempest_plugin.)"
option:
- '--concurrency=4'
@@ -108,6 +111,7 @@ tiers:
name: tempest_common
args:
mode: "(?!.*api.identity.v3.test_oauth1_tokens)\
+ (?!.*rbac)\
(?!.*scenario.test_federated_authentication)\
keystone_tempest_plugin."
option:
@@ -119,7 +123,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 119
+ tests_count: 124
description: >-
This test case runs the Tempest suite proposed by the
Heat project.
@@ -128,6 +132,10 @@ tiers:
args:
mode: "(?!.*functional.test_lbaasv2)\
(?!.*functional.test_encryption_vol_type)\
+ (?!.*functional.test_event_sinks)\
+ (?!.*functional.test_software_config.ZaqarSignalTransportTest)\
+ (?!.*functional.test_stack_events)\
+ (?!.*functional.test_waitcondition)\
(?!.*RemoteStackTest.test_stack_create_with_cloud_credential)\
(?!.*scenario.test_aodh_alarm)\
(?!.*tests.scenario.test_autoscaling_lb)\
@@ -168,7 +176,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 1276
+ tests_count: 1271
description: >-
The list of test cases is generated by
Tempest automatically and depends on the parameters of
@@ -177,7 +185,7 @@ tiers:
run:
name: tempest_common
args:
- mode: "(?!.*test_fixed_ips)\
+ mode: "(?!.*admin.test_agents)(?!.*test_fixed_ips)\
(?!.*test_fixed_ips_negative)\
(?!.*test_auto_allocate_network)(?!.*test_floating_ips_bulk)\
(?!.*test_flavors_microversions.FlavorsV255TestJSON)\
@@ -192,6 +200,7 @@ tiers:
(?!.*test_live_migration.LiveMigrationTest.test_volume_backed_live_migration)\
(?!.*test_live_migration.LiveMigrationRemoteConsolesV26Test)\
(?!.*test_quotas.QuotasAdminTestV257)\
+ (?!.*test_servers.ServersAdminTestJSON.test_reset_network_inject_network_info)\
(?!.*certificates.test_certificates)\
(?!.*test_quotas_negative.QuotasSecurityGroupAdminNegativeTest)\
(?!.*test_novnc)(?!.*test_server_personality)\
@@ -212,19 +221,38 @@ tiers:
(?!.*test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_update_security_group_with_invalid_sg_des)\
(?!.*test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_update_security_group_with_invalid_sg_id)\
(?!.*test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_update_security_group_with_invalid_sg_name)\
+ (?!.*test_create_server.ServersTestFqdnHostnames.test_create_server_with_fqdn_name)\
+ (?!.*test_server_metadata.ServerMetadataTestJSON)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_delete_metadata_non_existent_server)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_metadata_items_limit)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_set_metadata_invalid_key)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_set_metadata_non_existent_server)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_set_server_metadata_blank_key)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_set_server_metadata_missing_metadata)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_update_metadata_non_existent_server)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_update_metadata_with_blank_key)\
(?!.*test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_ip_regex)\
(?!.*compute.test_virtual_interfaces)(?!.*compute.test_virtual_interfaces_negative)\
(?!.*compute.test_networks)\
(?!.*test_attach_volume.AttachVolumeMultiAttach)\
(?!.*identity.admin.v2)(?!.*identity.v2)\
+ (?!.*identity.v3.test_access_rules)\
+ (?!.*identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_create_application_credential_access_rules)\
(?!.*image.v1)\
+ (?!.*image.v2.admin.test_images.ImportCopyImagesTest)\
+ (?!.*image.v2.test_images_negative.ImagesNegativeTest.test_create_image_reserved_property)\
+ (?!.*image.v2.test_images_negative.ImagesNegativeTest.test_update_image_reserved_property)\
+ (?!.*image.v2.test_images_negative.ImportImagesNegativeTest.test_image_web_download_import_with_bad_url)\
(?!.*image.v2.test_images.ImportImagesTest)\
+ (?!.*image.v2.test_images.MultiStoresImportImages)\
(?!.*admin.test_dhcp_agent_scheduler)\
(?!.*admin.test_routers_dvr)\
(?!.*test_metering_extensions)(?!.*network.test_tags)\
(?!.*test_routers_negative.DvrRoutersNegativeTest)\
(?!.*test_routers.RoutersIpV6Test.test_create_router_set_gateway_with_fixed_ip)\
(?!.*test_routers.RoutersTest.test_create_router_set_gateway_with_fixed_ip)\
+ (?!.*test_object_services.ObjectTest.test_create_object_with_transfer_encoding)\
+ (?!.*test_encrypted_volumes_extend)\
(?!.*test_group_snapshots.GroupSnapshotsV319Test.test_reset_group_snapshot_status)\
(?!.*test_multi_backend)\
(?!.*test_volume_retype.VolumeRetypeWithMigrationTest)\
@@ -242,7 +270,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 9
+ tests_count: 13
description: >-
The list of test cases is generated by
Tempest automatically and depends on the parameters of
@@ -252,6 +280,11 @@ tiers:
name: tempest_common
args:
mode: "\
+ (?!.*test_compute_unified_limits)\
+ (?!.*test_minbw_allocation_placement)\
+ (?!.*test_network_qos_placement)\
+ (?!.*test_unified_limits.ImageQuotaTest.test_image_count_uploading_quota)\
+ (?!.*test_unified_limits.ImageQuotaTest.test_image_stage_quota)\
(?!.*test_volume_boot_pattern.TestVolumeBootPattern.test_boot_server_from_encrypted_volume_luks)\
(?!.*\\[.*\\bslow\\b.*\\])(^tempest.scenario)"
option:
@@ -280,6 +313,7 @@ tiers:
(?!.*test_encrypted_cinder_volumes)\
(?!.*test_minbw_allocation_placement)\
(?!.*test_network_basic_ops.TestNetworkBasicOps.test_router_rescheduling)\
+ (?!.*test_shelve_instance.TestShelveInstance.test_cold_migrate_unshelved_instance)\
(?!.*test_volume_migrate_attached)\
(?!.*test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_cold_migration_revert)\
(?=.*\\[.*\\bslow\\b.*\\])(^tempest.)"
diff --git a/docker/smoke/Dockerfile b/docker/smoke/Dockerfile
index 1eaaf2b11..da42ef9b4 100644
--- a/docker/smoke/Dockerfile
+++ b/docker/smoke/Dockerfile
@@ -20,7 +20,7 @@ RUN apk --no-cache add --update libxml2 libxslt && \
python3-dev build-base linux-headers libffi-dev \
openssl-dev libjpeg-turbo-dev libxml2-dev libxslt-dev && \
case $(uname -m) in aarch*|arm*) CFLAGS="-O0" \
- pip3 install --no-cache-dir -c/src/requirements/upper-constraints.txt \
+ pip3 install --use-deprecated=legacy-resolver --no-cache-dir -c/src/requirements/upper-constraints.txt \
-c/src/functest/upper-constraints.txt lxml && \
sed -i -E /^numpy=/d /src/requirements/upper-constraints.txt && apk add py3-numpy ;; esac && \
git init /src/patrole && \
@@ -30,17 +30,17 @@ RUN apk --no-cache add --update libxml2 libxslt && \
update-requirements -s --source /src/requirements /src/patrole/ && \
git init /src/neutron-tempest-plugin && \
(cd /src/neutron-tempest-plugin && \
- git fetch --tags https://git.openstack.org/openstack/neutron-tempest-plugin.git $NEUTRON_TEMPEST_TAG && \
+ git fetch --tags https://opendev.org/openstack/neutron-tempest-plugin.git $NEUTRON_TEMPEST_TAG && \
git checkout FETCH_HEAD) && \
update-requirements -s --source /src/requirements /src/neutron-tempest-plugin && \
git init /src/cinder-tempest-plugin && \
(cd /src/cinder-tempest-plugin && \
- git fetch --tags https://git.openstack.org/openstack/cinder-tempest-plugin.git $CINDER_TEMPEST_TAG && \
+ git fetch --tags https://opendev.org/openstack/cinder-tempest-plugin.git $CINDER_TEMPEST_TAG && \
git checkout FETCH_HEAD) && \
update-requirements -s --source /src/requirements /src/cinder-tempest-plugin && \
git init /src/keystone-tempest-plugin && \
(cd /src/keystone-tempest-plugin && \
- git fetch --tags https://git.openstack.org/openstack/keystone-tempest-plugin.git $KEYSTONE_TEMPEST_TAG && \
+ git fetch --tags https://opendev.org/openstack/keystone-tempest-plugin.git $KEYSTONE_TEMPEST_TAG && \
git checkout FETCH_HEAD) && \
update-requirements -s --source /src/requirements /src/keystone-tempest-plugin && \
git init /src/barbican-tempest-plugin && \
@@ -50,32 +50,32 @@ RUN apk --no-cache add --update libxml2 libxslt && \
update-requirements -s --source /src/requirements /src/barbican-tempest-plugin/ && \
git init /src/octavia-tempest-plugin && \
(cd /src/octavia-tempest-plugin && \
- git fetch --tags https://git.openstack.org/openstack/octavia-tempest-plugin.git $OCTAVIA_TAG && \
+ git fetch --tags https://opendev.org/openstack/octavia-tempest-plugin.git $OCTAVIA_TAG && \
git checkout FETCH_HEAD) && \
update-requirements -s --source /src/requirements /src/octavia-tempest-plugin && \
git init /src/heat-tempest-plugin && \
(cd /src/heat-tempest-plugin && \
- git fetch --tags https://git.openstack.org/openstack/heat-tempest-plugin.git $HEAT_TEMPEST_TAG && \
+ git fetch --tags https://opendev.org/openstack/heat-tempest-plugin.git $HEAT_TEMPEST_TAG && \
git checkout FETCH_HEAD) && \
update-requirements -s --source /src/requirements /src/heat-tempest-plugin && \
git init /src/telemetry-tempest-plugin && \
(cd /src/telemetry-tempest-plugin && \
- git fetch --tags https://git.openstack.org/openstack/telemetry-tempest-plugin.git $TELEMETRY_TEMPEST_TAG && \
+ git fetch --tags https://opendev.org/openstack/telemetry-tempest-plugin.git $TELEMETRY_TEMPEST_TAG && \
git checkout FETCH_HEAD) && \
update-requirements -s --source /src/requirements /src/telemetry-tempest-plugin && \
git init /src/cyborg-tempest-plugin && \
(cd /src/cyborg-tempest-plugin && \
- git fetch --tags https://git.openstack.org/openstack/cyborg-tempest-plugin.git $CYBORG_TEMPEST_TAG && \
+ git fetch --tags https://opendev.org/openstack/cyborg-tempest-plugin.git $CYBORG_TEMPEST_TAG && \
git checkout FETCH_HEAD) && \
update-requirements -s --source /src/requirements /src/cyborg-tempest-plugin && \
- pip3 install --no-cache-dir --src /src -c/src/requirements/upper-constraints.txt \
+ pip3 install --use-deprecated=legacy-resolver --no-cache-dir --src /src -c/src/requirements/upper-constraints.txt \
-c/src/functest/upper-constraints.txt \
/src/patrole /src/barbican-tempest-plugin /src/neutron-tempest-plugin \
/src/cinder-tempest-plugin /src/keystone-tempest-plugin \
/src/octavia-tempest-plugin /src/heat-tempest-plugin /src/telemetry-tempest-plugin \
/src/cyborg-tempest-plugin && \
mkdir -p /home/opnfv/functest/data/refstack && \
- pip3 install --no-cache-dir --src /src -c/src/requirements/upper-constraints.txt \
+ pip3 install --use-deprecated=legacy-resolver --no-cache-dir --src /src -c/src/requirements/upper-constraints.txt \
-c/src/functest/upper-constraints.txt \
git+https://opendev.org/openstack/neutron.git@$NEUTRON_TAG#egg=neutron \
git+https://opendev.org/openstack/glance.git@$GLANCE_TAG#egg=glance \
@@ -90,5 +90,5 @@ RUN apk --no-cache add --update libxml2 libxslt && \
COPY compute.txt /home/opnfv/functest/data/refstack/compute.txt
COPY object.txt /home/opnfv/functest/data/refstack/object.txt
COPY platform.txt /home/opnfv/functest/data/refstack/platform.txt
-COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml
+COPY testcases.yaml /etc/xtesting/testcases.yaml
CMD ["run_tests", "-t", "all"]
diff --git a/docker/smoke/compute.txt b/docker/smoke/compute.txt
index b0e4faf17..7a642a703 100644
--- a/docker/smoke/compute.txt
+++ b/docker/smoke/compute.txt
@@ -3,6 +3,16 @@ tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors_wit
tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image[id-3731d080-d4c5-4872-b41a-64d0d0021314]
tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name[id-3b7c6fe4-dfe7-477c-9243-b06359db51e6]
tempest.api.compute.keypairs.test_keypairs_v22.KeyPairsV22TestJSON.test_keypairsv22_create_list_show_with_type[id-89d59d43-f735-441a-abcf-0601727f47b6]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_list_security_groups_by_server[id-79517d60-535a-438f-af3d-e6feab1cbea7]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_group_create_get_delete[id-ecc0da4a-2117-48af-91af-993cca39a615]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_groups_create_list_delete[id-eb2b087d-633d-4d0d-a7bd-9e6ba35b32de]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_server_security_groups[id-fe4abc0d-83f5-4c50-ad11-57a1127297a2]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_update_security_groups[id-7d4e1d3c-3209-4d6d-b020-986304ebad1f]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_delete_nonexistent_security_group[id-6727c00b-214c-4f9e-9a52-017ac3e98411]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_delete_security_group_without_passing_id[id-1438f330-8fa4-4aeb-8a94-37c250106d7f]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_delete_the_default_security_group[id-36a1629f-c6da-4a26-b8b8-55e7e5d5cd58]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_security_group_get_nonexistent_group[id-673eaec1-9b3e-48ed-bdf1-2786c1b9661c]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_update_non_existent_security_group[id-27edee9c-873d-4da6-a68a-3c256efebe8f]
tempest.api.compute.servers.test_availability_zone.AZV2TestJSON.test_get_availability_zone_list_with_non_admin_user[id-a8333aa2-205c-449f-a828-d38c2489bf25]
tempest.api.compute.servers.test_create_server.ServersTestJSON.test_host_name_is_same_as_server_name[id-ac1ad47f-984b-4441-9274-c9079b7a0666]
tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers[id-9a438d88-10c6-4bcd-8b5b-5b6e25e1346f]
@@ -28,6 +38,7 @@ tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.t
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_limit[id-614cdfc1-d557-4bac-915b-3e67b48eee76]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_name[id-9b067a7b-7fee-4f6a-b29c-be43fe18fc5a]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_status[id-ca78e20e-fddb-4ce6-b7f7-bcbf8605e66e]
+tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_ip_regex[id-a905e287-c35e-42f2-b132-d02b09f3654a]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_name_wildcard[id-e9f624ee-92af-4562-8bec-437945a18dcb]
tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_future_date[id-74745ad8-b346-45b5-b9b8-509d7447fc1f]
tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_invalid_date[id-87d12517-e20a-4c9c-97b6-dd1628d6d6c9]
@@ -45,12 +56,10 @@ tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard[id-2cb1baf6-ac8d-4429-bf0d-ba8a0ba53e32]
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_rebuild_server[id-aaa6cdf3-55a7-461a-add9-1c8596b9a07c]
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_stop_start_server[id-af8eafd4-38a7-4a4b-bdbc-75145a580560]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_delete_server_metadata_item[id-127642d6-4c7b-4486-b7cd-07265a378658]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_get_server_metadata_item[id-3043c57d-7e0e-49a6-9a96-ad569c265e6a]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_list_server_metadata[id-479da087-92b3-4dcf-aeb3-fd293b2d14ce]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata[id-211021f6-21de-4657-a68f-908878cfe251]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata_item[id-58c02d4f-5c67-40be-8744-d3fa5982eb1c]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_update_server_metadata[id-344d981e-0c33-4997-8a5d-6c1d803e4134]
+tempest.api.compute.servers.test_server_tags.ServerTagsTestJSON.test_check_tag_existence[id-81279a66-61c3-4759-b830-a2dbe64cbe08]
+tempest.api.compute.servers.test_server_tags.ServerTagsTestJSON.test_create_delete_tag[id-8d95abe2-c658-4c42-9a44-c0258500306b]
+tempest.api.compute.servers.test_server_tags.ServerTagsTestJSON.test_delete_all_tags[id-a63b2a74-e918-4b7c-bcab-10c855f3a57e]
+tempest.api.compute.servers.test_server_tags.ServerTagsTestJSON.test_update_all_tags[id-a2c1af8c-127d-417d-974b-8115f7e3d831]
tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_server_with_admin_password[id-b92d5ec7-b1dd-44a2-87e4-45e888c46ef0]
tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_specify_keypair[id-f9e15296-d7f9-4e62-b53f-a04e89160833]
tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_with_existing_server_name[id-8fea6be7-065e-47cf-89b8-496e6f96c699]
@@ -86,11 +95,20 @@ tempest.api.identity.v3.TestApiDiscovery.test_api_version_statuses[id-8879a470-a
tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_media_types[id-657c1970-4722-4189-8831-7325f3bc4265]
tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_resources[id-b9232f5e-d9e5-4d97-b96c-28d3db4de1bd]
tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_statuses[id-8879a470-abfb-47bb-bb8d-5a7fd279ad1e]
+tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_list_api_versions[id-721f480f-35b6-46c7-846e-047e6acea0dc]
+tempest.api.identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_create_application_credential[id-8080c75c-eddc-4786-941a-c2da7039ae61]
+tempest.api.identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_create_application_credential_expires[id-852daf0c-42b5-4239-8466-d193d0543ed3]
+tempest.api.identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_list_application_credentials[id-ff0cd457-6224-46e7-b79e-0ada4964a8a6]
+tempest.api.identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_query_application_credentials[id-9bb5e5cc-5250-493a-8869-8b665f6aa5f6]
tempest.api.identity.v3.test_catalog.IdentityCatalogTest.test_catalog_standardization[id-56b57ced-22b8-4127-9b8a-565dfb0207e2]
+tempest.api.identity.v3.test_domains.DefaultDomainTestJSON.test_default_domain_exists[id-17a5de24-e6a0-4e4a-a9ee-d85b6e5612b5]
tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token[id-6f8e4436-fc96-4282-8122-e41df57197a9]
tempest.api.identity.v3.test_tokens.TokensV3Test.test_token_auth_creation_existence_deletion[id-0f9f5a5f-d5cd-4a86-8a5b-c5ded151f212]
tempest.api.identity.v3.test_tokens.TokensV3Test.test_validate_token[id-a9512ac3-3909-48a4-b395-11f438e16260]
+tempest.api.identity.v3.test_users.IdentityV3UsersTest.test_password_history_check_self_service_api[id-941784ee-5342-4571-959b-b80dd2cea516]
+tempest.api.identity.v3.test_users.IdentityV3UsersTest.test_user_account_lockout[id-a7ad8bbf-2cff-4520-8c1d-96332e151658]
tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_delete_image[id-f848bb94-1c6e-45a4-8726-39e3a5b23535]
+tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file[id-139b765e-7f3d-4b3d-8b37-3ca3876ee318]
tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_update_image[id-f66891a7-a35c-41a8-b590-a065c2a1caa6]
tempest.api.image.v2.test_images.ListImagesTest.test_get_image_schema[id-622b925c-479f-4736-860d-adeaf13bc371]
tempest.api.image.v2.test_images.ListImagesTest.test_get_images_schema[id-25c8d7b2-df21-460f-87ac-93130bcdc684]
@@ -118,9 +136,29 @@ tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_non_exi
tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_delete_deleted_image[id-e57fc127-7ba0-4693-92d7-1d8a05ebcba9]
tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_image_null_id[id-ef45000d-0a72-4781-866d-4cb7bf2562ad]
tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_non_existent_image[id-668743d5-08ad-4480-b2b8-15da34f81d9f]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_register_with_invalid_container_format[id-292bd310-369b-41c7-a7a3-10276ef76753]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_register_with_invalid_disk_format[id-70c6040c-5a97-4111-9e13-e73665264ce1]
tempest.api.image.v2.test_images_tags.ImagesTagsTest.test_update_delete_tags_for_image[id-10407036-6059-4f95-a2cd-cbbbee7ed329]
tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_delete_non_existing_tag[id-39c023a2-325a-433a-9eea-649bf1414b19]
tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_update_tags_for_non_existing_image[id-8cd30f82-6f9a-4c6e-8034-c1b51fba43d9]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful[id-4ab211a0-276f-4552-9070-51e27f58fecf]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_fixedips[id-51a5e97f-f02e-4e4e-9a17-a69811d300e3]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_fixedips_duplicate[id-57b8302b-cba9-4fbb-8835-9168df029051]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_fixedips_outrange[id-98244d88-d990-4570-91d4-6b25d70d08af]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_router[id-e98f65db-68f4-4330-9fea-abd8c5192d4d]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_64_subnets[id-4256c61d-c538-41ea-9147-3c450c36669e]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_invalid_options[id-81f18ef6-95b5-4584-9966-10d480b7496a]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_stateless_eui64[id-e5517e62-6f16-430d-a672-f80875493d4c]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_stateless_no_ra[id-ae2f4a5d-03ff-4c42-a3b0-ce2fcb7ea832]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_stateless_no_ra_no_dhcp[id-21635b6f-165a-4d42-bf49-7d195e47342f]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_two_subnets[id-4544adf7-bb5f-4bdc-b769-b3e77026cef2]
+tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_floating_ip_specifying_a_fixed_ip_address[id-36de4bd0-f09c-43e3-a8e1-1decc1ffd3a5]
+tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_update_floatingip_with_port_multiple_ip_address[id-45c4c683-ea97-41ef-9c51-5e9802f2f3d7]
+tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_floating_ip_delete_port[id-e1f6bffd-442f-4668-b30e-df13f2705e77]
+tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_floating_ip_update_different_router[id-1bb2f731-fe5a-4b8c-8409-799ade1bed4d]
+tempest.api.network.test_floating_ips_negative.FloatingIPNegativeTestJSON.test_associate_floatingip_port_ext_net_unreachable[id-6b3b8797-6d43-4191-985c-c48b773eb429]
+tempest.api.network.test_floating_ips_negative.FloatingIPNegativeTestJSON.test_create_floatingip_in_private_network[id-50b9aeb4-9f0b-48ee-aa31-fa955a48ff54]
+tempest.api.network.test_floating_ips_negative.FloatingIPNegativeTestJSON.test_create_floatingip_with_port_ext_net_unreachable[id-22996ea8-4a81-4b27-b6e1-fa5df92fa5e8]
tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_all_attributes[id-a4d9ec4c-0306-4111-a75c-db01a709030b]
tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_allocation_pools[id-bec949c4-3147-4ba6-af5f-cd2306118404]
tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_dhcp_enabled[id-94ce038d-ff0a-4a4c-a56b-09da3ca0b55d]
@@ -171,6 +209,7 @@ tempest.api.network.test_routers.RoutersTest.test_add_multiple_router_interfaces
tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_port_id[id-2b7d2f37-6748-4d78-92e5-1d590234f0d5]
tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_subnet_id[id-b42e6e39-2e37-49cc-a6f4-8467e940900a]
tempest.api.network.test_routers.RoutersTest.test_create_show_list_update_delete_router[id-f64403e2-8483-4b34-8ccd-b09a87bcc68c]
+tempest.api.network.test_routers.RoutersTest.test_router_interface_port_update_with_fixed_ip[id-96522edf-b4b5-45d9-8443-fa11c26e6eff]
tempest.api.network.test_routers.RoutersTest.test_update_delete_extra_route[id-c86ac3a8-50bd-4b00-a6b8-62af84a0765c]
tempest.api.network.test_routers.RoutersTest.test_update_extra_route[id-c86ac3a8-50bd-4b00-a6b8-62af84a0765c]
tempest.api.network.test_routers.RoutersTest.test_update_router_admin_state[id-a8902683-c788-4246-95c7-ad9c6d63a4d9]
@@ -305,6 +344,17 @@ tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapsho
tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapshots_list_with_params[id-59f41f43-aebf-48a9-ab5d-d76340fab32b]
tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_volume_from_snapshot[id-677863d1-3142-456d-b6ac-9924f667a7f4]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_limit[id-db4d8e0a-7a2e-41cc-a712-961f6844e896]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_limit_equals_infinite[id-a1427f61-420e-48a5-b6e3-0b394fa95400]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_limit_equals_zero[id-e3b44b7f-ae87-45b5-8a8c-66110eb24d0a]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_marker[id-05489dde-44bc-4961-a1f5-3ce7ee7824f7]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_offset[id-ca96d551-17c6-4e11-b0e8-52d3bb8a63c7]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_created_at_asc[id-4052c3a0-2415-440a-a8cc-305a875331b0]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_created_at_desc[id-dcbbe24a-f3c0-4ec8-9274-55d48db8d1cf]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_id_asc[id-c5513ada-64c1-4d28-83b9-af3307ec1388]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_id_desc[id-8a7fe058-0b41-402a-8afd-2dbc5a4a718b]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_name_asc[id-d58b5fed-0c37-42d3-8c5d-39014ac13c00]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_name_desc[id-96ba6f4d-1f18-47e1-b4bc-76edc6c21250]
tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshots_list_with_params[id-59f41f43-aebf-48a9-ab5d-d76340fab32b]
tempest.api.volume.test_volumes_snapshots_list.VolumesV2SnapshotListTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
diff --git a/docker/smoke/hooks/post_checkout b/docker/smoke/hooks/post_checkout
index 3e5670b4a..b23f6f449 100644
--- a/docker/smoke/hooks/post_checkout
+++ b/docker/smoke/hooks/post_checkout
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
from="${DOCKER_REPO%/*}/functest-tempest:${DOCKER_TAG}"
sed -i "s|^FROM.*$|FROM ${from}|" Dockerfile
diff --git a/docker/smoke/object.txt b/docker/smoke/object.txt
index 1bc1fa07c..720532e2a 100644
--- a/docker/smoke/object.txt
+++ b/docker/smoke/object.txt
@@ -1,5 +1,7 @@
tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token[id-6f8e4436-fc96-4282-8122-e41df57197a9]
tempest.api.identity.v3.test_tokens.TokensV3Test.test_token_auth_creation_existence_deletion[id-0f9f5a5f-d5cd-4a86-8a5b-c5ded151f212]
+tempest.api.object_storage.test_account_bulk.BulkTest.test_bulk_delete[id-c075e682-0d2a-43b2-808d-4116200d736d]
+tempest.api.object_storage.test_account_bulk.BulkTest.test_extract_archive[id-a407de51-1983-47cc-9f14-47c2b059413c]
tempest.api.object_storage.test_account_quotas.AccountQuotasTest.test_upload_valid_object[id-a22ef352-a342-4587-8f47-3bbdb5b039c4]
tempest.api.object_storage.test_account_quotas_negative.AccountQuotasNegativeTest.test_user_modify_quota[id-d1dc5076-555e-4e6d-9697-28f1fe976324]
tempest.api.object_storage.test_account_services.AccountTest.test_list_containers[id-3499406a-ae53-4f8c-b43a-133d4dc6fe3f]
@@ -15,6 +17,9 @@ tempest.api.object_storage.test_account_services.AccountTest.test_list_extension
tempest.api.object_storage.test_account_services.AccountTest.test_list_extensions[id-6eb04a6a-4860-4e31-ba91-ea3347d76b58]
tempest.api.object_storage.test_account_services.AccountTest.test_list_no_account_metadata[id-b904c2e3-24c2-4dba-ad7d-04e90a761be5]
tempest.api.object_storage.test_account_services.AccountTest.test_list_no_containers[id-884ec421-fbad-4fcc-916b-0580f2699565]
+tempest.api.object_storage.test_account_services.AccountTest.test_update_account_metadata_with_create_metadata_key[id-64fd53f3-adbd-4639-af54-436e4982dbfb]
+tempest.api.object_storage.test_account_services.AccountTest.test_update_account_metadata_with_delete_metadata[id-9f60348d-c46f-4465-ae06-d51dbd470953]
+tempest.api.object_storage.test_account_services.AccountTest.test_update_account_metadata_with_delete_metadata_key[id-d4d884d3-4696-4b85-bc98-4f57c4dd2bf1]
tempest.api.object_storage.test_container_acl.ObjectTestACLs.test_read_object_with_rights[id-a3270f3f-7640-4944-8448-c7ea783ea5b6]
tempest.api.object_storage.test_container_acl.ObjectTestACLs.test_write_object_with_rights[id-aa58bfa5-40d9-4bc3-82b4-d07f4a9e392a]
tempest.api.object_storage.test_container_quotas.ContainerQuotasTest.test_upload_large_object[id-22eeeb2b-3668-4160-baef-44790f65a5a0]
diff --git a/docker/smoke/platform.txt b/docker/smoke/platform.txt
index 0f59d2dae..4eca58790 100644
--- a/docker/smoke/platform.txt
+++ b/docker/smoke/platform.txt
@@ -3,6 +3,16 @@ tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors_wit
tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image[id-3731d080-d4c5-4872-b41a-64d0d0021314]
tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name[id-3b7c6fe4-dfe7-477c-9243-b06359db51e6]
tempest.api.compute.keypairs.test_keypairs_v22.KeyPairsV22TestJSON.test_keypairsv22_create_list_show_with_type[id-89d59d43-f735-441a-abcf-0601727f47b6]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_list_security_groups_by_server[id-79517d60-535a-438f-af3d-e6feab1cbea7]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_group_create_get_delete[id-ecc0da4a-2117-48af-91af-993cca39a615]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_groups_create_list_delete[id-eb2b087d-633d-4d0d-a7bd-9e6ba35b32de]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_server_security_groups[id-fe4abc0d-83f5-4c50-ad11-57a1127297a2]
+tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_update_security_groups[id-7d4e1d3c-3209-4d6d-b020-986304ebad1f]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_delete_nonexistent_security_group[id-6727c00b-214c-4f9e-9a52-017ac3e98411]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_delete_security_group_without_passing_id[id-1438f330-8fa4-4aeb-8a94-37c250106d7f]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_delete_the_default_security_group[id-36a1629f-c6da-4a26-b8b8-55e7e5d5cd58]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_security_group_get_nonexistent_group[id-673eaec1-9b3e-48ed-bdf1-2786c1b9661c]
+tempest.api.compute.security_groups.test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_update_non_existent_security_group[id-27edee9c-873d-4da6-a68a-3c256efebe8f]
tempest.api.compute.servers.test_availability_zone.AZV2TestJSON.test_get_availability_zone_list_with_non_admin_user[id-a8333aa2-205c-449f-a828-d38c2489bf25]
tempest.api.compute.servers.test_create_server.ServersTestJSON.test_host_name_is_same_as_server_name[id-ac1ad47f-984b-4441-9274-c9079b7a0666]
tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers[id-9a438d88-10c6-4bcd-8b5b-5b6e25e1346f]
@@ -28,6 +38,7 @@ tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.t
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_limit[id-614cdfc1-d557-4bac-915b-3e67b48eee76]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_name[id-9b067a7b-7fee-4f6a-b29c-be43fe18fc5a]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filter_by_server_status[id-ca78e20e-fddb-4ce6-b7f7-bcbf8605e66e]
+tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_ip_regex[id-a905e287-c35e-42f2-b132-d02b09f3654a]
tempest.api.compute.servers.test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_name_wildcard[id-e9f624ee-92af-4562-8bec-437945a18dcb]
tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_future_date[id-74745ad8-b346-45b5-b9b8-509d7447fc1f]
tempest.api.compute.servers.test_list_servers_negative.ListServersNegativeTestJSON.test_list_servers_by_changes_since_invalid_date[id-87d12517-e20a-4c9c-97b6-dd1628d6d6c9]
@@ -45,12 +56,10 @@ tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_lock_
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard[id-2cb1baf6-ac8d-4429-bf0d-ba8a0ba53e32]
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_rebuild_server[id-aaa6cdf3-55a7-461a-add9-1c8596b9a07c]
tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_stop_start_server[id-af8eafd4-38a7-4a4b-bdbc-75145a580560]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_delete_server_metadata_item[id-127642d6-4c7b-4486-b7cd-07265a378658]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_get_server_metadata_item[id-3043c57d-7e0e-49a6-9a96-ad569c265e6a]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_list_server_metadata[id-479da087-92b3-4dcf-aeb3-fd293b2d14ce]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata[id-211021f6-21de-4657-a68f-908878cfe251]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_set_server_metadata_item[id-58c02d4f-5c67-40be-8744-d3fa5982eb1c]
-tempest.api.compute.servers.test_server_metadata.ServerMetadataTestJSON.test_update_server_metadata[id-344d981e-0c33-4997-8a5d-6c1d803e4134]
+tempest.api.compute.servers.test_server_tags.ServerTagsTestJSON.test_check_tag_existence[id-81279a66-61c3-4759-b830-a2dbe64cbe08]
+tempest.api.compute.servers.test_server_tags.ServerTagsTestJSON.test_create_delete_tag[id-8d95abe2-c658-4c42-9a44-c0258500306b]
+tempest.api.compute.servers.test_server_tags.ServerTagsTestJSON.test_delete_all_tags[id-a63b2a74-e918-4b7c-bcab-10c855f3a57e]
+tempest.api.compute.servers.test_server_tags.ServerTagsTestJSON.test_update_all_tags[id-a2c1af8c-127d-417d-974b-8115f7e3d831]
tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_server_with_admin_password[id-b92d5ec7-b1dd-44a2-87e4-45e888c46ef0]
tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_specify_keypair[id-f9e15296-d7f9-4e62-b53f-a04e89160833]
tempest.api.compute.servers.test_servers.ServersTestJSON.test_create_with_existing_server_name[id-8fea6be7-065e-47cf-89b8-496e6f96c699]
@@ -86,11 +95,20 @@ tempest.api.identity.v3.TestApiDiscovery.test_api_version_statuses[id-8879a470-a
tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_media_types[id-657c1970-4722-4189-8831-7325f3bc4265]
tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_resources[id-b9232f5e-d9e5-4d97-b96c-28d3db4de1bd]
tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_statuses[id-8879a470-abfb-47bb-bb8d-5a7fd279ad1e]
+tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_list_api_versions[id-721f480f-35b6-46c7-846e-047e6acea0dc]
+tempest.api.identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_create_application_credential[id-8080c75c-eddc-4786-941a-c2da7039ae61]
+tempest.api.identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_create_application_credential_expires[id-852daf0c-42b5-4239-8466-d193d0543ed3]
+tempest.api.identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_list_application_credentials[id-ff0cd457-6224-46e7-b79e-0ada4964a8a6]
+tempest.api.identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_query_application_credentials[id-9bb5e5cc-5250-493a-8869-8b665f6aa5f6]
tempest.api.identity.v3.test_catalog.IdentityCatalogTest.test_catalog_standardization[id-56b57ced-22b8-4127-9b8a-565dfb0207e2]
+tempest.api.identity.v3.test_domains.DefaultDomainTestJSON.test_default_domain_exists[id-17a5de24-e6a0-4e4a-a9ee-d85b6e5612b5]
tempest.api.identity.v3.test_tokens.TokensV3Test.test_create_token[id-6f8e4436-fc96-4282-8122-e41df57197a9]
tempest.api.identity.v3.test_tokens.TokensV3Test.test_token_auth_creation_existence_deletion[id-0f9f5a5f-d5cd-4a86-8a5b-c5ded151f212]
tempest.api.identity.v3.test_tokens.TokensV3Test.test_validate_token[id-a9512ac3-3909-48a4-b395-11f438e16260]
+tempest.api.identity.v3.test_users.IdentityV3UsersTest.test_password_history_check_self_service_api[id-941784ee-5342-4571-959b-b80dd2cea516]
+tempest.api.identity.v3.test_users.IdentityV3UsersTest.test_user_account_lockout[id-a7ad8bbf-2cff-4520-8c1d-96332e151658]
tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_delete_image[id-f848bb94-1c6e-45a4-8726-39e3a5b23535]
+tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file[id-139b765e-7f3d-4b3d-8b37-3ca3876ee318]
tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_update_image[id-f66891a7-a35c-41a8-b590-a065c2a1caa6]
tempest.api.image.v2.test_images.ListImagesTest.test_get_image_schema[id-622b925c-479f-4736-860d-adeaf13bc371]
tempest.api.image.v2.test_images.ListImagesTest.test_get_images_schema[id-25c8d7b2-df21-460f-87ac-93130bcdc684]
@@ -118,9 +136,29 @@ tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_delete_non_exi
tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_delete_deleted_image[id-e57fc127-7ba0-4693-92d7-1d8a05ebcba9]
tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_image_null_id[id-ef45000d-0a72-4781-866d-4cb7bf2562ad]
tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_get_non_existent_image[id-668743d5-08ad-4480-b2b8-15da34f81d9f]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_register_with_invalid_container_format[id-292bd310-369b-41c7-a7a3-10276ef76753]
+tempest.api.image.v2.test_images_negative.ImagesNegativeTest.test_register_with_invalid_disk_format[id-70c6040c-5a97-4111-9e13-e73665264ce1]
tempest.api.image.v2.test_images_tags.ImagesTagsTest.test_update_delete_tags_for_image[id-10407036-6059-4f95-a2cd-cbbbee7ed329]
tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_delete_non_existing_tag[id-39c023a2-325a-433a-9eea-649bf1414b19]
tempest.api.image.v2.test_images_tags_negative.ImagesTagsNegativeTest.test_update_tags_for_non_existing_image[id-8cd30f82-6f9a-4c6e-8034-c1b51fba43d9]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful[id-4ab211a0-276f-4552-9070-51e27f58fecf]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_fixedips[id-51a5e97f-f02e-4e4e-9a17-a69811d300e3]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_fixedips_duplicate[id-57b8302b-cba9-4fbb-8835-9168df029051]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_fixedips_outrange[id-98244d88-d990-4570-91d4-6b25d70d08af]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcp_stateful_router[id-e98f65db-68f4-4330-9fea-abd8c5192d4d]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_64_subnets[id-4256c61d-c538-41ea-9147-3c450c36669e]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_invalid_options[id-81f18ef6-95b5-4584-9966-10d480b7496a]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_stateless_eui64[id-e5517e62-6f16-430d-a672-f80875493d4c]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_stateless_no_ra[id-ae2f4a5d-03ff-4c42-a3b0-ce2fcb7ea832]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_stateless_no_ra_no_dhcp[id-21635b6f-165a-4d42-bf49-7d195e47342f]
+tempest.api.network.test_dhcp_ipv6.NetworksTestDHCPv6.test_dhcpv6_two_subnets[id-4544adf7-bb5f-4bdc-b769-b3e77026cef2]
+tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_floating_ip_specifying_a_fixed_ip_address[id-36de4bd0-f09c-43e3-a8e1-1decc1ffd3a5]
+tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_update_floatingip_with_port_multiple_ip_address[id-45c4c683-ea97-41ef-9c51-5e9802f2f3d7]
+tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_floating_ip_delete_port[id-e1f6bffd-442f-4668-b30e-df13f2705e77]
+tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_floating_ip_update_different_router[id-1bb2f731-fe5a-4b8c-8409-799ade1bed4d]
+tempest.api.network.test_floating_ips_negative.FloatingIPNegativeTestJSON.test_associate_floatingip_port_ext_net_unreachable[id-6b3b8797-6d43-4191-985c-c48b773eb429]
+tempest.api.network.test_floating_ips_negative.FloatingIPNegativeTestJSON.test_create_floatingip_in_private_network[id-50b9aeb4-9f0b-48ee-aa31-fa955a48ff54]
+tempest.api.network.test_floating_ips_negative.FloatingIPNegativeTestJSON.test_create_floatingip_with_port_ext_net_unreachable[id-22996ea8-4a81-4b27-b6e1-fa5df92fa5e8]
tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_all_attributes[id-a4d9ec4c-0306-4111-a75c-db01a709030b]
tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_allocation_pools[id-bec949c4-3147-4ba6-af5f-cd2306118404]
tempest.api.network.test_networks.NetworksTest.test_create_delete_subnet_with_dhcp_enabled[id-94ce038d-ff0a-4a4c-a56b-09da3ca0b55d]
@@ -171,6 +209,7 @@ tempest.api.network.test_routers.RoutersTest.test_add_multiple_router_interfaces
tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_port_id[id-2b7d2f37-6748-4d78-92e5-1d590234f0d5]
tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_subnet_id[id-b42e6e39-2e37-49cc-a6f4-8467e940900a]
tempest.api.network.test_routers.RoutersTest.test_create_show_list_update_delete_router[id-f64403e2-8483-4b34-8ccd-b09a87bcc68c]
+tempest.api.network.test_routers.RoutersTest.test_router_interface_port_update_with_fixed_ip[id-96522edf-b4b5-45d9-8443-fa11c26e6eff]
tempest.api.network.test_routers.RoutersTest.test_update_delete_extra_route[id-c86ac3a8-50bd-4b00-a6b8-62af84a0765c]
tempest.api.network.test_routers.RoutersTest.test_update_extra_route[id-c86ac3a8-50bd-4b00-a6b8-62af84a0765c]
tempest.api.network.test_routers.RoutersTest.test_update_router_admin_state[id-a8902683-c788-4246-95c7-ad9c6d63a4d9]
@@ -202,6 +241,8 @@ tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show
tempest.api.network.test_security_groups_negative.NegativeSecGroupTest.test_show_non_existent_security_group_rule[id-4c094c09-000b-4e41-8100-9617600c02a6]
tempest.api.network.test_subnetpools_extensions.SubnetPoolsTestJSON.test_create_list_show_update_delete_subnetpools[id-62595970-ab1c-4b7f-8fcc-fddfe55e9811]
tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources[id-cac8a836-c2e0-4304-b556-cd299c7281d1]
+tempest.api.object_storage.test_account_bulk.BulkTest.test_bulk_delete[id-c075e682-0d2a-43b2-808d-4116200d736d]
+tempest.api.object_storage.test_account_bulk.BulkTest.test_extract_archive[id-a407de51-1983-47cc-9f14-47c2b059413c]
tempest.api.object_storage.test_account_quotas.AccountQuotasTest.test_upload_valid_object[id-a22ef352-a342-4587-8f47-3bbdb5b039c4]
tempest.api.object_storage.test_account_quotas_negative.AccountQuotasNegativeTest.test_user_modify_quota[id-d1dc5076-555e-4e6d-9697-28f1fe976324]
tempest.api.object_storage.test_account_services.AccountTest.test_list_containers[id-3499406a-ae53-4f8c-b43a-133d4dc6fe3f]
@@ -217,6 +258,9 @@ tempest.api.object_storage.test_account_services.AccountTest.test_list_extension
tempest.api.object_storage.test_account_services.AccountTest.test_list_extensions[id-6eb04a6a-4860-4e31-ba91-ea3347d76b58]
tempest.api.object_storage.test_account_services.AccountTest.test_list_no_account_metadata[id-b904c2e3-24c2-4dba-ad7d-04e90a761be5]
tempest.api.object_storage.test_account_services.AccountTest.test_list_no_containers[id-884ec421-fbad-4fcc-916b-0580f2699565]
+tempest.api.object_storage.test_account_services.AccountTest.test_update_account_metadata_with_create_metadata_key[id-64fd53f3-adbd-4639-af54-436e4982dbfb]
+tempest.api.object_storage.test_account_services.AccountTest.test_update_account_metadata_with_delete_metadata[id-9f60348d-c46f-4465-ae06-d51dbd470953]
+tempest.api.object_storage.test_account_services.AccountTest.test_update_account_metadata_with_delete_metadata_key[id-d4d884d3-4696-4b85-bc98-4f57c4dd2bf1]
tempest.api.object_storage.test_container_acl.ObjectTestACLs.test_read_object_with_rights[id-a3270f3f-7640-4944-8448-c7ea783ea5b6]
tempest.api.object_storage.test_container_acl.ObjectTestACLs.test_write_object_with_rights[id-aa58bfa5-40d9-4bc3-82b4-d07f4a9e392a]
tempest.api.object_storage.test_container_quotas.ContainerQuotasTest.test_upload_large_object[id-22eeeb2b-3668-4160-baef-44790f65a5a0]
@@ -364,6 +408,17 @@ tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapsho
tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_snapshots_list_with_params[id-59f41f43-aebf-48a9-ab5d-d76340fab32b]
tempest.api.volume.test_volumes_snapshots.VolumesV2SnapshotTestJSON.test_volume_from_snapshot[id-677863d1-3142-456d-b6ac-9924f667a7f4]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_limit[id-db4d8e0a-7a2e-41cc-a712-961f6844e896]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_limit_equals_infinite[id-a1427f61-420e-48a5-b6e3-0b394fa95400]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_limit_equals_zero[id-e3b44b7f-ae87-45b5-8a8c-66110eb24d0a]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_marker[id-05489dde-44bc-4961-a1f5-3ce7ee7824f7]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_offset[id-ca96d551-17c6-4e11-b0e8-52d3bb8a63c7]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_created_at_asc[id-4052c3a0-2415-440a-a8cc-305a875331b0]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_created_at_desc[id-dcbbe24a-f3c0-4ec8-9274-55d48db8d1cf]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_id_asc[id-c5513ada-64c1-4d28-83b9-af3307ec1388]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_id_desc[id-8a7fe058-0b41-402a-8afd-2dbc5a4a718b]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_name_asc[id-d58b5fed-0c37-42d3-8c5d-39014ac13c00]
+tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshot_list_param_sort_name_desc[id-96ba6f4d-1f18-47e1-b4bc-76edc6c21250]
tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
tempest.api.volume.test_volumes_snapshots_list.VolumesSnapshotListTestJSON.test_snapshots_list_with_params[id-59f41f43-aebf-48a9-ab5d-d76340fab32b]
tempest.api.volume.test_volumes_snapshots_list.VolumesV2SnapshotListTestJSON.test_snapshots_list_details_with_params[id-220a1022-1fcd-4a74-a7bd-6b859156cda2]
diff --git a/docker/smoke/testcases.yaml b/docker/smoke/testcases.yaml
index 9b61b003a..77bee5704 100644
--- a/docker/smoke/testcases.yaml
+++ b/docker/smoke/testcases.yaml
@@ -110,11 +110,11 @@ tiers:
project_name: functest
criteria: 100
blocking: false
- deny_skipping: true
- tests_count: 222
description: >-
This test case runs a sub group of tests of the OpenStack
Compute testcases.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: refstack
args:
@@ -127,8 +127,6 @@ tiers:
project_name: functest
criteria: 100
blocking: false
- deny_skipping: true
- tests_count: 58
description: >-
This test case runs a sub group of tests of the OpenStack
Object testcases.
@@ -144,11 +142,11 @@ tiers:
project_name: functest
criteria: 100
blocking: false
- deny_skipping: true
- tests_count: 278
description: >-
This test case runs a sub group of tests of the OpenStack
Platform testcases.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: refstack
args:
@@ -223,7 +221,31 @@ tiers:
(?!.*test_delete_auto_allocated_topology)\
(?!.*test_create_network_provider_segmentation_id)\
(?!.*compute.test_floating_ips_rbac)\
+ (?!.*test_reset_network)\
(?!.*test_create_image_from_volume_backed_server)\
+ (?!.*test_network_ip_availability_rbac.NetworkIpAvailabilityExtRbacTest.test_get_network_ip_availabilities)\
+ (?!.*test_policy_bandwidth_limit_rule_rbac)\
+ (?!.*test_policy_minimum_bandwidth_rule_rbac)\
+ (?!.*test_group_type_specs)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_create_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_delete_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_update_group_type)\
+ (?!.*test_group_snapshots_rbac)\
+ (?!.*test_groups_rbac)\
+ (?!.*test_quota_classes_rbac)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_create_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_delete_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_show_interface)\
+ (?!.*test_user_messages_rbac)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV310RbacTest)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV312RbacTest)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_delete_volume_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_list_volumes_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_show_volume_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_update_volume_image_metadata)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV318RbacTest)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV39RbacTest)\
+ (?!.*test_volume_types_rbac)\
(?=.*[.*\bslow\b.*])\
(^patrole_tempest_plugin.tests.api.(compute|image|network|volume))"
option:
@@ -245,7 +267,31 @@ tiers:
(?!.*test_delete_auto_allocated_topology)\
(?!.*test_create_network_provider_segmentation_id)\
(?!.*compute.test_floating_ips_rbac)\
+ (?!.*test_reset_network)\
(?!.*test_create_image_from_volume_backed_server)\
+ (?!.*test_network_ip_availability_rbac.NetworkIpAvailabilityExtRbacTest.test_get_network_ip_availabilities)\
+ (?!.*test_policy_bandwidth_limit_rule_rbac)\
+ (?!.*test_policy_minimum_bandwidth_rule_rbac)\
+ (?!.*test_group_type_specs)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_create_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_delete_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_update_group_type)\
+ (?!.*test_group_snapshots_rbac)\
+ (?!.*test_groups_rbac)\
+ (?!.*test_quota_classes_rbac)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_create_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_delete_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_show_interface)\
+ (?!.*test_user_messages_rbac)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV310RbacTest)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV312RbacTest)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_delete_volume_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_list_volumes_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_show_volume_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_update_volume_image_metadata)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV318RbacTest)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV39RbacTest)\
+ (?!.*test_volume_types_rbac)\
(?=.*[.*\bslow\b.*])\
(^patrole_tempest_plugin.tests.api.(compute|image|network|volume))"
option:
@@ -267,7 +313,31 @@ tiers:
(?!.*test_delete_auto_allocated_topology)\
(?!.*test_create_network_provider_segmentation_id)\
(?!.*compute.test_floating_ips_rbac)\
+ (?!.*test_reset_network)\
(?!.*test_create_image_from_volume_backed_server)\
+ (?!.*test_network_ip_availability_rbac.NetworkIpAvailabilityExtRbacTest.test_get_network_ip_availabilities)\
+ (?!.*test_policy_bandwidth_limit_rule_rbac)\
+ (?!.*test_policy_minimum_bandwidth_rule_rbac)\
+ (?!.*test_group_type_specs)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_create_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_delete_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_update_group_type)\
+ (?!.*test_group_snapshots_rbac)\
+ (?!.*test_groups_rbac)\
+ (?!.*test_quota_classes_rbac)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_create_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_delete_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_show_interface)\
+ (?!.*test_user_messages_rbac)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV310RbacTest)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV312RbacTest)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_delete_volume_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_list_volumes_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_show_volume_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_update_volume_image_metadata)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV318RbacTest)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV39RbacTest)\
+ (?!.*test_volume_types_rbac)\
(?=.*[.*\bslow\b.*])\
(^patrole_tempest_plugin.tests.api.(compute|image|network|volume))"
option:
diff --git a/docker/vnf/Dockerfile b/docker/vnf/Dockerfile
index aa98b6c25..99f848fcd 100644
--- a/docker/vnf/Dockerfile
+++ b/docker/vnf/Dockerfile
@@ -54,5 +54,5 @@ RUN apk --no-cache add --update \
/src/vims-test/build-infra/.git /src/opnfv-vnf-vyos-blueprint/.git \
/tmp/clearwater-heat-singlenet-deps.patch && \
apk del .build-deps
-COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml
+COPY testcases.yaml /etc/xtesting/testcases.yaml
CMD ["run_tests", "-t", "all"]
diff --git a/docker/vnf/hooks/post_checkout b/docker/vnf/hooks/post_checkout
index 8d0e98124..c347524ea 100644
--- a/docker/vnf/hooks/post_checkout
+++ b/docker/vnf/hooks/post_checkout
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
from="${DOCKER_REPO%/*}/functest-core:${DOCKER_TAG}"
sed -i "s|^FROM.*$|FROM ${from}|" Dockerfile
diff --git a/docker/vnf/testcases.yaml b/docker/vnf/testcases.yaml
index 0d21c63c3..6b483af6a 100644
--- a/docker/vnf/testcases.yaml
+++ b/docker/vnf/testcases.yaml
@@ -23,6 +23,8 @@ tiers:
This test case deploys an OpenSource vIMS solution from
Clearwater using the Cloudify orchestrator. It also runs
some signaling traffic.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: cloudify_ims
@@ -35,6 +37,8 @@ tiers:
This test case deploys an OpenSource vIMS solution from
Clearwater using the OpenStack Heat orchestrator.
It also runs some signaling traffic.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: heat_ims
@@ -45,6 +49,8 @@ tiers:
blocking: false
description: >-
This test case is vRouter testing.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: vyos_vrouter
@@ -56,5 +62,7 @@ tiers:
description: >-
vEPC validation with Juju as VNF manager and ABoT as test
executor.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: juju_epc
diff --git a/docs/com/css/theme/OPNFV-Berlin.css b/docs/com/css/theme/OPNFV-Berlin.css
index 9f957d6f1..34f73b4bd 100644
--- a/docs/com/css/theme/OPNFV-Berlin.css
+++ b/docs/com/css/theme/OPNFV-Berlin.css
@@ -31,13 +31,13 @@ body {
bottom: 1em;
left: 1em;
font-size: 0.5em;
-
+
}
.reveal .slides > section, .reveal .slides > section > section {
line-height: 1.3;
font-weight: inherit; }
-
+
.reveal .opnfv-title {
background-image: url("../../img/title-bg-berlin.png" no-repeat);
}
@@ -87,7 +87,7 @@ body {
.reveal h1 {
text-shadow: none; }
-
+
/*********************************************
* OTHER
diff --git a/docs/com/css/theme/OPNFV.css b/docs/com/css/theme/OPNFV.css
index 745a2a8de..c59c59f8d 100644
--- a/docs/com/css/theme/OPNFV.css
+++ b/docs/com/css/theme/OPNFV.css
@@ -30,13 +30,13 @@ body {
bottom: 1em;
left: 1em;
font-size: 0.5em;
-
+
}
.reveal .slides > section, .reveal .slides > section > section {
line-height: 1.3;
font-weight: inherit; }
-
+
.reveal .opnfv-title {
background-image: url("../../img/title-bg.png" no-repeat);
}
@@ -83,7 +83,7 @@ body {
.reveal h1 {
text-shadow: none; }
-
+
/*********************************************
* OTHER
diff --git a/docs/com/pres/Summit/Berlin-2016/conversation.html b/docs/com/pres/Summit/Berlin-2016/conversation.html
index 356c2ad1e..5c1e18450 100644
--- a/docs/com/pres/Summit/Berlin-2016/conversation.html
+++ b/docs/com/pres/Summit/Berlin-2016/conversation.html
@@ -215,7 +215,7 @@
</div>
<div class='footer'>
- <img src="../../../img/logo-OPNFV-Berlin.png" alt="OPNFV logo">
+ <img src="../../../img/logo-OPNFV-Berlin.png" alt="OPNFV logo">
</div>
</div>
diff --git a/docs/com/pres/Summit/Berlin-2016/summit-Berlin.html b/docs/com/pres/Summit/Berlin-2016/summit-Berlin.html
index 97fa66c18..2b3b3b4ef 100644
--- a/docs/com/pres/Summit/Berlin-2016/summit-Berlin.html
+++ b/docs/com/pres/Summit/Berlin-2016/summit-Berlin.html
@@ -81,7 +81,7 @@
</section>
<section data-markdown>
## so concretely...
- * Validate scenarios for the release
+ * Validate scenarios for the release
* Give confidence on OPNFV fresh releases
* Ensure consistancy towards installers
* Ensure End to End interoperability
@@ -135,7 +135,7 @@
* 2 installers: Foreman/Fuel
* Lots of manual operations (reporting, documentation)
</section>
- </section>
+ </section>
<section>
<section data-markdown>
# Brahmaputra
@@ -146,8 +146,8 @@
* 3 feature projects: doctor, promise, sdnvpn
* 13 Scenarios
* 4 Installers (Apex, Compass, Fuel, Joid)
- * Test result collection
- * Automatic dashboard
+ * Test result collection
+ * Automatic dashboard
</section>
<section>
<h3>Jiras</h3>
@@ -196,7 +196,7 @@
</tr>
</tbody>
</table>
- * difference of duration due to POD and test suites
+ * difference of duration due to POD and test suites
</section>
<section data-markdown>
@@ -204,7 +204,7 @@
* Tempest/Rally
* harmonizing installer related OpenStack configuration differences
* creating customized test lists
- * main challenge was resolving SDN controller interworking problems
+ * main challenge was resolving SDN controller interworking problems
* vIMS
* complete but complex test case
* very interesting to automate (pre MANO, most of Telco needs met there)
@@ -212,7 +212,7 @@
</section>
- <section>
+ <section>
<section data-markdown>
# Colorado
</section>
@@ -221,7 +221,7 @@
## What's new?
* New internal test cases: healthcheck, security
* New controler: OpenContrail?
- * New feature projects (domino, multisites, movie, parser, moon, copper, models, onos-sfc...)
+ * New feature projects (domino, multisites, movie, parser, moon, copper, models, onos-sfc...)
* ARM Support (congratulations to ENEA team!)
* Simplified feature project integration journey
</section>
@@ -255,19 +255,19 @@
* New VNFs (vEPC, vCDN, vWhatever...)
* Multi sites (e.g bgpvpn with different back ends)
* Better test coverage
- </section>
+ </section>
<section data-markdown>
## A Functional testing As a Service framework
* Scenario owner can select their relevant test cases
* Test duration estimation
* Agile dashboarding
* Analytics
- </section>
+ </section>
<section data-markdown>
## Upstream
* Rally: time to give back more
- * Functest description for ETSI?
- </section>
+ * Functest description for ETSI?
+ </section>
</section>
<section>
diff --git a/docs/com/pres/Summit/Berlin-2016/testapi.html b/docs/com/pres/Summit/Berlin-2016/testapi.html
index c40637cbf..5e56d5052 100644
--- a/docs/com/pres/Summit/Berlin-2016/testapi.html
+++ b/docs/com/pres/Summit/Berlin-2016/testapi.html
@@ -69,7 +69,7 @@
* Test cases
* Results
</section>
-
+
<section data-markdown>
## Status Currently
* From many projects
@@ -87,7 +87,7 @@
</aside>
</script>
</section>
-
+
<section data-markdown>
## Help building
* Dashboards
@@ -135,8 +135,8 @@
<li>Scripts to support databse backup/restore/update</li>
<li>Support installation</li>
<aside class='notes'>
- So you can discover and understand the capabilities of the service without
- access to source code, documentation, or through network traffic inspection,
+ So you can discover and understand the capabilities of the service without
+ access to source code, documentation, or through network traffic inspection,
and also you can interact with the TestAPI directly through swagger website.
</aside>
</ul>
@@ -148,12 +148,12 @@
<section data-markdown>
# API Evoluation
</section>
-
+
<section>
<h2> URI changes...</h2>
- <div style="text-align:left"">
+ <div style="text-align:left"">
<p> testresults.opnfv.org/<span style="color:lightblue">TestAPI</span> => <br>testresults.opnfv.org/<span style="color:yellow">test/api/v1</span> </p>
-
+
<p> /test/api/v1/<b>pods</b></p>
<p> /test/api/v1/<b>projects</b></p>
<p> /test/api/v1/projects/qtip/<b>cases</b></p>
@@ -200,7 +200,7 @@
<section>
<h2>unit tests</h2>
<pre><code class="hljs" data-trim contenteditable>
-umry8364@umry8364-Latitude-E6400:~/Dev/OPNFV/releng/utils/test/result_collection_api$ ./run_test.sh
+umry8364@umry8364-Latitude-E6400:~/Dev/OPNFV/releng/utils/test/result_collection_api$ ./run_test.sh
Tests running...
WARNING:tornado.general:404 GET /dashboard/v1/results?case=vPing&pod=zte-pod1&version=C&installer=fuel&period=5 (127.0.0.1): Project name missing
WARNING:tornado.access:404 GET /dashboard/v1/results?case=vPing&pod=zte-pod1&version=C&installer=fuel&period=5 (127.0.0.1) 2.30ms
diff --git a/docs/com/pres/dockerslicing/dockerslicing.md b/docs/com/pres/dockerslicing/dockerslicing.md
index a66453248..0d1ce5e21 100644
--- a/docs/com/pres/dockerslicing/dockerslicing.md
+++ b/docs/com/pres/dockerslicing/dockerslicing.md
@@ -63,7 +63,7 @@
### 8 Functest containers
-```bash
+```shell
$ sudo docker search opnfv |grep functest-
opnfv/functest-core OPNFV Functest core image
opnfv/functest-restapi OPNFV Functest restapi image
diff --git a/docs/com/pres/euphrates_functest_evolution/euphrates.md b/docs/com/pres/euphrates_functest_evolution/euphrates.md
index 49aab16cd..2c3105539 100644
--- a/docs/com/pres/euphrates_functest_evolution/euphrates.md
+++ b/docs/com/pres/euphrates_functest_evolution/euphrates.md
@@ -120,7 +120,7 @@ export OS_IDENTITY_API_VERSION=3
#### Retrieve all the images
```
-mkdir -p images && wget -q -O- https://git.opnfv.org/functest/plain/functest/ci/download_images.sh | bash -s -- images && ls -1 images/*
+mkdir -p images && wget -q -O- https://git.opnfv.org/functest/plain/functest/ci/download_images.sh | sh -s -- images && ls -1 images/*
images/CentOS-7-aarch64-GenericCloud.qcow2
images/CentOS-7-aarch64-GenericCloud.qcow2.xz
images/CentOS-7-x86_64-GenericCloud.qcow2
diff --git a/docs/com/pres/gambia/gambia.md b/docs/com/pres/gambia/gambia.md
index cf5f0f1bf..4a65e9905 100644
--- a/docs/com/pres/gambia/gambia.md
+++ b/docs/com/pres/gambia/gambia.md
@@ -85,7 +85,7 @@ the classical ONAP testing virtual machine (**> 1GB**).
- all ONAP Robot Framework files retrieved from the original repositories
- testcases.yaml describing the testcases
-[Orange-OpenSource/xtesting-onap-robot](https://github.com/Orange-OpenSource/xtesting-onap-robot/)
+[Orange-OpenSource/xtesting-onap-robot](https://github.com/Orange-OpenSource/xtesting-onap-robot/)
### What's new in Xtesting?
@@ -94,7 +94,7 @@ the classical ONAP testing virtual machine (**> 1GB**).
full CI/CD toolchains in few minutes** (Jenkins, Minio, TestAPI, MongoDB and
Docker registry)
-```bash
+```shell
virtualenv xtesting
. xtesting/bin/activate
pip install ansible docker
diff --git a/docs/com/pres/oran/ftth.png b/docs/com/pres/oran/ftth.png
new file mode 100644
index 000000000..5b441d1de
--- /dev/null
+++ b/docs/com/pres/oran/ftth.png
Binary files differ
diff --git a/docs/com/pres/oran/oran.md b/docs/com/pres/oran/oran.md
new file mode 100644
index 000000000..87fba1306
--- /dev/null
+++ b/docs/com/pres/oran/oran.md
@@ -0,0 +1,90 @@
+---
+author: Cédric Ollivier
+title: OPNFV/CNTT CI assets
+date: 2020/12/01
+---
+## Continuous integration
+
+
+### A few CNTT reqs
+
+- OPNFV has built a complete CI/CD toolchain for continuously deploying and
+ testing cloud infrastructure
+- Reference conformance only requires for the local deployment of the
+ same components
+- all test cases must be delivered as **Docker containers** including the
+ common test case execution and the unified way to manage all the interactions
+
+**Integrate smoothly and deploy fastly**
+
+
+### How OPNFV/CNTT helps?
+
+- **Functest** offers a collection of state-of-the-art virtual infrastructure
+ test suites
+- **Xtesting** helps assembling sparse test cases and accelerating the adoption
+ of CI/CD best practices
+- **XtestingCI** eases deploying anywhere plug-and-play CI/CD toolchains in a
+ few commands
+- **CNTT** defines conformance suites and its playbooks leveraging this 3
+ testing frameworks
+
+**Leverage best opensource tools and practices**
+
+
+### Testing [FTTH](https://en.wikipedia.org/wiki/Fiber_to_the_x) equipment
+
+![FTTH](ftth.png)
+
+
+### Deploy your CI toolchain
+
+```
+virtualenv xtesting
+. xtesting/bin/activate
+pip install ansible
+ansible-galaxy install collivier.xtesting
+git clone https://gerrit.opnfv.org/gerrit/functest-xtesting functest-xtesting-src
+ansible-playbook functest-xtesting-src/ansible/site.yml
+deactivate
+rm -rf functest-xtesting-src xtesting
+```
+
+**More scenarios in [[1]](https://wiki.opnfv.org/pages/viewpage.action?pageId=32015004)**
+
+
+
+## Testing
+
+
+### Verif', Compliance and Interop'
+
+- verification allows skipping test if optional services or capabilities are
+ missing
+- compliance forces the full API descriptions as currently proposed by
+ CNTT and then skips all optional capability testing
+- the opensource certifications are driven by simple interoperability testing
+ (RefStack and OVP are about less than 10% of Functest. **trustability?**)
+
+**Fine tune the same test frameworks**
+
+
+### [CNTT RC1](https://build.opnfv.org/ci/view/functest/job/functest-leguer-daily/12/)
+
+![RC1](rc1.png)
+
+
+
+## Conclusion
+
+
+### Key takeaways
+
+- **test integration requirements** are crucial to smoothly assemble all
+ test cases
+- leverage **existing** OPNFV testing knowledge (projects) and experience
+ (history) by utilising the OPNFV toolchain design already in-place
+- **3000+** functional tests, **3 hours** upstream API and dataplane benchmarks
+ and VNFs automatically onboarded and tested **can be reused asis**
+
+**Keep it simple and do not reivent the wheel**
diff --git a/docs/com/pres/oran/rc1.png b/docs/com/pres/oran/rc1.png
new file mode 100644
index 000000000..9179f3bb6
--- /dev/null
+++ b/docs/com/pres/oran/rc1.png
Binary files differ
diff --git a/docs/com/pres/vevent202010/index.html b/docs/com/pres/vevent202010/index.html
new file mode 100644
index 000000000..a24c721d2
--- /dev/null
+++ b/docs/com/pres/vevent202010/index.html
@@ -0,0 +1,52 @@
+<html>
+<head>
+<title>Orange CNTT RC1 Field Trial feedbacks</title>
+<meta name="author" content="Cédric Ollivier">
+<meta name="viewport"
+ content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
+<link rel="stylesheet" href="../reveal.js/css/reveal.css">
+<link rel="stylesheet" href="../reveal.js/css/theme/white.css">
+<link rel="stylesheet" href="../reveal.js/lib/css/zenburn.css">
+<script>
+var link = document.createElement( 'link' );
+link.rel = 'stylesheet';
+link.type = 'text/css';
+link.href = window.location.search.match( /print-pdf/gi ) ? '../reveal.js/css/print/pdf.css' : '../reveal.js/css/print/paper.css';
+document.getElementsByTagName( 'head' )[0].appendChild( link );
+</script>
+</head>
+<body>
+ <div class="reveal">
+ <div class="slides">
+ <section data-markdown="vevent202010.md" data-separator="^\n\n\n"
+ data-separator-vertical="^\n\n" data-separator-notes="^Note:"></section>
+ </div>
+ </div>
+ <script src="../reveal.js/lib/js/head.min.js"></script>
+ <script src="../reveal.js/js/reveal.js"></script>
+ <script>
+ Reveal.initialize({
+ dependencies : [ {
+ src : '../reveal.js/plugin/markdown/marked.js',
+ condition : function() {
+ return !!document.querySelector('[data-markdown]');
+ }
+ }, {
+ src : '../reveal.js/plugin/markdown/markdown.js',
+ condition : function() {
+ return !!document.querySelector('[data-markdown]');
+ }
+ }, {
+ src: '../reveal.js/plugin/highlight/highlight.js',
+ async: true,
+ callback: function() {
+ hljs.initHighlightingOnLoad();
+ }
+ }, {
+ src: '../reveal.js/plugin/notes/notes.js',
+ async: true
+ } ]
+ });
+ </script>
+</body>
+</html>
diff --git a/docs/com/pres/vevent202010/vevent202010.md b/docs/com/pres/vevent202010/vevent202010.md
new file mode 100644
index 000000000..b128210ac
--- /dev/null
+++ b/docs/com/pres/vevent202010/vevent202010.md
@@ -0,0 +1,62 @@
+# Orange CNTT RC1 Field Trial
+
+[Cédric Ollivier](mailto:cedric.ollivier@orange.com)
+
+2020/10/14
+
+
+
+## CNTT RC1 Field Trial
+
+
+### Orange CNTT RC1 Field Trial
+
+- helped detecting a couple of issues in CNTT RC1
+- integrated cinder backup and nova instance_password in Orange IaaS
+- to fix 10 remaining single test failures (out 2000+ functional tests, 3 hours
+ benchmarking and 3 VNFs automatically onboarded and tested)
+- to enhance Functest juju_epc to pass proxies
+
+**99,999%**
+
+
+### Orange CNTT RC1 feedbacks
+
+- we easily executed the CNTT RC1 playbook asis and no clear bug was found in
+ CNTT RC1 Baldy
+- CNTT RC1 is now used in our Orange IaaS verification in addition to the
+ classical Functest containers
+- CNTT RC1 is also executed to verify all deployments before onboarding any VNF
+- CNTT RC1 Baldy and CNTT RC2 Baraque are listed in **Orange RFP requirements**
+ (see last ONES
+ [[1]](http://testresults.opnfv.org/functest/ONES2020NA_OPNFV-CNTT_RFP.1080p.mp4))
+
+
+
+## What else?
+
+
+### Wish list
+
+- to integrate **more benchmarks** in CNTT conformance (e.g. disk benchmarking)
+- to switch from the current Kubernetes interoperability testing to a **true**
+ CNTT conformance suite
+- to build the first **VNF and CNF** conformance suites (**high priority**)
+
+**We need your contribution helps!
+[[2]](https://www.linkedin.com/pulse/call-functest-cntt-rc1-contributions-c%25C3%25A9dric-ollivier/)
+[[3]](https://www.linkedin.com/pulse/call-functest-cntt-rc2-contributions-c%25C3%25A9dric-ollivier/)**
+
+
+### New Functest opportunities
+
+- to implement live monitoring of your OpenStack and Kubernetes deployments via
+ a subset of Functest (healthcheck? new Rally tasks?)
+- to implement new functest testcases to validate and verify OpenStack upgrades
+ (new Rally tasks)?
+
+**Try Functest Leguer, you will love it [[4]](https://www.linkedin.com/pulse/opnfv-functest-leguer-out-c%25C3%25A9dric-ollivier/)**
+
+
+
+## Thank you!
diff --git a/docs/release/release-notes/functest-release.rst b/docs/release/release-notes/functest-release.rst
index 00951b9a3..b5e228caa 100644
--- a/docs/release/release-notes/functest-release.rst
+++ b/docs/release/release-notes/functest-release.rst
@@ -141,8 +141,8 @@ Key benefits
* all testcases can run in parallel (tested with 4 executors in our gates)
* no remaining resources detected in our gates after multiple runs
-.. _`Functest toolchains`: https://wiki.opnfv.org/pages/viewpage.action?pageId=32015004
-.. _`Raspberry PI`: https://wiki.opnfv.org/display/functest/Run+Functest+containers+on+Raspberry+PI
+.. _`Functest toolchains`: https://github.com/collivier/ansible-role-xtesting
+.. _`Raspberry PI`: https://github.com/opnfv/functest/blob/master/PI.md
Code quality
------------
@@ -153,9 +153,8 @@ Code quality
Useful links
============
- * wiki project page: https://wiki.opnfv.org/display/functest/Opnfv+Functional+Testing
* Functest git repository: https://github.com/opnfv/functest
- * Functest CI dashboard: https://build.opnfv.org/ci/view/functest/
+ * Functest CI dashboard: http://104.154.71.112:8080/view/functest/
* JIRA dashboard: https://jira.opnfv.org/secure/Dashboard.jspa?selectPageId=10611
* Functest IRC channel: #opnfv-functest
* Reporting page: http://testresults.opnfv.org/reporting/master/functest/functest.html
diff --git a/docs/results/euphrates/5.0/apex.html b/docs/results/euphrates/5.0/apex.html
index 2a75054c3..34f0c1503 100644
--- a/docs/results/euphrates/5.0/apex.html
+++ b/docs/results/euphrates/5.0/apex.html
@@ -307,13 +307,13 @@ $(document).ready(function (){
<th width="10%">Iteration</th>
</tr>
<tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=http://testresultS.opnfv.org/reporting>os-odl-fdio-ha</a></td>
<td><div id="gaugeScenario5"></div></td>
@@ -327,7 +327,7 @@ $(document).ready(function (){
<td>4/18</td>
<td>3</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-apex-baremetal-daily-euphrates/108/console>os-nosdn-ovs_dpdk-ha</a></td>
<td><div id="gaugeScenario8"></div></td>
@@ -341,9 +341,9 @@ $(document).ready(function (){
<td>16/18</td>
<td>2</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-apex-baremetal-daily-euphrates/139/console>os-nosdn-fdio-ha</a></td>
<td><div id="gaugeScenario12"></div></td>
@@ -357,11 +357,11 @@ $(document).ready(function (){
<td>18/20</td>
<td>3</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-apex-baremetal-daily-euphrates/121/console>os-nosdn-bar-ha</a></td>
<td><div id="gaugeScenario17"></div></td>
@@ -369,7 +369,7 @@ $(document).ready(function (){
<td>26/27</td>
<td>4</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-apex-baremetal-daily-euphrates/134/console>os-nosdn-nofeature-ha</a></td>
<td><div id="gaugeScenario19"></div></td>
@@ -413,9 +413,9 @@ $(document).ready(function (){
<td>8/30</td>
<td>4</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-apex-baremetal-daily-euphrates/114/console>os-ovn-nofeature-noha</a></td>
<td><div id="gaugeScenario7"></div></td>
@@ -423,9 +423,9 @@ $(document).ready(function (){
<td>6/18</td>
<td>2</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-apex-baremetal-daily-euphrates/136/console>os-odl-nofeature-noha</a></td>
<td><div id="gaugeScenario10"></div></td>
@@ -439,9 +439,9 @@ $(document).ready(function (){
<td>4/18</td>
<td>3</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=http://testresultS.opnfv.org/reporting>os-odl-fdio_dvr-noha</a></td>
<td><div id="gaugeScenario14"></div></td>
@@ -461,7 +461,7 @@ $(document).ready(function (){
<td>18/18</td>
<td>3</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-apex-baremetal-daily-euphrates/128/console>os-nosdn-calipso-noha</a></td>
<td><div id="gaugeScenario18"></div></td>
@@ -469,7 +469,7 @@ $(document).ready(function (){
<td>18/18</td>
<td>3</td>
</tr><tr class="tr-ok">
-
+
</tr>
</table>
@@ -489,31 +489,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th>
</tr>
<tr class="tr-weather-weather">
@@ -534,31 +534,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -582,37 +582,37 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
FDS
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -636,34 +636,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -693,37 +693,37 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
FDS
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -747,31 +747,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th>
</tr>
<tr class="tr-weather-weather">
@@ -792,31 +792,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -843,31 +843,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th>
</tr>
<tr class="tr-weather-weather">
@@ -888,31 +888,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -942,34 +942,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -996,31 +996,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th>
</tr>
<tr class="tr-weather-weather">
@@ -1041,31 +1041,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -1089,34 +1089,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -1143,37 +1143,37 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
FDS
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -1197,31 +1197,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -1251,31 +1251,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -1302,31 +1302,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -1356,31 +1356,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
@@ -1407,31 +1407,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Doctor
*
diff --git a/docs/results/euphrates/5.0/compass.html b/docs/results/euphrates/5.0/compass.html
index d62ba4d2e..b7f7a20ea 100644
--- a/docs/results/euphrates/5.0/compass.html
+++ b/docs/results/euphrates/5.0/compass.html
@@ -223,11 +223,11 @@ $(document).ready(function (){
<th width="10%">Iteration</th>
</tr>
<tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-compass-virtual-daily-euphrates/81/console>os-odl_l3-nofeature-ha</a></td>
<td><div id="gaugeScenario4"></div></td>
@@ -247,7 +247,7 @@ $(document).ready(function (){
<td>26/30</td>
<td>9</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-compass-virtual-daily-euphrates/72/console>os-nosdn-ovs_dpdk-ha</a></td>
<td><div id="gaugeScenario8"></div></td>
@@ -255,9 +255,9 @@ $(document).ready(function (){
<td>2/18</td>
<td>3</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-compass-baremetal-daily-euphrates/24/console>os-odl_l2-moon-ha</a></td>
<td><div id="gaugeScenario11"></div></td>
@@ -301,11 +301,11 @@ $(document).ready(function (){
<td>24/30</td>
<td>4</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-compass-virtual-daily-euphrates/86/console>os-nosdn-nofeature-noha</a></td>
<td><div id="gaugeScenario7"></div></td>
@@ -313,7 +313,7 @@ $(document).ready(function (){
<td>26/27</td>
<td>6</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-compass-virtual-daily-euphrates/82/console>os-nosdn-ovs_dpdk-noha</a></td>
<td><div id="gaugeScenario9"></div></td>
@@ -327,9 +327,9 @@ $(document).ready(function (){
<td>14/18</td>
<td>3</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr>
</table>
@@ -349,31 +349,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -397,34 +397,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
SFC
*
@@ -451,34 +451,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -502,31 +502,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -550,31 +550,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -598,34 +598,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
SFC
*
@@ -652,31 +652,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -700,31 +700,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th>
</tr>
<tr class="tr-weather-weather">
@@ -745,31 +745,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th>
</tr>
<tr class="tr-weather-weather">
@@ -790,31 +790,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -838,34 +838,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -889,31 +889,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
diff --git a/docs/results/euphrates/5.0/daisy.html b/docs/results/euphrates/5.0/daisy.html
index 6de8d0895..09580fc19 100644
--- a/docs/results/euphrates/5.0/daisy.html
+++ b/docs/results/euphrates/5.0/daisy.html
@@ -127,9 +127,9 @@ $(document).ready(function (){
<th width="10%">Iteration</th>
</tr>
<tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr>
</table>
@@ -149,31 +149,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
@@ -206,34 +206,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Domino
*
diff --git a/docs/results/euphrates/5.0/fuel@aarch64.html b/docs/results/euphrates/5.0/fuel@aarch64.html
index c8a2f2db0..aa9f1eb66 100644
--- a/docs/results/euphrates/5.0/fuel@aarch64.html
+++ b/docs/results/euphrates/5.0/fuel@aarch64.html
@@ -115,7 +115,7 @@ $(document).ready(function (){
<th width="10%">Iteration</th>
</tr>
<tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-fuel-armband-baremetal-arm-daily-euphrates/31/console>os-nosdn-nofeature-ha</a></td>
<td><div id="gaugeScenario2"></div></td>
@@ -147,9 +147,9 @@ $(document).ready(function (){
<td>1/9</td>
<td>1</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr>
</table>
@@ -169,31 +169,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -220,31 +220,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -283,34 +283,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
diff --git a/docs/results/euphrates/5.0/fuel@x86.html b/docs/results/euphrates/5.0/fuel@x86.html
index 3b918dd74..52b67b37f 100644
--- a/docs/results/euphrates/5.0/fuel@x86.html
+++ b/docs/results/euphrates/5.0/fuel@x86.html
@@ -151,7 +151,7 @@ $(document).ready(function (){
<th width="10%">Iteration</th>
</tr>
<tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-fuel-baremetal-daily-euphrates/40/console>os-odl-nofeature-ha</a></td>
<td><div id="gaugeScenario2"></div></td>
@@ -159,9 +159,9 @@ $(document).ready(function (){
<td>29/30</td>
<td>6</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-fuel-baremetal-daily-euphrates/38/console>os-nosdn-ovs-ha</a></td>
<td><div id="gaugeScenario5"></div></td>
@@ -193,7 +193,7 @@ $(document).ready(function (){
<td>25/27</td>
<td>10</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-fuel-virtual-daily-euphrates/91/console>os-odl-nofeature-noha</a></td>
<td><div id="gaugeScenario3"></div></td>
@@ -207,9 +207,9 @@ $(document).ready(function (){
<td>27/27</td>
<td>9</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr>
</table>
@@ -229,31 +229,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -280,34 +280,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -334,34 +334,34 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
ODL
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -388,31 +388,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -439,31 +439,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -490,31 +490,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
diff --git a/docs/results/euphrates/5.0/joid.html b/docs/results/euphrates/5.0/joid.html
index d58efcca1..9b6d0f4d0 100644
--- a/docs/results/euphrates/5.0/joid.html
+++ b/docs/results/euphrates/5.0/joid.html
@@ -163,7 +163,7 @@ $(document).ready(function (){
<th width="10%">Iteration</th>
</tr>
<tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-joid-baremetal-daily-euphrates/84/console>os-ocl-nofeature-ha</a></td>
<td><div id="gaugeScenario2"></div></td>
@@ -171,7 +171,7 @@ $(document).ready(function (){
<td>3/27</td>
<td>10</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-joid-baremetal-daily-euphrates/67/console>os-nosdn-openbaton-ha</a></td>
<td><div id="gaugeScenario4"></div></td>
@@ -185,7 +185,7 @@ $(document).ready(function (){
<td>2/15</td>
<td>4</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-joid-baremetal-daily-euphrates/81/console>os-nosdn-nofeature-ha</a></td>
<td><div id="gaugeScenario7"></div></td>
@@ -211,7 +211,7 @@ $(document).ready(function (){
<td>3/27</td>
<td>9</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-joid-baremetal-daily-euphrates/90/console>os-nosdn-nofeature-noha</a></td>
<td><div id="gaugeScenario3"></div></td>
@@ -219,9 +219,9 @@ $(document).ready(function (){
<td>15/27</td>
<td>9</td>
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
-
+
</tr><tr class="tr-ok">
<td><a href=https://build.opnfv.org/ci/view/functest/job/functest-joid-baremetal-daily-euphrates/88/console>os-nosdn-lxd-noha</a></td>
<td><div id="gaugeScenario6"></div></td>
@@ -229,7 +229,7 @@ $(document).ready(function (){
<td>3/15</td>
<td>9</td>
</tr><tr class="tr-ok">
-
+
</tr>
</table>
@@ -249,31 +249,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th>
</tr>
<tr class="tr-weather-weather">
@@ -294,31 +294,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th>
</tr>
<tr class="tr-weather-weather">
@@ -339,31 +339,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -390,31 +390,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
@@ -441,19 +441,19 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
Promise
*
@@ -480,19 +480,19 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
Promise
*
@@ -519,31 +519,31 @@ $(document).ready(function (){
<tr>
<th>
Health (connection)
-
+
</th><th>
Health (api)
-
+
</th><th>
Health (dhcp)
-
+
</th><th>
vPing (ssh)
-
+
</th><th>
vPing (userdata)
-
+
</th><th>
Tempest (smoke)
-
+
</th><th>
Rally (smoke)
-
+
</th><th>
Refstack
-
+
</th><th>
SNAPS
-
+
</th><th>
Promise
*
diff --git a/docs/results/js/default.css b/docs/results/js/default.css
index e32fa5fba..cbb564326 100644
--- a/docs/results/js/default.css
+++ b/docs/results/js/default.css
@@ -55,7 +55,7 @@ td{
background-color: #0095a2;
}
-h1 {
+h1 {
display: block;
font-size: 2em;
margin-top: 0.67em;
diff --git a/docs/results/js/trend.js b/docs/results/js/trend.js
index f24213382..47ee4d906 100644
--- a/docs/results/js/trend.js
+++ b/docs/results/js/trend.js
@@ -63,13 +63,13 @@ var trend = function(container, trend_data) {
.attr("d", valueline(trend_data))
.attr("stroke", "steelblue")
.attr("fill", "none");
-
+
trend_svg.selectAll(".dot")
.data(trend_data)
.enter().append("circle")
.attr("r", 2.5)
.attr("cx", function(d) { return trend_x(d.date); })
- .attr("cy", function(d) { return trend_y(d.score); });
+ .attr("cy", function(d) { return trend_y(d.score); });
return trend;
}
diff --git a/docs/testing/developer/devguide/index.rst b/docs/testing/developer/devguide/index.rst
index 3978a444c..da5485ef0 100644
--- a/docs/testing/developer/devguide/index.rst
+++ b/docs/testing/developer/devguide/index.rst
@@ -208,13 +208,6 @@ Additional dashboarding is managed at the testing group level, see `[5]`_ for
details.
-=======
-How TOs
-=======
-
-See How to section on Functest wiki `[6]`_
-
-
==========
References
==========
@@ -229,6 +222,4 @@ _`[4]`: https://wiki.opnfv.org/display/functest/2017+Beijing?preview=%2F11699623
_`[5]`: https://lfanalytics.io/projects/lfn%2Fopnfv/dashboard
-_`[6]`: https://wiki.opnfv.org/pages/viewpage.action?pageId=7768932
-
IRC support chan: #opnfv-functest
diff --git a/docs/testing/user/configguide/configguide.rst b/docs/testing/user/configguide/configguide.rst
index 55585ed64..47c25adb1 100644
--- a/docs/testing/user/configguide/configguide.rst
+++ b/docs/testing/user/configguide/configguide.rst
@@ -52,7 +52,7 @@ See section on OpenStack credentials for details.
Create a directory for the different images (attached as a Docker volume)::
- mkdir -p images && wget -q -O- https://git.opnfv.org/functest/plain/functest/ci/download_images.sh | bash -s -- images && ls -1 images/*
+ mkdir -p images && wget -q -O- https://git.opnfv.org/functest/plain/functest/ci/download_images.sh | sh -s -- images && ls -1 images/*
images/cirros-0.5.1-aarch64-disk.img
images/cirros-0.5.1-x86_64-disk.img
diff --git a/docs/testing/user/userguide/index.rst b/docs/testing/user/userguide/index.rst
index 07ab0e62d..1e73cd622 100644
--- a/docs/testing/user/userguide/index.rst
+++ b/docs/testing/user/userguide/index.rst
@@ -40,7 +40,7 @@ References
`[11]`_: Robot Framework web site
-`[13]`_: SNAPS wiki
+`[13]`_: SNAPS
`[14]`_: vRouter
@@ -64,12 +64,12 @@ IRC support chan: #opnfv-functest
.. _`[9]`: https://github.com/openstack/interop
.. _`[10]`: https://github.com/openstack/interop/blob/master/2016.08/procedure.rst
.. _`[11]`: https://robotframework.org/
-.. _`[13]`: https://wiki.opnfv.org/display/PROJ/SNAPS-OO
+.. _`[13]`: https://git.opnfv.org/snaps/
.. _`[14]`: https://github.com/oolorg/opnfv-functest-vrouter
.. _`[15]`: https://aptira.com/testing-openstack-tempest-part-1/
.. _`[16]`: http://testresults.opnfv.org/test/
.. _`OPNFV main site`: https://www.opnfv.org/
-.. _`Functest page`: https://wiki.opnfv.org/display/functest/Opnfv+Functional+Testing
+.. _`Functest page`: https://github.com/opnfv/functest/
.. _`OpenRC`: http://docs.openstack.org/user-guide/common/cli_set_environment_variables_using_openstack_rc.html
.. _`Rally installation procedure`: https://rally.readthedocs.org/en/latest/tutorial/step_0_installation.html
.. _`config_functest.yaml` : https://github.com/opnfv/functest/blob/master/functest/ci/config_functest.yaml
diff --git a/docs/testing/user/userguide/test_details.rst b/docs/testing/user/userguide/test_details.rst
index 7c4aba0a7..98247d488 100644
--- a/docs/testing/user/userguide/test_details.rst
+++ b/docs/testing/user/userguide/test_details.rst
@@ -405,7 +405,7 @@ The kubernetes testcases are distributed across various Tiers:
.. _`[9]`: https://github.com/openstack/interop
.. _`[10]`: https://github.com/openstack/interop/blob/master/2016.08/procedure.rst
.. _`[11]`: https://robotframework.org/
-.. _`[13]`: https://wiki.opnfv.org/display/PROJ/SNAPS-OO
+.. _`[13]`: https://git.opnfv.org/snaps/
.. _`[14]`: https://github.com/oolorg/opnfv-functest-vrouter
.. _`[15]`: https://github.com/RebacaInc/abot_charm
.. _`[16]`: https://github.com/kubernetes/community/blob/master/contributors/devel/sig-testing/e2e-tests.md
diff --git a/docs/testing/user/userguide/troubleshooting.rst b/docs/testing/user/userguide/troubleshooting.rst
index 663051f97..d857ed4c4 100644
--- a/docs/testing/user/userguide/troubleshooting.rst
+++ b/docs/testing/user/userguide/troubleshooting.rst
@@ -261,40 +261,28 @@ Functest offers a possibility to test a customized list of Tempest test cases.
To enable that, add a new entry in docker/smoke/testcases.yaml on the
"smoke" container with the following content::
- -
- case_name: tempest_custom
- project_name: functest
- criteria: 100
- blocking: false
- description: >-
- The test case allows running a customized list of tempest
- test cases
- dependencies:
- installer: ''
- scenario: ''
- run:
- module: 'functest.opnfv_tests.openstack.tempest.tempest'
- class: 'TempestCustom'
-
-Also, a list of the Tempest test cases must be provided to the container or
-modify the existing one in
-/usr/lib/python3.8/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt
-
-Example of custom list of tests 'my-custom-tempest-tests.txt'::
-
- tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basic_ops[compute,id-7fff3fb3-91d8-4fd0-bd7d-0204f1f180ba,network,smoke]
- tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_network_basic_ops[compute,id-f323b3ba-82f8-4db7-8ea6-6a895869ec49,network,smoke]
+ -
+ case_name: tempest_custom
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ The test case allows running a customized list of tempest
+ test cases
+ run:
+ name: tempest_common
+ args:
+ mode: "tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basic_ops|\
+ tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_network_basic_ops"
This is an example of running a customized list of Tempest tests in Functest::
sudo docker run --env-file env \
-v $(pwd)/openstack.creds:/home/opnfv/functest/conf/env_file \
-v $(pwd)/images:/home/opnfv/functest/images \
- -v $(pwd)/my-custom-testcases.yaml:/usr/lib/python3.8/site-packages/functest/ci/testcases.yaml \
- -v $(pwd)/my-custom-tempest-tests.txt:/usr/lib/python3.8/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/test_list.txt \
+ -v $(pwd)/my-custom-testcases.yaml:/usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml \
opnfv/functest-smoke run_tests -t tempest_custom
-
Rally
^^^^^
diff --git a/elements/functest/element-deps b/elements/functest/element-deps
new file mode 100644
index 000000000..d06e3d771
--- /dev/null
+++ b/elements/functest/element-deps
@@ -0,0 +1 @@
+xtestingci
diff --git a/elements/functest/install.d/16-functest b/elements/functest/install.d/16-functest
new file mode 100755
index 000000000..97d37e499
--- /dev/null
+++ b/elements/functest/install.d/16-functest
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+download-frozen-image-v2.sh /data/docker/functest \
+ opnfv/functest-healthcheck:latest \
+ opnfv/functest-smoke:latest \
+ opnfv/functest-smoke-cntt:latest \
+ opnfv/functest-benchmarking:latest \
+ opnfv/functest-benchmarking-cntt:latest \
+ opnfv/functest-vnf:latest
+mkdir -p /data/images && wget -q -O- https://git.opnfv.org/functest/plain/functest/ci/download_images.sh | sh -s -- /data/images && ls -1 /data/images/*
+git clone https://git.opnfv.org/functest /home/debian/functest
+chown -R 1000:1000 /home/debian/functest
+
+exit 0
diff --git a/functest/ci/add_proxy.sh b/functest/ci/add_proxy.sh
index 082141073..9d7db22e4 100644
--- a/functest/ci/add_proxy.sh
+++ b/functest/ci/add_proxy.sh
@@ -1,8 +1,9 @@
-#!/bin/bash
+#!/bin/sh
set -e
-pushd "${1:-/home/opnfv/functest/images}" > /dev/null
+initdir=$(pwd)
+cd "${1:-/home/opnfv/functest/images}"
http_proxy_host=${http_proxy_host:-proxy}
http_proxy_port=${http_proxy_port:-8080}
@@ -89,7 +90,7 @@ for image in $images; do
fi
guestmount -a "${image}" -i --rw "${tmpdir}"
add_proxy "${tmpdir}/etc/environment"
- if [[ ${image} == "ubuntu"* ]]; then
+ if expr "$image" : 'ubuntu' ; then
add_proxy_apt "${tmpdir}/etc/apt/apt.conf"
add_proxy_juju_env "${tmpdir}/etc/juju-proxy.conf"
add_proxy_juju_systemd "${tmpdir}/etc/juju-proxy-systemd.conf"
@@ -133,5 +134,5 @@ else
fi
rmdir "${tmpdir}"
-popd > /dev/null
+cd initdir
diff --git a/functest/ci/convert_images.sh b/functest/ci/convert_images.sh
index d7ed3887b..2159d2a60 100644
--- a/functest/ci/convert_images.sh
+++ b/functest/ci/convert_images.sh
@@ -1,11 +1,13 @@
-#!/bin/bash
+#!/bin/sh
set -ex
-pushd "${1:-/home/opnfv/functest/images}"
+initdir=$(pwd)
+
+cd "${1:-/home/opnfv/functest/images}"
for i in *.img *.qcow2; do
qemu-img convert -f qcow2 -O vmdk "$i" "${i%.*}.vmdk"
done
-popd
+cd $initdir
diff --git a/functest/ci/download_images.sh b/functest/ci/download_images.sh
index 358ccf916..a56c02b60 100644
--- a/functest/ci/download_images.sh
+++ b/functest/ci/download_images.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -ex
@@ -6,10 +6,11 @@ wget_opts="-N --tries=1 --connect-timeout=30"
[ -t 1 ] || wget_opts="${wget_opts} --progress=dot:giga"
cat << EOF | wget ${wget_opts} -i - -P ${1:-/home/opnfv/functest/images}
-http://download.cirros-cloud.net/0.5.1/cirros-0.5.1-x86_64-disk.img
+http://download.cirros-cloud.net/0.6.1/cirros-0.6.1-x86_64-disk.img
https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img
https://cloud-images.ubuntu.com/releases/16.04/release/ubuntu-16.04-server-cloudimg-amd64-disk1.img
-http://download.cirros-cloud.net/0.5.1/cirros-0.5.1-aarch64-disk.img
+https://cloud-images.ubuntu.com/releases/18.04/release/ubuntu-18.04-server-cloudimg-amd64.img
+http://download.cirros-cloud.net/0.6.1/cirros-0.6.1-aarch64-disk.img
http://repository.cloudifysource.org/cloudify/19.01.24/community-release/cloudify-docker-manager-community-19.01.24.tar
http://testresults.opnfv.org/functest/vyos-1.1.8-amd64.qcow2
http://testresults.opnfv.org/functest/shaker-image-1.3.4+stretch.qcow2
diff --git a/functest/ci/testcases.yaml b/functest/ci/testcases.yaml
index 282590a13..acf5a7199 100644
--- a/functest/ci/testcases.yaml
+++ b/functest/ci/testcases.yaml
@@ -27,6 +27,8 @@ tiers:
It creates and configures all tenant network ressources
required by advanced testcases (subnet, network and
router).
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: tenantnetwork1
@@ -39,6 +41,8 @@ tiers:
It creates new user/project before creating and configuring
all tenant network ressources required by a testcase
(subnet, network and router).
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: tenantnetwork2
@@ -173,9 +177,9 @@ tiers:
dependencies:
- DASHBOARD_URL: '^(?!\s*$).+'
run:
- name: tempest_horizon
+ name: tempest_common
args:
- mode: '^tempest_horizon.'
+ mode: '^tempest.scenario.test_dashboard_basic_ops.'
-
name: smoke
@@ -287,11 +291,11 @@ tiers:
project_name: functest
criteria: 100
blocking: false
- deny_skipping: true
- tests_count: 222
description: >-
This test case runs a sub group of tests of the OpenStack
Compute testcases.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: refstack
args:
@@ -304,8 +308,6 @@ tiers:
project_name: functest
criteria: 100
blocking: false
- deny_skipping: true
- tests_count: 58
description: >-
This test case runs a sub group of tests of the OpenStack
Object testcases.
@@ -321,11 +323,11 @@ tiers:
project_name: functest
criteria: 100
blocking: false
- deny_skipping: true
- tests_count: 278
description: >-
This test case runs a sub group of tests of the OpenStack
Platform testcases.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: refstack
args:
@@ -400,7 +402,31 @@ tiers:
(?!.*test_delete_auto_allocated_topology)\
(?!.*test_create_network_provider_segmentation_id)\
(?!.*compute.test_floating_ips_rbac)\
+ (?!.*test_reset_network)\
(?!.*test_create_image_from_volume_backed_server)\
+ (?!.*test_network_ip_availability_rbac.NetworkIpAvailabilityExtRbacTest.test_get_network_ip_availabilities)\
+ (?!.*test_policy_bandwidth_limit_rule_rbac)\
+ (?!.*test_policy_minimum_bandwidth_rule_rbac)\
+ (?!.*test_group_type_specs)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_create_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_delete_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_update_group_type)\
+ (?!.*test_group_snapshots_rbac)\
+ (?!.*test_groups_rbac)\
+ (?!.*test_quota_classes_rbac)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_create_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_delete_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_show_interface)\
+ (?!.*test_user_messages_rbac)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV310RbacTest)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV312RbacTest)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_delete_volume_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_list_volumes_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_show_volume_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_update_volume_image_metadata)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV318RbacTest)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV39RbacTest)\
+ (?!.*test_volume_types_rbac)\
(?=.*[.*\bslow\b.*])\
(^patrole_tempest_plugin.tests.api.(compute|image|network|volume))"
option:
@@ -422,7 +448,31 @@ tiers:
(?!.*test_delete_auto_allocated_topology)\
(?!.*test_create_network_provider_segmentation_id)\
(?!.*compute.test_floating_ips_rbac)\
+ (?!.*test_reset_network)\
(?!.*test_create_image_from_volume_backed_server)\
+ (?!.*test_network_ip_availability_rbac.NetworkIpAvailabilityExtRbacTest.test_get_network_ip_availabilities)\
+ (?!.*test_policy_bandwidth_limit_rule_rbac)\
+ (?!.*test_policy_minimum_bandwidth_rule_rbac)\
+ (?!.*test_group_type_specs)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_create_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_delete_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_update_group_type)\
+ (?!.*test_group_snapshots_rbac)\
+ (?!.*test_groups_rbac)\
+ (?!.*test_quota_classes_rbac)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_create_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_delete_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_show_interface)\
+ (?!.*test_user_messages_rbac)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV310RbacTest)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV312RbacTest)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_delete_volume_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_list_volumes_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_show_volume_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_update_volume_image_metadata)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV318RbacTest)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV39RbacTest)\
+ (?!.*test_volume_types_rbac)\
(?=.*[.*\bslow\b.*])\
(^patrole_tempest_plugin.tests.api.(compute|image|network|volume))"
option:
@@ -444,7 +494,31 @@ tiers:
(?!.*test_delete_auto_allocated_topology)\
(?!.*test_create_network_provider_segmentation_id)\
(?!.*compute.test_floating_ips_rbac)\
+ (?!.*test_reset_network)\
(?!.*test_create_image_from_volume_backed_server)\
+ (?!.*test_network_ip_availability_rbac.NetworkIpAvailabilityExtRbacTest.test_get_network_ip_availabilities)\
+ (?!.*test_policy_bandwidth_limit_rule_rbac)\
+ (?!.*test_policy_minimum_bandwidth_rule_rbac)\
+ (?!.*test_group_type_specs)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_create_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_delete_group_type)\
+ (?!.*test_groups_rbac.GroupTypesV3RbacTest.test_update_group_type)\
+ (?!.*test_group_snapshots_rbac)\
+ (?!.*test_groups_rbac)\
+ (?!.*test_quota_classes_rbac)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_create_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_delete_interface)\
+ (?!.*test_server_misc_policy_actions_rbac.MiscPolicyActionsNetworkRbacTest.test_show_interface)\
+ (?!.*test_user_messages_rbac)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV310RbacTest)\
+ (?!.*test_volume_actions_rbac.VolumesActionsV312RbacTest)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_delete_volume_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_list_volumes_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_show_volume_details_image_metadata)\
+ (?!.*test_volume_metadata_rbac.VolumeMetadataV3RbacTest.test_update_volume_image_metadata)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV318RbacTest)\
+ (?!.*test_volumes_backup_rbac.VolumesBackupsV39RbacTest)\
+ (?!.*test_volume_types_rbac)\
(?=.*[.*\bslow\b.*])\
(^patrole_tempest_plugin.tests.api.(compute|image|network|volume))"
option:
@@ -517,7 +591,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 523
+ tests_count: 564
description: >-
This test case runs the Tempest suite proposed by the
Neutron project. The list of test cases is generated by
@@ -540,6 +614,7 @@ tiers:
(?!.*test_conntrack_helper)\
(?!.*test_floating_ips.FloatingIPPoolTestJSON)\
(?!.*test_floating_ips.FloatingIPTestJSON.test_create_update_floatingip_port_details)\
+ (?!.*test_local_ip)\
(?!.*test_metering_extensions)\
(?!.*test_metering_negative)\
(?!.*test_networks.NetworksSearchCriteriaTest.test_list_validation_filters)\
@@ -584,7 +659,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 9
+ tests_count: 10
description: >-
This test case runs the Tempest suite proposed by the
Cinder project.
@@ -595,6 +670,8 @@ tiers:
(?!.*test_consistencygroups)\
(?!.*test_backup_crossproject_admin_negative)\
(?!.*test_backup_crossproject_user_negative)\
+ (?!.*test_volume_encrypted.TestEncryptedCinderVolumes)\
+ (?!.*rbac)\
(^cinder_tempest_plugin.)"
option:
- '--concurrency=4'
@@ -613,6 +690,7 @@ tiers:
name: tempest_common
args:
mode: "(?!.*api.identity.v3.test_oauth1_tokens)\
+ (?!.*rbac)\
(?!.*scenario.test_federated_authentication)\
keystone_tempest_plugin."
option:
@@ -624,7 +702,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 119
+ tests_count: 124
description: >-
This test case runs the Tempest suite proposed by the
Heat project.
@@ -633,6 +711,10 @@ tiers:
args:
mode: "(?!.*functional.test_lbaasv2)\
(?!.*functional.test_encryption_vol_type)\
+ (?!.*functional.test_event_sinks)\
+ (?!.*functional.test_software_config.ZaqarSignalTransportTest)\
+ (?!.*functional.test_stack_events)\
+ (?!.*functional.test_waitcondition)\
(?!.*RemoteStackTest.test_stack_create_with_cloud_credential)\
(?!.*scenario.test_aodh_alarm)\
(?!.*tests.scenario.test_autoscaling_lb)\
@@ -673,7 +755,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 1280
+ tests_count: 1271
description: >-
The list of test cases is generated by
Tempest automatically and depends on the parameters of
@@ -682,7 +764,7 @@ tiers:
run:
name: tempest_common
args:
- mode: "(?!.*test_fixed_ips)\
+ mode: "(?!.*admin.test_agents)(?!.*test_fixed_ips)\
(?!.*test_fixed_ips_negative)\
(?!.*test_auto_allocate_network)(?!.*test_floating_ips_bulk)\
(?!.*test_flavors_microversions.FlavorsV255TestJSON)\
@@ -697,6 +779,7 @@ tiers:
(?!.*test_live_migration.LiveMigrationTest.test_volume_backed_live_migration)\
(?!.*test_live_migration.LiveMigrationRemoteConsolesV26Test)\
(?!.*test_quotas.QuotasAdminTestV257)\
+ (?!.*test_servers.ServersAdminTestJSON.test_reset_network_inject_network_info)\
(?!.*certificates.test_certificates)\
(?!.*test_quotas_negative.QuotasSecurityGroupAdminNegativeTest)\
(?!.*test_novnc)(?!.*test_server_personality)\
@@ -717,19 +800,38 @@ tiers:
(?!.*test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_update_security_group_with_invalid_sg_des)\
(?!.*test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_update_security_group_with_invalid_sg_id)\
(?!.*test_security_groups_negative.SecurityGroupsNegativeTestJSON.test_update_security_group_with_invalid_sg_name)\
+ (?!.*test_create_server.ServersTestFqdnHostnames.test_create_server_with_fqdn_name)\
+ (?!.*test_server_metadata.ServerMetadataTestJSON)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_delete_metadata_non_existent_server)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_metadata_items_limit)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_set_metadata_invalid_key)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_set_metadata_non_existent_server)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_set_server_metadata_blank_key)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_set_server_metadata_missing_metadata)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_update_metadata_non_existent_server)\
+ (?!.*test_server_metadata_negative.ServerMetadataNegativeTestJSON.test_update_metadata_with_blank_key)\
(?!.*test_list_server_filters.ListServerFiltersTestJSON.test_list_servers_filtered_by_ip_regex)\
(?!.*compute.test_virtual_interfaces)(?!.*compute.test_virtual_interfaces_negative)\
(?!.*compute.test_networks)\
(?!.*test_attach_volume.AttachVolumeMultiAttach)\
(?!.*identity.admin.v2)(?!.*identity.v2)\
+ (?!.*identity.v3.test_access_rules)\
+ (?!.*identity.v3.test_application_credentials.ApplicationCredentialsV3Test.test_create_application_credential_access_rules)\
(?!.*image.v1)\
+ (?!.*image.v2.admin.test_images.ImportCopyImagesTest)\
+ (?!.*image.v2.test_images_negative.ImagesNegativeTest.test_create_image_reserved_property)\
+ (?!.*image.v2.test_images_negative.ImagesNegativeTest.test_update_image_reserved_property)\
+ (?!.*image.v2.test_images_negative.ImportImagesNegativeTest.test_image_web_download_import_with_bad_url)\
(?!.*image.v2.test_images.ImportImagesTest)\
+ (?!.*image.v2.test_images.MultiStoresImportImages)\
(?!.*admin.test_dhcp_agent_scheduler)\
(?!.*admin.test_routers_dvr)\
(?!.*test_metering_extensions)(?!.*network.test_tags)\
(?!.*test_routers_negative.DvrRoutersNegativeTest)\
(?!.*test_routers.RoutersIpV6Test.test_create_router_set_gateway_with_fixed_ip)\
(?!.*test_routers.RoutersTest.test_create_router_set_gateway_with_fixed_ip)\
+ (?!.*test_object_services.ObjectTest.test_create_object_with_transfer_encoding)\
+ (?!.*test_encrypted_volumes_extend)\
(?!.*test_group_snapshots.GroupSnapshotsV319Test.test_reset_group_snapshot_status)\
(?!.*test_multi_backend)\
(?!.*test_volume_retype.VolumeRetypeWithMigrationTest)\
@@ -747,7 +849,7 @@ tiers:
criteria: 100
blocking: false
deny_skipping: true
- tests_count: 9
+ tests_count: 13
description: >-
The list of test cases is generated by
Tempest automatically and depends on the parameters of
@@ -757,6 +859,11 @@ tiers:
name: tempest_scenario
args:
mode: "\
+ (?!.*test_compute_unified_limits)\
+ (?!.*test_minbw_allocation_placement)\
+ (?!.*test_network_qos_placement)\
+ (?!.*test_unified_limits.ImageQuotaTest.test_image_count_uploading_quota)\
+ (?!.*test_unified_limits.ImageQuotaTest.test_image_stage_quota)\
(?!.*test_volume_boot_pattern.TestVolumeBootPattern.test_boot_server_from_encrypted_volume_luks)\
(?!.*\\[.*\\bslow\\b.*\\])(^tempest.scenario)"
option:
@@ -785,6 +892,7 @@ tiers:
(?!.*test_encrypted_cinder_volumes)\
(?!.*test_minbw_allocation_placement)\
(?!.*test_network_basic_ops.TestNetworkBasicOps.test_router_rescheduling)\
+ (?!.*test_shelve_instance.TestShelveInstance.test_cold_migrate_unshelved_instance)\
(?!.*test_volume_migrate_attached)\
(?!.*test_network_advanced_server_ops.TestNetworkAdvancedServerOps.test_server_connectivity_cold_migration_revert)\
(?=.*\\[.*\\bslow\\b.*\\])(^tempest.)"
@@ -820,6 +928,8 @@ tiers:
description: >-
This test case runs a group of Rally jobs used in
OpenStack gating
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: rally_jobs
args:
@@ -836,7 +946,7 @@ tiers:
perform ping connectivity, round trip time measurement
(latency) and TCP/UDP throughput
dependencies:
- - POD_ARCH: '^(?!aarch64$)'
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: vmtp
@@ -851,7 +961,7 @@ tiers:
is able to deploy OpenStack instances and networks in
different topologies.
dependencies:
- - POD_ARCH: '^(?!aarch64$)'
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: shaker
@@ -921,7 +1031,7 @@ tiers:
Clearwater using the Cloudify orchestrator. It also runs
some signaling traffic.
dependencies:
- - DEPLOY_SCENARIO: 'os-.*-nofeature-.*ha'
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: cloudify_ims
@@ -934,6 +1044,8 @@ tiers:
This test case deploys an OpenSource vIMS solution from
Clearwater using the OpenStack Heat orchestrator.
It also runs some signaling traffic.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: heat_ims
@@ -944,6 +1056,8 @@ tiers:
blocking: false
description: >-
This test case is vRouter testing.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: vyos_vrouter
@@ -955,5 +1069,7 @@ tiers:
description: >-
vEPC validation with Juju as VNF manager and ABoT as test
executor.
+ dependencies:
+ - NO_TENANT_NETWORK: '^(?![tT]rue$)'
run:
name: juju_epc
diff --git a/functest/core/cloudify.py b/functest/core/cloudify.py
index 0fb4f6eca..966d33645 100644
--- a/functest/core/cloudify.py
+++ b/functest/core/cloudify.py
@@ -29,7 +29,7 @@ class Cloudify(singlevm.SingleVm2):
__logger = logging.getLogger(__name__)
filename = ('/home/opnfv/functest/images/'
- 'ubuntu-16.04-server-cloudimg-amd64-disk1.img')
+ 'ubuntu-18.04-server-cloudimg-amd64.img')
flavor_ram = 4096
flavor_vcpus = 2
flavor_disk = 40
@@ -46,11 +46,11 @@ class Cloudify(singlevm.SingleVm2):
"""Initialize Cloudify testcase object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "cloudify"
- super(Cloudify, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.cfy_client = None
def prepare(self):
- super(Cloudify, self).prepare()
+ super().prepare()
for port in self.ports:
self.cloud.create_security_group_rule(
self.sec.id, port_range_min=port, port_range_max=port,
@@ -64,20 +64,19 @@ class Cloudify(singlevm.SingleVm2):
scpc.put(self.cloudify_archive,
remote_path=os.path.basename(self.cloudify_archive))
(_, stdout, stderr) = self.ssh.exec_command(
- "sudo wget https://get.docker.com/ -O script.sh && "
- "sudo chmod +x script.sh && "
- "sudo ./script.sh && "
- "sudo docker load -i ~/{} && "
+ "sudo apt-get update && "
+ "sudo apt-get install -y docker.io && "
+ "sudo docker load -i "
+ f"~/{os.path.basename(self.cloudify_archive)} && "
"sudo docker run --name cfy_manager_local -d "
"--restart unless-stopped -v /sys/fs/cgroup:/sys/fs/cgroup:ro "
"--tmpfs /run --tmpfs /run/lock --security-opt seccomp:unconfined "
- "--cap-add SYS_ADMIN --network=host {}".format(
- os.path.basename(self.cloudify_archive),
- self.cloudify_container))
+ f"--cap-add SYS_ADMIN --network=host {self.cloudify_container}")
self.__logger.debug("output:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("error:\n%s", stderr.read().decode("utf-8"))
self.cfy_client = CloudifyClient(
- host=self.fip.floating_ip_address,
+ host=self.fip.floating_ip_address if self.fip else (
+ self.sshvm.public_v4),
username='admin', password='admin', tenant='default_tenant')
self.__logger.info("Attemps running status of the Manager")
secret_key = "foo"
@@ -131,8 +130,8 @@ class Cloudify(singlevm.SingleVm2):
"""Upload Cloudify plugins"""
(_, stdout, stderr) = self.ssh.exec_command(
"sudo docker exec cfy_manager_local "
- "cfy plugins upload -y {} {} && "
- "sudo docker exec cfy_manager_local cfy status".format(yaml, wgn))
+ f"cfy plugins upload -y {yaml} {wgn} && "
+ "sudo docker exec cfy_manager_local cfy status")
self.__logger.debug("output:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("error:\n%s", stderr.read().decode("utf-8"))
@@ -188,9 +187,8 @@ def wait_for_execution(client, execution, logger, timeout=3600, ):
if timeout is not None:
if time.time() > deadline:
raise RuntimeError(
- 'execution of operation {0} for deployment {1} '
- 'timed out'.format(execution.workflow_id,
- execution.deployment_id))
+ 'execution of operation {execution.workflow_id} for '
+ 'deployment {execution.deployment_id} timed out')
# update the remaining timeout
timeout = deadline - time.time()
@@ -218,4 +216,4 @@ def get_execution_id(client, deployment_id):
return execution
raise RuntimeError('Failed to get create_deployment_environment '
'workflow execution.'
- 'Available executions: {0}'.format(executions))
+ f'Available executions: {executions}')
diff --git a/functest/core/singlevm.py b/functest/core/singlevm.py
index bfaa53bfc..4bce516d3 100644
--- a/functest/core/singlevm.py
+++ b/functest/core/singlevm.py
@@ -39,7 +39,7 @@ class VmReady1(tenantnetwork.TenantNetwork1):
# pylint: disable=too-many-instance-attributes
__logger = logging.getLogger(__name__)
- filename = '/home/opnfv/functest/images/cirros-0.4.0-x86_64-disk.img'
+ filename = '/home/opnfv/functest/images/cirros-0.6.1-x86_64-disk.img'
image_format = 'qcow2'
extra_properties = {}
filename_alt = filename
@@ -59,7 +59,7 @@ class VmReady1(tenantnetwork.TenantNetwork1):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'vmready1'
- super(VmReady1, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.image = None
self.flavor = None
@@ -80,19 +80,18 @@ class VmReady1(tenantnetwork.TenantNetwork1):
functest_utils.convert_ini_to_dict(
env.get('IMAGE_PROPERTIES')))
extra_properties.update(
- getattr(config.CONF, '{}_extra_properties'.format(
- self.case_name), {}))
+ getattr(config.CONF, f'{self.case_name}_extra_properties', {}))
image = self.cloud.create_image(
- name if name else '{}-img_{}'.format(self.case_name, self.guid),
+ name if name else f'{self.case_name}-img_{self.guid}',
filename=getattr(
- config.CONF, '{}_image'.format(self.case_name),
+ config.CONF, f'{self.case_name}_image',
self.filename),
meta=extra_properties,
disk_format=getattr(
- config.CONF, '{}_image_format'.format(self.case_name),
+ config.CONF, f'{self.case_name}_image_format',
self.image_format),
visibility=getattr(
- config.CONF, '{}_visibility'.format(self.case_name),
+ config.CONF, f'{self.case_name}_visibility',
self.visibility),
wait=True)
self.__logger.debug("image: %s", image)
@@ -115,20 +114,18 @@ class VmReady1(tenantnetwork.TenantNetwork1):
functest_utils.convert_ini_to_dict(
env.get('IMAGE_PROPERTIES')))
extra_alt_properties.update(
- getattr(config.CONF, '{}_extra_alt_properties'.format(
- self.case_name), {}))
+ getattr(config.CONF, f'{self.case_name}_extra_alt_properties', {}))
image = self.cloud.create_image(
- name if name else '{}-img_alt_{}'.format(
- self.case_name, self.guid),
+ name if name else f'{self.case_name}-img_alt_{self.guid}',
filename=getattr(
- config.CONF, '{}_image_alt'.format(self.case_name),
+ config.CONF, f'{self.case_name}_image_alt',
self.filename_alt),
meta=extra_alt_properties,
disk_format=getattr(
- config.CONF, '{}_image_alt_format'.format(self.case_name),
+ config.CONF, f'{self.case_name}_image_alt_format',
self.image_format),
visibility=getattr(
- config.CONF, '{}_visibility'.format(self.case_name),
+ config.CONF, f'{self.case_name}_visibility',
self.visibility),
wait=True)
self.__logger.debug("image: %s", image)
@@ -146,12 +143,12 @@ class VmReady1(tenantnetwork.TenantNetwork1):
"""
assert self.orig_cloud
flavor = self.orig_cloud.create_flavor(
- name if name else '{}-flavor_{}'.format(self.case_name, self.guid),
- getattr(config.CONF, '{}_flavor_ram'.format(self.case_name),
+ name if name else f'{self.case_name}-flavor_{self.guid}',
+ getattr(config.CONF, f'{self.case_name}_flavor_ram',
self.flavor_ram),
- getattr(config.CONF, '{}_flavor_vcpus'.format(self.case_name),
+ getattr(config.CONF, f'{self.case_name}_flavor_vcpus',
self.flavor_vcpus),
- getattr(config.CONF, '{}_flavor_disk'.format(self.case_name),
+ getattr(config.CONF, f'{self.case_name}_flavor_disk',
self.flavor_disk))
self.__logger.debug("flavor: %s", flavor)
flavor_extra_specs = self.flavor_extra_specs.copy()
@@ -161,7 +158,7 @@ class VmReady1(tenantnetwork.TenantNetwork1):
env.get('FLAVOR_EXTRA_SPECS')))
flavor_extra_specs.update(
getattr(config.CONF,
- '{}_flavor_extra_specs'.format(self.case_name), {}))
+ f'{self.case_name}_flavor_extra_specs', {}))
self.orig_cloud.set_flavor_specs(flavor.id, flavor_extra_specs)
return flavor
@@ -177,13 +174,12 @@ class VmReady1(tenantnetwork.TenantNetwork1):
"""
assert self.orig_cloud
flavor = self.orig_cloud.create_flavor(
- name if name else '{}-flavor_alt_{}'.format(
- self.case_name, self.guid),
- getattr(config.CONF, '{}_flavor_alt_ram'.format(self.case_name),
+ name if name else f'{self.case_name}-flavor_alt_{self.guid}',
+ getattr(config.CONF, f'{self.case_name}_flavor_alt_ram',
self.flavor_alt_ram),
- getattr(config.CONF, '{}_flavor_alt_vcpus'.format(self.case_name),
+ getattr(config.CONF, f'{self.case_name}_flavor_alt_vcpus',
self.flavor_alt_vcpus),
- getattr(config.CONF, '{}_flavor_alt_disk'.format(self.case_name),
+ getattr(config.CONF, f'{self.case_name}_flavor_alt_disk',
self.flavor_alt_disk))
self.__logger.debug("flavor: %s", flavor)
flavor_alt_extra_specs = self.flavor_alt_extra_specs.copy()
@@ -193,7 +189,7 @@ class VmReady1(tenantnetwork.TenantNetwork1):
env.get('FLAVOR_EXTRA_SPECS')))
flavor_alt_extra_specs.update(
getattr(config.CONF,
- '{}_flavor_alt_extra_specs'.format(self.case_name), {}))
+ f'{self.case_name}_flavor_alt_extra_specs', {}))
self.orig_cloud.set_flavor_specs(
flavor.id, flavor_alt_extra_specs)
return flavor
@@ -210,9 +206,11 @@ class VmReady1(tenantnetwork.TenantNetwork1):
"""
assert self.cloud
vm1 = self.cloud.create_server(
- name if name else '{}-vm_{}'.format(self.case_name, self.guid),
+ name if name else f'{self.case_name}-vm_{self.guid}',
image=self.image.id, flavor=self.flavor.id,
- auto_ip=False, network=self.network.id,
+ auto_ip=False,
+ network=self.network.id if self.network else env.get(
+ "EXTERNAL_NETWORK"),
timeout=self.create_server_timeout, wait=True, **kwargs)
self.__logger.debug("vm: %s", vm1)
return vm1
@@ -287,7 +285,7 @@ class VmReady1(tenantnetwork.TenantNetwork1):
status = testcase.TestCase.EX_RUN_ERROR
try:
assert self.cloud
- assert super(VmReady1, self).run(
+ assert super().run(
**kwargs) == testcase.TestCase.EX_OK
self.image = self.publish_image()
self.flavor = self.create_flavor()
@@ -304,7 +302,7 @@ class VmReady1(tenantnetwork.TenantNetwork1):
try:
assert self.orig_cloud
assert self.cloud
- super(VmReady1, self).clean()
+ super().clean()
if self.image:
self.cloud.delete_image(self.image.id)
if self.flavor:
@@ -330,7 +328,7 @@ class VmReady2(VmReady1):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'vmready2'
- super(VmReady2, self).__init__(**kwargs)
+ super().__init__(**kwargs)
try:
assert self.orig_cloud
self.project = tenantnetwork.NewProject(
@@ -344,7 +342,7 @@ class VmReady2(VmReady1):
def clean(self):
try:
- super(VmReady2, self).clean()
+ super().clean()
assert self.project
self.project.clean()
except Exception: # pylint: disable=broad-except
@@ -373,7 +371,7 @@ class SingleVm1(VmReady1):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'singlevm1'
- super(SingleVm1, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.sshvm = None
self.sec = None
self.fip = None
@@ -391,14 +389,15 @@ class SingleVm1(VmReady1):
"""
assert self.cloud
self.keypair = self.cloud.create_keypair(
- '{}-kp_{}'.format(self.case_name, self.guid))
+ f'{self.case_name}-kp_{self.guid}')
self.__logger.debug("keypair: %s", self.keypair)
self.__logger.debug("private_key:\n%s", self.keypair.private_key)
- with open(self.key_filename, 'w') as private_key_file:
+ with open(
+ self.key_filename, 'w', encoding='utf-8') as private_key_file:
private_key_file.write(self.keypair.private_key)
self.sec = self.cloud.create_security_group(
- '{}-sg_{}'.format(self.case_name, self.guid),
- 'created by OPNFV Functest ({})'.format(self.case_name))
+ f'{self.case_name}-sg_{self.guid}',
+ f'created by OPNFV Functest ({self.case_name})')
self.cloud.create_security_group_rule(
self.sec.id, port_range_min='22', port_range_max='22',
protocol='tcp', direction='ingress')
@@ -416,10 +415,12 @@ class SingleVm1(VmReady1):
- None on error
"""
assert vm1
- fip = self.cloud.create_floating_ip(
- network=self.ext_net.id, server=vm1, wait=True,
- timeout=self.create_floating_ip_timeout)
- self.__logger.debug("floating_ip: %s", fip)
+ fip = None
+ if env.get('NO_TENANT_NETWORK').lower() != 'true':
+ fip = self.cloud.create_floating_ip(
+ network=self.ext_net.id, server=vm1, wait=True,
+ timeout=self.create_floating_ip_timeout)
+ self.__logger.debug("floating_ip: %s", fip)
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.client.AutoAddPolicy())
for loop in range(self.ssh_connect_loops):
@@ -427,20 +428,20 @@ class SingleVm1(VmReady1):
p_console = self.cloud.get_server_console(vm1)
self.__logger.debug("vm console: \n%s", p_console)
ssh.connect(
- fip.floating_ip_address,
+ fip.floating_ip_address if fip else vm1.public_v4,
username=getattr(
config.CONF,
- '{}_image_user'.format(self.case_name), self.username),
+ f'{self.case_name}_image_user', self.username),
key_filename=self.key_filename,
timeout=getattr(
config.CONF,
- '{}_vm_ssh_connect_timeout'.format(self.case_name),
+ f'{self.case_name}_vm_ssh_connect_timeout',
self.ssh_connect_timeout))
break
except Exception as exc: # pylint: disable=broad-except
self.__logger.debug(
"try %s: cannot connect to %s: %s", loop + 1,
- fip.floating_ip_address, exc)
+ fip.floating_ip_address if fip else vm1.public_v4, exc)
time.sleep(9)
else:
self.__logger.error(
@@ -476,7 +477,7 @@ class SingleVm1(VmReady1):
status = testcase.TestCase.EX_RUN_ERROR
try:
assert self.cloud
- assert super(SingleVm1, self).run(
+ assert super().run(
**kwargs) == testcase.TestCase.EX_OK
self.result = 0
self.prepare()
@@ -507,7 +508,7 @@ class SingleVm1(VmReady1):
self.cloud.delete_security_group(self.sec.id)
if self.keypair:
self.cloud.delete_keypair(self.keypair.name)
- super(SingleVm1, self).clean()
+ super().clean()
except Exception: # pylint: disable=broad-except
self.__logger.exception("Cannot clean all resources")
@@ -527,7 +528,7 @@ class SingleVm2(SingleVm1):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'singlevm2'
- super(SingleVm2, self).__init__(**kwargs)
+ super().__init__(**kwargs)
try:
assert self.orig_cloud
self.project = tenantnetwork.NewProject(
@@ -541,7 +542,7 @@ class SingleVm2(SingleVm1):
def clean(self):
try:
- super(SingleVm2, self).clean()
+ super().clean()
assert self.project
self.project.clean()
except Exception: # pylint: disable=broad-except
diff --git a/functest/core/tenantnetwork.py b/functest/core/tenantnetwork.py
index b4818d76f..3670dbe8a 100644
--- a/functest/core/tenantnetwork.py
+++ b/functest/core/tenantnetwork.py
@@ -61,13 +61,12 @@ class NewProject():
name_or_id=self.orig_cloud.auth.get(
"project_domain_name", "Default"))
self.project = self.orig_cloud.create_project(
- name='{}-project_{}'.format(self.case_name[:18], self.guid),
- description="Created by OPNFV Functest: {}".format(
- self.case_name),
+ name=f'{self.case_name[:18]}-project_{self.guid}',
+ description=f"Created by OPNFV Functest: {self.case_name}",
domain_id=self.domain.id)
self.__logger.debug("project: %s", self.project)
self.user = self.orig_cloud.create_user(
- name='{}-user_{}'.format(self.case_name, self.guid),
+ name=f'{self.case_name}-user_{self.guid}',
password=self.password,
domain_id=self.domain.id)
self.__logger.debug("user: %s", self.user)
@@ -77,7 +76,7 @@ class NewProject():
elif self.orig_cloud.get_role(self.default_member.lower()):
self.role_name = self.default_member.lower()
else:
- raise Exception("Cannot detect {}".format(self.default_member))
+ raise Exception(f"Cannot detect {self.default_member}")
except Exception: # pylint: disable=broad-except
self.__logger.info("Creating default role %s", self.default_member)
role = self.orig_cloud.create_role(self.default_member)
@@ -146,29 +145,28 @@ class TenantNetwork1(testcase.TestCase):
__logger = logging.getLogger(__name__)
cidr = '192.168.120.0/24'
shared_network = False
- allow_no_fip = False
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'tenantnetwork1'
- super(TenantNetwork1, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.dir_results = os.path.join(getattr(config.CONF, 'dir_results'))
self.res_dir = os.path.join(self.dir_results, self.case_name)
self.output_log_name = 'functest.log'
self.output_debug_log_name = 'functest.debug.log'
+ self.ext_net = None
try:
cloud_config = os_client_config.get_config()
self.cloud = self.orig_cloud = shade.OpenStackCloud(
cloud_config=cloud_config)
except Exception: # pylint: disable=broad-except
self.cloud = self.orig_cloud = None
- self.ext_net = None
self.__logger.exception("Cannot connect to Cloud")
- try:
- self.ext_net = self.get_external_network(self.cloud)
- except Exception: # pylint: disable=broad-except
- self.ext_net = None
- self.__logger.exception("Cannot get the external network")
+ if env.get('NO_TENANT_NETWORK').lower() != 'true':
+ try:
+ self.ext_net = self.get_external_network(self.cloud)
+ except Exception: # pylint: disable=broad-except
+ self.__logger.exception("Cannot get the external network")
self.guid = str(uuid.uuid4())
self.network = None
self.subnet = None
@@ -220,18 +218,18 @@ class TenantNetwork1(testcase.TestCase):
Raises: expection on error
"""
assert self.cloud
- if not self.allow_no_fip:
+ if env.get('NO_TENANT_NETWORK').lower() != 'true':
assert self.ext_net
provider = {}
- if hasattr(config.CONF, '{}_network_type'.format(self.case_name)):
+ if hasattr(config.CONF, f'{self.case_name}_network_type'):
provider["network_type"] = getattr(
- config.CONF, '{}_network_type'.format(self.case_name))
- if hasattr(config.CONF, '{}_physical_network'.format(self.case_name)):
+ config.CONF, f'{self.case_name}_network_type')
+ if hasattr(config.CONF, f'{self.case_name}_physical_network'):
provider["physical_network"] = getattr(
- config.CONF, '{}_physical_network'.format(self.case_name))
- if hasattr(config.CONF, '{}_segmentation_id'.format(self.case_name)):
+ config.CONF, f'{self.case_name}_physical_network')
+ if hasattr(config.CONF, f'{self.case_name}_segmentation_id'):
provider["segmentation_id"] = getattr(
- config.CONF, '{}_segmentation_id'.format(self.case_name))
+ config.CONF, f'{self.case_name}_segmentation_id')
domain = self.orig_cloud.get_domain(
name_or_id=self.orig_cloud.auth.get(
"project_domain_name", "Default"))
@@ -239,23 +237,23 @@ class TenantNetwork1(testcase.TestCase):
self.cloud.auth['project_name'],
domain_id=domain.id)
self.network = self.orig_cloud.create_network(
- '{}-net_{}'.format(self.case_name, self.guid),
+ f'{self.case_name}-net_{self.guid}',
provider=provider, project_id=project.id,
shared=self.shared_network)
self.__logger.debug("network: %s", self.network)
self.subnet = self.cloud.create_subnet(
self.network.id,
- subnet_name='{}-subnet_{}'.format(self.case_name, self.guid),
+ subnet_name=f'{self.case_name}-subnet_{self.guid}',
cidr=getattr(
- config.CONF, '{}_private_subnet_cidr'.format(self.case_name),
+ config.CONF, f'{self.case_name}_private_subnet_cidr',
self.cidr),
enable_dhcp=True,
dns_nameservers=[env.get('NAMESERVER')])
self.__logger.debug("subnet: %s", self.subnet)
self.router = self.cloud.create_router(
- name='{}-router_{}'.format(self.case_name, self.guid),
+ name=f'{self.case_name}-router_{self.guid}',
ext_gateway_net_id=self.ext_net.id if self.ext_net else None)
self.__logger.debug("router: %s", self.router)
self.cloud.add_router_interface(self.router, subnet_id=self.subnet.id)
@@ -265,7 +263,8 @@ class TenantNetwork1(testcase.TestCase):
try:
assert self.cloud
self.start_time = time.time()
- self.create_network_resources()
+ if env.get('NO_TENANT_NETWORK').lower() != 'true':
+ self.create_network_resources()
self.result = 100
status = testcase.TestCase.EX_OK
except Exception: # pylint: disable=broad-except
@@ -306,7 +305,7 @@ class TenantNetwork2(TenantNetwork1):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'tenantnetwork2'
- super(TenantNetwork2, self).__init__(**kwargs)
+ super().__init__(**kwargs)
try:
assert self.cloud
self.project = NewProject(
@@ -320,7 +319,7 @@ class TenantNetwork2(TenantNetwork1):
def clean(self):
try:
- super(TenantNetwork2, self).clean()
+ super().clean()
assert self.project
self.project.clean()
except Exception: # pylint: disable=broad-except
diff --git a/functest/opnfv_tests/openstack/api/connection_check.py b/functest/opnfv_tests/openstack/api/connection_check.py
index adca30ee9..eaf9767c0 100644
--- a/functest/opnfv_tests/openstack/api/connection_check.py
+++ b/functest/opnfv_tests/openstack/api/connection_check.py
@@ -16,6 +16,7 @@ import os_client_config
import shade
from xtesting.core import testcase
+from functest.utils import env
from functest.utils import functest_utils
@@ -33,7 +34,7 @@ class ConnectionCheck(testcase.TestCase):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'connection_check'
- super(ConnectionCheck, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.output_log_name = 'functest.log'
self.output_debug_log_name = 'functest.debug.log'
try:
@@ -51,6 +52,10 @@ class ConnectionCheck(testcase.TestCase):
self.start_time = time.time()
self.__logger.debug(
"list_services: %s", functest_utils.list_services(self.cloud))
+ if env.get('NO_TENANT_NETWORK').lower() == 'true':
+ self.func_list.remove("list_floating_ip_pools")
+ self.func_list.remove("list_floating_ips")
+ self.func_list.remove("list_routers")
for func in self.func_list:
self.__logger.debug(
"%s: %s", func, getattr(self.cloud, func)())
diff --git a/functest/opnfv_tests/openstack/barbican/barbican.py b/functest/opnfv_tests/openstack/barbican/barbican.py
index 7b1bb24f7..706304bbf 100644
--- a/functest/opnfv_tests/openstack/barbican/barbican.py
+++ b/functest/opnfv_tests/openstack/barbican/barbican.py
@@ -9,8 +9,6 @@
# pylint: disable=missing-docstring
-import logging
-
from six.moves import configparser
from functest.opnfv_tests.openstack.tempest import tempest
@@ -18,10 +16,8 @@ from functest.opnfv_tests.openstack.tempest import tempest
class Barbican(tempest.TempestCommon):
- __logger = logging.getLogger(__name__)
-
def configure(self, **kwargs):
- super(Barbican, self).configure(**kwargs)
+ super().configure(**kwargs)
rconfig = configparser.RawConfigParser()
rconfig.read(self.conf_file)
if not rconfig.has_section('auth'):
@@ -36,6 +32,6 @@ class Barbican(tempest.TempestCommon):
if not rconfig.has_section('image-feature-enabled'):
rconfig.add_section('image-feature-enabled')
rconfig.set('image-feature-enabled', 'api_v1', False)
- with open(self.conf_file, 'w') as config_file:
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
self.backup_tempest_config(self.conf_file, self.res_dir)
diff --git a/functest/opnfv_tests/openstack/cinder/cinder_test.py b/functest/opnfv_tests/openstack/cinder/cinder_test.py
index d81bb100a..7d8c0a0bd 100644
--- a/functest/opnfv_tests/openstack/cinder/cinder_test.py
+++ b/functest/opnfv_tests/openstack/cinder/cinder_test.py
@@ -35,7 +35,7 @@ class CinderCheck(singlevm.SingleVm2):
"""Initialize testcase."""
if "case_name" not in kwargs:
kwargs["case_name"] = "cinder_test"
- super(CinderCheck, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.logger = logging.getLogger(__name__)
self.vm2 = None
self.fip2 = None
@@ -52,14 +52,14 @@ class CinderCheck(singlevm.SingleVm2):
return self._write_data() or self._read_data()
def prepare(self):
- super(CinderCheck, self).prepare()
+ super().prepare()
self.vm2 = self.boot_vm(
- '{}-vm2_{}'.format(self.case_name, self.guid),
+ f'{self.case_name}-vm2_{self.guid}',
key_name=self.keypair.id,
security_groups=[self.sec.id])
(self.fip2, self.ssh2) = self.connect(self.vm2)
self.volume = self.cloud.create_volume(
- name='{}-volume_{}'.format(self.case_name, self.guid), size='2',
+ name=f'{self.case_name}-volume_{self.guid}', size='2',
timeout=self.volume_timeout, wait=True)
def _write_data(self):
@@ -76,7 +76,7 @@ class CinderCheck(singlevm.SingleVm2):
return testcase.TestCase.EX_RUN_ERROR
self.logger.debug("ssh: %s", self.ssh)
(_, stdout, stderr) = self.ssh.exec_command(
- "sh ~/write_data.sh {}".format(env.get('VOLUME_DEVICE_NAME')))
+ f"sh ~/write_data.sh {env.get('VOLUME_DEVICE_NAME')}")
self.logger.debug(
"volume_write stdout: %s", stdout.read().decode("utf-8"))
self.logger.debug(
@@ -104,7 +104,7 @@ class CinderCheck(singlevm.SingleVm2):
return testcase.TestCase.EX_RUN_ERROR
self.logger.debug("ssh: %s", self.ssh2)
(_, stdout, stderr) = self.ssh2.exec_command(
- "sh ~/read_data.sh {}".format(env.get('VOLUME_DEVICE_NAME')))
+ f"sh ~/read_data.sh {env.get('VOLUME_DEVICE_NAME')}")
self.logger.debug(
"read volume stdout: %s", stdout.read().decode("utf-8"))
self.logger.debug(
@@ -124,4 +124,4 @@ class CinderCheck(singlevm.SingleVm2):
self.cloud.delete_floating_ip(self.fip2.id)
if self.volume:
self.cloud.delete_volume(self.volume.id)
- super(CinderCheck, self).clean()
+ super().clean()
diff --git a/functest/opnfv_tests/openstack/cinder/write_data.sh b/functest/opnfv_tests/openstack/cinder/write_data.sh
index 6689309b9..16845ba31 100644
--- a/functest/opnfv_tests/openstack/cinder/write_data.sh
+++ b/functest/opnfv_tests/openstack/cinder/write_data.sh
@@ -15,7 +15,7 @@ echo "VOL_DEV_NAME: $VOL_DEV_NAME"
echo "$(lsblk -l -o NAME)"
if [ ! -z $(lsblk -l -o NAME | grep $VOL_DEV_NAME) ]; then
- sudo /usr/sbin/mkfs.ext4 -F /dev/$VOL_DEV_NAME
+ sudo mkfs.ext4 -F /dev/$VOL_DEV_NAME
sudo mount /dev/$VOL_DEV_NAME $DEST
sudo touch $DEST/new_data
if [ -f $DEST/new_data ]; then
diff --git a/functest/opnfv_tests/openstack/patrole/patrole.py b/functest/opnfv_tests/openstack/patrole/patrole.py
index 8613d5127..88c42f269 100644
--- a/functest/opnfv_tests/openstack/patrole/patrole.py
+++ b/functest/opnfv_tests/openstack/patrole/patrole.py
@@ -9,8 +9,6 @@
# pylint: disable=missing-docstring
-import logging
-
from six.moves import configparser
from functest.opnfv_tests.openstack.tempest import tempest
@@ -18,15 +16,13 @@ from functest.opnfv_tests.openstack.tempest import tempest
class Patrole(tempest.TempestCommon):
- __logger = logging.getLogger(__name__)
-
def configure(self, **kwargs):
- super(Patrole, self).configure(**kwargs)
+ super().configure(**kwargs)
rconfig = configparser.RawConfigParser()
rconfig.read(self.conf_file)
if not rconfig.has_section('rbac'):
rconfig.add_section('rbac')
rconfig.set('rbac', 'rbac_test_roles', kwargs.get('roles', 'admin'))
- with open(self.conf_file, 'w') as config_file:
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
self.backup_tempest_config(self.conf_file, self.res_dir)
diff --git a/functest/opnfv_tests/openstack/rally/blacklist.yaml b/functest/opnfv_tests/openstack/rally/blacklist.yaml
index bc41bb695..e16b83ba6 100644
--- a/functest/opnfv_tests/openstack/rally/blacklist.yaml
+++ b/functest/opnfv_tests/openstack/rally/blacklist.yaml
@@ -26,7 +26,15 @@ functionality:
tests:
- HeatStacks.create_and_delete_stack
- NovaServers.boot_and_associate_floating_ip
+ - NovaServers.boot_server_and_list_interfaces
- NovaServers.boot_server_associate_and_dissociate_floating_ip
- NeutronNetworks.create_and_delete_floating_ips
- NeutronNetworks.create_and_list_floating_ips
- NeutronNetworks.associate_and_dissociate_floating_ips
+ - VMTasks.dd_load_test
+ - NeutronNetworks.create_and_delete_routers
+ - NeutronNetworks.create_and_list_routers
+ - NeutronNetworks.create_and_show_routers
+ - NeutronNetworks.create_and_update_routers
+ - NeutronNetworks.set_and_clear_router_gateway
+ - Quotas.neutron_update
diff --git a/functest/opnfv_tests/openstack/rally/rally.py b/functest/opnfv_tests/openstack/rally/rally.py
index 63f281b67..3d897e25d 100644
--- a/functest/opnfv_tests/openstack/rally/rally.py
+++ b/functest/opnfv_tests/openstack/rally/rally.py
@@ -69,13 +69,12 @@ class RallyBase(singlevm.VmReady2):
visibility = 'public'
shared_network = True
- allow_no_fip = True
task_timeout = 3600
username = 'cirros'
def __init__(self, **kwargs):
"""Initialize RallyBase object."""
- super(RallyBase, self).__init__(**kwargs)
+ super().__init__(**kwargs)
assert self.orig_cloud
assert self.project
if self.orig_cloud.get_role("admin"):
@@ -133,13 +132,20 @@ class RallyBase(singlevm.VmReady2):
if self.network:
task_args['netid'] = str(self.network.id)
else:
- task_args['netid'] = ''
+ LOGGER.warning(
+ 'No tenant network created. '
+ 'Trying EXTERNAL_NETWORK as a fallback')
+ if env.get("EXTERNAL_NETWORK"):
+ network = self.cloud.get_network(env.get("EXTERNAL_NETWORK"))
+ task_args['netid'] = str(network.id) if network else ''
+ else:
+ task_args['netid'] = ''
return task_args
def _prepare_test_list(self, test_name):
"""Build the list of test cases to be executed."""
- test_yaml_file_name = 'opnfv-{}.yaml'.format(test_name)
+ test_yaml_file_name = f'opnfv-{test_name}.yaml'
scenario_file_name = os.path.join(self.rally_scenario_dir,
test_yaml_file_name)
@@ -148,8 +154,8 @@ class RallyBase(singlevm.VmReady2):
test_yaml_file_name)
if not os.path.exists(scenario_file_name):
- raise Exception("The scenario '%s' does not exist."
- % scenario_file_name)
+ raise Exception(
+ f"The scenario '{scenario_file_name}' does not exist.")
LOGGER.debug('Scenario fetched from : %s', scenario_file_name)
test_file_name = os.path.join(self.temp_dir, test_yaml_file_name)
@@ -168,10 +174,10 @@ class RallyBase(singlevm.VmReady2):
cmd = ("rally deployment list | awk '/" +
getattr(config.CONF, 'rally_deployment_name') +
"/ {print $2}'")
- proc = subprocess.Popen(cmd, shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- deployment_uuid = proc.stdout.readline().rstrip()
+ with subprocess.Popen(
+ cmd, shell=True, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT) as proc:
+ deployment_uuid = proc.stdout.readline().rstrip()
return deployment_uuid.decode("utf-8")
@staticmethod
@@ -184,7 +190,9 @@ class RallyBase(singlevm.VmReady2):
if pod_arch and pod_arch in arch_filter:
LOGGER.info("Apply aarch64 specific to rally config...")
- with open(RallyBase.rally_aar4_patch_path, "r") as pfile:
+ with open(
+ RallyBase.rally_aar4_patch_path, "r",
+ encoding='utf-8') as pfile:
rally_patch_conf = pfile.read()
for line in fileinput.input(RallyBase.rally_conf_path):
@@ -222,7 +230,7 @@ class RallyBase(singlevm.VmReady2):
rconfig.add_section('openstack')
rconfig.set(
'openstack', 'keystone_default_role', env.get("NEW_USER_ROLE"))
- with open(rally_conf, 'w') as config_file:
+ with open(rally_conf, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
@staticmethod
@@ -233,7 +241,7 @@ class RallyBase(singlevm.VmReady2):
rconfig.read(rally_conf)
if rconfig.has_option('openstack', 'keystone_default_role'):
rconfig.remove_option('openstack', 'keystone_default_role')
- with open(rally_conf, 'w') as config_file:
+ with open(rally_conf, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
@staticmethod
@@ -285,7 +293,9 @@ class RallyBase(singlevm.VmReady2):
"""Exclude scenario."""
black_tests = []
try:
- with open(RallyBase.blacklist_file, 'r') as black_list_file:
+ with open(
+ RallyBase.blacklist_file, 'r',
+ encoding='utf-8') as black_list_file:
black_list_yaml = yaml.safe_load(black_list_file)
deploy_scenario = env.get('DEPLOY_SCENARIO')
@@ -329,7 +339,9 @@ class RallyBase(singlevm.VmReady2):
func_list = []
try:
- with open(RallyBase.blacklist_file, 'r') as black_list_file:
+ with open(
+ RallyBase.blacklist_file, 'r',
+ encoding='utf-8') as black_list_file:
black_list_yaml = yaml.safe_load(black_list_file)
if env.get('BLOCK_MIGRATION').lower() == 'true':
@@ -356,31 +368,25 @@ class RallyBase(singlevm.VmReady2):
def apply_blacklist(self, case_file_name, result_file_name):
"""Apply blacklist."""
LOGGER.debug("Applying blacklist...")
- cases_file = open(case_file_name, 'r')
- result_file = open(result_file_name, 'w')
-
- black_tests = list(set(self.excl_func() +
- self.excl_scenario()))
-
- if black_tests:
- LOGGER.debug("Blacklisted tests: %s", str(black_tests))
-
- include = True
- for cases_line in cases_file:
- if include:
- for black_tests_line in black_tests:
- if re.search(black_tests_line,
- cases_line.strip().rstrip(':')):
- include = False
- break
+ with open(case_file_name, 'r', encoding='utf-8') as cases_file, open(
+ result_file_name, 'w', encoding='utf-8') as result_file:
+ black_tests = list(set(self.excl_func() + self.excl_scenario()))
+ if black_tests:
+ LOGGER.debug("Blacklisted tests: %s", str(black_tests))
+
+ include = True
+ for cases_line in cases_file:
+ if include:
+ for black_tests_line in black_tests:
+ if re.search(black_tests_line,
+ cases_line.strip().rstrip(':')):
+ include = False
+ break
+ else:
+ result_file.write(str(cases_line))
else:
- result_file.write(str(cases_line))
- else:
- if cases_line.isspace():
- include = True
-
- cases_file.close()
- result_file.close()
+ if cases_line.isspace():
+ include = True
@staticmethod
def file_is_empty(file_name):
@@ -408,7 +414,7 @@ class RallyBase(singlevm.VmReady2):
LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
# save report as JSON
- report_json_name = '{}.json'.format(test_name)
+ report_json_name = f'{test_name}.json'
report_json_dir = os.path.join(self.results_dir, report_json_name)
cmd = (["rally", "task", "report", "--json", "--uuid", task_id,
"--out", report_json_dir])
@@ -416,7 +422,8 @@ class RallyBase(singlevm.VmReady2):
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
LOGGER.info("%s\n%s", " ".join(cmd), output.decode("utf-8"))
- json_results = open(report_json_dir).read()
+ with open(report_json_dir, encoding='utf-8') as json_file:
+ json_results = json_file.read()
self._append_summary(json_results, test_name)
# parse JSON operation result
@@ -491,7 +498,7 @@ class RallyBase(singlevm.VmReady2):
if test in self.stests:
self.tests.append(test)
else:
- raise Exception("Test name '%s' is invalid" % test)
+ raise Exception(f"Test name '{test}' is invalid")
if not os.path.exists(self.task_dir):
os.makedirs(self.task_dir)
@@ -499,16 +506,14 @@ class RallyBase(singlevm.VmReady2):
task = os.path.join(self.rally_dir, 'task.yaml')
if not os.path.exists(task):
LOGGER.error("Task file '%s' does not exist.", task)
- raise Exception("Task file '{}' does not exist.".
- format(task))
+ raise Exception(f"Task file '{task}' does not exist.")
self.task_file = os.path.join(self.task_dir, 'task.yaml')
shutil.copyfile(task, self.task_file)
task_macro = os.path.join(self.rally_dir, 'macro')
if not os.path.exists(task_macro):
LOGGER.error("Task macro dir '%s' does not exist.", task_macro)
- raise Exception("Task macro dir '{}' does not exist.".
- format(task_macro))
+ raise Exception(f"Task macro dir '{task_macro}' does not exist.")
macro_dir = os.path.join(self.task_dir, 'macro')
if os.path.exists(macro_dir):
shutil.rmtree(macro_dir)
@@ -569,7 +574,7 @@ class RallyBase(singlevm.VmReady2):
success_avg = 100 * item['nb_success'] / item['nb_tests']
except ZeroDivisionError:
success_avg = 0
- success_str = str("{:0.2f}".format(success_avg)) + '%'
+ success_str = f"{success_avg:0.2f}%"
duration_str = time.strftime("%H:%M:%S",
time.gmtime(item['overall_duration']))
res_table.add_row([item['test_name'], duration_str,
@@ -587,7 +592,7 @@ class RallyBase(singlevm.VmReady2):
self.result = 100 * total_nb_success / total_nb_tests
except ZeroDivisionError:
self.result = 100
- success_rate = "{:0.2f}".format(self.result)
+ success_rate = f"{self.result:0.2f}"
success_rate_str = str(success_rate) + '%'
res_table.add_row(["", "", "", ""])
res_table.add_row(["TOTAL:", total_duration_str, total_nb_tests,
@@ -642,7 +647,7 @@ class RallyBase(singlevm.VmReady2):
self.clean_rally_logs()
if self.flavor_alt:
self.orig_cloud.delete_flavor(self.flavor_alt.id)
- super(RallyBase, self).clean()
+ super().clean()
def is_successful(self):
"""The overall result of the test."""
@@ -650,7 +655,7 @@ class RallyBase(singlevm.VmReady2):
if item['task_status'] is False:
return testcase.TestCase.EX_TESTCASE_FAILED
- return super(RallyBase, self).is_successful()
+ return super().is_successful()
@staticmethod
def update_rally_logs(res_dir, rally_conf='/etc/rally/rally.conf'):
@@ -663,7 +668,7 @@ class RallyBase(singlevm.VmReady2):
rconfig.set('DEFAULT', 'use_stderr', False)
rconfig.set('DEFAULT', 'log-file', 'rally.log')
rconfig.set('DEFAULT', 'log_dir', res_dir)
- with open(rally_conf, 'w') as config_file:
+ with open(rally_conf, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
@staticmethod
@@ -679,14 +684,14 @@ class RallyBase(singlevm.VmReady2):
rconfig.remove_option('DEFAULT', 'log-file')
if rconfig.has_option('DEFAULT', 'log_dir'):
rconfig.remove_option('DEFAULT', 'log_dir')
- with open(rally_conf, 'w') as config_file:
+ with open(rally_conf, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
def run(self, **kwargs):
"""Run testcase."""
self.start_time = time.time()
try:
- assert super(RallyBase, self).run(
+ assert super().run(
**kwargs) == testcase.TestCase.EX_OK
self.update_rally_logs(self.res_dir)
self.create_rally_deployment(environ=self.project.get_environ())
@@ -694,9 +699,9 @@ class RallyBase(singlevm.VmReady2):
self.run_tests(**kwargs)
self._generate_report()
self.export_task(
- "{}/{}.html".format(self.results_dir, self.case_name))
+ f"{self.results_dir}/{self.case_name}.html")
self.export_task(
- "{}/{}.xml".format(self.results_dir, self.case_name),
+ f"{self.results_dir}/{self.case_name}.xml",
export_type="junit-xml")
res = testcase.TestCase.EX_OK
except Exception: # pylint: disable=broad-except
@@ -714,7 +719,7 @@ class RallySanity(RallyBase):
"""Initialize RallySanity object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "rally_sanity"
- super(RallySanity, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.smoke = True
self.scenario_dir = os.path.join(self.rally_scenario_dir, 'sanity')
@@ -728,7 +733,7 @@ class RallyFull(RallyBase):
"""Initialize RallyFull object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "rally_full"
- super(RallyFull, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.smoke = False
self.scenario_dir = os.path.join(self.rally_scenario_dir, 'full')
@@ -743,21 +748,21 @@ class RallyJobs(RallyBase):
"""Initialize RallyJobs object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "rally_jobs"
- super(RallyJobs, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.task_file = os.path.join(self.rally_dir, 'rally_jobs.yaml')
self.task_yaml = None
def prepare_run(self, **kwargs):
"""Create resources needed by test scenarios."""
- super(RallyJobs, self).prepare_run(**kwargs)
- with open(os.path.join(self.rally_dir,
- 'rally_jobs.yaml'), 'r') as task_file:
+ super().prepare_run(**kwargs)
+ with open(
+ os.path.join(self.rally_dir, 'rally_jobs.yaml'),
+ 'r', encoding='utf-8') as task_file:
self.task_yaml = yaml.safe_load(task_file)
for task in self.task_yaml:
if task not in self.tests:
- raise Exception("Test '%s' not in '%s'" %
- (task, self.tests))
+ raise Exception(f"Test '{task}' not in '{self.tests}'")
def apply_blacklist(self, case_file_name, result_file_name):
# pylint: disable=too-many-branches
@@ -769,7 +774,7 @@ class RallyJobs(RallyBase):
LOGGER.debug("Blacklisted tests: %s", str(black_tests))
template = YAML(typ='jinja2')
- with open(case_file_name, 'r') as fname:
+ with open(case_file_name, 'r', encoding='utf-8') as fname:
cases = template.load(fname)
if cases.get("version", 1) == 1:
# scenarios in dictionary
@@ -799,7 +804,7 @@ class RallyJobs(RallyBase):
cases['subtasks'].pop(sind)
break
- with open(result_file_name, 'w') as fname:
+ with open(result_file_name, 'w', encoding='utf-8') as fname:
template.dump(cases, fname)
def build_task_args(self, test_name):
@@ -820,7 +825,7 @@ class RallyJobs(RallyBase):
task_name = self.task_yaml.get(test_name).get("task")
task = os.path.join(jobs_dir, task_name)
if not os.path.exists(task):
- raise Exception("The scenario '%s' does not exist." % task)
+ raise Exception(f"The scenario '{task}' does not exist.")
LOGGER.debug('Scenario fetched from : %s', task)
if not os.path.exists(self.temp_dir):
diff --git a/functest/opnfv_tests/openstack/rally/scenario/full/opnfv-cinder.yaml b/functest/opnfv_tests/openstack/rally/scenario/full/opnfv-cinder.yaml
index 4b3c22ebd..7abeeac68 100644
--- a/functest/opnfv_tests/openstack/rally/scenario/full/opnfv-cinder.yaml
+++ b/functest/opnfv_tests/openstack/rally/scenario/full/opnfv-cinder.yaml
@@ -348,20 +348,6 @@
sla:
{{ no_failures_sla() }}
- CinderVolumeTypes.create_and_update_volume_type:
- -
- args:
- description: "test"
- update_description: "test update"
- context:
- {{ user_context(tenants_amount, users_amount, use_existing_users) }}
- api_versions:
- {{ volume_service(version=volume_version, service_type=volume_service_type) }}
- runner:
- {{ constant_runner(concurrency=concurrency, times=iterations, is_smoke=smoke) }}
- sla:
- {{ no_failures_sla() }}
-
CinderVolumeTypes.create_volume_type_and_encryption_type:
-
args:
@@ -378,16 +364,3 @@
{{ constant_runner(concurrency=concurrency, times=iterations, is_smoke=smoke) }}
sla:
{{ no_failures_sla() }}
-
- CinderVolumeTypes.create_volume_type_add_and_list_type_access:
- -
- args:
- description: "rally tests creating types"
- context:
- {{ user_context(tenants_amount, users_amount, use_existing_users) }}
- api_versions:
- {{ volume_service(version=volume_version, service_type=volume_service_type) }}
- runner:
- {{ constant_runner(concurrency=concurrency, times=iterations, is_smoke=smoke) }}
- sla:
- {{ no_failures_sla() }}
diff --git a/functest/opnfv_tests/openstack/rally/scenario/full/opnfv-nova.yaml b/functest/opnfv_tests/openstack/rally/scenario/full/opnfv-nova.yaml
index 71a159963..210591f9b 100644
--- a/functest/opnfv_tests/openstack/rally/scenario/full/opnfv-nova.yaml
+++ b/functest/opnfv_tests/openstack/rally/scenario/full/opnfv-nova.yaml
@@ -39,9 +39,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova() }}
@@ -59,9 +56,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova() }}
@@ -80,9 +74,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova() }}
@@ -104,9 +95,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova() }}
@@ -124,9 +112,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova() }}
@@ -256,9 +241,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova(keypairs=true) }}
@@ -277,9 +259,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_volumes() }}
{{ unlimited_neutron() }}
@@ -301,9 +280,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova() }}
@@ -395,8 +371,7 @@
-
args:
{{ vm_params(image_name, flavor_name) }}
- create_floating_ip_args:
- floating_network: {{ floating_network }}
+ floating_network: {{ floating_network }}
nics:
- net-id: {{ netid }}
context:
@@ -412,8 +387,7 @@
-
args:
{{ vm_params(image_name, flavor_name) }}
- create_floating_ip_args:
- floating_network: {{ floating_network }}
+ floating_network: {{ floating_network }}
nics:
- net-id: {{ netid }}
context:
diff --git a/functest/opnfv_tests/openstack/rally/scenario/sanity/opnfv-nova.yaml b/functest/opnfv_tests/openstack/rally/scenario/sanity/opnfv-nova.yaml
index 138e53b00..1fbfccb5a 100644
--- a/functest/opnfv_tests/openstack/rally/scenario/sanity/opnfv-nova.yaml
+++ b/functest/opnfv_tests/openstack/rally/scenario/sanity/opnfv-nova.yaml
@@ -55,9 +55,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova(keypairs=true) }}
@@ -76,9 +73,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_volumes() }}
{{ unlimited_neutron() }}
@@ -100,9 +94,6 @@
- net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
- network:
- networks_per_tenant: 1
- start_cidr: "100.1.0.0/25"
quotas:
{{ unlimited_neutron() }}
{{ unlimited_nova() }}
@@ -128,7 +119,8 @@
-
args:
{{ vm_params(image_name, flavor_name) }}
- auto_assign_nic: true
+ nics:
+ - net-id: {{ netid }}
context:
{% call user_context(tenants_amount, users_amount, use_existing_users) %}
network: {}
@@ -142,8 +134,7 @@
-
args:
{{ vm_params(image_name, flavor_name) }}
- create_floating_ip_args:
- floating_network: {{ floating_network }}
+ floating_network: {{ floating_network }}
nics:
- net-id: {{ netid }}
context:
diff --git a/functest/opnfv_tests/openstack/rally/scenario/templates/server_with_ports.yaml.template b/functest/opnfv_tests/openstack/rally/scenario/templates/server_with_ports.yaml.template
index f1a12c178..75afb2dbe 100644
--- a/functest/opnfv_tests/openstack/rally/scenario/templates/server_with_ports.yaml.template
+++ b/functest/opnfv_tests/openstack/rally/scenario/templates/server_with_ports.yaml.template
@@ -7,7 +7,7 @@ parameters:
default: public
image:
type: string
- default: cirros-0.5.1-x86_64-uec
+ default: cirros-0.6.1-x86_64-uec
flavor:
type: string
default: m1.tiny
diff --git a/functest/opnfv_tests/openstack/refstack/refstack.py b/functest/opnfv_tests/openstack/refstack/refstack.py
index faf183f76..87932020b 100644
--- a/functest/opnfv_tests/openstack/refstack/refstack.py
+++ b/functest/opnfv_tests/openstack/refstack/refstack.py
@@ -26,12 +26,11 @@ class Refstack(tempest.TempestCommon):
def _extract_refstack_data(self, refstack_list):
yaml_data = ""
- with open(refstack_list) as def_file:
+ with open(refstack_list, encoding='utf-8') as def_file:
for line in def_file:
try:
grp = re.search(r'^([^\[]*)(\[.*\])\n*$', line)
- yaml_data = "{}\n{}: {}".format(
- yaml_data, grp.group(1), grp.group(2))
+ yaml_data = f"{yaml_data}\n{grp.group(1)}: {grp.group(2)}"
except Exception: # pylint: disable=broad-except
self.__logger.warning("Cannot parse %s", line)
return yaml.full_load(yaml_data)
@@ -53,8 +52,7 @@ class Refstack(tempest.TempestCommon):
for line in output.splitlines():
try:
grp = re.search(r'^([^\[]*)(\[.*\])\n*$', line.decode("utf-8"))
- yaml_data2 = "{}\n{}: {}".format(
- yaml_data2, grp.group(1), grp.group(2))
+ yaml_data2 = f"{yaml_data2}\n{grp.group(1)}: {grp.group(2)}"
except Exception: # pylint: disable=broad-except
self.__logger.warning("Cannot parse %s. skipping it", line)
return yaml.full_load(yaml_data2)
@@ -62,11 +60,11 @@ class Refstack(tempest.TempestCommon):
def generate_test_list(self, **kwargs):
refstack_list = os.path.join(
getattr(config.CONF, 'dir_refstack_data'),
- "{}.txt".format(kwargs.get('target', 'compute')))
+ f"{kwargs.get('target', 'compute')}.txt")
self.backup_tempest_config(self.conf_file, '/etc')
refstack_data = self._extract_refstack_data(refstack_list)
tempest_data = self._extract_tempest_data()
- with open(self.list, 'w') as ref_file:
+ with open(self.list, 'w', encoding='utf-8') as ref_file:
for key in refstack_data.keys():
try:
for data in tempest_data[key]:
@@ -75,9 +73,9 @@ class Refstack(tempest.TempestCommon):
else:
self.__logger.info("%s: ids differ. skipping it", key)
continue
- ref_file.write("{}{}\n".format(
- key, str(tempest_data[key]).replace(
- "'", "").replace(", ", ",")))
+ value = str(tempest_data[key]).replace(
+ "'", "").replace(", ", ",")
+ ref_file.write(f"{key}{value}\n")
except Exception: # pylint: disable=broad-except
self.__logger.info("%s: not found. skipping it", key)
continue
diff --git a/functest/opnfv_tests/openstack/shaker/shaker.py b/functest/opnfv_tests/openstack/shaker/shaker.py
index 917c65980..275cc3077 100644
--- a/functest/opnfv_tests/openstack/shaker/shaker.py
+++ b/functest/opnfv_tests/openstack/shaker/shaker.py
@@ -32,7 +32,7 @@ class Shaker(singlevm.SingleVm2):
__logger = logging.getLogger(__name__)
- filename = '/home/opnfv/functest/images/shaker-image-1.3.0+stretch.qcow2'
+ filename = '/home/opnfv/functest/images/shaker-image-1.3.4+stretch.qcow2'
flavor_ram = 512
flavor_vcpus = 1
flavor_disk = 3
@@ -47,7 +47,7 @@ class Shaker(singlevm.SingleVm2):
check_console_loop = 12
def __init__(self, **kwargs):
- super(Shaker, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.role = None
def check_requirements(self):
@@ -57,7 +57,7 @@ class Shaker(singlevm.SingleVm2):
self.project.clean()
def prepare(self):
- super(Shaker, self).prepare()
+ super().prepare()
self.cloud.create_security_group_rule(
self.sec.id, port_range_min=self.port, port_range_max=self.port,
protocol='tcp', direction='ingress')
@@ -95,33 +95,31 @@ class Shaker(singlevm.SingleVm2):
scpc.put('/home/opnfv/functest/conf/env_file', remote_path='~/')
if os.environ.get('OS_CACERT'):
scpc.put(os.environ.get('OS_CACERT'), remote_path='~/os_cacert')
+ opt = 'export OS_CACERT=~/os_cacert && ' if os.environ.get(
+ 'OS_CACERT') else ''
(_, stdout, stderr) = self.ssh.exec_command(
'source ~/env_file && '
'export OS_INTERFACE=public && '
- 'export OS_AUTH_URL={} && '
- 'export OS_USERNAME={} && '
- 'export OS_PROJECT_NAME={} && '
- 'export OS_PROJECT_ID={} && '
+ f'export OS_AUTH_URL={endpoint} && '
+ f'export OS_USERNAME={self.project.user.name} && '
+ f'export OS_PROJECT_NAME={self.project.project.name} && '
+ f'export OS_PROJECT_ID={self.project.project.id} && '
'unset OS_TENANT_NAME && '
'unset OS_TENANT_ID && '
'unset OS_ENDPOINT_TYPE && '
- 'export OS_PASSWORD="{}" && '
- '{}'
+ f'export OS_PASSWORD="{self.project.password}" && '
+ f'{opt}'
'env && '
- 'timeout {} shaker --debug --image-name {} --flavor-name {} '
- '--server-endpoint {}:9000 --external-net {} --dns-nameservers {} '
+ f'timeout {self.shaker_timeout} shaker --debug '
+ f'--image-name {self.image.name} --flavor-name {self.flavor.name} '
+ f'--server-endpoint {self.fip.floating_ip_address}:9000 '
+ f'--external-net {self.ext_net.id} '
+ f"--dns-nameservers {env.get('NAMESERVER')} "
'--scenario openstack/full_l2,'
'openstack/full_l3_east_west,'
'openstack/full_l3_north_south,'
'openstack/perf_l3_north_south '
- '--report report.html --output report.json'.format(
- endpoint, self.project.user.name, self.project.project.name,
- self.project.project.id, self.project.password,
- 'export OS_CACERT=~/os_cacert && ' if os.environ.get(
- 'OS_CACERT') else '',
- self.shaker_timeout, self.image.name, self.flavor.name,
- self.fip.floating_ip_address, self.ext_net.id,
- env.get('NAMESERVER')))
+ '--report report.html --output report.json')
self.__logger.info("output:\n%s", stdout.read().decode("utf-8"))
self.__logger.info("error:\n%s", stderr.read().decode("utf-8"))
if not os.path.exists(self.res_dir):
@@ -132,7 +130,9 @@ class Shaker(singlevm.SingleVm2):
except scp.SCPException:
self.__logger.exception("cannot get report files")
return 1
- with open(os.path.join(self.res_dir, 'report.json')) as json_file:
+ with open(
+ os.path.join(self.res_dir, 'report.json'),
+ encoding='utf-8') as json_file:
data = json.load(json_file)
for value in data["records"].values():
if value["status"] != "ok":
@@ -142,6 +142,6 @@ class Shaker(singlevm.SingleVm2):
return stdout.channel.recv_exit_status()
def clean(self):
- super(Shaker, self).clean()
+ super().clean()
if self.role:
self.orig_cloud.delete_role(self.role.id)
diff --git a/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml b/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml
index a444a54f5..0ee4ab613 100644
--- a/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml
+++ b/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml
@@ -1,5 +1,6 @@
---
compute:
+ min_microversion: '2.44'
max_microversion: latest
compute-feature-enabled:
attach_encrypted_volume: false
@@ -11,6 +12,7 @@ compute-feature-enabled:
console_output: true
disk_config: true
enable_instance_password: true
+ hostname_fqdn_sanitization: true
interface_attach: true
live_migration: true
live_migrate_back_and_forth: false
@@ -46,13 +48,14 @@ identity-feature-enabled:
external_idp: false
project_tags: true
application_credentials: true
+ access_rules: true
image-feature-enabled:
api_v2: true
api_v1: false
+ import_image: false
network-feature-enabled:
port_admin_state_change: true
port_security: true
- floating_ips: true
placement:
max_microversion: latest
validation:
@@ -60,7 +63,6 @@ validation:
ssh_timeout: 196
ip_version_for_ssh: 4
run_validation: true
- connect_method: floating
volume:
max_microversion: latest
storage_protocol: ceph
@@ -73,7 +75,8 @@ volume-feature-enabled:
clone: true
manage_snapshot: true
manage_volume: true
- extend_attached_volume: false
+ extend_attached_volume: true
+ extend_attached_encrypted_volume: false
consistency_group: false
volume_revert: true
load_balancer:
@@ -81,13 +84,14 @@ load_balancer:
neutron_plugin_options:
agent_availability_zone: nova
available_type_drivers: flat,geneve,vlan,gre,local,vxlan
- provider_vlans: foo,
+ provider_vlans: public,
create_shared_resources: true
object-storage-feature-enabled:
discoverable_apis: "account_quotas,formpost,bulk_upload,bulk_delete,\
tempurl,crossdomain,container_quotas,staticweb,account_quotas,slo"
object_versioning: true
discoverability: true
+ tempurl_digest_hashlib: sha1
heat_plugin:
skip_functional_test_list: EncryptionVolTypeTest
skip_scenario_test_list: "AodhAlarmTest,SoftwareConfigIntegrationTest,\
diff --git a/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf_ovn.yaml b/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf_ovn.yaml
index 141f295b4..6b09d8e5a 100644
--- a/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf_ovn.yaml
+++ b/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf_ovn.yaml
@@ -1,5 +1,6 @@
---
compute:
+ min_microversion: '2.44'
max_microversion: latest
compute-feature-enabled:
attach_encrypted_volume: false
@@ -11,6 +12,7 @@ compute-feature-enabled:
console_output: true
disk_config: true
enable_instance_password: true
+ hostname_fqdn_sanitization: true
interface_attach: true
live_migration: true
live_migrate_back_and_forth: false
@@ -46,13 +48,14 @@ identity-feature-enabled:
external_idp: false
project_tags: true
application_credentials: true
+ access_rules: true
image-feature-enabled:
api_v2: true
api_v1: false
+ import_image: false
network-feature-enabled:
port_admin_state_change: true
port_security: true
- floating_ips: true
placement:
max_microversion: latest
validation:
@@ -60,7 +63,6 @@ validation:
ssh_timeout: 196
ip_version_for_ssh: 4
run_validation: true
- connect_method: floating
volume:
max_microversion: latest
storage_protocol: ceph
@@ -73,7 +75,8 @@ volume-feature-enabled:
clone: true
manage_snapshot: true
manage_volume: true
- extend_attached_volume: false
+ extend_attached_volume: true
+ extend_attached_encrypted_volume: false
consistency_group: false
volume_revert: true
load_balancer:
@@ -88,6 +91,7 @@ object-storage-feature-enabled:
tempurl,crossdomain,container_quotas,staticweb,account_quotas,slo"
object_versioning: true
discoverability: true
+ tempurl_digest_hashlib: sha1
heat_plugin:
skip_functional_test_list: EncryptionVolTypeTest
skip_scenario_test_list: "AodhAlarmTest,SoftwareConfigIntegrationTest,\
diff --git a/functest/opnfv_tests/openstack/tempest/tempest.py b/functest/opnfv_tests/openstack/tempest/tempest.py
index d2c54262a..7233ffd60 100644
--- a/functest/opnfv_tests/openstack/tempest/tempest.py
+++ b/functest/opnfv_tests/openstack/tempest/tempest.py
@@ -39,7 +39,7 @@ class TempestCommon(singlevm.VmReady2):
"""TempestCommon testcases implementation class."""
visibility = 'public'
- filename_alt = '/home/opnfv/functest/images/cirros-0.4.0-x86_64-disk.img'
+ filename_alt = '/home/opnfv/functest/images/cirros-0.6.1-x86_64-disk.img'
shared_network = True
tempest_conf_yaml = pkg_resources.resource_filename(
'functest',
@@ -57,7 +57,7 @@ class TempestCommon(singlevm.VmReady2):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'tempest'
- super(TempestCommon, self).__init__(**kwargs)
+ super().__init__(**kwargs)
assert self.orig_cloud
assert self.cloud
assert self.project
@@ -128,7 +128,7 @@ class TempestCommon(singlevm.VmReady2):
@staticmethod
def read_file(filename):
"""Read file and return content as a stripped list."""
- with open(filename) as src:
+ with open(filename, encoding='utf-8') as src:
return [line.strip() for line in src.readlines()]
@staticmethod
@@ -142,22 +142,22 @@ class TempestCommon(singlevm.VmReady2):
}
cmd = ["rally", "verify", "show", "--uuid", verif_id]
LOGGER.info("Showing result for a verification: '%s'.", cmd)
- proc = subprocess.Popen(cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- for line in proc.stdout:
- LOGGER.info(line.decode("utf-8").rstrip())
- new_line = line.decode("utf-8").replace(' ', '').split('|')
- if 'Tests' in new_line:
- break
- if 'Testscount' in new_line:
- result['num_tests'] = int(new_line[2])
- elif 'Success' in new_line:
- result['num_success'] = int(new_line[2])
- elif 'Skipped' in new_line:
- result['num_skipped'] = int(new_line[2])
- elif 'Failures' in new_line:
- result['num_failures'] = int(new_line[2])
+ with subprocess.Popen(
+ cmd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT) as proc:
+ for line in proc.stdout:
+ LOGGER.info(line.decode("utf-8").rstrip())
+ new_line = line.decode("utf-8").replace(' ', '').split('|')
+ if 'Tests' in new_line:
+ break
+ if 'Testscount' in new_line:
+ result['num_tests'] = int(new_line[2])
+ elif 'Success' in new_line:
+ result['num_success'] = int(new_line[2])
+ elif 'Skipped' in new_line:
+ result['num_skipped'] = int(new_line[2])
+ elif 'Failures' in new_line:
+ result['num_failures'] = int(new_line[2])
return result
@staticmethod
@@ -199,10 +199,10 @@ class TempestCommon(singlevm.VmReady2):
cmd = ("rally verify list-verifiers | awk '/" +
getattr(config.CONF, 'tempest_verifier_name') +
"/ {print $2}'")
- proc = subprocess.Popen(cmd, shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.DEVNULL)
- verifier_uuid = proc.stdout.readline().rstrip()
+ with subprocess.Popen(
+ cmd, shell=True, stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL) as proc:
+ verifier_uuid = proc.stdout.readline().rstrip()
return verifier_uuid.decode("utf-8")
@staticmethod
@@ -212,7 +212,7 @@ class TempestCommon(singlevm.VmReady2):
"""
return os.path.join(getattr(config.CONF, 'dir_rally_inst'),
'verification',
- 'verifier-{}'.format(verifier_id),
+ f'verifier-{verifier_id}',
'repo')
@staticmethod
@@ -222,13 +222,13 @@ class TempestCommon(singlevm.VmReady2):
"""
return os.path.join(getattr(config.CONF, 'dir_rally_inst'),
'verification',
- 'verifier-{}'.format(verifier_id),
- 'for-deployment-{}'.format(deployment_id))
+ f'verifier-{verifier_id}',
+ f'for-deployment-{deployment_id}')
@staticmethod
def update_tempest_conf_file(conf_file, rconfig):
"""Update defined paramters into tempest config file"""
- with open(TempestCommon.tempest_conf_yaml) as yfile:
+ with open(TempestCommon.tempest_conf_yaml, encoding='utf-8') as yfile:
conf_yaml = yaml.safe_load(yfile)
if conf_yaml:
sections = rconfig.sections()
@@ -239,7 +239,7 @@ class TempestCommon(singlevm.VmReady2):
for key, value in sub_conf.items():
rconfig.set(section, key, value)
- with open(conf_file, 'w') as config_file:
+ with open(conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
@staticmethod
@@ -272,17 +272,6 @@ class TempestCommon(singlevm.VmReady2):
rconfig.set('compute-feature-enabled', 'live_migration', True)
if os.environ.get('OS_REGION_NAME'):
rconfig.set('identity', 'region', os.environ.get('OS_REGION_NAME'))
- if env.get("NEW_USER_ROLE").lower() != "member":
- rconfig.set(
- 'auth', 'tempest_roles',
- functest_utils.convert_list_to_ini([env.get("NEW_USER_ROLE")]))
- if not json.loads(env.get("USE_DYNAMIC_CREDENTIALS").lower()):
- rconfig.set('auth', 'use_dynamic_credentials', False)
- account_file = os.path.join(
- getattr(config.CONF, 'dir_functest_data'), 'accounts.yaml')
- assert os.path.exists(
- account_file), "{} doesn't exist".format(account_file)
- rconfig.set('auth', 'test_accounts_file', account_file)
rconfig.set('identity', 'admin_role', admin_role_name)
rconfig.set('identity', 'default_domain_id', domain_id)
if not rconfig.has_section('network'):
@@ -335,13 +324,13 @@ class TempestCommon(singlevm.VmReady2):
shutil.copyfile(
self.tempest_custom, self.list)
else:
- raise Exception("Tempest test list file %s NOT found."
- % self.tempest_custom)
+ raise Exception(
+ f"Tempest test list file {self.tempest_custom} NOT found.")
else:
testr_mode = kwargs.get(
'mode', r'^tempest\.(api|scenario).*\[.*\bsmoke\b.*\]$')
- cmd = "(cd {0}; stestr list '{1}' >{2} 2>/dev/null)".format(
- self.verifier_repo_dir, testr_mode, self.list)
+ cmd = (f"(cd {self.verifier_repo_dir}; "
+ f"stestr list '{testr_mode}' > {self.list} 2>/dev/null)")
output = subprocess.check_output(cmd, shell=True)
LOGGER.info("%s\n%s", cmd, output.decode("utf-8"))
os.remove('/etc/tempest.conf')
@@ -353,32 +342,31 @@ class TempestCommon(singlevm.VmReady2):
os.remove(self.raw_list)
os.rename(self.list, self.raw_list)
cases_file = self.read_file(self.raw_list)
- result_file = open(self.list, 'w')
- black_tests = []
- try:
- deploy_scenario = env.get('DEPLOY_SCENARIO')
- if bool(deploy_scenario):
- # if DEPLOY_SCENARIO is set we read the file
- black_list_file = open(black_list)
- black_list_yaml = yaml.safe_load(black_list_file)
- black_list_file.close()
- for item in black_list_yaml:
- scenarios = item['scenarios']
- in_it = rally.RallyBase.in_iterable_re
- if in_it(deploy_scenario, scenarios):
- tests = item['tests']
- black_tests.extend(tests)
- except Exception: # pylint: disable=broad-except
+ with open(self.list, 'w', encoding='utf-8') as result_file:
black_tests = []
- LOGGER.debug("Tempest blacklist file does not exist.")
+ try:
+ deploy_scenario = env.get('DEPLOY_SCENARIO')
+ if bool(deploy_scenario):
+ # if DEPLOY_SCENARIO is set we read the file
+ with open(black_list, encoding='utf-8') as black_list_file:
+ black_list_yaml = yaml.safe_load(black_list_file)
+ black_list_file.close()
+ for item in black_list_yaml:
+ scenarios = item['scenarios']
+ in_it = rally.RallyBase.in_iterable_re
+ if in_it(deploy_scenario, scenarios):
+ tests = item['tests']
+ black_tests.extend(tests)
+ except Exception: # pylint: disable=broad-except
+ black_tests = []
+ LOGGER.debug("Tempest blacklist file does not exist.")
- for cases_line in cases_file:
- for black_tests_line in black_tests:
- if re.search(black_tests_line, cases_line):
- break
- else:
- result_file.write(str(cases_line) + '\n')
- result_file.close()
+ for cases_line in cases_file:
+ for black_tests_line in black_tests:
+ if re.search(black_tests_line, cases_line):
+ break
+ else:
+ result_file.write(str(cases_line) + '\n')
def run_verifier_tests(self, **kwargs):
"""Execute tempest test cases."""
@@ -387,33 +375,31 @@ class TempestCommon(singlevm.VmReady2):
cmd.extend(kwargs.get('option', []))
LOGGER.info("Starting Tempest test suite: '%s'.", cmd)
- f_stdout = open(
- os.path.join(self.res_dir, "tempest.log"), 'w+')
-
- proc = subprocess.Popen(
- cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- bufsize=1)
-
- with proc.stdout:
- for line in iter(proc.stdout.readline, b''):
- if re.search(r"\} tempest\.", line.decode("utf-8")):
- LOGGER.info(line.rstrip())
- elif re.search(r'(?=\(UUID=(.*)\))', line.decode("utf-8")):
- self.verification_id = re.search(
- r'(?=\(UUID=(.*)\))', line.decode("utf-8")).group(1)
- f_stdout.write(line.decode("utf-8"))
- proc.wait()
- f_stdout.close()
+ with open(
+ os.path.join(self.res_dir, "tempest.log"), 'w+',
+ encoding='utf-8') as f_stdout:
+ with subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ bufsize=1) as proc:
+ with proc.stdout:
+ for line in iter(proc.stdout.readline, b''):
+ if re.search(r"\} tempest\.", line.decode("utf-8")):
+ LOGGER.info(line.rstrip())
+ elif re.search(r'(?=\(UUID=(.*)\))',
+ line.decode("utf-8")):
+ self.verification_id = re.search(
+ r'(?=\(UUID=(.*)\))',
+ line.decode("utf-8")).group(1)
+ f_stdout.write(line.decode("utf-8"))
+ proc.wait()
if self.verification_id is None:
raise Exception('Verification UUID not found')
LOGGER.info('Verification UUID: %s', self.verification_id)
shutil.copy(
- "{}/tempest.log".format(self.deployment_dir),
- "{}/tempest.debug.log".format(self.res_dir))
+ f"{self.deployment_dir}/tempest.log",
+ f"{self.res_dir}/tempest.debug.log")
def parse_verifier_result(self):
"""Parse and save test results."""
@@ -430,8 +416,8 @@ class TempestCommon(singlevm.VmReady2):
LOGGER.error("No test has been executed")
return
- with open(os.path.join(self.res_dir,
- "rally.log"), 'r') as logfile:
+ with open(os.path.join(self.res_dir, "rally.log"),
+ 'r', encoding='utf-8') as logfile:
output = logfile.read()
success_testcases = []
@@ -466,9 +452,8 @@ class TempestCommon(singlevm.VmReady2):
rconfig.read(rally_conf)
if not rconfig.has_section('openstack'):
rconfig.add_section('openstack')
- rconfig.set('openstack', 'img_name_regex', '^{}$'.format(
- self.image.name))
- with open(rally_conf, 'w') as config_file:
+ rconfig.set('openstack', 'img_name_regex', f'^{self.image.name}$')
+ with open(rally_conf, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
def update_default_role(self, rally_conf='/etc/rally/rally.conf'):
@@ -481,7 +466,7 @@ class TempestCommon(singlevm.VmReady2):
if not rconfig.has_section('openstack'):
rconfig.add_section('openstack')
rconfig.set('openstack', 'swift_operator_role', role.name)
- with open(rally_conf, 'w') as config_file:
+ with open(rally_conf, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
@staticmethod
@@ -493,18 +478,51 @@ class TempestCommon(singlevm.VmReady2):
rconfig.remove_option('openstack', 'img_name_regex')
if rconfig.has_option('openstack', 'swift_operator_role'):
rconfig.remove_option('openstack', 'swift_operator_role')
- with open(rally_conf, 'w') as config_file:
+ with open(rally_conf, 'w', encoding='utf-8') as config_file:
+ rconfig.write(config_file)
+
+ def update_auth_section(self):
+ """Update auth section in tempest.conf"""
+ rconfig = configparser.RawConfigParser()
+ rconfig.read(self.conf_file)
+ if not rconfig.has_section("auth"):
+ rconfig.add_section("auth")
+ if env.get("NEW_USER_ROLE").lower() != "member":
+ tempest_roles = []
+ if rconfig.has_option("auth", "tempest_roles"):
+ tempest_roles = functest_utils.convert_ini_to_list(
+ rconfig.get("auth", "tempest_roles"))
+ rconfig.set(
+ 'auth', 'tempest_roles',
+ functest_utils.convert_list_to_ini(
+ [env.get("NEW_USER_ROLE")] + tempest_roles))
+ if not json.loads(env.get("USE_DYNAMIC_CREDENTIALS").lower()):
+ rconfig.set('auth', 'use_dynamic_credentials', False)
+ account_file = os.path.join(
+ getattr(config.CONF, 'dir_functest_data'), 'accounts.yaml')
+ assert os.path.exists(
+ account_file), f"{account_file} doesn't exist"
+ rconfig.set('auth', 'test_accounts_file', account_file)
+ if env.get('NO_TENANT_NETWORK').lower() == 'true':
+ rconfig.set('auth', 'create_isolated_networks', False)
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
def update_network_section(self):
"""Update network section in tempest.conf"""
rconfig = configparser.RawConfigParser()
rconfig.read(self.conf_file)
- if not rconfig.has_section('network'):
- rconfig.add_section('network')
- rconfig.set('network', 'public_network_id', self.ext_net.id)
- rconfig.set('network', 'floating_network_name', self.ext_net.name)
- with open(self.conf_file, 'w') as config_file:
+ if self.ext_net:
+ if not rconfig.has_section('network'):
+ rconfig.add_section('network')
+ rconfig.set('network', 'public_network_id', self.ext_net.id)
+ rconfig.set('network', 'floating_network_name', self.ext_net.name)
+ rconfig.set('network-feature-enabled', 'floating_ips', True)
+ else:
+ if not rconfig.has_section('network-feature-enabled'):
+ rconfig.add_section('network-feature-enabled')
+ rconfig.set('network-feature-enabled', 'floating_ips', False)
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
def update_compute_section(self):
@@ -513,8 +531,10 @@ class TempestCommon(singlevm.VmReady2):
rconfig.read(self.conf_file)
if not rconfig.has_section('compute'):
rconfig.add_section('compute')
- rconfig.set('compute', 'fixed_network_name', self.network.name)
- with open(self.conf_file, 'w') as config_file:
+ rconfig.set(
+ 'compute', 'fixed_network_name',
+ self.network.name if self.network else env.get("EXTERNAL_NETWORK"))
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
def update_validation_section(self):
@@ -523,8 +543,13 @@ class TempestCommon(singlevm.VmReady2):
rconfig.read(self.conf_file)
if not rconfig.has_section('validation'):
rconfig.add_section('validation')
- rconfig.set('validation', 'network_for_ssh', self.network.name)
- with open(self.conf_file, 'w') as config_file:
+ rconfig.set(
+ 'validation', 'connect_method',
+ 'floating' if self.ext_net else 'fixed')
+ rconfig.set(
+ 'validation', 'network_for_ssh',
+ self.network.name if self.network else env.get("EXTERNAL_NETWORK"))
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
def update_scenario_section(self):
@@ -532,13 +557,12 @@ class TempestCommon(singlevm.VmReady2):
rconfig = configparser.RawConfigParser()
rconfig.read(self.conf_file)
filename = getattr(
- config.CONF, '{}_image'.format(self.case_name), self.filename)
+ config.CONF, f'{self.case_name}_image', self.filename)
if not rconfig.has_section('scenario'):
rconfig.add_section('scenario')
- rconfig.set('scenario', 'img_file', os.path.basename(filename))
- rconfig.set('scenario', 'img_dir', os.path.dirname(filename))
+ rconfig.set('scenario', 'img_file', filename)
rconfig.set('scenario', 'img_disk_format', getattr(
- config.CONF, '{}_image_format'.format(self.case_name),
+ config.CONF, f'{self.case_name}_image_format',
self.image_format))
extra_properties = self.extra_properties.copy()
if env.get('IMAGE_PROPERTIES'):
@@ -546,12 +570,24 @@ class TempestCommon(singlevm.VmReady2):
functest_utils.convert_ini_to_dict(
env.get('IMAGE_PROPERTIES')))
extra_properties.update(
- getattr(config.CONF, '{}_extra_properties'.format(
- self.case_name), {}))
+ getattr(config.CONF, f'{self.case_name}_extra_properties', {}))
rconfig.set(
'scenario', 'img_properties',
functest_utils.convert_dict_to_ini(extra_properties))
- with open(self.conf_file, 'w') as config_file:
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
+ rconfig.write(config_file)
+
+ def update_dashboard_section(self):
+ """Update dashboard section in tempest.conf"""
+ rconfig = configparser.RawConfigParser()
+ rconfig.read(self.conf_file)
+ if env.get('DASHBOARD_URL'):
+ if not rconfig.has_section('dashboard'):
+ rconfig.add_section('dashboard')
+ rconfig.set('dashboard', 'dashboard_url', env.get('DASHBOARD_URL'))
+ else:
+ rconfig.set('service_available', 'horizon', False)
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
def configure(self, **kwargs): # pylint: disable=unused-argument
@@ -591,16 +627,18 @@ class TempestCommon(singlevm.VmReady2):
flavor_alt_id=self.flavor_alt.id,
admin_role_name=self.role_name, cidr=self.cidr,
domain_id=self.project.domain.id)
+ self.update_auth_section()
self.update_network_section()
self.update_compute_section()
self.update_validation_section()
self.update_scenario_section()
+ self.update_dashboard_section()
self.backup_tempest_config(self.conf_file, self.res_dir)
def run(self, **kwargs):
self.start_time = time.time()
try:
- assert super(TempestCommon, self).run(
+ assert super().run(
**kwargs) == testcase.TestCase.EX_OK
if not os.path.exists(self.res_dir):
os.makedirs(self.res_dir)
@@ -640,7 +678,7 @@ class TempestCommon(singlevm.VmReady2):
self.cloud.delete_image(self.image_alt)
if self.flavor_alt:
self.orig_cloud.delete_flavor(self.flavor_alt.id)
- super(TempestCommon, self).clean()
+ super().clean()
def is_successful(self):
"""The overall result of the test."""
@@ -650,22 +688,7 @@ class TempestCommon(singlevm.VmReady2):
if self.tests_count and (
self.details.get("tests_number", 0) != self.tests_count):
return testcase.TestCase.EX_TESTCASE_FAILED
- return super(TempestCommon, self).is_successful()
-
-
-class TempestHorizon(TempestCommon):
- """Tempest Horizon testcase implementation class."""
-
- def configure(self, **kwargs):
- super(TempestHorizon, self).configure(**kwargs)
- rconfig = configparser.RawConfigParser()
- rconfig.read(self.conf_file)
- if not rconfig.has_section('dashboard'):
- rconfig.add_section('dashboard')
- rconfig.set('dashboard', 'dashboard_url', env.get('DASHBOARD_URL'))
- with open(self.conf_file, 'w') as config_file:
- rconfig.write(config_file)
- self.backup_tempest_config(self.conf_file, self.res_dir)
+ return super().is_successful()
class TempestHeat(TempestCommon):
@@ -678,11 +701,14 @@ class TempestHeat(TempestCommon):
flavor_alt_disk = 4
def __init__(self, **kwargs):
- super(TempestHeat, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.user2 = self.orig_cloud.create_user(
- name='{}-user2_{}'.format(self.case_name, self.project.guid),
+ name=f'{self.case_name}-user2_{self.project.guid}',
password=self.project.password,
domain_id=self.project.domain.id)
+ self.orig_cloud.grant_role(
+ self.role_name, user=self.user2.id,
+ project=self.project.project.id, domain=self.project.domain.id)
if not self.orig_cloud.get_role("heat_stack_owner"):
self.role = self.orig_cloud.create_role("heat_stack_owner")
self.orig_cloud.grant_role(
@@ -692,7 +718,7 @@ class TempestHeat(TempestCommon):
def configure(self, **kwargs):
assert self.user2
- super(TempestHeat, self).configure(**kwargs)
+ super().configure(**kwargs)
rconfig = configparser.RawConfigParser()
rconfig.read(self.conf_file)
if not rconfig.has_section('heat_plugin'):
@@ -719,11 +745,23 @@ class TempestHeat(TempestCommon):
rconfig.set('heat_plugin', 'instance_type', self.flavor_alt.id)
rconfig.set('heat_plugin', 'minimal_image_ref', self.image.id)
rconfig.set('heat_plugin', 'minimal_instance_type', self.flavor.id)
- rconfig.set('heat_plugin', 'floating_network_name', self.ext_net.name)
- rconfig.set('heat_plugin', 'fixed_network_name', self.network.name)
- rconfig.set('heat_plugin', 'fixed_subnet_name', self.subnet.name)
- rconfig.set('heat_plugin', 'network_for_ssh', self.network.name)
- with open(self.conf_file, 'w') as config_file:
+ if self.ext_net:
+ rconfig.set(
+ 'heat_plugin', 'floating_network_name', self.ext_net.name)
+ if self.network:
+ rconfig.set('heat_plugin', 'fixed_network_name', self.network.name)
+ rconfig.set('heat_plugin', 'fixed_subnet_name', self.subnet.name)
+ rconfig.set('heat_plugin', 'network_for_ssh', self.network.name)
+ else:
+ LOGGER.warning(
+ 'No tenant network created. '
+ 'Trying EXTERNAL_NETWORK as a fallback')
+ rconfig.set(
+ 'heat_plugin', 'fixed_network_name',
+ env.get("EXTERNAL_NETWORK"))
+ rconfig.set(
+ 'heat_plugin', 'network_for_ssh', env.get("EXTERNAL_NETWORK"))
+ with open(self.conf_file, 'w', encoding='utf-8') as config_file:
rconfig.write(config_file)
self.backup_tempest_config(self.conf_file, self.res_dir)
@@ -731,6 +769,6 @@ class TempestHeat(TempestCommon):
"""
Cleanup all OpenStack objects. Should be called on completion.
"""
- super(TempestHeat, self).clean()
+ super().clean()
if self.user2:
self.orig_cloud.delete_user(self.user2.id)
diff --git a/functest/opnfv_tests/openstack/vmtp/vmtp.py b/functest/opnfv_tests/openstack/vmtp/vmtp.py
index 1686489b8..9833cc72a 100644
--- a/functest/opnfv_tests/openstack/vmtp/vmtp.py
+++ b/functest/opnfv_tests/openstack/vmtp/vmtp.py
@@ -56,8 +56,8 @@ class Vmtp(singlevm.VmReady2):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = 'vmtp'
- super(Vmtp, self).__init__(**kwargs)
- self.config = "{}/vmtp.conf".format(self.res_dir)
+ super().__init__(**kwargs)
+ self.config = f"{self.res_dir}/vmtp.conf"
(_, self.privkey_filename) = tempfile.mkstemp()
(_, self.pubkey_filename) = tempfile.mkstemp()
@@ -77,7 +77,7 @@ class Vmtp(singlevm.VmReady2):
assert self.cloud
assert self.ext_net
self.router = self.cloud.create_router(
- name='{}-router_{}'.format(self.case_name, self.guid),
+ name=f'{self.case_name}-router_{self.guid}',
ext_gateway_net_id=self.ext_net.id)
self.__logger.debug("router: %s", self.router)
@@ -87,13 +87,13 @@ class Vmtp(singlevm.VmReady2):
Raises: Exception on error
"""
assert self.cloud
- name = "vmtp_{}".format(self.guid)
+ name = f"vmtp_{self.guid}"
self.__logger.info("Creating keypair with name: '%s'", name)
keypair = self.cloud.create_keypair(name)
self.__logger.debug("keypair: %s", keypair)
- with open(self.privkey_filename, 'w') as key_file:
+ with open(self.privkey_filename, 'w', encoding='utf-8') as key_file:
key_file.write(keypair.private_key)
- with open(self.pubkey_filename, 'w') as key_file:
+ with open(self.pubkey_filename, 'w', encoding='utf-8') as key_file:
key_file.write(keypair.public_key)
self.cloud.delete_keypair(keypair.id)
@@ -108,7 +108,7 @@ class Vmtp(singlevm.VmReady2):
cmd = ['vmtp', '-sc']
output = subprocess.check_output(cmd).decode("utf-8")
self.__logger.info("%s\n%s", " ".join(cmd), output)
- with open(self.config, "w+") as conf:
+ with open(self.config, "w+", encoding='utf-8') as conf:
vmtp_conf = yaml.full_load(output)
vmtp_conf["private_key_file"] = self.privkey_filename
vmtp_conf["public_key_file"] = self.pubkey_filename
@@ -116,12 +116,11 @@ class Vmtp(singlevm.VmReady2):
vmtp_conf["router_name"] = str(self.router.name)
vmtp_conf["flavor_type"] = str(self.flavor.name)
vmtp_conf["internal_network_name"] = [
- "pns-internal-net_{}".format(self.guid),
- "pns-internal-net2_{}".format(self.guid)]
- vmtp_conf["vm_name_client"] = "TestClient_{}".format(self.guid)
- vmtp_conf["vm_name_server"] = "TestServer_{}".format(self.guid)
- vmtp_conf["security_group_name"] = "pns-security{}".format(
- self.guid)
+ f"pns-internal-net_{self.guid}",
+ f"pns-internal-net2_{self.guid}"]
+ vmtp_conf["vm_name_client"] = f"TestClient_{self.guid}"
+ vmtp_conf["vm_name_server"] = f"TestServer_{self.guid}"
+ vmtp_conf["security_group_name"] = f"pns-security{self.guid}"
vmtp_conf["dns_nameservers"] = [env.get('NAMESERVER')]
vmtp_conf["generic_retry_count"] = self.create_server_timeout // 2
vmtp_conf["ssh_retry_count"] = self.ssh_retry_timeout // 2
@@ -143,13 +142,13 @@ class Vmtp(singlevm.VmReady2):
OS_USER_DOMAIN_NAME=self.project.domain.name,
OS_PASSWORD=self.project.password)
if not new_env["OS_AUTH_URL"].endswith(('v3', 'v3/')):
- new_env["OS_AUTH_URL"] = "{}/v3".format(new_env["OS_AUTH_URL"])
+ new_env["OS_AUTH_URL"] = f'{new_env["OS_AUTH_URL"]}/v3'
try:
del new_env['OS_TENANT_NAME']
del new_env['OS_TENANT_ID']
except Exception: # pylint: disable=broad-except
pass
- cmd = ['vmtp', '-d', '--json', '{}/vmtp.json'.format(self.res_dir),
+ cmd = ['vmtp', '-d', '--json', f'{self.res_dir}/vmtp.json',
'-c', self.config]
if env.get("VMTP_HYPERVISORS"):
hypervisors = functest_utils.convert_ini_to_list(
@@ -160,12 +159,13 @@ class Vmtp(singlevm.VmReady2):
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT, env=new_env).decode("utf-8")
self.__logger.info("%s\n%s", " ".join(cmd), output)
- cmd = ['vmtp_genchart', '-c', '{}/vmtp.html'.format(self.res_dir),
- '{}/vmtp.json'.format(self.res_dir)]
+ cmd = ['vmtp_genchart', '-c', f'{self.res_dir}/vmtp.html',
+ f'{self.res_dir}/vmtp.json']
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT).decode("utf-8")
self.__logger.info("%s\n%s", " ".join(cmd), output)
- with open('{}/vmtp.json'.format(self.res_dir), 'r') as res_file:
+ with open(f'{self.res_dir}/vmtp.json', 'r',
+ encoding='utf-8') as res_file:
self.details = json.load(res_file)
def run(self, **kwargs):
@@ -173,7 +173,7 @@ class Vmtp(singlevm.VmReady2):
status = testcase.TestCase.EX_RUN_ERROR
try:
assert self.cloud
- assert super(Vmtp, self).run(**kwargs) == self.EX_OK
+ assert super().run(**kwargs) == self.EX_OK
status = testcase.TestCase.EX_RUN_ERROR
if self.orig_cloud.get_role("admin"):
role_name = "admin"
@@ -204,10 +204,10 @@ class Vmtp(singlevm.VmReady2):
def clean(self):
try:
assert self.cloud
- super(Vmtp, self).clean()
+ super().clean()
os.remove(self.privkey_filename)
os.remove(self.pubkey_filename)
- self.cloud.delete_network("pns-internal-net_{}".format(self.guid))
- self.cloud.delete_network("pns-internal-net2_{}".format(self.guid))
+ self.cloud.delete_network(f"pns-internal-net_{self.guid}")
+ self.cloud.delete_network(f"pns-internal-net2_{self.guid}")
except Exception: # pylint: disable=broad-except
pass
diff --git a/functest/opnfv_tests/openstack/vping/vping_ssh.py b/functest/opnfv_tests/openstack/vping/vping_ssh.py
index a7bbfc23c..ad64348c4 100644
--- a/functest/opnfv_tests/openstack/vping/vping_ssh.py
+++ b/functest/opnfv_tests/openstack/vping/vping_ssh.py
@@ -29,13 +29,13 @@ class VPingSSH(singlevm.SingleVm2):
"""Initialize testcase."""
if "case_name" not in kwargs:
kwargs["case_name"] = "vping_ssh"
- super(VPingSSH, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.vm2 = None
def prepare(self):
- super(VPingSSH, self).prepare()
+ super().prepare()
self.vm2 = self.boot_vm(
- '{}-vm2_{}'.format(self.case_name, self.guid),
+ f'{self.case_name}-vm2_{self.guid}',
security_groups=[self.sec.id])
def execute(self):
@@ -46,10 +46,9 @@ class VPingSSH(singlevm.SingleVm2):
assert self.ssh
if not self.check_regex_in_console(self.vm2.name):
return 1
- (_, stdout, stderr) = self.ssh.exec_command(
- 'ping -c 1 {}'.format(
- self.vm2.private_v4 or self.vm2.addresses[
- self.network.name][0].addr))
+ ip4 = self.vm2.private_v4 or self.vm2.addresses[
+ self.network.name][0].addr
+ (_, stdout, stderr) = self.ssh.exec_command(f'ping -c 1 {ip4}')
self.__logger.info("output:\n%s", stdout.read().decode("utf-8"))
self.__logger.info("error:\n%s", stderr.read().decode("utf-8"))
return stdout.channel.recv_exit_status()
@@ -60,4 +59,4 @@ class VPingSSH(singlevm.SingleVm2):
self.cloud.delete_server(
self.vm2, wait=True,
timeout=getattr(config.CONF, 'vping_vm_delete_timeout'))
- super(VPingSSH, self).clean()
+ super().clean()
diff --git a/functest/opnfv_tests/openstack/vping/vping_userdata.py b/functest/opnfv_tests/openstack/vping/vping_userdata.py
index 9010895cb..8a8f26f37 100644
--- a/functest/opnfv_tests/openstack/vping/vping_userdata.py
+++ b/functest/opnfv_tests/openstack/vping/vping_userdata.py
@@ -26,7 +26,7 @@ class VPingUserdata(singlevm.VmReady2):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = "vping_userdata"
- super(VPingUserdata, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.logger = logging.getLogger(__name__)
self.vm1 = None
self.vm2 = None
@@ -39,12 +39,12 @@ class VPingUserdata(singlevm.VmReady2):
"""
try:
assert self.cloud
- assert super(VPingUserdata, self).run(
+ assert super().run(
**kwargs) == testcase.TestCase.EX_OK
self.result = 0
self.vm1 = self.boot_vm()
self.vm2 = self.boot_vm(
- '{}-vm2_{}'.format(self.case_name, self.guid),
+ f'{self.case_name}-vm2_{self.guid}',
userdata=self._get_userdata())
result = self._do_vping()
@@ -104,13 +104,15 @@ class VPingUserdata(singlevm.VmReady2):
"""
Returns the post VM creation script to be added into the VM's userdata
:param test_ip: the IP value to substitute into the script
- :return: the bash script contents
+ :return: the shell script contents
"""
+ ip4 = self.vm1.private_v4 or self.vm1.addresses[
+ self.network.name][0].addr
if self.vm1.private_v4 or self.vm1.addresses[
self.network.name][0].addr:
return ("#!/bin/sh\n\n"
"while true; do\n"
- " ping -c 1 %s 2>&1 >/dev/null\n"
+ f" ping -c 1 {ip4} 2>&1 >/dev/null\n"
" RES=$?\n"
" if [ \"Z$RES\" = \"Z0\" ] ; then\n"
" echo 'vPing OK'\n"
@@ -119,9 +121,7 @@ class VPingUserdata(singlevm.VmReady2):
" echo 'vPing KO'\n"
" fi\n"
" sleep 1\n"
- "done\n" % str(
- self.vm1.private_v4 or self.vm1.addresses[
- self.network.name][0].addr))
+ "done\n")
return None
def clean(self):
@@ -134,4 +134,4 @@ class VPingUserdata(singlevm.VmReady2):
self.cloud.delete_server(
self.vm2, wait=True,
timeout=getattr(config.CONF, 'vping_vm_delete_timeout'))
- super(VPingUserdata, self).clean()
+ super().clean()
diff --git a/functest/opnfv_tests/sdn/odl/odl.py b/functest/opnfv_tests/sdn/odl/odl.py
index b54f0f54b..72c38ce2c 100644
--- a/functest/opnfv_tests/sdn/odl/odl.py
+++ b/functest/opnfv_tests/sdn/odl/odl.py
@@ -49,7 +49,7 @@ class ODLTests(robotframework.RobotFramework):
__logger = logging.getLogger(__name__)
def __init__(self, **kwargs):
- super(ODLTests, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.res_dir = os.path.join(
getattr(config.CONF, 'dir_results'), 'odl')
self.xml_file = os.path.join(self.res_dir, 'output.xml')
@@ -66,10 +66,10 @@ class ODLTests(robotframework.RobotFramework):
try:
for line in fileinput.input(cls.odl_variables_file,
inplace=True):
- print(re.sub("@{AUTH}.*",
- "@{{AUTH}} {} {}".format(
- odlusername, odlpassword),
- line.rstrip()))
+ print(re.sub(
+ "@{AUTH}.*",
+ f"@{{AUTH}} {odlusername} {odlpassword}",
+ line.rstrip()))
return True
except Exception: # pylint: disable=broad-except
cls.__logger.exception("Cannot set ODL creds:")
@@ -111,9 +111,8 @@ class ODLTests(robotframework.RobotFramework):
odlusername = kwargs['odlusername']
odlpassword = kwargs['odlpassword']
osauthurl = kwargs['osauthurl']
- keystoneurl = "{}://{}".format(
- urllib.parse.urlparse(osauthurl).scheme,
- urllib.parse.urlparse(osauthurl).netloc)
+ keystoneurl = (f"{urllib.parse.urlparse(osauthurl).scheme}://"
+ f"{urllib.parse.urlparse(osauthurl).netloc}")
variable = ['KEYSTONEURL:' + keystoneurl,
'NEUTRONURL:' + kwargs['neutronurl'],
'OS_AUTH_URL:"' + osauthurl + '"',
@@ -135,7 +134,7 @@ class ODLTests(robotframework.RobotFramework):
else:
if not self.set_robotframework_vars(odlusername, odlpassword):
return self.EX_RUN_ERROR
- return super(ODLTests, self).run(variable=variable, suites=suites)
+ return super().run(variable=variable, suites=suites)
def run(self, **kwargs):
"""Run suites in OPNFV environment
diff --git a/functest/opnfv_tests/vnf/epc/juju_epc.py b/functest/opnfv_tests/vnf/epc/juju_epc.py
index 5049bd0bb..1cf240b80 100644
--- a/functest/opnfv_tests/vnf/epc/juju_epc.py
+++ b/functest/opnfv_tests/vnf/epc/juju_epc.py
@@ -83,16 +83,16 @@ class JujuEpc(singlevm.SingleVm2):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = "juju_epc"
- super(JujuEpc, self).__init__(**kwargs)
+ super().__init__(**kwargs)
# Retrieve the configuration
self.case_dir = pkg_resources.resource_filename(
'functest', 'opnfv_tests/vnf/epc')
try:
self.config = getattr(
- config.CONF, 'vnf_{}_config'.format(self.case_name))
- except Exception:
- raise Exception("VNF config file not found")
+ config.CONF, f'vnf_{self.case_name}_config')
+ except Exception as exc:
+ raise Exception("VNF config file not found") from exc
self.config_file = os.path.join(self.case_dir, self.config)
self.orchestrator = dict(
requirements=functest_utils.get_parameter_from_yaml(
@@ -138,7 +138,7 @@ class JujuEpc(singlevm.SingleVm2):
try:
self.public_auth_url = self.get_public_auth_url(self.orig_cloud)
if not self.public_auth_url.endswith(('v3', 'v3/')):
- self.public_auth_url = "{}/v3".format(self.public_auth_url)
+ self.public_auth_url = f"{self.public_auth_url}/v3"
except Exception: # pylint: disable=broad-except
self.public_auth_url = None
self.sec = None
@@ -168,7 +168,7 @@ class JujuEpc(singlevm.SingleVm2):
'url': self.public_auth_url,
'region': self.cloud.region_name if self.cloud.region_name else (
'RegionOne')}
- with open(clouds_yaml, 'w') as yfile:
+ with open(clouds_yaml, 'w', encoding='utf-8') as yfile:
yfile.write(CLOUD_TEMPLATE.format(**cloud_data))
scpc = scp.SCPClient(self.ssh.get_transport())
scpc.put(clouds_yaml, remote_path='~/')
@@ -189,7 +189,7 @@ class JujuEpc(singlevm.SingleVm2):
"project_domain_name", "Default"),
'user_domain_n': self.cloud.auth.get(
"user_domain_name", "Default")}
- with open(credentials_yaml, 'w') as yfile:
+ with open(credentials_yaml, 'w', encoding='utf-8') as yfile:
yfile.write(CREDS_TEMPLATE.format(**creds_data))
scpc = scp.SCPClient(self.ssh.get_transport())
scpc.put(credentials_yaml, remote_path='~/')
@@ -205,20 +205,20 @@ class JujuEpc(singlevm.SingleVm2):
'RegionOne')
(_, stdout, stderr) = self.ssh.exec_command(
'/snap/bin/juju metadata generate-image -d /home/ubuntu '
- '-i {} -s xenial -r {} -u {}'.format(
- self.image.id, region_name, self.public_auth_url))
+ f'-i {self.image.id} -s xenial -r {region_name} '
+ f'-u {self.public_auth_url}')
self.__logger.debug("stdout:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
return not stdout.channel.recv_exit_status()
def publish_image_alt(self, name=None):
- image_alt = super(JujuEpc, self).publish_image_alt(name)
+ image_alt = super().publish_image_alt(name)
region_name = self.cloud.region_name if self.cloud.region_name else (
'RegionOne')
(_, stdout, stderr) = self.ssh.exec_command(
'/snap/bin/juju metadata generate-image -d /home/ubuntu '
- '-i {} -s trusty -r {} -u {}'.format(
- image_alt.id, region_name, self.public_auth_url))
+ f'-i {image_alt.id} -s trusty -r {region_name} '
+ f'-u {self.public_auth_url}')
self.__logger.debug("stdout:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
return image_alt
@@ -236,18 +236,16 @@ class JujuEpc(singlevm.SingleVm2):
region_name = self.cloud.region_name if self.cloud.region_name else (
'RegionOne')
(_, stdout, stderr) = self.ssh.exec_command(
- 'timeout {} '
- '/snap/bin/juju bootstrap abot-epc/{} abot-controller '
+ f'timeout {JujuEpc.juju_timeout} '
+ f'/snap/bin/juju bootstrap abot-epc/{region_name} abot-controller '
'--agent-version 2.3.9 --metadata-source /home/ubuntu '
'--constraints mem=2G --bootstrap-series xenial '
- '--config network={} '
+ f'--config network={self.network.id} '
'--config ssl-hostname-verification=false '
- '--config external-network={} '
+ f'--config external-network={self.ext_net.id} '
'--config use-floating-ip=true '
'--config use-default-secgroup=true '
- '--debug'.format(
- JujuEpc.juju_timeout, region_name, self.network.id,
- self.ext_net.id))
+ '--debug')
self.__logger.debug("stdout:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
return not stdout.channel.recv_exit_status()
@@ -256,14 +254,14 @@ class JujuEpc(singlevm.SingleVm2):
"""Check application status."""
for i in range(10):
(_, stdout, stderr) = self.ssh.exec_command(
- '/snap/bin/juju status --format short {}'.format(name))
+ f'/snap/bin/juju status --format short {name}')
output = stdout.read().decode("utf-8")
self.__logger.debug("stdout:\n%s", output)
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
if stdout.channel.recv_exit_status():
continue
ret = re.search(
- r'(?=workload:({})\))'.format(status), output)
+ rf'(?=workload:({status})\))', output)
if ret:
self.__logger.info("%s workload is %s", name, status)
break
@@ -295,7 +293,7 @@ class JujuEpc(singlevm.SingleVm2):
return not stdout.channel.recv_exit_status()
(_, stdout, stderr) = self.ssh.exec_command(
'PATH=/snap/bin/:$PATH '
- 'timeout {} juju-wait'.format(JujuEpc.juju_timeout))
+ f'timeout {JujuEpc.juju_timeout} juju-wait')
self.__logger.debug("stdout:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
if stdout.channel.recv_exit_status():
@@ -312,12 +310,11 @@ class JujuEpc(singlevm.SingleVm2):
return False
scpc = scp.SCPClient(self.ssh.get_transport())
scpc.put(
- '{}/featureFiles'.format(self.case_dir), remote_path='~/',
+ f'{self.case_dir}/featureFiles', remote_path='~/',
recursive=True)
(_, stdout, stderr) = self.ssh.exec_command(
- 'timeout {} /snap/bin/juju scp -- -r -v ~/featureFiles '
- 'abot-epc-basic/0:/etc/rebaca-test-suite/'.format(
- JujuEpc.juju_timeout))
+ f'timeout {JujuEpc.juju_timeout} /snap/bin/juju scp -- -r -v '
+ '~/featureFiles abot-epc-basic/0:/etc/rebaca-test-suite/')
output = stdout.read().decode("utf-8")
self.__logger.debug("stdout:\n%s", output)
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
@@ -327,15 +324,15 @@ class JujuEpc(singlevm.SingleVm2):
"""Run test on ABoT."""
start_time = time.time()
(_, stdout, stderr) = self.ssh.exec_command(
- '/snap/bin/juju run-action abot-epc-basic/0 '
- 'run tagnames={}'.format(self.details['test_vnf']['tag_name']))
+ "/snap/bin/juju run-action abot-epc-basic/0 "
+ f"run tagnames={self.details['test_vnf']['tag_name']}")
self.__logger.debug("stdout:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
if stdout.channel.recv_exit_status():
return not stdout.channel.recv_exit_status()
(_, stdout, stderr) = self.ssh.exec_command(
'PATH=/snap/bin/:$PATH '
- 'timeout {} juju-wait'.format(JujuEpc.juju_timeout))
+ f'timeout {JujuEpc.juju_timeout} juju-wait')
self.__logger.debug("stdout:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
if stdout.channel.recv_exit_status():
@@ -343,9 +340,9 @@ class JujuEpc(singlevm.SingleVm2):
duration = time.time() - start_time
self.__logger.info("Getting results from Abot node....")
(_, stdout, stderr) = self.ssh.exec_command(
- 'timeout {} /snap/bin/juju scp -- -v abot-epc-basic/0:'
- '/var/lib/abot-epc-basic/artifacts/TestResults.json .'.format(
- JujuEpc.juju_timeout))
+ f'timeout {JujuEpc.juju_timeout} /snap/bin/juju scp '
+ '-- -v abot-epc-basic/0:'
+ '/var/lib/abot-epc-basic/artifacts/TestResults.json .')
self.__logger.debug("stdout:\n%s", stdout.read().decode("utf-8"))
self.__logger.debug("stderr:\n%s", stderr.read().decode("utf-8"))
if stdout.channel.recv_exit_status():
@@ -353,8 +350,7 @@ class JujuEpc(singlevm.SingleVm2):
scpc = scp.SCPClient(self.ssh.get_transport())
scpc.get('TestResults.json', self.res_dir)
self.__logger.info("Parsing the Test results...")
- res = (process_abot_test_result('{}/TestResults.json'.format(
- self.res_dir)))
+ res = process_abot_test_result(f'{self.res_dir}/TestResults.json')
short_result = sig_test_format(res)
self.__logger.info(short_result)
self.details['test_vnf'].update(
@@ -375,7 +371,7 @@ class JujuEpc(singlevm.SingleVm2):
except OSError as ex:
if ex.errno != errno.EEXIST:
self.__logger.exception("Cannot create %s", self.res_dir)
- raise Exception
+ raise Exception from ex
self.__logger.info("ENV:\n%s", env.string())
try:
assert self._install_juju()
@@ -407,7 +403,7 @@ class JujuEpc(singlevm.SingleVm2):
self.cloud.delete_image(self.image_alt)
if self.flavor_alt:
self.orig_cloud.delete_flavor(self.flavor_alt.id)
- super(JujuEpc, self).clean()
+ super().clean()
def sig_test_format(sig_test):
@@ -433,7 +429,7 @@ def sig_test_format(sig_test):
def process_abot_test_result(file_path):
""" Process ABoT Result """
- with open(file_path) as test_result:
+ with open(file_path, encoding='utf-8') as test_result:
data = json.load(test_result)
res = []
for tests in data:
diff --git a/functest/opnfv_tests/vnf/ims/clearwater.py b/functest/opnfv_tests/vnf/ims/clearwater.py
index 67128b11c..4c143fd70 100644
--- a/functest/opnfv_tests/vnf/ims/clearwater.py
+++ b/functest/opnfv_tests/vnf/ims/clearwater.py
@@ -50,7 +50,7 @@ class ClearwaterTesting():
output_dict = {}
self.logger.debug('Ellis IP: %s', self.ellis_ip)
output_dict['ellis_ip'] = self.ellis_ip
- account_url = 'http://{0}/accounts'.format(self.ellis_ip)
+ account_url = f'http://{self.ellis_ip}/accounts'
params = {"password": "functest",
"full_name": "opnfv functest user",
"email": "functest@opnfv.org",
@@ -60,7 +60,7 @@ class ClearwaterTesting():
number_res = self._create_ellis_account(account_url, params)
output_dict['number'] = number_res
- session_url = 'http://{0}/session'.format(self.ellis_ip)
+ session_url = f'http://{self.ellis_ip}/session'
session_data = {
'username': params['email'],
'password': params['password'],
@@ -68,8 +68,8 @@ class ClearwaterTesting():
}
cookies = self._get_ellis_session_cookies(session_url, session_data)
- number_url = 'http://{0}/accounts/{1}/numbers'.format(
- self.ellis_ip, params['email'])
+ number_url = (
+ f"http://{self.ellis_ip}/accounts/{params['email']}/numbers")
self.logger.debug('Create 1st calling number on Ellis')
number_res = self._create_ellis_number(number_url, cookies)
@@ -97,8 +97,7 @@ class ClearwaterTesting():
"try %s: cannot create ellis account", iloop + 1)
time.sleep(30)
raise Exception(
- "Unable to create an account {}".format(
- params.get('full_name')))
+ f"Unable to create an account {params.get('full_name')}")
def _get_ellis_session_cookies(self, session_url, params):
i = 15
@@ -150,24 +149,20 @@ class ClearwaterTesting():
"""
# pylint: disable=too-many-locals,too-many-arguments
self.logger.info('Run Clearwater live test')
- script = ('cd {0};'
- 'rake test[{1}] SIGNUP_CODE={2}'
- .format(self.test_dir,
- public_domain,
- signup_code))
+ script = (f'cd {self.test_dir};'
+ f'rake test[{public_domain}] SIGNUP_CODE={signup_code}')
if self.bono_ip and self.ellis_ip:
- subscript = ' PROXY={0} ELLIS={1}'.format(
- self.bono_ip, self.ellis_ip)
- script = '{0}{1}'.format(script, subscript)
- script = ('{0}{1}'.format(script, ' --trace'))
- cmd = "/bin/bash -c '{0}'".format(script)
+ subscript = f' PROXY={self.bono_ip} ELLIS={self.ellis_ip}'
+ script = f'{script}{subscript}'
+ script = f'{script} --trace'
+ cmd = f"/bin/sh -c '{script}'"
self.logger.debug('Live test cmd: %s', cmd)
output_file = os.path.join(self.result_dir, "ims_test_output.txt")
ft_utils.execute_command(cmd,
error_msg='Clearwater live test failed',
output_file=output_file)
- with open(output_file, 'r') as ofile:
+ with open(output_file, 'r', encoding='utf-8') as ofile:
result = ofile.read()
if result != "":
diff --git a/functest/opnfv_tests/vnf/ims/cloudify_ims.py b/functest/opnfv_tests/vnf/ims/cloudify_ims.py
index d937cc052..b93af7d6d 100644
--- a/functest/opnfv_tests/vnf/ims/cloudify_ims.py
+++ b/functest/opnfv_tests/vnf/ims/cloudify_ims.py
@@ -53,14 +53,14 @@ class CloudifyIms(cloudify.Cloudify):
"""Initialize CloudifyIms testcase object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "cloudify_ims"
- super(CloudifyIms, self).__init__(**kwargs)
+ super().__init__(**kwargs)
# Retrieve the configuration
try:
self.config = getattr(
- config.CONF, 'vnf_{}_config'.format(self.case_name))
- except Exception:
- raise Exception("VNF config file not found")
+ config.CONF, f'vnf_{self.case_name}_config')
+ except Exception as exc:
+ raise Exception("VNF config file not found") from exc
self.case_dir = pkg_resources.resource_filename(
'functest', 'opnfv_tests/vnf/ims')
@@ -114,7 +114,7 @@ class CloudifyIms(cloudify.Cloudify):
network, security group, fip, VM creation
"""
- assert super(CloudifyIms, self).execute() == 0
+ assert super().execute() == 0
start_time = time.time()
self.orig_cloud.set_network_quotas(
self.project.project.name,
@@ -259,4 +259,4 @@ class CloudifyIms(cloudify.Cloudify):
self.cloud.delete_image(self.image_alt)
if self.flavor_alt:
self.orig_cloud.delete_flavor(self.flavor_alt.id)
- super(CloudifyIms, self).clean()
+ super().clean()
diff --git a/functest/opnfv_tests/vnf/ims/heat_ims.py b/functest/opnfv_tests/vnf/ims/heat_ims.py
index 4edb2ddd9..0d4e345a0 100644
--- a/functest/opnfv_tests/vnf/ims/heat_ims.py
+++ b/functest/opnfv_tests/vnf/ims/heat_ims.py
@@ -57,14 +57,14 @@ class HeatIms(singlevm.VmReady2):
"""Initialize HeatIms testcase object."""
if "case_name" not in kwargs:
kwargs["case_name"] = "heat_ims"
- super(HeatIms, self).__init__(**kwargs)
+ super().__init__(**kwargs)
# Retrieve the configuration
try:
self.config = getattr(
- config.CONF, 'vnf_{}_config'.format(self.case_name))
- except Exception:
- raise Exception("VNF config file not found")
+ config.CONF, f'vnf_{self.case_name}_config')
+ except Exception as exc:
+ raise Exception("VNF config file not found") from exc
self.case_dir = pkg_resources.resource_filename(
'functest', 'opnfv_tests/vnf/ims')
@@ -112,9 +112,10 @@ class HeatIms(singlevm.VmReady2):
project=self.project.project.id,
domain=self.project.domain.id)
self.keypair = self.cloud.create_keypair(
- '{}-kp_{}'.format(self.case_name, self.guid))
+ f'{self.case_name}-kp_{self.guid}')
self.__logger.info("keypair:\n%s", self.keypair.private_key)
- with open(self.key_filename, 'w') as private_key_file:
+ with open(
+ self.key_filename, 'w', encoding='utf-8') as private_key_file:
private_key_file.write(self.keypair.private_key)
if self.deploy_vnf() and self.test_vnf():
@@ -137,7 +138,7 @@ class HeatIms(singlevm.VmReady2):
status = testcase.TestCase.EX_RUN_ERROR
try:
assert self.cloud
- assert super(HeatIms, self).run(
+ assert super().run(
**kwargs) == testcase.TestCase.EX_OK
self.result = 0
if not self.execute():
@@ -247,6 +248,6 @@ class HeatIms(singlevm.VmReady2):
pass
except Exception: # pylint: disable=broad-except
self.__logger.exception("Cannot clean stack ressources")
- super(HeatIms, self).clean()
+ super().clean()
if self.role:
self.orig_cloud.delete_role(self.role.id)
diff --git a/functest/opnfv_tests/vnf/router/cloudify_vrouter.py b/functest/opnfv_tests/vnf/router/cloudify_vrouter.py
index 82f57ca7b..32d675347 100644
--- a/functest/opnfv_tests/vnf/router/cloudify_vrouter.py
+++ b/functest/opnfv_tests/vnf/router/cloudify_vrouter.py
@@ -51,14 +51,14 @@ class CloudifyVrouter(cloudify.Cloudify):
def __init__(self, **kwargs):
if "case_name" not in kwargs:
kwargs["case_name"] = "vyos_vrouter"
- super(CloudifyVrouter, self).__init__(**kwargs)
+ super().__init__(**kwargs)
# Retrieve the configuration
try:
self.config = getattr(
- config.CONF, 'vnf_{}_config'.format(self.case_name))
- except Exception:
- raise Exception("VNF config file not found")
+ config.CONF, f'vnf_{self.case_name}_config')
+ except Exception as exc:
+ raise Exception("VNF config file not found") from exc
self.case_dir = pkg_resources.resource_filename(
'functest', 'opnfv_tests/vnf/router')
@@ -127,7 +127,7 @@ class CloudifyVrouter(cloudify.Cloudify):
network, security group, fip, VM creation
"""
# network creation
- super(CloudifyVrouter, self).execute()
+ super().execute()
start_time = time.time()
self.put_private_key()
self.upload_cfy_plugins(self.cop_yaml, self.cop_wgn)
@@ -231,4 +231,4 @@ class CloudifyVrouter(cloudify.Cloudify):
self.cloud.delete_image(self.image_alt)
if self.flavor_alt:
self.orig_cloud.delete_flavor(self.flavor_alt.id)
- super(CloudifyVrouter, self).clean()
+ super().clean()
diff --git a/functest/opnfv_tests/vnf/router/test_controller/function_test_exec.py b/functest/opnfv_tests/vnf/router/test_controller/function_test_exec.py
index 0a56913b7..9eb3c5d69 100644
--- a/functest/opnfv_tests/vnf/router/test_controller/function_test_exec.py
+++ b/functest/opnfv_tests/vnf/router/test_controller/function_test_exec.py
@@ -32,17 +32,16 @@ class FunctionTestExec():
credentials = util_info["credentials"]
self.vnf_ctrl = VnfController(util_info)
- test_cmd_map_file = open(
- os.path.join(
- self.util.vnf_data_dir, self.util.command_template_dir,
- self.util.test_cmd_map_yaml_file),
- 'r')
- self.test_cmd_map_yaml = yaml.safe_load(test_cmd_map_file)
- test_cmd_map_file.close()
+ with open(
+ os.path.join(
+ self.util.vnf_data_dir, self.util.command_template_dir,
+ self.util.test_cmd_map_yaml_file),
+ 'r', encoding='utf-8') as test_cmd_map_file:
+ self.test_cmd_map_yaml = yaml.safe_load(test_cmd_map_file)
self.util.set_credentials(credentials["cloud"])
- with open(self.util.test_env_config_yaml) as file_fd:
+ with open(self.util.test_env_config_yaml, encoding='utf-8') as file_fd:
test_env_config_yaml = yaml.safe_load(file_fd)
file_fd.close()
diff --git a/functest/opnfv_tests/vnf/router/utilvnf.py b/functest/opnfv_tests/vnf/router/utilvnf.py
index 2db3b38e5..111f20c1a 100644
--- a/functest/opnfv_tests/vnf/router/utilvnf.py
+++ b/functest/opnfv_tests/vnf/router/utilvnf.py
@@ -64,7 +64,7 @@ class Utilvnf(): # pylint: disable=too-many-instance-attributes
if not os.path.exists(self.vnf_data_dir):
os.makedirs(self.vnf_data_dir)
- with open(self.test_env_config_yaml) as file_fd:
+ with open(self.test_env_config_yaml, encoding='utf-8') as file_fd:
test_env_config_yaml = yaml.safe_load(file_fd)
file_fd.close()
@@ -98,9 +98,7 @@ class Utilvnf(): # pylint: disable=too-many-instance-attributes
return mac_address
def get_blueprint_outputs(self, cfy_manager_ip, deployment_name):
- url = "http://%s/deployments/%s/outputs" % (
- cfy_manager_ip, deployment_name)
-
+ url = f"http://{cfy_manager_ip}/deployments/{deployment_name}/outputs"
response = requests.get(
url,
auth=requests.auth.HTTPBasicAuth('admin', 'admin'),
@@ -212,24 +210,29 @@ class Utilvnf(): # pylint: disable=too-many-instance-attributes
def write_result_data(self, result_data):
test_result = []
if not os.path.isfile(self.test_result_json_file):
- file_fd = open(self.test_result_json_file, "w")
- file_fd.close()
+ with open(
+ self.test_result_json_file, "w",
+ encoding="utf-8") as file_fd:
+ pass
else:
- file_fd = open(self.test_result_json_file, "r")
- test_result = json.load(file_fd)
- file_fd.close()
+ with open(
+ self.test_result_json_file, "r",
+ encoding="utf-8") as file_fd:
+ test_result = json.load(file_fd)
test_result.append(result_data)
- file_fd = open(self.test_result_json_file, "w")
- json.dump(test_result, file_fd)
- file_fd.close()
+ with open(
+ self.test_result_json_file, "w",
+ encoding="utf-8") as file_fd:
+ json.dump(test_result, file_fd)
def output_test_result_json(self):
if os.path.isfile(self.test_result_json_file):
- file_fd = open(self.test_result_json_file, "r")
- test_result = json.load(file_fd)
- file_fd.close()
+ with open(
+ self.test_result_json_file, "r",
+ encoding="utf-8") as file_fd:
+ test_result = json.load(file_fd)
output_json_data = json.dumps(test_result,
sort_keys=True,
indent=4)
@@ -239,8 +242,6 @@ class Utilvnf(): # pylint: disable=too-many-instance-attributes
@staticmethod
def get_test_scenario(file_path):
- test_scenario_file = open(file_path,
- 'r')
- test_scenario_yaml = yaml.safe_load(test_scenario_file)
- test_scenario_file.close()
+ with open(file_path, "r", encoding="utf-8") as test_scenario_file:
+ test_scenario_yaml = yaml.safe_load(test_scenario_file)
return test_scenario_yaml["test_scenario_list"]
diff --git a/functest/opnfv_tests/vnf/router/vnf_controller/ssh_client.py b/functest/opnfv_tests/vnf/router/vnf_controller/ssh_client.py
index 0969eab3b..269f6526b 100644
--- a/functest/opnfv_tests/vnf/router/vnf_controller/ssh_client.py
+++ b/functest/opnfv_tests/vnf/router/vnf_controller/ssh_client.py
@@ -43,7 +43,7 @@ class SshClient(): # pylint: disable=too-many-instance-attributes
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.util = Utilvnf()
- with open(self.util.test_env_config_yaml) as file_fd:
+ with open(self.util.test_env_config_yaml, encoding='utf-8') as file_fd:
test_env_config_yaml = yaml.safe_load(file_fd)
file_fd.close()
diff --git a/functest/opnfv_tests/vnf/router/vnf_controller/vm_controller.py b/functest/opnfv_tests/vnf/router/vnf_controller/vm_controller.py
index b159ddda4..2210b3909 100644
--- a/functest/opnfv_tests/vnf/router/vnf_controller/vm_controller.py
+++ b/functest/opnfv_tests/vnf/router/vnf_controller/vm_controller.py
@@ -36,7 +36,7 @@ class VmController():
self.util = Utilvnf()
self.util.set_credentials(credentials["cloud"])
- with open(self.util.test_env_config_yaml) as file_fd:
+ with open(self.util.test_env_config_yaml, encoding='utf-8') as file_fd:
test_env_config_yaml = yaml.safe_load(file_fd)
file_fd.close()
@@ -101,10 +101,8 @@ class VmController():
def command_create_and_execute(self, ssh, test_cmd_file_path,
cmd_input_param, prompt_file_path):
- prompt_file = open(prompt_file_path,
- 'r')
- prompt = yaml.safe_load(prompt_file)
- prompt_file.close()
+ with open(prompt_file_path, 'r', encoding='utf-8') as prompt_file:
+ prompt = yaml.safe_load(prompt_file)
config_mode_prompt = prompt["config_mode"]
commands = self.command_gen_from_template(test_cmd_file_path,
diff --git a/functest/opnfv_tests/vnf/router/vnf_controller/vnf_controller.py b/functest/opnfv_tests/vnf/router/vnf_controller/vnf_controller.py
index 7ed287c6e..46584456f 100644
--- a/functest/opnfv_tests/vnf/router/vnf_controller/vnf_controller.py
+++ b/functest/opnfv_tests/vnf/router/vnf_controller/vnf_controller.py
@@ -36,7 +36,7 @@ class VnfController():
self.util = Utilvnf()
self.vm_controller = VmController(util_info)
- with open(self.util.test_env_config_yaml) as file_fd:
+ with open(self.util.test_env_config_yaml, encoding='utf-8') as file_fd:
test_env_config_yaml = yaml.safe_load(file_fd)
file_fd.close()
@@ -49,10 +49,9 @@ class VnfController():
def config_vnf(self, source_vnf, destination_vnf, test_cmd_file_path,
parameter_file_path, prompt_file_path):
# pylint: disable=too-many-arguments
- parameter_file = open(parameter_file_path,
- 'r')
- cmd_input_param = yaml.safe_load(parameter_file)
- parameter_file.close()
+ with open(
+ parameter_file_path, 'r', encoding='utf-8') as parameter_file:
+ cmd_input_param = yaml.safe_load(parameter_file)
cmd_input_param["macaddress"] = source_vnf["data_plane_network_mac"]
cmd_input_param["source_ip"] = source_vnf["data_plane_network_ip"]
@@ -71,19 +70,16 @@ class VnfController():
res_dict_data_list = []
- parameter_file = open(parameter_file_path,
- 'r')
- cmd_input_param = yaml.safe_load(parameter_file)
- parameter_file.close()
+ with open(
+ parameter_file_path, 'r', encoding='utf-8') as parameter_file:
+ cmd_input_param = yaml.safe_load(parameter_file)
cmd_input_param["source_ip"] = target_vnf["data_plane_network_ip"]
cmd_input_param["destination_ip"] = reference_vnf[
"data_plane_network_ip"]
- prompt_file = open(prompt_file_path,
- 'r')
- prompt = yaml.safe_load(prompt_file)
- prompt_file.close()
+ with open(prompt_file_path, 'r', encoding='utf-8') as prompt_file:
+ prompt = yaml.safe_load(prompt_file)
terminal_mode_prompt = prompt["terminal_mode"]
ssh = SshClient(target_vnf["floating_ip"],
diff --git a/functest/tests/unit/odl/test_odl.py b/functest/tests/unit/odl/test_odl.py
index 1b2e10b07..c675c2988 100644
--- a/functest/tests/unit/odl/test_odl.py
+++ b/functest/tests/unit/odl/test_odl.py
@@ -33,10 +33,10 @@ class ODLTesting(unittest.TestCase):
logging.disable(logging.CRITICAL)
_keystone_ip = "127.0.0.1"
- _neutron_url = u"https://127.0.0.1:9696"
- _neutron_id = u"dummy"
+ _neutron_url = "https://127.0.0.1:9696"
+ _neutron_id = "dummy"
_sdn_controller_ip = "127.0.0.3"
- _os_auth_url = "http://{}:5000/v3".format(_keystone_ip)
+ _os_auth_url = f"http://{_keystone_ip}:5000/v3"
_os_projectname = "admin"
_os_username = "admin"
_os_password = "admin"
@@ -63,8 +63,7 @@ class ODLTesting(unittest.TestCase):
self.test = odl.ODLTests(case_name='odl', project_name='functest')
self.defaultargs = {'odlusername': self._odl_username,
'odlpassword': self._odl_password,
- 'neutronurl': "http://{}:9696".format(
- self._keystone_ip),
+ 'neutronurl': f"http://{self._keystone_ip}:9696",
'osauthurl': self._os_auth_url,
'osusername': self._os_username,
'osuserdomainname': self._os_userdomainname,
@@ -105,7 +104,7 @@ class ODLRobotTesting(ODLTesting):
mock_method.assert_called_once_with(
os.path.join(odl.ODLTests.odl_test_repo,
'csit/variables/Variables.robot'), inplace=True)
- self.assertEqual(args[0].getvalue(), "{}\n".format(msg2))
+ self.assertEqual(args[0].getvalue(), f"{msg2}\n")
def test_set_vars_auth_default(self):
self._test_set_vars(
@@ -160,19 +159,19 @@ class ODLMainTesting(ODLTesting):
args[0].assert_called_once_with(self.test.odl_variables_file)
if len(args) > 1:
variable = [
- 'KEYSTONEURL:{}://{}'.format(
- urllib.parse.urlparse(self._os_auth_url).scheme,
- urllib.parse.urlparse(self._os_auth_url).netloc),
- 'NEUTRONURL:{}'.format(self._neutron_url),
- 'OS_AUTH_URL:"{}"'.format(self._os_auth_url),
- 'OSUSERNAME:"{}"'.format(self._os_username),
- 'OSUSERDOMAINNAME:"{}"'.format(self._os_userdomainname),
- 'OSTENANTNAME:"{}"'.format(self._os_projectname),
- 'OSPROJECTDOMAINNAME:"{}"'.format(self._os_projectdomainname),
- 'OSPASSWORD:"{}"'.format(self._os_password),
- 'ODL_SYSTEM_IP:{}'.format(self._sdn_controller_ip),
- 'PORT:{}'.format(self._odl_webport),
- 'RESTCONFPORT:{}'.format(self._odl_restconfport)]
+ ('KEYSTONEURL:'
+ f'{urllib.parse.urlparse(self._os_auth_url).scheme}://'
+ f'{urllib.parse.urlparse(self._os_auth_url).netloc}'),
+ f'NEUTRONURL:{self._neutron_url}',
+ f'OS_AUTH_URL:"{self._os_auth_url}"',
+ f'OSUSERNAME:"{self._os_username}"',
+ f'OSUSERDOMAINNAME:"{self._os_userdomainname}"',
+ f'OSTENANTNAME:"{self._os_projectname}"',
+ f'OSPROJECTDOMAINNAME:"{self._os_projectdomainname}"',
+ f'OSPASSWORD:"{self._os_password}"',
+ f'ODL_SYSTEM_IP:{self._sdn_controller_ip}',
+ f'PORT:{self._odl_webport}',
+ f'RESTCONFPORT:{self._odl_restconfport}']
args[1].assert_called_once_with(
odl.ODLTests.basic_suite_dir, odl.ODLTests.neutron_suite_dir,
include=[],
@@ -541,7 +540,7 @@ class ODLArgParserTesting(ODLTesting):
def setUp(self):
self.parser = odl.ODLParser()
- super(ODLArgParserTesting, self).setUp()
+ super().setUp()
def test_default(self):
self.assertEqual(self.parser.parse_args(), self.defaultargs)
@@ -551,8 +550,8 @@ class ODLArgParserTesting(ODLTesting):
self.defaultargs['odlip'] = self._sdn_controller_ip
self.assertEqual(
self.parser.parse_args(
- ["--neutronurl={}".format(self._neutron_url),
- "--odlip={}".format(self._sdn_controller_ip)]),
+ [f"--neutronurl={self._neutron_url}",
+ f"--odlip={self._sdn_controller_ip}"]),
self.defaultargs)
@mock.patch('sys.stderr', new_callable=six.StringIO)
@@ -565,7 +564,7 @@ class ODLArgParserTesting(ODLTesting):
def _test_arg(self, arg, value):
self.defaultargs[arg] = value
self.assertEqual(
- self.parser.parse_args(["--{}={}".format(arg, value)]),
+ self.parser.parse_args([f"--{arg}={value}"]),
self.defaultargs)
def test_odlusername(self):
@@ -606,7 +605,7 @@ class ODLArgParserTesting(ODLTesting):
def test_pushtodb(self):
self.defaultargs['pushtodb'] = True
- self.assertEqual(self.parser.parse_args(["--{}".format('pushtodb')]),
+ self.assertEqual(self.parser.parse_args(["--pushtodb"]),
self.defaultargs)
def test_multiple_args(self):
@@ -614,8 +613,8 @@ class ODLArgParserTesting(ODLTesting):
self.defaultargs['odlip'] = self._sdn_controller_ip
self.assertEqual(
self.parser.parse_args(
- ["--neutronurl={}".format(self._neutron_url),
- "--odlip={}".format(self._sdn_controller_ip)]),
+ [f"--neutronurl={self._neutron_url}",
+ f"--odlip={self._sdn_controller_ip}"]),
self.defaultargs)
diff --git a/functest/tests/unit/openstack/cinder/test_cinder.py b/functest/tests/unit/openstack/cinder/test_cinder.py
index 4052408d9..d3c9cabb6 100644
--- a/functest/tests/unit/openstack/cinder/test_cinder.py
+++ b/functest/tests/unit/openstack/cinder/test_cinder.py
@@ -59,7 +59,7 @@ class CinderTesting(unittest.TestCase):
self.cinder.prepare()
args[0].assert_called_with()
args[1].assert_called_once_with(
- '{}-vm2_{}'.format(self.cinder.case_name, self.cinder.guid),
+ f'{self.cinder.case_name}-vm2_{self.cinder.guid}',
security_groups=[self.cinder.sec.id],
key_name=self.cinder.keypair.id)
self.cinder.cloud.create_volume.assert_not_called()
@@ -81,13 +81,12 @@ class CinderTesting(unittest.TestCase):
self.cinder.prepare()
args[0].assert_called_once_with()
args[1].assert_called_once_with(
- '{}-vm2_{}'.format(self.cinder.case_name, self.cinder.guid),
+ f'{self.cinder.case_name}-vm2_{self.cinder.guid}',
security_groups=[self.cinder.sec.id],
key_name=self.cinder.keypair.id)
self.cinder.connect.assert_called_once_with(args[1].return_value)
self.cinder.cloud.create_volume.assert_called_once_with(
- name='{}-volume_{}'.format(
- self.cinder.case_name, self.cinder.guid),
+ name=f'{self.cinder.case_name}-volume_{self.cinder.guid}',
size='2', timeout=self.cinder.volume_timeout, wait=True)
@mock.patch('scp.SCPClient.put')
@@ -101,7 +100,7 @@ class CinderTesting(unittest.TestCase):
self.cinder.ssh.exec_command.return_value = (None, stdout, mock.Mock())
self.assertEqual(self.cinder._write_data(), 0)
self.cinder.ssh.exec_command.assert_called_once_with(
- "sh ~/write_data.sh {}".format(env.get('VOLUME_DEVICE_NAME')))
+ f"sh ~/write_data.sh {env.get('VOLUME_DEVICE_NAME')}")
self.cinder.cloud.attach_volume.assert_called_once_with(
self.cinder.sshvm, self.cinder.volume,
timeout=self.cinder.volume_timeout)
@@ -138,7 +137,7 @@ class CinderTesting(unittest.TestCase):
stdout.channel.recv_exit_status.return_value = 0
self.assertEqual(self.cinder._read_data(), 0)
self.cinder.ssh2.exec_command.assert_called_once_with(
- "sh ~/read_data.sh {}".format(env.get('VOLUME_DEVICE_NAME')))
+ f"sh ~/read_data.sh {env.get('VOLUME_DEVICE_NAME')}")
self.cinder.cloud.attach_volume.assert_called_once_with(
self.cinder.vm2, self.cinder.volume,
timeout=self.cinder.volume_timeout)
diff --git a/functest/tests/unit/openstack/rally/test_rally.py b/functest/tests/unit/openstack/rally/test_rally.py
index c281d4f52..f3c2e7cf6 100644
--- a/functest/tests/unit/openstack/rally/test_rally.py
+++ b/functest/tests/unit/openstack/rally/test_rally.py
@@ -50,7 +50,7 @@ class OSRallyTesting(unittest.TestCase):
@staticmethod
def check_scenario_file(value):
- yaml_file = 'opnfv-{}.yaml'.format('test_file_name')
+ yaml_file = 'opnfv-test_file_name.yaml'
if yaml_file in value:
return False
return True
@@ -64,7 +64,7 @@ class OSRallyTesting(unittest.TestCase):
@staticmethod
def check_temp_dir(value):
- yaml_file = 'opnfv-{}.yaml'.format('test_file_name')
+ yaml_file = 'opnfv-test_file_name.yaml'
if yaml_file in value:
return True
return False
@@ -95,7 +95,7 @@ class OSRallyTesting(unittest.TestCase):
self, mock_method, mock_os_makedirs, mock_path_exists):
mock_path_exists.side_effect = self.check_temp_dir
- yaml_file = 'opnfv-{}.yaml'.format('test_file_name')
+ yaml_file = 'opnfv-test_file_name.yaml'
ret_val = os.path.join(self.rally_base.temp_dir, yaml_file)
self.assertEqual(self.rally_base._prepare_test_list('test_file_name'),
ret_val)
@@ -423,8 +423,8 @@ class OSRallyTesting(unittest.TestCase):
@mock.patch('subprocess.check_output',
side_effect=subprocess.CalledProcessError('', ''))
def test_export_task_ko(self, *args):
- file_name = "{}/{}.html".format(
- self.rally_base.results_dir, self.rally_base.case_name)
+ file_name = (f"{self.rally_base.results_dir}/"
+ f"{self.rally_base.case_name}.html")
with self.assertRaises(subprocess.CalledProcessError):
self.rally_base.export_task(file_name)
cmd = ["rally", "task", "export", "--type", "html", "--deployment",
@@ -434,8 +434,8 @@ class OSRallyTesting(unittest.TestCase):
@mock.patch('subprocess.check_output', return_value=b'')
def test_export_task(self, *args):
- file_name = "{}/{}.html".format(
- self.rally_base.results_dir, self.rally_base.case_name)
+ file_name = (f"{self.rally_base.results_dir}/"
+ f"{self.rally_base.case_name}.html")
self.assertEqual(self.rally_base.export_task(file_name), None)
cmd = ["rally", "task", "export", "--type", "html", "--deployment",
str(getattr(config.CONF, 'rally_deployment_name')),
@@ -445,8 +445,8 @@ class OSRallyTesting(unittest.TestCase):
@mock.patch('subprocess.check_output',
side_effect=subprocess.CalledProcessError('', ''))
def test_verify_report_ko(self, *args):
- file_name = "{}/{}.html".format(
- self.rally_base.results_dir, self.rally_base.case_name)
+ file_name = (f"{self.rally_base.results_dir}/"
+ f"{self.rally_base.case_name}.html")
with self.assertRaises(subprocess.CalledProcessError):
self.rally_base.verify_report(file_name, "1")
cmd = ["rally", "verify", "report", "--type", "html", "--uuid", "1",
@@ -455,8 +455,8 @@ class OSRallyTesting(unittest.TestCase):
@mock.patch('subprocess.check_output', return_value=b'')
def test_verify_report(self, *args):
- file_name = "{}/{}.html".format(
- self.rally_base.results_dir, self.rally_base.case_name)
+ file_name = (f"{self.rally_base.results_dir}/"
+ f"{self.rally_base.case_name}.html")
self.assertEqual(self.rally_base.verify_report(file_name, "1"), None)
cmd = ["rally", "verify", "report", "--type", "html", "--uuid", "1",
"--to", file_name]
diff --git a/functest/tests/unit/openstack/tempest/test_tempest.py b/functest/tests/unit/openstack/tempest/test_tempest.py
index 71aa5a257..efc4393c8 100644
--- a/functest/tests/unit/openstack/tempest/test_tempest.py
+++ b/functest/tests/unit/openstack/tempest/test_tempest.py
@@ -83,8 +83,8 @@ class OSTempestTesting(unittest.TestCase):
testr_mode = self.tempestcommon.mode
verifier_repo_dir = 'test_verifier_repo_dir'
self.tempestcommon.verifier_repo_dir = verifier_repo_dir
- cmd = "(cd {0}; stestr list '{1}' >{2} 2>/dev/null)".format(
- verifier_repo_dir, testr_mode, self.tempestcommon.list)
+ cmd = (f"(cd {verifier_repo_dir}; stestr list '{testr_mode}' > "
+ f"{self.tempestcommon.list} 2>/dev/null)")
self.tempestcommon.generate_test_list(mode=testr_mode)
args[0].assert_called_once_with(cmd, shell=True)
args[2].assert_called_once_with('/etc/tempest.conf')
@@ -158,20 +158,17 @@ class OSTempestTesting(unittest.TestCase):
args[2].assert_called_once_with(
self.tempestcommon.list, self.tempestcommon.raw_list)
+ @mock.patch('functest.opnfv_tests.openstack.tempest.tempest.'
+ 'subprocess.Popen')
+ @mock.patch('six.moves.builtins.open', mock.mock_open())
@mock.patch('functest.opnfv_tests.openstack.tempest.tempest.LOGGER.info')
- def test_run_verifier_tests_default(self, mock_logger_info):
- with mock.patch('six.moves.builtins.open', mock.mock_open()), \
- mock.patch('six.moves.builtins.iter',
- return_value=[r'\} tempest\.']), \
- mock.patch('functest.opnfv_tests.openstack.tempest.tempest.'
- 'subprocess.Popen'):
- self.tempestcommon.tempest_list = 'test_tempest_list'
- cmd = ["rally", "verify", "start", "--load-list",
- self.tempestcommon.tempest_list]
- with self.assertRaises(Exception):
- self.tempestcommon.run_verifier_tests()
- mock_logger_info. \
- assert_any_call("Starting Tempest test suite: '%s'.", cmd)
+ def test_run_verifier_tests_default(self, *args):
+ self.tempestcommon.tempest_list = 'test_tempest_list'
+ cmd = ["rally", "verify", "start", "--load-list",
+ self.tempestcommon.tempest_list]
+ with self.assertRaises(Exception):
+ self.tempestcommon.run_verifier_tests()
+ args[0].assert_any_call("Starting Tempest test suite: '%s'.", cmd)
@mock.patch('functest.opnfv_tests.openstack.tempest.tempest.'
'os.path.exists', return_value=False)
@@ -272,10 +269,9 @@ class OSTempestTesting(unittest.TestCase):
with mock.patch('functest.opnfv_tests.openstack.tempest.'
'tempest.subprocess.Popen') as mock_popen:
- mock_stdout = mock.Mock()
- attrs = {'stdout.readline.return_value': b'test_deploy_id'}
- mock_stdout.configure_mock(**attrs)
- mock_popen.return_value = mock_stdout
+ attrs = {'return_value.__enter__.return_value.'
+ 'stdout.readline.return_value': b'test_deploy_id'}
+ mock_popen.configure_mock(**attrs)
self.assertEqual(self.tempestcommon.get_verifier_id(),
'test_deploy_id')
@@ -284,10 +280,9 @@ class OSTempestTesting(unittest.TestCase):
setattr(config.CONF, 'tempest_verifier_name', 'test_deploy_name')
with mock.patch('functest.opnfv_tests.openstack.tempest.'
'tempest.subprocess.Popen') as mock_popen:
- mock_stdout = mock.Mock()
- attrs = {'stdout.readline.return_value': b'test_deploy_id'}
- mock_stdout.configure_mock(**attrs)
- mock_popen.return_value = mock_stdout
+ attrs = {'return_value.__enter__.return_value.'
+ 'stdout.readline.return_value': b'test_deploy_id'}
+ mock_popen.configure_mock(**attrs)
self.assertEqual(rally.RallyBase.get_verifier_deployment_id(),
'test_deploy_id')
diff --git a/functest/tests/unit/openstack/vmtp/test_vmtp.py b/functest/tests/unit/openstack/vmtp/test_vmtp.py
index fcbb0c46b..850273476 100644
--- a/functest/tests/unit/openstack/vmtp/test_vmtp.py
+++ b/functest/tests/unit/openstack/vmtp/test_vmtp.py
@@ -66,21 +66,22 @@ class VmtpTesting(unittest.TestCase):
def test_generate_keys1(self, *args):
self.testcase.generate_keys()
self.testcase.cloud.create_keypair.assert_called_once_with(
- 'vmtp_{}'.format(self.testcase.guid))
+ f'vmtp_{self.testcase.guid}')
self.testcase.cloud.delete_keypair.assert_called_once_with('id')
- calls = [mock.call(self.testcase.privkey_filename, 'w'),
- mock.call(self.testcase.pubkey_filename, 'w')]
+ calls = [mock.call(
+ self.testcase.privkey_filename, 'w', encoding='utf-8'),
+ mock.call(
+ self.testcase.pubkey_filename, 'w', encoding='utf-8')]
args[0].assert_has_calls(calls, any_order=True)
@mock.patch('six.moves.builtins.open')
def test_generate_keys2(self, *args):
- # pylint: disable=bad-continuation
with mock.patch.object(
self.testcase.cloud, "create_keypair",
side_effect=shade.OpenStackCloudException(None)) as mock_obj, \
self.assertRaises(shade.OpenStackCloudException):
self.testcase.generate_keys()
- mock_obj.assert_called_once_with('vmtp_{}'.format(self.testcase.guid))
+ mock_obj.assert_called_once_with(f'vmtp_{self.testcase.guid}')
args[0].assert_not_called()
diff --git a/functest/tests/unit/openstack/vping/test_vping_ssh.py b/functest/tests/unit/openstack/vping/test_vping_ssh.py
index bc1148da4..a07148aab 100644
--- a/functest/tests/unit/openstack/vping/test_vping_ssh.py
+++ b/functest/tests/unit/openstack/vping/test_vping_ssh.py
@@ -47,7 +47,7 @@ class VpingSSHTesting(unittest.TestCase):
self.vping.prepare()
args[0].assert_called_once_with()
args[1].assert_called_once_with(
- '{}-vm2_{}'.format(self.vping.case_name, self.vping.guid),
+ f'{self.vping.case_name}-vm2_{self.vping.guid}',
security_groups=[self.vping.sec.id])
@mock.patch('functest.opnfv_tests.openstack.vping.vping_ssh.VPingSSH.'
@@ -58,7 +58,7 @@ class VpingSSHTesting(unittest.TestCase):
self.vping.prepare()
args[0].assert_called_once_with()
args[1].assert_called_once_with(
- '{}-vm2_{}'.format(self.vping.case_name, self.vping.guid),
+ f'{self.vping.case_name}-vm2_{self.vping.guid}',
security_groups=[self.vping.sec.id])
@mock.patch('functest.opnfv_tests.openstack.vping.vping_ssh.VPingSSH.'
@@ -70,7 +70,7 @@ class VpingSSHTesting(unittest.TestCase):
with self.assertRaises(ssh_exception.SSHException):
self.vping.execute()
self.vping.ssh.exec_command.assert_called_once_with(
- 'ping -c 1 {}'.format(self.vping.vm2.private_v4))
+ f'ping -c 1 {self.vping.vm2.private_v4}')
args[0].assert_called_once_with('foo')
@mock.patch('functest.opnfv_tests.openstack.vping.vping_ssh.VPingSSH.'
@@ -94,7 +94,7 @@ class VpingSSHTesting(unittest.TestCase):
self.assertEqual(self.vping.execute(), ret)
mock_check.assert_called_once_with('foo')
self.vping.ssh.exec_command.assert_called_once_with(
- 'ping -c 1 {}'.format(self.vping.vm2.private_v4))
+ f'ping -c 1 {self.vping.vm2.private_v4}')
def test_execute1(self):
self._test_execute()
diff --git a/functest/tests/unit/utils/test_functest_utils.py b/functest/tests/unit/utils/test_functest_utils.py
index 4f3f16f23..4b642ff9d 100644
--- a/functest/tests/unit/utils/test_functest_utils.py
+++ b/functest/tests/unit/utils/test_functest_utils.py
@@ -100,17 +100,18 @@ class FunctestUtilsTesting(unittest.TestCase):
mock.mock_open()) as mopen:
stream = six.BytesIO()
stream.write(self.cmd_readline().encode("utf-8"))
- mock_obj2 = mock.Mock()
- attrs = {'stdout': stream, 'wait.return_value': 1}
- mock_obj2.configure_mock(**attrs)
- mock_subproc_open.return_value = mock_obj2
+ attrs = {
+ 'return_value.__enter__.return_value.stdout': stream,
+ 'return_value.__enter__.return_value.wait.return_value': 1}
+ mock_subproc_open.configure_mock(**attrs)
resp = functest_utils.execute_command(
self.cmd, info=True, error_msg=self.error_msg, verbose=True,
output_file=self.output_file)
self.assertEqual(resp, 1)
- msg_exec = ("Executing command: '%s'" % self.cmd)
+ msg_exec = f"Executing command: '{self.cmd}'"
mock_logger_info.assert_called_once_with(msg_exec)
- mopen.assert_called_once_with(self.output_file, "w")
+ mopen.assert_called_once_with(
+ self.output_file, "w", encoding='utf-8')
mock_logger_error.assert_called_once_with(self.error_msg)
@mock.patch('functest.utils.functest_utils.LOGGER.info')
@@ -121,17 +122,18 @@ class FunctestUtilsTesting(unittest.TestCase):
mock.mock_open()) as mopen:
stream = six.BytesIO()
stream.write(self.cmd_readline().encode("utf-8"))
- mock_obj2 = mock.Mock()
- attrs = {'stdout': stream, 'wait.return_value': 0}
- mock_obj2.configure_mock(**attrs)
- mock_subproc_open.return_value = mock_obj2
+ attrs = {
+ 'return_value.__enter__.return_value.stdout': stream,
+ 'return_value.__enter__.return_value.wait.return_value': 0}
+ mock_subproc_open.configure_mock(**attrs)
resp = functest_utils.execute_command(
self.cmd, info=True, error_msg=self.error_msg, verbose=True,
output_file=self.output_file)
self.assertEqual(resp, 0)
- msg_exec = ("Executing command: '%s'" % self.cmd)
+ msg_exec = (f"Executing command: '{self.cmd}'")
mock_logger_info.assert_called_once_with(msg_exec)
- mopen.assert_called_once_with(self.output_file, "w")
+ mopen.assert_called_once_with(
+ self.output_file, "w", encoding='utf-8')
@mock.patch('sys.stdout')
def test_exec_cmd_args_missing_ok(self, stdout=None):
@@ -140,10 +142,10 @@ class FunctestUtilsTesting(unittest.TestCase):
as mock_subproc_open:
stream = six.BytesIO()
stream.write(self.cmd_readline().encode("utf-8"))
- mock_obj2 = mock.Mock()
- attrs = {'stdout': stream, 'wait.return_value': 0}
- mock_obj2.configure_mock(**attrs)
- mock_subproc_open.return_value = mock_obj2
+ attrs = {
+ 'return_value.__enter__.return_value.stdout': stream,
+ 'return_value.__enter__.return_value.wait.return_value': 0}
+ mock_subproc_open.configure_mock(**attrs)
resp = functest_utils.execute_command(
self.cmd, info=False, error_msg="", verbose=False,
output_file=None)
@@ -154,12 +156,13 @@ class FunctestUtilsTesting(unittest.TestCase):
# pylint: disable=unused-argument
with mock.patch('functest.utils.functest_utils.subprocess.Popen') \
as mock_subproc_open:
+ attrs = {}
stream = six.BytesIO()
stream.write(self.cmd_readline().encode("utf-8"))
- mock_obj2 = mock.Mock()
- attrs = {'stdout': stream, 'wait.return_value': 1}
- mock_obj2.configure_mock(**attrs)
- mock_subproc_open.return_value = mock_obj2
+ attrs = {
+ 'return_value.__enter__.return_value.stdout': stream,
+ 'return_value.__enter__.return_value.wait.return_value': 1}
+ mock_subproc_open.configure_mock(**attrs)
resp = functest_utils.execute_command(
self.cmd, info=False, error_msg="", verbose=False,
output_file=None)
@@ -174,9 +177,9 @@ class FunctestUtilsTesting(unittest.TestCase):
mock_yaml.return_value = self.file_yaml
functest_utils.get_parameter_from_yaml(self.parameter,
self.test_file)
- self.assertTrue(("The parameter %s is not"
- " defined in config_functest.yaml" %
- self.parameter) in excep.exception)
+ self.assertTrue((f"The parameter {self.parameter} is not"
+ " defined in config_functest.yaml"
+ ) in excep.exception)
def test_get_param_from_yaml_def(self):
with mock.patch('six.moves.builtins.open', mock.mock_open()), \
@@ -317,6 +320,38 @@ class FunctestUtilsTesting(unittest.TestCase):
args[0].assert_called_once_with(cloud)
@mock.patch('functest.utils.functest_utils.get_nova_version',
+ return_value=(2, 87))
+ def test_openstack_version13(self, *args):
+ cloud = mock.Mock()
+ self.assertEqual(functest_utils.get_openstack_version(
+ cloud), "Ussuri")
+ args[0].assert_called_once_with(cloud)
+
+ @mock.patch('functest.utils.functest_utils.get_nova_version',
+ return_value=(2, 88))
+ def test_openstack_version14(self, *args):
+ cloud = mock.Mock()
+ self.assertEqual(functest_utils.get_openstack_version(
+ cloud), "Wallaby")
+ args[0].assert_called_once_with(cloud)
+
+ @mock.patch('functest.utils.functest_utils.get_nova_version',
+ return_value=(2, 89))
+ def test_openstack_version15(self, *args):
+ cloud = mock.Mock()
+ self.assertEqual(functest_utils.get_openstack_version(
+ cloud), "Xena")
+ args[0].assert_called_once_with(cloud)
+
+ @mock.patch('functest.utils.functest_utils.get_nova_version',
+ return_value=(2, 92))
+ def test_openstack_version16(self, *args):
+ cloud = mock.Mock()
+ self.assertEqual(functest_utils.get_openstack_version(
+ cloud), "Zed")
+ args[0].assert_called_once_with(cloud)
+
+ @mock.patch('functest.utils.functest_utils.get_nova_version',
return_value=None)
def test_openstack_version_exc(self, *args):
cloud = mock.Mock()
diff --git a/functest/utils/config.py b/functest/utils/config.py
index c2897d361..40414b88b 100644
--- a/functest/utils/config.py
+++ b/functest/utils/config.py
@@ -13,12 +13,13 @@ from functest.utils import env
class Config():
def __init__(self):
try:
- # pylint: disable=bad-continuation
with open(pkg_resources.resource_filename(
- 'functest', 'ci/config_functest.yaml')) as yfile:
+ 'functest', 'ci/config_functest.yaml'),
+ encoding='utf-8') as yfile:
self.functest_yaml = yaml.safe_load(yfile)
except Exception as error:
- raise Exception('Parse config failed: {}'.format(str(error)))
+ raise Exception(
+ f'Parse config failed: {str(error)}') from error
@staticmethod
def _merge_dicts(dict1, dict2):
@@ -34,7 +35,7 @@ class Config():
yield (k, dict2[k])
def patch_file(self, patch_file_path):
- with open(patch_file_path) as yfile:
+ with open(patch_file_path, encoding='utf-8') as yfile:
patch_file = yaml.safe_load(yfile)
for key in patch_file:
@@ -53,13 +54,14 @@ class Config():
@staticmethod
def _get_attr_further(attr_now, next): # pylint: disable=redefined-builtin
return attr_now if next == 'general' else (
- '{}_{}'.format(attr_now, next) if attr_now else next)
+ f'{attr_now}_{next}' if attr_now else next)
def fill(self):
try:
self._parse(None, self.functest_yaml)
except Exception as error:
- raise Exception('Parse config failed: {}'.format(str(error)))
+ raise Exception(
+ f'Parse config failed: {str(error)}') from error
CONF = Config()
diff --git a/functest/utils/env.py b/functest/utils/env.py
index 672aee1d9..2e312726c 100644
--- a/functest/utils/env.py
+++ b/functest/utils/env.py
@@ -40,7 +40,8 @@ INPUTS = {
'SKIP_DOWN_HYPERVISORS': 'False',
'PUBLIC_ENDPOINT_ONLY': 'False',
'DASHBOARD_URL': '',
- 'VMTP_HYPERVISORS': ''
+ 'VMTP_HYPERVISORS': '',
+ 'NO_TENANT_NETWORK': 'False'
}
diff --git a/functest/utils/functest_utils.py b/functest/utils/functest_utils.py
index c48acd3b9..eec544489 100644
--- a/functest/utils/functest_utils.py
+++ b/functest/utils/functest_utils.py
@@ -32,28 +32,26 @@ def execute_command_raise(cmd, info=False, error_msg="",
def execute_command(cmd, info=False, error_msg="",
verbose=True, output_file=None):
if not error_msg:
- error_msg = ("The command '%s' failed." % cmd)
- msg_exec = ("Executing command: '%s'" % cmd)
+ error_msg = f"The command '{cmd}' failed."
+ msg_exec = f"Executing command: '{cmd}'"
if verbose:
if info:
LOGGER.info(msg_exec)
else:
LOGGER.debug(msg_exec)
- popen = subprocess.Popen(
- cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- if output_file:
- ofd = open(output_file, "w")
- for line in iter(popen.stdout.readline, b''):
+ with subprocess.Popen(
+ cmd, shell=True, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT) as popen:
if output_file:
- ofd.write(line.decode("utf-8"))
- else:
- line = line.decode("utf-8").replace('\n', '')
- print(line)
- sys.stdout.flush()
- if output_file:
- ofd.close()
- popen.stdout.close()
- returncode = popen.wait()
+ with open(output_file, "w", encoding='utf-8') as ofd:
+ for line in iter(popen.stdout.readline, b''):
+ if output_file:
+ ofd.write(line.decode("utf-8"))
+ else:
+ line = line.decode("utf-8").replace('\n', '')
+ print(line)
+ sys.stdout.flush()
+ returncode = popen.wait()
if returncode != 0:
if verbose:
LOGGER.error(error_msg)
@@ -67,14 +65,14 @@ def get_parameter_from_yaml(parameter, yfile):
parameter must be given in string format with dots
Example: general.openstack.image_name
"""
- with open(yfile) as yfd:
+ with open(yfile, encoding='utf-8') as yfd:
file_yaml = yaml.safe_load(yfd)
value = file_yaml
for element in parameter.split("."):
value = value.get(element)
if value is None:
- raise ValueError("The parameter %s is not defined in"
- " %s" % (parameter, yfile))
+ raise ValueError(f"The parameter {parameter} is not defined in"
+ f" {yfile}")
return value
@@ -114,8 +112,14 @@ def get_openstack_version(cloud):
version = get_nova_version(cloud)
try:
assert version
- if version > (2, 87):
+ if version > (2, 93):
osversion = "Master"
+ elif version > (2, 90):
+ osversion = "Zed"
+ elif version > (2, 88):
+ osversion = "Xena"
+ elif version > (2, 87):
+ osversion = "Wallaby"
elif version > (2, 79):
osversion = "Ussuri"
elif version > (2, 72):
@@ -195,14 +199,13 @@ def search_services(cloud, name_or_id=None, filters=None):
def convert_dict_to_ini(value):
"Convert dict to oslo.conf input"
assert isinstance(value, dict)
- return ",".join("{}:{}".format(
- key, val) for (key, val) in six.iteritems(value))
+ return ",".join(f"{key}:{val}" for (key, val) in six.iteritems(value))
def convert_list_to_ini(value):
"Convert list to oslo.conf input"
assert isinstance(value, list)
- return ",".join("{}".format(val) for val in value)
+ return ",".join(val for val in value)
def convert_ini_to_dict(value):
diff --git a/requirements.txt b/requirements.txt
index be0760858..385f8d8e6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -8,9 +8,9 @@ robotframework>=3.0
scp
cloudify-rest-client
mock!=4.0.0,!=4.0.1 # BSD
-PrettyTable<0.8 # BSD
+PrettyTable!=3.4.0 # BSD
six # MIT
-paramiko # LGPLv2.1+
+paramiko!=2.9.0,!=2.9.1 # LGPLv2.1+
Jinja2 # BSD License (3 clause)
xtesting
os-client-config # Apache-2.0
@@ -19,3 +19,4 @@ ruamel.yaml.jinja2 # MIT
tempest # Apache-2.0
rally
rally-openstack
+munch # MIT
diff --git a/rtd-requirements.txt b/rtd-requirements.txt
deleted file mode 100644
index feba192d4..000000000
--- a/rtd-requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-git+https://gerrit.opnfv.org/gerrit/snaps#egg=snaps
--r requirements.txt
--r test-requirements.txt
diff --git a/setup.cfg b/setup.cfg
index d6bdfd1c6..3b466985a 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -26,7 +26,6 @@ xtesting.testcase =
cinder_test = functest.opnfv_tests.openstack.cinder.cinder_test:CinderCheck
odl = functest.opnfv_tests.sdn.odl.odl:ODLTests
tempest_common = functest.opnfv_tests.openstack.tempest.tempest:TempestCommon
- tempest_horizon = functest.opnfv_tests.openstack.tempest.tempest:TempestHorizon
tempest_heat = functest.opnfv_tests.openstack.tempest.tempest:TempestHeat
rally_sanity = functest.opnfv_tests.openstack.rally.rally:RallySanity
refstack = functest.opnfv_tests.openstack.refstack.refstack:Refstack
diff --git a/test-requirements.txt b/test-requirements.txt
index 4cc86c858..03f57b9cc 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -3,14 +3,16 @@
# process, which may cause wedges in the gate later.
coverage!=4.4 # Apache-2.0
mock!=4.0.0,!=4.0.1 # BSD
-nose # LGPL
+pytest # MIT
+pytest-html #MPL-2.0
+pytest-cov # MIT
flake8 # MIT
pylint # GPLv2
-sphinx!=1.6.6,!=1.6.7,!=2.1.0,!=3.0.0 # BSD
+sphinx!=1.6.6,!=1.6.7,!=2.1.0,!=3.0.0,!=3.4.2 # BSD
sphinx-rtd-theme
yamllint
-ansible-lint
doc8 # Apache-2.0
bashate # Apache-2.0
bandit
sphinxcontrib-spelling
+pre-commit
diff --git a/tox.ini b/tox.ini
index 466c415a8..1bebeb93a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = docs,pep8,pylint,yamllint,ansiblelint,bashate,bandit,py38,cover,perm
+envlist = docs,pep8,pylint,yamllint,bashate,bandit,py310,cover,perm,pre-commit
[testenv]
usedevelop = True
@@ -8,17 +8,16 @@ deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt}
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
-install_command = pip install {opts} {packages}
-commands = nosetests --with-xunit \
- --with-coverage \
- --cover-tests \
- --cover-package=functest \
- --cover-xml \
- --cover-html \
- functest/tests/unit
+install_command = pip install --use-deprecated=legacy-resolver {opts} {packages}
+commands =
+ pytest \
+ --junit-xml=junit.xml \
+ --html=report.html --self-contained-html \
+ --cov=xtesting --cov-reset --cov-report html \
+ functest/tests/unit
[testenv:docs]
-basepython = python3.8
+basepython = python3.10
commands =
doc8 \
--ignore-path api/build \
@@ -32,18 +31,19 @@ commands =
sphinx-build -W -b spelling -Dextensions=sphinxcontrib.spelling docs docs/build/spellcheck
[testenv:pep8]
-basepython = python3.8
+basepython = python3.10
commands = flake8
[testenv:pylint]
-basepython = python3.8
+basepython = python3.10
commands =
pylint \
- --ignore-imports=y --min-similarity-lines=10 \
+ --ignore-imports=y --min-similarity-lines=15 \
+ --generated-members=os.* \
--disable=locally-disabled functest
[testenv:yamllint]
-basepython = python3.8
+basepython = python3.10
files =
.travis.yml
docker
@@ -56,16 +56,8 @@ files =
commands =
yamllint -s {[testenv:yamllint]files}
-[testenv:ansiblelint]
-basepython = python3.8
-commands =
- ansible-lint -x303 ansible/site.yml
-
-[testenv:py37]
-commands = nosetests functest/tests/unit
-
[testenv:bashate]
-basepython = python3.8
+basepython = python3.10
files =
functest/opnfv_tests/openstack/cinder/write_data.sh
functest/opnfv_tests/openstack/cinder/read_data.sh
@@ -75,31 +67,32 @@ files =
build.sh
commands = bashate -e E005,E006,E042,E043 {[testenv:bashate]files}
-
[testenv:bandit]
-basepython = python3.8
+basepython = python3.10
commands = bandit -r functest -x tests -n 5 -ll -s B601,B602
[testenv:cover]
-basepython = python3.8
+basepython = python3.10
dirs =
functest/tests/unit/odl
functest/tests/unit/openstack/vping
functest/tests/unit/openstack/cinder
-commands = nosetests --with-coverage --cover-tests \
- --cover-package functest.opnfv_tests.sdn.odl \
- --cover-package functest.opnfv_tests.openstack.vping.vping_ssh \
- --cover-package functest.opnfv_tests.openstack.cinder.cinder_test \
- --cover-package functest.tests.unit \
- --cover-min-percentage 100 {[testenv:cover]dirs}
+commands =
+ pytest --cov=xtesting --cov-reset --cov-report html --cov-fail-under=100 \
+ {[testenv:cover]dirs}
[testenv:perm]
-basepython = python3.8
-whitelist_externals = bash
-path=. -not -path './.tox/*' -not -path './.git/*' -not -path './docs/com/pres/reveal.js/*'
+basepython = python3.10
+allowlist_externals = sh
+path=. -not -path './.tox/*' -not -path './.git/*' -not -path './docs/com/pres/reveal.js/*' -not -path './elements/functest/install.d/*'
commands =
- bash -c "\
+ sh -c "\
find {[testenv:perm]path} \( -type f -not -perm 644 -o -type d -not -perm 755 \) \
-exec ls -l \{\} + | grep '.' && exit 1 || exit 0"
- bash -c "\
+ sh -c "\
find {[testenv:perm]path} -exec file \{\} + | grep CRLF && exit 1 || exit 0"
+
+[testenv:pre-commit]
+basepython = python3.10
+commands =
+ pre-commit run --all-files --show-diff-on-failure
diff --git a/upper-constraints.txt b/upper-constraints.txt
index 420d93ba0..5eff3d20f 100644
--- a/upper-constraints.txt
+++ b/upper-constraints.txt
@@ -1,31 +1,24 @@
git+https://gerrit.opnfv.org/gerrit/functest#egg=functest
-git+https://gerrit.opnfv.org/gerrit/releng#egg=opnfv&subdirectory=modules
-git+https://gerrit.opnfv.org/gerrit/snaps@0dacfaa2fbd2dfe7fc9d438b9350a0187506e61c#egg=snaps
-git+https://gerrit.opnfv.org/gerrit/barometer#egg=baro_tests
-git+https://gerrit.opnfv.org/gerrit/sfc#egg=sfc
--e git+https://gerrit.opnfv.org/gerrit/doctor#egg=doctor-tests
-git+https://gerrit.opnfv.org/gerrit/stor4nfv#egg=stor4nfv-tests
-git+https://gerrit.opnfv.org/gerrit/clover#egg=clover
-git+https://gerrit.opnfv.org/gerrit/parser#egg=nfv-heattranslator&subdirectory=tosca2heat/heat-translator
-git+https://gerrit.opnfv.org/gerrit/parser#egg=nfv-toscaparser&subdirectory=tosca2heat/tosca-parser
--e git+https://gerrit.opnfv.org/gerrit/parser#egg=nfv-parser
git+https://github.com/collivier/cloudify-rest-client.git@4.3.3-py3#egg=cloudify-rest-client
-robotframework===3.1.1
+robotframework===4.1.2
robotframework-httplibrary===0.4.2
-robotframework-requests===0.5.0
-robotframework-sshlibrary===3.3.0
-ansible===2.9.2
-xtesting===0.91.0
-git+https://github.com/PyCQA/bandit@3d0824676974e7e2e9635c10bc4f12e261f1dbdf#egg=bandit
-bandit===1.1.0
+robotframework-requests===0.9.2
+robotframework-sshlibrary===3.8.0
+xtesting===0.98.0
+bandit===1.7.5
+bandit===1.7.0
ruamel.yaml.jinja2==0.2.2
-e git+https://opendev.org/openstack/tempest#egg=tempest
-e git+https://opendev.org/openstack/rally.git#egg=rally
git+https://opendev.org/openstack/rally-openstack.git#egg=rally-openstack
git+https://github.com/xrally/xrally-kubernetes.git#egg=xrally-kubernetes
-pylint===1.9.5;python_version=='2.7'
-pylint===2.4.4;python_version=='3.8'
-flake8===3.7.9
-nose===1.3.7
-ruamel.yaml===0.15.100
+pylint===2.11.1
+flake8===4.0.1
+pytest===7.1.2
+pytest-cov===3.0.0
+pytest-html===3.1.1
+ruamel.yaml===0.17.17
sphinxcontrib-spelling===4.3.0
+ansible-lint===5.2.1
+setuptools_scm===6.3.2
+pre-commit===3.1.1