aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xdocker/run_tests.sh42
-rw-r--r--docs/devguide/index.rst6
-rw-r--r--docs/results/apex-os-ocl-nofeature-ha.rst10
-rw-r--r--docs/results/apex-os-odl_l2-nofeature-ha.rst10
-rw-r--r--docs/results/apex-os-odl_l2-sfc-ha.rst10
-rw-r--r--docs/results/apex-os-odl_l3-nofeature-ha.rst10
-rw-r--r--docs/results/apex-os-onos-nofeature-ha.rst10
-rw-r--r--docs/results/compass-os-nosdn-nofeature-ha.rst355
-rw-r--r--docs/results/compass-os-ocl-nofeature-ha.rst356
-rw-r--r--docs/results/compass-os-odl_l2-nofeature-ha.rst357
-rw-r--r--docs/results/compass-os-onos-nofeature-ha.rst356
-rw-r--r--docs/results/fuel-os-nosdn-kvm-ha.rst10
-rw-r--r--docs/results/fuel-os-nosdn-nofeature-ha.rst10
-rw-r--r--docs/results/fuel-os-nosdn-ovs-ha.rst10
-rw-r--r--docs/results/fuel-os-odl_l2-bgpvpn-ha.rst10
-rw-r--r--docs/results/fuel-os-odl_l2-nofeature-ha.rst10
-rw-r--r--docs/results/fuel-os-odl_l3-nofeature-ha.rst10
-rw-r--r--docs/results/fuel-os-onos-nofeature-ha.rst10
-rw-r--r--docs/results/index.rst12
-rw-r--r--docs/results/joid-os-nosdn-nofeature-ha.rst10
-rw-r--r--docs/results/joid-os-ocl-nofeature-ha.rst10
-rw-r--r--docs/results/joid-os-odl_l2-nofeature-ha.rst10
-rw-r--r--docs/results/joid-os-onos-nofeature-ha.rst10
-rw-r--r--docs/results/overview.rst12
-rw-r--r--docs/results/results.rst43
-rw-r--r--docs/userguide/description.rst51
-rw-r--r--docs/userguide/index.rst538
-rw-r--r--docs/userguide/runfunctest.rst135
-rw-r--r--testcases/Controllers/ODL/CI/odlreport2db.py1
-rw-r--r--testcases/Controllers/ONOS/Teston/CI/onosfunctest.py1
-rwxr-xr-xtestcases/VIM/OpenStack/CI/libraries/run_rally-cert.py189
-rwxr-xr-xtestcases/VIM/OpenStack/CI/libraries/run_rally.py29
-rw-r--r--testcases/VIM/OpenStack/CI/libraries/run_tempest.py20
-rw-r--r--testcases/config_functest.yaml10
-rw-r--r--testcases/features/doctor.py2
-rw-r--r--testcases/functest_utils.py94
-rw-r--r--testcases/tests/TestFunctestUtils.py7
-rw-r--r--testcases/vIMS/CI/vIMS.py9
-rw-r--r--testcases/vPing/CI/libraries/vPing_ssh.py (renamed from testcases/vPing/CI/libraries/vPing2.py)9
-rw-r--r--testcases/vPing/CI/libraries/vPing_userdata.py (renamed from testcases/vPing/CI/libraries/vPing.py)9
40 files changed, 2161 insertions, 642 deletions
diff --git a/docker/run_tests.sh b/docker/run_tests.sh
index 9d5f681ca..0f9e8a30c 100755
--- a/docker/run_tests.sh
+++ b/docker/run_tests.sh
@@ -22,13 +22,15 @@ where:
-h|--help show this help text
-r|--report push results to database (false by default)
-n|--no-clean do not clean OpenStack resources after test run
+ -s|--serial run tests in one thread
-t|--test run specific set of tests
- <test_name> one or more of the following: vping,odl,rally,tempest,vims,onos,promise,ovno. Separated by comma.
+ <test_name> one or more of the following separated by comma:
+ vping_ssh,vping_userdata,odl,rally,tempest,vims,onos,promise,ovno
examples:
$(basename "$0")
- $(basename "$0") --test vping,odl
+ $(basename "$0") --test vping_ssh,odl
$(basename "$0") -t tempest,rally"
@@ -37,6 +39,8 @@ examples:
offline=false
report=""
clean=true
+serial=false
+
# Get the list of runnable tests
# Check if we are in CI mode
@@ -81,16 +85,25 @@ function run_test(){
echo " Running test case: $i"
echo "----------------------------------------------"
echo ""
+ clean_flag=""
+ if [ $clean == "false" ]; then
+ clean_flag="-n"
+ fi
+ serial_flag=""
+ if [ $serial == "true" ]; then
+ serial_flag="-s"
+ fi
+
case $test_name in
- "vping")
- info "Running vPing test..."
- python ${FUNCTEST_REPO_DIR}/testcases/vPing/CI/libraries/vPing2.py \
- --debug ${report}
+ "vping_ssh")
+ info "Running vPing-SSH test..."
+ python ${FUNCTEST_REPO_DIR}/testcases/vPing/CI/libraries/vPing_ssh.py \
+ --debug $clean_flag ${report}
;;
"vping_userdata")
- info "Running vPing test using userdata/cloudinit.... "
- python ${FUNCTEST_REPO_DIR}/testcases/vPing/CI/libraries/vPing.py \
- --debug ${report}
+ info "Running vPing-userdata test... "
+ python ${FUNCTEST_REPO_DIR}/testcases/vPing/CI/libraries/vPing_userdata.py \
+ --debug $clean_flag ${report}
;;
"odl")
info "Running ODL test..."
@@ -109,7 +122,7 @@ function run_test(){
"tempest")
info "Running Tempest tests..."
python ${FUNCTEST_REPO_DIR}/testcases/VIM/OpenStack/CI/libraries/run_tempest.py \
- --debug -m custom ${report}
+ --debug $serial_flag $clean_flag -m custom ${report}
# save tempest.conf for further troubleshooting
tempest_conf="${RALLY_VENV_DIR}/tempest/for-deployment-*/tempest.conf"
if [ -f ${tempest_conf} ]; then
@@ -120,13 +133,13 @@ function run_test(){
"vims")
info "Running vIMS test..."
python ${FUNCTEST_REPO_DIR}/testcases/vIMS/CI/vIMS.py \
- --debug ${report}
+ --debug $clean_flag ${report}
clean_openstack
;;
"rally")
info "Running Rally benchmark suite..."
python ${FUNCTEST_REPO_DIR}/testcases/VIM/OpenStack/CI/libraries/run_rally-cert.py \
- --debug all ${report}
+ --debug $clean_flag all ${report}
clean_openstack
;;
@@ -207,6 +220,9 @@ while [[ $# > 0 ]]
-n|--no-clean)
clean=false
;;
+ -s|--serial)
+ serial=true
+ ;;
-t|--test|--tests)
TEST="$2"
shift
@@ -225,7 +241,7 @@ if [[ -n "$DEPLOY_SCENARIO" && "$DEPLOY_SCENARIO" != "none" ]] &&\
[[ -f $tests_file ]]; then
arr_test=($(cat $tests_file))
else
- arr_test=(vping tempest vims rally)
+ arr_test=(vping_ssh tempest vims rally)
fi
BASEDIR=`dirname $0`
diff --git a/docs/devguide/index.rst b/docs/devguide/index.rst
index 7dd5cc790..7f3233c4c 100644
--- a/docs/devguide/index.rst
+++ b/docs/devguide/index.rst
@@ -41,7 +41,8 @@ Functest can be described as follow::
Functest deals with internal and external test cases.
The Internal test cases in Brahmaputra are:
- * vPing
+ * vPing_SSH
+ * vPing_userdata
* ODL
* Tempest
* vIMS
@@ -265,7 +266,8 @@ own project::
And do not forget to update also the help line::
-t|--test run specific set of tests
- <test_name> one or more of the following: vping,odl,rally,tempest,vims,onos, promise. Separated by comma.
+ <test_name> one or more of the following separated by comma:
+ vping_ssh,vping_userdata,odl,rally,tempest,vims,onos,promise,ovno
config_funtest.yaml
diff --git a/docs/results/apex-os-ocl-nofeature-ha.rst b/docs/results/apex-os-ocl-nofeature-ha.rst
new file mode 100644
index 000000000..021f8b54d
--- /dev/null
+++ b/docs/results/apex-os-ocl-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for apex-os-ocl-nofeature-ha
+=========================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/apex-os-odl_l2-nofeature-ha.rst b/docs/results/apex-os-odl_l2-nofeature-ha.rst
new file mode 100644
index 000000000..fb3218583
--- /dev/null
+++ b/docs/results/apex-os-odl_l2-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for apex-os-odl_l2-nofeature-ha
+============================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/apex-os-odl_l2-sfc-ha.rst b/docs/results/apex-os-odl_l2-sfc-ha.rst
new file mode 100644
index 000000000..4b3551fb2
--- /dev/null
+++ b/docs/results/apex-os-odl_l2-sfc-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for apex-os-odl_l2-sfc-ha
+======================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/apex-os-odl_l3-nofeature-ha.rst b/docs/results/apex-os-odl_l3-nofeature-ha.rst
new file mode 100644
index 000000000..38cb96b85
--- /dev/null
+++ b/docs/results/apex-os-odl_l3-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for apex-os-odl_l3-nofeature-ha
+============================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/apex-os-onos-nofeature-ha.rst b/docs/results/apex-os-onos-nofeature-ha.rst
new file mode 100644
index 000000000..f3d73ba87
--- /dev/null
+++ b/docs/results/apex-os-onos-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for apex-os-onos-nofeature-ha
+==========================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/compass-os-nosdn-nofeature-ha.rst b/docs/results/compass-os-nosdn-nofeature-ha.rst
new file mode 100644
index 000000000..23a0e82d2
--- /dev/null
+++ b/docs/results/compass-os-nosdn-nofeature-ha.rst
@@ -0,0 +1,355 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Detailed test results for compass-os-nosdn-nofeature-ha
+=======================================================
+
+.. Add any text in here that could be useful for a reader.
+
+The following section outlines the detailed functest results for the Brahmaputra scenario
+deploying OpenStack in a Pharos environment by the Compass installer.
+
+VIM
+---
+
+vPing_SSH
+^^^^^^^^^
+
+vPing test case output is displayed in the console::
+
+ FUNCTEST.info: Running vPing test...
+ 2016-01-23 03:18:20,153 - vPing- INFO - Creating neutron network vping-net...
+ 2016-01-23 03:18:35,476 - vPing- INFO - Flavor found 'm1.small'
+ 2016-01-23 03:18:36,350 - vPing- INFO - vPing Start Time:'2016-01-23 03:18:36'
+ 2016-01-23 03:18:38,571 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
+ 2016-01-23 03:18:53,716 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
+ 2016-01-23 03:18:55,239 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
+ 2016-01-23 03:19:15,593 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
+ 2016-01-23 03:19:15,593 - vPing- INFO - Creating floating IP for the second VM...
+ 2016-01-23 03:19:18,017 - vPing- INFO - Floating IP created: '10.2.65.6'
+ 2016-01-23 03:19:18,017 - vPing- INFO - Associating floating ip: '10.2.65.6' to VM2
+ 2016-01-23 03:19:37,839 - vPing- INFO - SCP ping script to VM2...
+ 2016-01-23 03:19:37,839 - vPing- INFO - Waiting for ping...
+ 2016-01-23 03:19:40,130 - vPing- INFO - vPing detected!
+ 2016-01-23 03:19:40,130 - vPing- INFO - vPing duration:'63.8'
+ 2016-01-23 03:19:40,130 - vPing- INFO - Cleaning up...
+ 2016-01-23 03:20:06,574 - vPing- INFO - Deleting network 'vping-net'...
+ 2016-01-23 03:20:13,587 - vPing- INFO - vPing OK
+
+
+
+
+vPing_userdata
+^^^^^^^^^^^^^^
+
+vPing_userdata results are displayed in the console::
+
+ 2016-01-06 16:06:20,550 - vPing- INFO - Creating neutron network vping-net...
+ 2016-01-06 16:06:23,867 - vPing- INFO - Flavor found 'm1.small'
+ 2016-01-06 16:06:24,457 - vPing- INFO - vPing Start Time:'2016-01-06 16:06:24'
+ 2016-01-06 16:06:24,626 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
+ 2016-01-06 16:06:39,351 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
+ 2016-01-06 16:06:39,650 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
+ 2016-01-06 16:06:53,330 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
+ 2016-01-06 16:06:53,330 - vPing- INFO - Waiting for ping...
+ 2016-01-06 16:06:58,669 - vPing- INFO - vPing detected!
+ 2016-01-06 16:06:58,669 - vPing- INFO - vPing duration:'34.2'
+ 2016-01-06 16:06:58,670 - vPing- INFO - Cleaning up...
+ 2016-01-06 16:07:12,661 - vPing- INFO - Deleting network 'vping-net'...
+ 2016-01-06 16:07:14,748 - vPing- INFO - vPing OK
+
+Tempest
+^^^^^^^
+
+The Tempest results are displayed in the console::
+
+ FUNCTEST.info: Running Tempest tests...
+ 2016-01-28 07:56:55,380 - run_tempest - INFO - Creating tenant and user for Tempest suite
+ 2016-01-28 07:56:56.127 23795 INFO rally.verification.tempest.tempest [-] Starting: Creating configuration file for Tempest.
+ 2016-01-28 07:56:59.512 23795 INFO rally.verification.tempest.tempest [-] Completed: Creating configuration file for Tempest.
+ 16-01-28 07:57:00,597 - run_tempest - INFO - Starting Tempest test suite: '--tests-file /home/opnfv/repos/functest/testcases/VIM/OpenStack/CI/custom_tests/test_list.txt'.
+ Total results of verification:
+ .
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ | UUID | Deployment UUID | Set name | Tests | Failures | Created at | Status |
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ | e0bf7770-2c0f-4c63-913c-cd51a6edd96d | 16582e1e-7b01-4d5d-9c13-a26db8567b7b | | 144 | 30 | 2016-01-28 07:57:01.044856 | finished |
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ .
+ Tests:
+ .
+ +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
+ | name | time | status |
+ +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
+ | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_get_flavor | 0.29804 | success |
+ | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors | 0.06289 | success |
+ | tempest.api.compute.images.test_images.ImagesTestJSON.test_delete_saving_image | 9.21756 | success |
+ | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image | 8.65376 | success |
+ | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name | 9.10993 | success |
+ | tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_changes_since | 0.19585 | success |
+ ...........................................
+ 2016-01-28 08:19:32,132 - run_tempest - INFO - Results: {'timestart': '2016-01-2807:57:01.044856', 'duration': 1350, 'tests': 144, 'failures': 30}
+ 2016-01-28 08:19:32,133 - run_tempest - INFO - Pushing results to DB: 'http://testresults.opnfv.org/testapi/results'.
+ 2016-01-28 08:19:32,278 - run_tempest - INFO - Deleting tenant and user for Tempest suite)
+
+In order to check all the available test cases related debug information, inspect
+tempest.log file stored under */home/opnfv/functest/results/tempest/*.
+
+
+Rally
+^^^^^
+
+The Rally results are displayed in the console, each module is run one after the
+other. Tables are displayed::
+
+ +-------------------------------------------------------------------------------------------+
+ | Response Times (sec) |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | action | min | median | 90%ile | 95%ile | max | avg | success | count |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | keystone.create_role | 0.358 | 0.572 | 0.772 | 0.811 | 1.106 | 0.603 | 100.0% | 20 |
+ | keystone.add_role | 0.32 | 0.436 | 0.846 | 0.903 | 1.018 | 0.51 | 100.0% | 20 |
+ | keystone.list_roles | 0.102 | 0.185 | 0.253 | 0.275 | 0.347 | 0.188 | 100.0% | 20 |
+ | total | 0.845 | 1.223 | 1.821 | 1.822 | 1.823 | 1.302 | 100.0% | 20 |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ Load duration: 7.13633608818
+ Full duration: 36.7863121033
+ ..............
+ +------------------------------------------------------------------------------------------+
+ | Response Times (sec) |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | action | min | median | 90%ile | 95%ile | max | avg | success | count |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | nova.create_keypair | 1.005 | 1.784 | 3.025 | 3.636 | 4.373 | 2.004 | 100.0% | 20 |
+ | nova.delete_keypair | 0.199 | 0.699 | 1.007 | 1.244 | 3.014 | 0.79 | 100.0% | 20 |
+ | total | 1.249 | 2.625 | 4.259 | 4.845 | 5.131 | 2.794 | 100.0% | 20 |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ Load duration: 14.9231169224
+ Full duration: 71.4614388943
+
+
+At the end of the module test, a message is displayed to provide a global
+summary::
+
+ 2016-02-04 12:50:18,382 - run_rally - INFO - Test scenario: "requests" OK.
+
+
+Controllers
+-----------
+
+OpenDaylight
+^^^^^^^^^^^^
+
+The results of ODL tests can be seen in the console::
+
+ ==============================================================================
+ Basic
+ ==============================================================================
+ Basic.010 Restconf OK :: Test suite to verify Restconf is OK
+ ==============================================================================
+ Get Controller Modules :: Get the controller modules via Restconf | PASS |
+ ------------------------------------------------------------------------------
+ Basic.010 Restconf OK :: Test suite to verify Restconf is OK | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Basic | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Output: /home/opnfv/repos/functest/output.xml
+ Log: /home/opnfv/repos/functest/log.html
+ Report: /home/opnfv/repos/functest/report.html
+
+ ..............................................................................
+
+ Neutron.Delete Networks :: Checking Network deleted in OpenStack a... | PASS |
+ 2 critical tests, 2 passed, 0 failed
+ 2 tests total, 2 passed, 0 failed
+ ==============================================================================
+ Neutron :: Test suite for Neutron Plugin | PASS |
+ 18 critical tests, 18 passed, 0 failed
+ 18 tests total, 18 passed, 0 failed
+ ==============================================================================
+ Output: /home/opnfv/repos/functest/output.xml
+ Log: /home/opnfv/repos/functest/log.html
+ Report: /home/opnfv/repos/functest/report.html
+
+3 result files are generated:
+ * output.xml
+ * log.html
+ * report.html
+
+**ODL result page**
+
+.. figure:: ../images/functestODL.png
+ :width: 170mm
+ :align: center
+ :alt: ODL suite result page
+
+
+ONOS
+^^^^
+
+The ONOS test logs can be found in OnosSystemTest/, and TestON/, and logs/
+(ONOSCI_PATH to be added), and can also be seen in the console::
+
+ ******************************
+ Result summary for Testcase4
+ ******************************
+
+ 2016-01-14 05:25:40,529 - FUNCvirNetNBL3 - INFO - ONOS Router Delete test Start
+
+ [2016-01-14 05:25:40.529644] [FUNCvirNetNBL3] [CASE] Virtual Network NBI Test - Router
+ 2016-01-14 05:25:40,530 - FUNCvirNetNBL3 - INFO - Generate Post Data
+
+ [2016-01-14 05:25:40.530825] [FUNCvirNetNBL3] [STEP] 4.1: Post Network Data via HTTP(Post Router need post network)
+ 2016-01-14 05:25:40,531 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/networks/ using POST method.
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Post Network Success
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.539687] [FUNCvirNetNBL3] [STEP] 4.2: Post Router Data via HTTP
+ 2016-01-14 05:25:40,540 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/ using POST method.
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Post Router Success
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.543489] [FUNCvirNetNBL3] [STEP] 4.3: Delete Router Data via HTTP
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Delete Router Success
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.546774] [FUNCvirNetNBL3] [STEP] 4.4: Get Router Data is NULL
+ 2016-01-14 05:25:40,547 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using GET method.
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Get Router Success
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+
+ *****************************
+ Result: Pass
+ *****************************
+
+ .......................................................................................
+
+ ******************************
+ Result summary for Testcase9
+ ******************************
+ .......................................................................................
+
+
+ [2016-01-14 05:26:42.543489] [FUNCvirNetNBL3] [STEP] 9.6: FloatingIp Clean Data via HTTP
+ 2016-01-14 05:26:42,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/floatingips/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
+ 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - ERROR - Delete Floatingip failed
+
+ .......................................................................................
+
+ *****************************
+ Result: Failed
+ *****************************
+
+There is a result summary for each testcase, and a global summary for the whole test.
+If any problem occurs during the test, a ERROR message will be provided in the test and the the global summary::
+
+ *************************************
+ Test Execution Summary
+ *************************************
+
+ Test Start : 14 Jan 2016 05:25:37
+ Test End : 14 Jan 2016 05:25:41
+ Execution Time : 0:00:03.349087
+ Total tests planned : 11
+ Total tests RUN : 11
+ Total Pass : 8
+ Total Fail : 3
+ Total No Result : 0
+ Success Percentage : 72%
+ Execution Result : 100%
+
+
+OpenContrail
+^^^^^^^^^^^^
+
+TODO OVNO
+
+
+Feature
+-------
+
+vIMS
+^^^^
+
+The results in the console can be described as follows::
+
+ FUNCTEST.info: Running vIMS test...
+ 2016-02-04 13:46:25,025 - vIMS - INFO - Prepare OpenStack plateform (create tenant and user)
+ 2016-02-04 13:46:25,312 - vIMS - INFO - Update OpenStack creds informations
+ 2016-02-04 13:46:25,312 - vIMS - INFO - Upload some OS images if it doesn't exist
+ 2016-02-04 13:46:25,566 - vIMS - INFO - centos_7 image doesn't exist on glance repository.
+ Try downloading this image and upload on glance !
+ 2016-02-04 13:47:06,167 - vIMS - INFO - ubuntu_14.04 image doesn't exist on glance repository.
+ Try downloading this image and upload on glance !
+ 2016-02-04 13:47:26,987 - vIMS - INFO - Update security group quota for this tenant
+ 2016-02-04 13:47:27,193 - vIMS - INFO - Update cinder quota for this tenant
+ 2016-02-04 13:47:27,746 - vIMS - INFO - Collect flavor id for cloudify manager server
+ 2016-02-04 13:47:28,326 - vIMS - INFO - Prepare virtualenv for cloudify-cli
+ 2016-02-04 13:48:00,657 - vIMS - INFO - Downloading the cloudify manager server blueprint
+ 2016-02-04 13:48:03,391 - vIMS - INFO - Cloudify deployment Start Time:'2016-02-04 13:48:03'
+ 2016-02-04 13:48:03,391 - vIMS - INFO - Writing the inputs file
+ 2016-02-04 13:48:03,395 - vIMS - INFO - Launching the cloudify-manager deployment
+ 2016-02-04 13:56:03,501 - vIMS - INFO - Cloudify-manager server is UP !
+ 2016-02-04 13:56:03,502 - vIMS - INFO - Cloudify deployment duration:'480.1'
+ 2016-02-04 13:56:03,502 - vIMS - INFO - Collect flavor id for all clearwater vm
+ 2016-02-04 13:56:04,093 - vIMS - INFO - vIMS VNF deployment Start Time:'2016-02-04 13:56:04'
+ 2016-02-04 13:56:04,093 - vIMS - INFO - Downloading the openstack-blueprint.yaml blueprint
+ 2016-02-04 13:56:06,265 - vIMS - INFO - Writing the inputs file
+ 2016-02-04 13:56:06,268 - vIMS - INFO - Launching the clearwater deployment
+ 2016-02-04 14:11:27,101 - vIMS - INFO - The deployment of clearwater-opnfv is ended
+ 2016-02-04 14:11:27,103 - vIMS - INFO - vIMS VNF deployment duration:'923.0'
+ 2016-02-04 14:14:31,976 - vIMS - INFO - vIMS functional test Start Time:'2016-02-04 14:14:31'
+ 2016-02-04 14:15:45,880 - vIMS - INFO - vIMS functional test duration:'73.9'
+ 2016-02-04 14:15:46,113 - vIMS - INFO - Launching the clearwater-opnfv undeployment
+ 2016-02-04 14:18:12,604 - vIMS - INFO - Launching the cloudify-manager undeployment
+ 2016-02-04 14:18:51,808 - vIMS - INFO - Cloudify-manager server has been successfully removed!
+ 2016-02-04 14:18:51,870 - vIMS - INFO - Removing vIMS tenant ..
+ 2016-02-04 14:18:52,131 - vIMS - INFO - Removing vIMS user ..
+
+Please note that vIMS traces can bee summarized in several steps:
+
+ * INFO: environment prepared successfully => environment OK
+ * INFO - Cloudify-manager server is UP ! => orchestrator deployed
+ * INFO - The deployment of clearwater-opnfv is ended => VNF deployed
+ * Multiple Identities (UDP) - (6505550771, 6505550675) Passed => tests run
+
+
+Promise
+^^^^^^^
+
+The results can be observed in the console::
+
+ Running test case: promise
+ ----------------------------------------------
+ FUNCTEST.info: Running PROMISE test case...
+ 2016-02-04 07:10:37,735 - Promise- INFO - Creating tenant 'promise'...
+ 2016-02-04 07:10:37,893 - Promise- INFO - Adding role '59828986a9a94dfaa852548599fde628' to tenant 'promise'...
+ 2016-02-04 07:10:38,005 - Promise- INFO - Creating user 'promiser'...
+ 2016-02-04 07:10:38,128 - Promise- INFO - Updating OpenStack credentials...
+ 2016-02-04 07:10:38,157 - Promise- INFO - Creating image 'promise-img' from '/home/opnfv/functest/data/cirros-0.3.4-x86_64-disk.img'...
+ 2016-02-04 07:10:42,016 - Promise- INFO - Creating flavor 'promise-flavor'...
+ 2016-02-04 07:10:42,836 - Promise- INFO - Exporting environment variables...
+ 2016-02-04 07:10:42,836 - Promise- INFO - Running command: DEBUG=1 npm run -s test -- --reporter json
+ 2016-02-04 07:10:51,505 - Promise- INFO - The test succeeded.
+ ....
+ **********************************
+ Promise test summary
+ **********************************
+ Test start: Thu Feb 04 07:10:42 UTC 2016
+ Test end: Thu Feb 04 07:10:51 UTC 2016
+ Execution time: 8.7
+ Total tests executed: 33
+ Total tests failed: 0
+ **********************************
+
diff --git a/docs/results/compass-os-ocl-nofeature-ha.rst b/docs/results/compass-os-ocl-nofeature-ha.rst
new file mode 100644
index 000000000..62746ad56
--- /dev/null
+++ b/docs/results/compass-os-ocl-nofeature-ha.rst
@@ -0,0 +1,356 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Detailed test results for compass-os-ocl-nofeature-ha
+=====================================================
+
+.. Add any text in here that could be useful for a reader.
+
+The following section outlines the detailed functest results for the Brahmaputra scenario
+deploying OpenStack with an OpenContrail SDN controller in a Pharos environment
+by the Compass installer.
+
+VIM
+---
+
+vPing_SSH
+^^^^^^^^^
+
+vPing test case output is displayed in the console::
+
+ FUNCTEST.info: Running vPing test...
+ 2016-01-23 03:18:20,153 - vPing- INFO - Creating neutron network vping-net...
+ 2016-01-23 03:18:35,476 - vPing- INFO - Flavor found 'm1.small'
+ 2016-01-23 03:18:36,350 - vPing- INFO - vPing Start Time:'2016-01-23 03:18:36'
+ 2016-01-23 03:18:38,571 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
+ 2016-01-23 03:18:53,716 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
+ 2016-01-23 03:18:55,239 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
+ 2016-01-23 03:19:15,593 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
+ 2016-01-23 03:19:15,593 - vPing- INFO - Creating floating IP for the second VM...
+ 2016-01-23 03:19:18,017 - vPing- INFO - Floating IP created: '10.2.65.6'
+ 2016-01-23 03:19:18,017 - vPing- INFO - Associating floating ip: '10.2.65.6' to VM2
+ 2016-01-23 03:19:37,839 - vPing- INFO - SCP ping script to VM2...
+ 2016-01-23 03:19:37,839 - vPing- INFO - Waiting for ping...
+ 2016-01-23 03:19:40,130 - vPing- INFO - vPing detected!
+ 2016-01-23 03:19:40,130 - vPing- INFO - vPing duration:'63.8'
+ 2016-01-23 03:19:40,130 - vPing- INFO - Cleaning up...
+ 2016-01-23 03:20:06,574 - vPing- INFO - Deleting network 'vping-net'...
+ 2016-01-23 03:20:13,587 - vPing- INFO - vPing OK
+
+
+
+
+vPing_userdata
+^^^^^^^^^^^^^^
+
+vPing_userdata results are displayed in the console::
+
+ 2016-01-06 16:06:20,550 - vPing- INFO - Creating neutron network vping-net...
+ 2016-01-06 16:06:23,867 - vPing- INFO - Flavor found 'm1.small'
+ 2016-01-06 16:06:24,457 - vPing- INFO - vPing Start Time:'2016-01-06 16:06:24'
+ 2016-01-06 16:06:24,626 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
+ 2016-01-06 16:06:39,351 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
+ 2016-01-06 16:06:39,650 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
+ 2016-01-06 16:06:53,330 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
+ 2016-01-06 16:06:53,330 - vPing- INFO - Waiting for ping...
+ 2016-01-06 16:06:58,669 - vPing- INFO - vPing detected!
+ 2016-01-06 16:06:58,669 - vPing- INFO - vPing duration:'34.2'
+ 2016-01-06 16:06:58,670 - vPing- INFO - Cleaning up...
+ 2016-01-06 16:07:12,661 - vPing- INFO - Deleting network 'vping-net'...
+ 2016-01-06 16:07:14,748 - vPing- INFO - vPing OK
+
+Tempest
+^^^^^^^
+
+The Tempest results are displayed in the console::
+
+ FUNCTEST.info: Running Tempest tests...
+ 2016-01-28 07:56:55,380 - run_tempest - INFO - Creating tenant and user for Tempest suite
+ 2016-01-28 07:56:56.127 23795 INFO rally.verification.tempest.tempest [-] Starting: Creating configuration file for Tempest.
+ 2016-01-28 07:56:59.512 23795 INFO rally.verification.tempest.tempest [-] Completed: Creating configuration file for Tempest.
+ 16-01-28 07:57:00,597 - run_tempest - INFO - Starting Tempest test suite: '--tests-file /home/opnfv/repos/functest/testcases/VIM/OpenStack/CI/custom_tests/test_list.txt'.
+ Total results of verification:
+ .
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ | UUID | Deployment UUID | Set name | Tests | Failures | Created at | Status |
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ | e0bf7770-2c0f-4c63-913c-cd51a6edd96d | 16582e1e-7b01-4d5d-9c13-a26db8567b7b | | 144 | 30 | 2016-01-28 07:57:01.044856 | finished |
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ .
+ Tests:
+ .
+ +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
+ | name | time | status |
+ +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
+ | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_get_flavor | 0.29804 | success |
+ | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors | 0.06289 | success |
+ | tempest.api.compute.images.test_images.ImagesTestJSON.test_delete_saving_image | 9.21756 | success |
+ | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image | 8.65376 | success |
+ | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name | 9.10993 | success |
+ | tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_changes_since | 0.19585 | success |
+ ...........................................
+ 2016-01-28 08:19:32,132 - run_tempest - INFO - Results: {'timestart': '2016-01-2807:57:01.044856', 'duration': 1350, 'tests': 144, 'failures': 30}
+ 2016-01-28 08:19:32,133 - run_tempest - INFO - Pushing results to DB: 'http://testresults.opnfv.org/testapi/results'.
+ 2016-01-28 08:19:32,278 - run_tempest - INFO - Deleting tenant and user for Tempest suite)
+
+In order to check all the available test cases related debug information, inspect
+tempest.log file stored under */home/opnfv/functest/results/tempest/*.
+
+
+Rally
+^^^^^
+
+The Rally results are displayed in the console, each module is run one after the
+other. Tables are displayed::
+
+ +-------------------------------------------------------------------------------------------+
+ | Response Times (sec) |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | action | min | median | 90%ile | 95%ile | max | avg | success | count |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | keystone.create_role | 0.358 | 0.572 | 0.772 | 0.811 | 1.106 | 0.603 | 100.0% | 20 |
+ | keystone.add_role | 0.32 | 0.436 | 0.846 | 0.903 | 1.018 | 0.51 | 100.0% | 20 |
+ | keystone.list_roles | 0.102 | 0.185 | 0.253 | 0.275 | 0.347 | 0.188 | 100.0% | 20 |
+ | total | 0.845 | 1.223 | 1.821 | 1.822 | 1.823 | 1.302 | 100.0% | 20 |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ Load duration: 7.13633608818
+ Full duration: 36.7863121033
+ ..............
+ +------------------------------------------------------------------------------------------+
+ | Response Times (sec) |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | action | min | median | 90%ile | 95%ile | max | avg | success | count |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | nova.create_keypair | 1.005 | 1.784 | 3.025 | 3.636 | 4.373 | 2.004 | 100.0% | 20 |
+ | nova.delete_keypair | 0.199 | 0.699 | 1.007 | 1.244 | 3.014 | 0.79 | 100.0% | 20 |
+ | total | 1.249 | 2.625 | 4.259 | 4.845 | 5.131 | 2.794 | 100.0% | 20 |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ Load duration: 14.9231169224
+ Full duration: 71.4614388943
+
+
+At the end of the module test, a message is displayed to provide a global
+summary::
+
+ 2016-02-04 12:50:18,382 - run_rally - INFO - Test scenario: "requests" OK.
+
+
+Controllers
+-----------
+
+OpenDaylight
+^^^^^^^^^^^^
+
+The results of ODL tests can be seen in the console::
+
+ ==============================================================================
+ Basic
+ ==============================================================================
+ Basic.010 Restconf OK :: Test suite to verify Restconf is OK
+ ==============================================================================
+ Get Controller Modules :: Get the controller modules via Restconf | PASS |
+ ------------------------------------------------------------------------------
+ Basic.010 Restconf OK :: Test suite to verify Restconf is OK | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Basic | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Output: /home/opnfv/repos/functest/output.xml
+ Log: /home/opnfv/repos/functest/log.html
+ Report: /home/opnfv/repos/functest/report.html
+
+ ..............................................................................
+
+ Neutron.Delete Networks :: Checking Network deleted in OpenStack a... | PASS |
+ 2 critical tests, 2 passed, 0 failed
+ 2 tests total, 2 passed, 0 failed
+ ==============================================================================
+ Neutron :: Test suite for Neutron Plugin | PASS |
+ 18 critical tests, 18 passed, 0 failed
+ 18 tests total, 18 passed, 0 failed
+ ==============================================================================
+ Output: /home/opnfv/repos/functest/output.xml
+ Log: /home/opnfv/repos/functest/log.html
+ Report: /home/opnfv/repos/functest/report.html
+
+3 result files are generated:
+ * output.xml
+ * log.html
+ * report.html
+
+**ODL result page**
+
+.. figure:: ../images/functestODL.png
+ :width: 170mm
+ :align: center
+ :alt: ODL suite result page
+
+
+ONOS
+^^^^
+
+The ONOS test logs can be found in OnosSystemTest/, and TestON/, and logs/
+(ONOSCI_PATH to be added), and can also be seen in the console::
+
+ ******************************
+ Result summary for Testcase4
+ ******************************
+
+ 2016-01-14 05:25:40,529 - FUNCvirNetNBL3 - INFO - ONOS Router Delete test Start
+
+ [2016-01-14 05:25:40.529644] [FUNCvirNetNBL3] [CASE] Virtual Network NBI Test - Router
+ 2016-01-14 05:25:40,530 - FUNCvirNetNBL3 - INFO - Generate Post Data
+
+ [2016-01-14 05:25:40.530825] [FUNCvirNetNBL3] [STEP] 4.1: Post Network Data via HTTP(Post Router need post network)
+ 2016-01-14 05:25:40,531 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/networks/ using POST method.
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Post Network Success
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.539687] [FUNCvirNetNBL3] [STEP] 4.2: Post Router Data via HTTP
+ 2016-01-14 05:25:40,540 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/ using POST method.
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Post Router Success
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.543489] [FUNCvirNetNBL3] [STEP] 4.3: Delete Router Data via HTTP
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Delete Router Success
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.546774] [FUNCvirNetNBL3] [STEP] 4.4: Get Router Data is NULL
+ 2016-01-14 05:25:40,547 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using GET method.
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Get Router Success
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+
+ *****************************
+ Result: Pass
+ *****************************
+
+ .......................................................................................
+
+ ******************************
+ Result summary for Testcase9
+ ******************************
+ .......................................................................................
+
+
+ [2016-01-14 05:26:42.543489] [FUNCvirNetNBL3] [STEP] 9.6: FloatingIp Clean Data via HTTP
+ 2016-01-14 05:26:42,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/floatingips/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
+ 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - ERROR - Delete Floatingip failed
+
+ .......................................................................................
+
+ *****************************
+ Result: Failed
+ *****************************
+
+There is a result summary for each testcase, and a global summary for the whole test.
+If any problem occurs during the test, a ERROR message will be provided in the test and the the global summary::
+
+ *************************************
+ Test Execution Summary
+ *************************************
+
+ Test Start : 14 Jan 2016 05:25:37
+ Test End : 14 Jan 2016 05:25:41
+ Execution Time : 0:00:03.349087
+ Total tests planned : 11
+ Total tests RUN : 11
+ Total Pass : 8
+ Total Fail : 3
+ Total No Result : 0
+ Success Percentage : 72%
+ Execution Result : 100%
+
+
+OpenContrail
+^^^^^^^^^^^^
+
+TODO OVNO
+
+
+Feature
+-------
+
+vIMS
+^^^^
+
+The results in the console can be described as follows::
+
+ FUNCTEST.info: Running vIMS test...
+ 2016-02-04 13:46:25,025 - vIMS - INFO - Prepare OpenStack plateform (create tenant and user)
+ 2016-02-04 13:46:25,312 - vIMS - INFO - Update OpenStack creds informations
+ 2016-02-04 13:46:25,312 - vIMS - INFO - Upload some OS images if it doesn't exist
+ 2016-02-04 13:46:25,566 - vIMS - INFO - centos_7 image doesn't exist on glance repository.
+ Try downloading this image and upload on glance !
+ 2016-02-04 13:47:06,167 - vIMS - INFO - ubuntu_14.04 image doesn't exist on glance repository.
+ Try downloading this image and upload on glance !
+ 2016-02-04 13:47:26,987 - vIMS - INFO - Update security group quota for this tenant
+ 2016-02-04 13:47:27,193 - vIMS - INFO - Update cinder quota for this tenant
+ 2016-02-04 13:47:27,746 - vIMS - INFO - Collect flavor id for cloudify manager server
+ 2016-02-04 13:47:28,326 - vIMS - INFO - Prepare virtualenv for cloudify-cli
+ 2016-02-04 13:48:00,657 - vIMS - INFO - Downloading the cloudify manager server blueprint
+ 2016-02-04 13:48:03,391 - vIMS - INFO - Cloudify deployment Start Time:'2016-02-04 13:48:03'
+ 2016-02-04 13:48:03,391 - vIMS - INFO - Writing the inputs file
+ 2016-02-04 13:48:03,395 - vIMS - INFO - Launching the cloudify-manager deployment
+ 2016-02-04 13:56:03,501 - vIMS - INFO - Cloudify-manager server is UP !
+ 2016-02-04 13:56:03,502 - vIMS - INFO - Cloudify deployment duration:'480.1'
+ 2016-02-04 13:56:03,502 - vIMS - INFO - Collect flavor id for all clearwater vm
+ 2016-02-04 13:56:04,093 - vIMS - INFO - vIMS VNF deployment Start Time:'2016-02-04 13:56:04'
+ 2016-02-04 13:56:04,093 - vIMS - INFO - Downloading the openstack-blueprint.yaml blueprint
+ 2016-02-04 13:56:06,265 - vIMS - INFO - Writing the inputs file
+ 2016-02-04 13:56:06,268 - vIMS - INFO - Launching the clearwater deployment
+ 2016-02-04 14:11:27,101 - vIMS - INFO - The deployment of clearwater-opnfv is ended
+ 2016-02-04 14:11:27,103 - vIMS - INFO - vIMS VNF deployment duration:'923.0'
+ 2016-02-04 14:14:31,976 - vIMS - INFO - vIMS functional test Start Time:'2016-02-04 14:14:31'
+ 2016-02-04 14:15:45,880 - vIMS - INFO - vIMS functional test duration:'73.9'
+ 2016-02-04 14:15:46,113 - vIMS - INFO - Launching the clearwater-opnfv undeployment
+ 2016-02-04 14:18:12,604 - vIMS - INFO - Launching the cloudify-manager undeployment
+ 2016-02-04 14:18:51,808 - vIMS - INFO - Cloudify-manager server has been successfully removed!
+ 2016-02-04 14:18:51,870 - vIMS - INFO - Removing vIMS tenant ..
+ 2016-02-04 14:18:52,131 - vIMS - INFO - Removing vIMS user ..
+
+Please note that vIMS traces can bee summarized in several steps:
+
+ * INFO: environment prepared successfully => environment OK
+ * INFO - Cloudify-manager server is UP ! => orchestrator deployed
+ * INFO - The deployment of clearwater-opnfv is ended => VNF deployed
+ * Multiple Identities (UDP) - (6505550771, 6505550675) Passed => tests run
+
+
+Promise
+^^^^^^^
+
+The results can be observed in the console::
+
+ Running test case: promise
+ ----------------------------------------------
+ FUNCTEST.info: Running PROMISE test case...
+ 2016-02-04 07:10:37,735 - Promise- INFO - Creating tenant 'promise'...
+ 2016-02-04 07:10:37,893 - Promise- INFO - Adding role '59828986a9a94dfaa852548599fde628' to tenant 'promise'...
+ 2016-02-04 07:10:38,005 - Promise- INFO - Creating user 'promiser'...
+ 2016-02-04 07:10:38,128 - Promise- INFO - Updating OpenStack credentials...
+ 2016-02-04 07:10:38,157 - Promise- INFO - Creating image 'promise-img' from '/home/opnfv/functest/data/cirros-0.3.4-x86_64-disk.img'...
+ 2016-02-04 07:10:42,016 - Promise- INFO - Creating flavor 'promise-flavor'...
+ 2016-02-04 07:10:42,836 - Promise- INFO - Exporting environment variables...
+ 2016-02-04 07:10:42,836 - Promise- INFO - Running command: DEBUG=1 npm run -s test -- --reporter json
+ 2016-02-04 07:10:51,505 - Promise- INFO - The test succeeded.
+ ....
+ **********************************
+ Promise test summary
+ **********************************
+ Test start: Thu Feb 04 07:10:42 UTC 2016
+ Test end: Thu Feb 04 07:10:51 UTC 2016
+ Execution time: 8.7
+ Total tests executed: 33
+ Total tests failed: 0
+ **********************************
+
diff --git a/docs/results/compass-os-odl_l2-nofeature-ha.rst b/docs/results/compass-os-odl_l2-nofeature-ha.rst
new file mode 100644
index 000000000..d81ec77e7
--- /dev/null
+++ b/docs/results/compass-os-odl_l2-nofeature-ha.rst
@@ -0,0 +1,357 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+
+Detailed test results for compass-os-odl_l2-nofeature-ha
+========================================================
+
+.. Add any text in here that could be useful for a reader.
+
+The following section outlines the detailed functest results for the Brahmaputra scenario
+deploying OpenStack with an OpenDaylight based ML2 SDN controller in a Pharos environment
+by the Compass installer.
+
+VIM
+---
+
+vPing_SSH
+^^^^^^^^^
+
+vPing test case output is displayed in the console::
+
+ FUNCTEST.info: Running vPing test...
+ 2016-01-23 03:18:20,153 - vPing- INFO - Creating neutron network vping-net...
+ 2016-01-23 03:18:35,476 - vPing- INFO - Flavor found 'm1.small'
+ 2016-01-23 03:18:36,350 - vPing- INFO - vPing Start Time:'2016-01-23 03:18:36'
+ 2016-01-23 03:18:38,571 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
+ 2016-01-23 03:18:53,716 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
+ 2016-01-23 03:18:55,239 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
+ 2016-01-23 03:19:15,593 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
+ 2016-01-23 03:19:15,593 - vPing- INFO - Creating floating IP for the second VM...
+ 2016-01-23 03:19:18,017 - vPing- INFO - Floating IP created: '10.2.65.6'
+ 2016-01-23 03:19:18,017 - vPing- INFO - Associating floating ip: '10.2.65.6' to VM2
+ 2016-01-23 03:19:37,839 - vPing- INFO - SCP ping script to VM2...
+ 2016-01-23 03:19:37,839 - vPing- INFO - Waiting for ping...
+ 2016-01-23 03:19:40,130 - vPing- INFO - vPing detected!
+ 2016-01-23 03:19:40,130 - vPing- INFO - vPing duration:'63.8'
+ 2016-01-23 03:19:40,130 - vPing- INFO - Cleaning up...
+ 2016-01-23 03:20:06,574 - vPing- INFO - Deleting network 'vping-net'...
+ 2016-01-23 03:20:13,587 - vPing- INFO - vPing OK
+
+
+
+
+vPing_userdata
+^^^^^^^^^^^^^^
+
+vPing_userdata results are displayed in the console::
+
+ 2016-01-06 16:06:20,550 - vPing- INFO - Creating neutron network vping-net...
+ 2016-01-06 16:06:23,867 - vPing- INFO - Flavor found 'm1.small'
+ 2016-01-06 16:06:24,457 - vPing- INFO - vPing Start Time:'2016-01-06 16:06:24'
+ 2016-01-06 16:06:24,626 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
+ 2016-01-06 16:06:39,351 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
+ 2016-01-06 16:06:39,650 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
+ 2016-01-06 16:06:53,330 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
+ 2016-01-06 16:06:53,330 - vPing- INFO - Waiting for ping...
+ 2016-01-06 16:06:58,669 - vPing- INFO - vPing detected!
+ 2016-01-06 16:06:58,669 - vPing- INFO - vPing duration:'34.2'
+ 2016-01-06 16:06:58,670 - vPing- INFO - Cleaning up...
+ 2016-01-06 16:07:12,661 - vPing- INFO - Deleting network 'vping-net'...
+ 2016-01-06 16:07:14,748 - vPing- INFO - vPing OK
+
+Tempest
+^^^^^^^
+
+The Tempest results are displayed in the console::
+
+ FUNCTEST.info: Running Tempest tests...
+ 2016-01-28 07:56:55,380 - run_tempest - INFO - Creating tenant and user for Tempest suite
+ 2016-01-28 07:56:56.127 23795 INFO rally.verification.tempest.tempest [-] Starting: Creating configuration file for Tempest.
+ 2016-01-28 07:56:59.512 23795 INFO rally.verification.tempest.tempest [-] Completed: Creating configuration file for Tempest.
+ 16-01-28 07:57:00,597 - run_tempest - INFO - Starting Tempest test suite: '--tests-file /home/opnfv/repos/functest/testcases/VIM/OpenStack/CI/custom_tests/test_list.txt'.
+ Total results of verification:
+ .
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ | UUID | Deployment UUID | Set name | Tests | Failures | Created at | Status |
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ | e0bf7770-2c0f-4c63-913c-cd51a6edd96d | 16582e1e-7b01-4d5d-9c13-a26db8567b7b | | 144 | 30 | 2016-01-28 07:57:01.044856 | finished |
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ .
+ Tests:
+ .
+ +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
+ | name | time | status |
+ +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
+ | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_get_flavor | 0.29804 | success |
+ | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors | 0.06289 | success |
+ | tempest.api.compute.images.test_images.ImagesTestJSON.test_delete_saving_image | 9.21756 | success |
+ | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image | 8.65376 | success |
+ | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name | 9.10993 | success |
+ | tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_changes_since | 0.19585 | success |
+ ...........................................
+ 2016-01-28 08:19:32,132 - run_tempest - INFO - Results: {'timestart': '2016-01-2807:57:01.044856', 'duration': 1350, 'tests': 144, 'failures': 30}
+ 2016-01-28 08:19:32,133 - run_tempest - INFO - Pushing results to DB: 'http://testresults.opnfv.org/testapi/results'.
+ 2016-01-28 08:19:32,278 - run_tempest - INFO - Deleting tenant and user for Tempest suite)
+
+In order to check all the available test cases related debug information, inspect
+tempest.log file stored under */home/opnfv/functest/results/tempest/*.
+
+
+Rally
+^^^^^
+
+The Rally results are displayed in the console, each module is run one after the
+other. Tables are displayed::
+
+ +-------------------------------------------------------------------------------------------+
+ | Response Times (sec) |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | action | min | median | 90%ile | 95%ile | max | avg | success | count |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | keystone.create_role | 0.358 | 0.572 | 0.772 | 0.811 | 1.106 | 0.603 | 100.0% | 20 |
+ | keystone.add_role | 0.32 | 0.436 | 0.846 | 0.903 | 1.018 | 0.51 | 100.0% | 20 |
+ | keystone.list_roles | 0.102 | 0.185 | 0.253 | 0.275 | 0.347 | 0.188 | 100.0% | 20 |
+ | total | 0.845 | 1.223 | 1.821 | 1.822 | 1.823 | 1.302 | 100.0% | 20 |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ Load duration: 7.13633608818
+ Full duration: 36.7863121033
+ ..............
+ +------------------------------------------------------------------------------------------+
+ | Response Times (sec) |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | action | min | median | 90%ile | 95%ile | max | avg | success | count |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | nova.create_keypair | 1.005 | 1.784 | 3.025 | 3.636 | 4.373 | 2.004 | 100.0% | 20 |
+ | nova.delete_keypair | 0.199 | 0.699 | 1.007 | 1.244 | 3.014 | 0.79 | 100.0% | 20 |
+ | total | 1.249 | 2.625 | 4.259 | 4.845 | 5.131 | 2.794 | 100.0% | 20 |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ Load duration: 14.9231169224
+ Full duration: 71.4614388943
+
+
+At the end of the module test, a message is displayed to provide a global
+summary::
+
+ 2016-02-04 12:50:18,382 - run_rally - INFO - Test scenario: "requests" OK.
+
+
+Controllers
+-----------
+
+OpenDaylight
+^^^^^^^^^^^^
+
+The results of ODL tests can be seen in the console::
+
+ ==============================================================================
+ Basic
+ ==============================================================================
+ Basic.010 Restconf OK :: Test suite to verify Restconf is OK
+ ==============================================================================
+ Get Controller Modules :: Get the controller modules via Restconf | PASS |
+ ------------------------------------------------------------------------------
+ Basic.010 Restconf OK :: Test suite to verify Restconf is OK | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Basic | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Output: /home/opnfv/repos/functest/output.xml
+ Log: /home/opnfv/repos/functest/log.html
+ Report: /home/opnfv/repos/functest/report.html
+
+ ..............................................................................
+
+ Neutron.Delete Networks :: Checking Network deleted in OpenStack a... | PASS |
+ 2 critical tests, 2 passed, 0 failed
+ 2 tests total, 2 passed, 0 failed
+ ==============================================================================
+ Neutron :: Test suite for Neutron Plugin | PASS |
+ 18 critical tests, 18 passed, 0 failed
+ 18 tests total, 18 passed, 0 failed
+ ==============================================================================
+ Output: /home/opnfv/repos/functest/output.xml
+ Log: /home/opnfv/repos/functest/log.html
+ Report: /home/opnfv/repos/functest/report.html
+
+3 result files are generated:
+ * output.xml
+ * log.html
+ * report.html
+
+**ODL result page**
+
+.. figure:: ../images/functestODL.png
+ :width: 170mm
+ :align: center
+ :alt: ODL suite result page
+
+
+ONOS
+^^^^
+
+The ONOS test logs can be found in OnosSystemTest/, and TestON/, and logs/
+(ONOSCI_PATH to be added), and can also be seen in the console::
+
+ ******************************
+ Result summary for Testcase4
+ ******************************
+
+ 2016-01-14 05:25:40,529 - FUNCvirNetNBL3 - INFO - ONOS Router Delete test Start
+
+ [2016-01-14 05:25:40.529644] [FUNCvirNetNBL3] [CASE] Virtual Network NBI Test - Router
+ 2016-01-14 05:25:40,530 - FUNCvirNetNBL3 - INFO - Generate Post Data
+
+ [2016-01-14 05:25:40.530825] [FUNCvirNetNBL3] [STEP] 4.1: Post Network Data via HTTP(Post Router need post network)
+ 2016-01-14 05:25:40,531 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/networks/ using POST method.
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Post Network Success
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.539687] [FUNCvirNetNBL3] [STEP] 4.2: Post Router Data via HTTP
+ 2016-01-14 05:25:40,540 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/ using POST method.
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Post Router Success
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.543489] [FUNCvirNetNBL3] [STEP] 4.3: Delete Router Data via HTTP
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Delete Router Success
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.546774] [FUNCvirNetNBL3] [STEP] 4.4: Get Router Data is NULL
+ 2016-01-14 05:25:40,547 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using GET method.
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Get Router Success
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+
+ *****************************
+ Result: Pass
+ *****************************
+
+ .......................................................................................
+
+ ******************************
+ Result summary for Testcase9
+ ******************************
+ .......................................................................................
+
+
+ [2016-01-14 05:26:42.543489] [FUNCvirNetNBL3] [STEP] 9.6: FloatingIp Clean Data via HTTP
+ 2016-01-14 05:26:42,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/floatingips/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
+ 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - ERROR - Delete Floatingip failed
+
+ .......................................................................................
+
+ *****************************
+ Result: Failed
+ *****************************
+
+There is a result summary for each testcase, and a global summary for the whole test.
+If any problem occurs during the test, a ERROR message will be provided in the test and the the global summary::
+
+ *************************************
+ Test Execution Summary
+ *************************************
+
+ Test Start : 14 Jan 2016 05:25:37
+ Test End : 14 Jan 2016 05:25:41
+ Execution Time : 0:00:03.349087
+ Total tests planned : 11
+ Total tests RUN : 11
+ Total Pass : 8
+ Total Fail : 3
+ Total No Result : 0
+ Success Percentage : 72%
+ Execution Result : 100%
+
+
+OpenContrail
+^^^^^^^^^^^^
+
+TODO OVNO
+
+
+Feature
+-------
+
+vIMS
+^^^^
+
+The results in the console can be described as follows::
+
+ FUNCTEST.info: Running vIMS test...
+ 2016-02-04 13:46:25,025 - vIMS - INFO - Prepare OpenStack plateform (create tenant and user)
+ 2016-02-04 13:46:25,312 - vIMS - INFO - Update OpenStack creds informations
+ 2016-02-04 13:46:25,312 - vIMS - INFO - Upload some OS images if it doesn't exist
+ 2016-02-04 13:46:25,566 - vIMS - INFO - centos_7 image doesn't exist on glance repository.
+ Try downloading this image and upload on glance !
+ 2016-02-04 13:47:06,167 - vIMS - INFO - ubuntu_14.04 image doesn't exist on glance repository.
+ Try downloading this image and upload on glance !
+ 2016-02-04 13:47:26,987 - vIMS - INFO - Update security group quota for this tenant
+ 2016-02-04 13:47:27,193 - vIMS - INFO - Update cinder quota for this tenant
+ 2016-02-04 13:47:27,746 - vIMS - INFO - Collect flavor id for cloudify manager server
+ 2016-02-04 13:47:28,326 - vIMS - INFO - Prepare virtualenv for cloudify-cli
+ 2016-02-04 13:48:00,657 - vIMS - INFO - Downloading the cloudify manager server blueprint
+ 2016-02-04 13:48:03,391 - vIMS - INFO - Cloudify deployment Start Time:'2016-02-04 13:48:03'
+ 2016-02-04 13:48:03,391 - vIMS - INFO - Writing the inputs file
+ 2016-02-04 13:48:03,395 - vIMS - INFO - Launching the cloudify-manager deployment
+ 2016-02-04 13:56:03,501 - vIMS - INFO - Cloudify-manager server is UP !
+ 2016-02-04 13:56:03,502 - vIMS - INFO - Cloudify deployment duration:'480.1'
+ 2016-02-04 13:56:03,502 - vIMS - INFO - Collect flavor id for all clearwater vm
+ 2016-02-04 13:56:04,093 - vIMS - INFO - vIMS VNF deployment Start Time:'2016-02-04 13:56:04'
+ 2016-02-04 13:56:04,093 - vIMS - INFO - Downloading the openstack-blueprint.yaml blueprint
+ 2016-02-04 13:56:06,265 - vIMS - INFO - Writing the inputs file
+ 2016-02-04 13:56:06,268 - vIMS - INFO - Launching the clearwater deployment
+ 2016-02-04 14:11:27,101 - vIMS - INFO - The deployment of clearwater-opnfv is ended
+ 2016-02-04 14:11:27,103 - vIMS - INFO - vIMS VNF deployment duration:'923.0'
+ 2016-02-04 14:14:31,976 - vIMS - INFO - vIMS functional test Start Time:'2016-02-04 14:14:31'
+ 2016-02-04 14:15:45,880 - vIMS - INFO - vIMS functional test duration:'73.9'
+ 2016-02-04 14:15:46,113 - vIMS - INFO - Launching the clearwater-opnfv undeployment
+ 2016-02-04 14:18:12,604 - vIMS - INFO - Launching the cloudify-manager undeployment
+ 2016-02-04 14:18:51,808 - vIMS - INFO - Cloudify-manager server has been successfully removed!
+ 2016-02-04 14:18:51,870 - vIMS - INFO - Removing vIMS tenant ..
+ 2016-02-04 14:18:52,131 - vIMS - INFO - Removing vIMS user ..
+
+Please note that vIMS traces can bee summarized in several steps:
+
+ * INFO: environment prepared successfully => environment OK
+ * INFO - Cloudify-manager server is UP ! => orchestrator deployed
+ * INFO - The deployment of clearwater-opnfv is ended => VNF deployed
+ * Multiple Identities (UDP) - (6505550771, 6505550675) Passed => tests run
+
+
+Promise
+^^^^^^^
+
+The results can be observed in the console::
+
+ Running test case: promise
+ ----------------------------------------------
+ FUNCTEST.info: Running PROMISE test case...
+ 2016-02-04 07:10:37,735 - Promise- INFO - Creating tenant 'promise'...
+ 2016-02-04 07:10:37,893 - Promise- INFO - Adding role '59828986a9a94dfaa852548599fde628' to tenant 'promise'...
+ 2016-02-04 07:10:38,005 - Promise- INFO - Creating user 'promiser'...
+ 2016-02-04 07:10:38,128 - Promise- INFO - Updating OpenStack credentials...
+ 2016-02-04 07:10:38,157 - Promise- INFO - Creating image 'promise-img' from '/home/opnfv/functest/data/cirros-0.3.4-x86_64-disk.img'...
+ 2016-02-04 07:10:42,016 - Promise- INFO - Creating flavor 'promise-flavor'...
+ 2016-02-04 07:10:42,836 - Promise- INFO - Exporting environment variables...
+ 2016-02-04 07:10:42,836 - Promise- INFO - Running command: DEBUG=1 npm run -s test -- --reporter json
+ 2016-02-04 07:10:51,505 - Promise- INFO - The test succeeded.
+ ....
+ **********************************
+ Promise test summary
+ **********************************
+ Test start: Thu Feb 04 07:10:42 UTC 2016
+ Test end: Thu Feb 04 07:10:51 UTC 2016
+ Execution time: 8.7
+ Total tests executed: 33
+ Total tests failed: 0
+ **********************************
+
diff --git a/docs/results/compass-os-onos-nofeature-ha.rst b/docs/results/compass-os-onos-nofeature-ha.rst
new file mode 100644
index 000000000..720ce32af
--- /dev/null
+++ b/docs/results/compass-os-onos-nofeature-ha.rst
@@ -0,0 +1,356 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Detailed test results for compass-os-onos-nofeature-ha
+======================================================
+
+.. Add any text in here that could be useful for a reader.
+
+The following section outlines the detailed functest results for the Brahmaputra scenario
+deploying OpenStack with an ONOS based ML2 SDN controller in a Pharos environment
+by the Compass installer.
+
+VIM
+---
+
+vPing_SSH
+^^^^^^^^^
+
+vPing test case output is displayed in the console::
+
+ FUNCTEST.info: Running vPing test...
+ 2016-01-23 03:18:20,153 - vPing- INFO - Creating neutron network vping-net...
+ 2016-01-23 03:18:35,476 - vPing- INFO - Flavor found 'm1.small'
+ 2016-01-23 03:18:36,350 - vPing- INFO - vPing Start Time:'2016-01-23 03:18:36'
+ 2016-01-23 03:18:38,571 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
+ 2016-01-23 03:18:53,716 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
+ 2016-01-23 03:18:55,239 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
+ 2016-01-23 03:19:15,593 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
+ 2016-01-23 03:19:15,593 - vPing- INFO - Creating floating IP for the second VM...
+ 2016-01-23 03:19:18,017 - vPing- INFO - Floating IP created: '10.2.65.6'
+ 2016-01-23 03:19:18,017 - vPing- INFO - Associating floating ip: '10.2.65.6' to VM2
+ 2016-01-23 03:19:37,839 - vPing- INFO - SCP ping script to VM2...
+ 2016-01-23 03:19:37,839 - vPing- INFO - Waiting for ping...
+ 2016-01-23 03:19:40,130 - vPing- INFO - vPing detected!
+ 2016-01-23 03:19:40,130 - vPing- INFO - vPing duration:'63.8'
+ 2016-01-23 03:19:40,130 - vPing- INFO - Cleaning up...
+ 2016-01-23 03:20:06,574 - vPing- INFO - Deleting network 'vping-net'...
+ 2016-01-23 03:20:13,587 - vPing- INFO - vPing OK
+
+
+
+
+vPing_userdata
+^^^^^^^^^^^^^^
+
+vPing_userdata results are displayed in the console::
+
+ 2016-01-06 16:06:20,550 - vPing- INFO - Creating neutron network vping-net...
+ 2016-01-06 16:06:23,867 - vPing- INFO - Flavor found 'm1.small'
+ 2016-01-06 16:06:24,457 - vPing- INFO - vPing Start Time:'2016-01-06 16:06:24'
+ 2016-01-06 16:06:24,626 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
+ 2016-01-06 16:06:39,351 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
+ 2016-01-06 16:06:39,650 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
+ 2016-01-06 16:06:53,330 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
+ 2016-01-06 16:06:53,330 - vPing- INFO - Waiting for ping...
+ 2016-01-06 16:06:58,669 - vPing- INFO - vPing detected!
+ 2016-01-06 16:06:58,669 - vPing- INFO - vPing duration:'34.2'
+ 2016-01-06 16:06:58,670 - vPing- INFO - Cleaning up...
+ 2016-01-06 16:07:12,661 - vPing- INFO - Deleting network 'vping-net'...
+ 2016-01-06 16:07:14,748 - vPing- INFO - vPing OK
+
+Tempest
+^^^^^^^
+
+The Tempest results are displayed in the console::
+
+ FUNCTEST.info: Running Tempest tests...
+ 2016-01-28 07:56:55,380 - run_tempest - INFO - Creating tenant and user for Tempest suite
+ 2016-01-28 07:56:56.127 23795 INFO rally.verification.tempest.tempest [-] Starting: Creating configuration file for Tempest.
+ 2016-01-28 07:56:59.512 23795 INFO rally.verification.tempest.tempest [-] Completed: Creating configuration file for Tempest.
+ 16-01-28 07:57:00,597 - run_tempest - INFO - Starting Tempest test suite: '--tests-file /home/opnfv/repos/functest/testcases/VIM/OpenStack/CI/custom_tests/test_list.txt'.
+ Total results of verification:
+ .
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ | UUID | Deployment UUID | Set name | Tests | Failures | Created at | Status |
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ | e0bf7770-2c0f-4c63-913c-cd51a6edd96d | 16582e1e-7b01-4d5d-9c13-a26db8567b7b | | 144 | 30 | 2016-01-28 07:57:01.044856 | finished |
+ +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
+ .
+ Tests:
+ .
+ +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
+ | name | time | status |
+ +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
+ | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_get_flavor | 0.29804 | success |
+ | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors | 0.06289 | success |
+ | tempest.api.compute.images.test_images.ImagesTestJSON.test_delete_saving_image | 9.21756 | success |
+ | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image | 8.65376 | success |
+ | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name | 9.10993 | success |
+ | tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_changes_since | 0.19585 | success |
+ ...........................................
+ 2016-01-28 08:19:32,132 - run_tempest - INFO - Results: {'timestart': '2016-01-2807:57:01.044856', 'duration': 1350, 'tests': 144, 'failures': 30}
+ 2016-01-28 08:19:32,133 - run_tempest - INFO - Pushing results to DB: 'http://testresults.opnfv.org/testapi/results'.
+ 2016-01-28 08:19:32,278 - run_tempest - INFO - Deleting tenant and user for Tempest suite)
+
+In order to check all the available test cases related debug information, inspect
+tempest.log file stored under */home/opnfv/functest/results/tempest/*.
+
+
+Rally
+^^^^^
+
+The Rally results are displayed in the console, each module is run one after the
+other. Tables are displayed::
+
+ +-------------------------------------------------------------------------------------------+
+ | Response Times (sec) |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | action | min | median | 90%ile | 95%ile | max | avg | success | count |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | keystone.create_role | 0.358 | 0.572 | 0.772 | 0.811 | 1.106 | 0.603 | 100.0% | 20 |
+ | keystone.add_role | 0.32 | 0.436 | 0.846 | 0.903 | 1.018 | 0.51 | 100.0% | 20 |
+ | keystone.list_roles | 0.102 | 0.185 | 0.253 | 0.275 | 0.347 | 0.188 | 100.0% | 20 |
+ | total | 0.845 | 1.223 | 1.821 | 1.822 | 1.823 | 1.302 | 100.0% | 20 |
+ +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ Load duration: 7.13633608818
+ Full duration: 36.7863121033
+ ..............
+ +------------------------------------------------------------------------------------------+
+ | Response Times (sec) |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | action | min | median | 90%ile | 95%ile | max | avg | success | count |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ | nova.create_keypair | 1.005 | 1.784 | 3.025 | 3.636 | 4.373 | 2.004 | 100.0% | 20 |
+ | nova.delete_keypair | 0.199 | 0.699 | 1.007 | 1.244 | 3.014 | 0.79 | 100.0% | 20 |
+ | total | 1.249 | 2.625 | 4.259 | 4.845 | 5.131 | 2.794 | 100.0% | 20 |
+ +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
+ Load duration: 14.9231169224
+ Full duration: 71.4614388943
+
+
+At the end of the module test, a message is displayed to provide a global
+summary::
+
+ 2016-02-04 12:50:18,382 - run_rally - INFO - Test scenario: "requests" OK.
+
+
+Controllers
+-----------
+
+OpenDaylight
+^^^^^^^^^^^^
+
+The results of ODL tests can be seen in the console::
+
+ ==============================================================================
+ Basic
+ ==============================================================================
+ Basic.010 Restconf OK :: Test suite to verify Restconf is OK
+ ==============================================================================
+ Get Controller Modules :: Get the controller modules via Restconf | PASS |
+ ------------------------------------------------------------------------------
+ Basic.010 Restconf OK :: Test suite to verify Restconf is OK | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Basic | PASS |
+ 1 critical test, 1 passed, 0 failed
+ 1 test total, 1 passed, 0 failed
+ ==============================================================================
+ Output: /home/opnfv/repos/functest/output.xml
+ Log: /home/opnfv/repos/functest/log.html
+ Report: /home/opnfv/repos/functest/report.html
+
+ ..............................................................................
+
+ Neutron.Delete Networks :: Checking Network deleted in OpenStack a... | PASS |
+ 2 critical tests, 2 passed, 0 failed
+ 2 tests total, 2 passed, 0 failed
+ ==============================================================================
+ Neutron :: Test suite for Neutron Plugin | PASS |
+ 18 critical tests, 18 passed, 0 failed
+ 18 tests total, 18 passed, 0 failed
+ ==============================================================================
+ Output: /home/opnfv/repos/functest/output.xml
+ Log: /home/opnfv/repos/functest/log.html
+ Report: /home/opnfv/repos/functest/report.html
+
+3 result files are generated:
+ * output.xml
+ * log.html
+ * report.html
+
+**ODL result page**
+
+.. figure:: ../images/functestODL.png
+ :width: 170mm
+ :align: center
+ :alt: ODL suite result page
+
+
+ONOS
+^^^^
+
+The ONOS test logs can be found in OnosSystemTest/, and TestON/, and logs/
+(ONOSCI_PATH to be added), and can also be seen in the console::
+
+ ******************************
+ Result summary for Testcase4
+ ******************************
+
+ 2016-01-14 05:25:40,529 - FUNCvirNetNBL3 - INFO - ONOS Router Delete test Start
+
+ [2016-01-14 05:25:40.529644] [FUNCvirNetNBL3] [CASE] Virtual Network NBI Test - Router
+ 2016-01-14 05:25:40,530 - FUNCvirNetNBL3 - INFO - Generate Post Data
+
+ [2016-01-14 05:25:40.530825] [FUNCvirNetNBL3] [STEP] 4.1: Post Network Data via HTTP(Post Router need post network)
+ 2016-01-14 05:25:40,531 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/networks/ using POST method.
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Post Network Success
+ 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.539687] [FUNCvirNetNBL3] [STEP] 4.2: Post Router Data via HTTP
+ 2016-01-14 05:25:40,540 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/ using POST method.
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Post Router Success
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.543489] [FUNCvirNetNBL3] [STEP] 4.3: Delete Router Data via HTTP
+ 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Delete Router Success
+ 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+ [2016-01-14 05:25:40.546774] [FUNCvirNetNBL3] [STEP] 4.4: Get Router Data is NULL
+ 2016-01-14 05:25:40,547 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using GET method.
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Get Router Success
+ 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Assertion Passed
+
+
+ *****************************
+ Result: Pass
+ *****************************
+
+ .......................................................................................
+
+ ******************************
+ Result summary for Testcase9
+ ******************************
+ .......................................................................................
+
+
+ [2016-01-14 05:26:42.543489] [FUNCvirNetNBL3] [STEP] 9.6: FloatingIp Clean Data via HTTP
+ 2016-01-14 05:26:42,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/floatingips/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
+ 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
+ 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - ERROR - Delete Floatingip failed
+
+ .......................................................................................
+
+ *****************************
+ Result: Failed
+ *****************************
+
+There is a result summary for each testcase, and a global summary for the whole test.
+If any problem occurs during the test, a ERROR message will be provided in the test and the the global summary::
+
+ *************************************
+ Test Execution Summary
+ *************************************
+
+ Test Start : 14 Jan 2016 05:25:37
+ Test End : 14 Jan 2016 05:25:41
+ Execution Time : 0:00:03.349087
+ Total tests planned : 11
+ Total tests RUN : 11
+ Total Pass : 8
+ Total Fail : 3
+ Total No Result : 0
+ Success Percentage : 72%
+ Execution Result : 100%
+
+
+OpenContrail
+^^^^^^^^^^^^
+
+TODO OVNO
+
+
+Feature
+-------
+
+vIMS
+^^^^
+
+The results in the console can be described as follows::
+
+ FUNCTEST.info: Running vIMS test...
+ 2016-02-04 13:46:25,025 - vIMS - INFO - Prepare OpenStack plateform (create tenant and user)
+ 2016-02-04 13:46:25,312 - vIMS - INFO - Update OpenStack creds informations
+ 2016-02-04 13:46:25,312 - vIMS - INFO - Upload some OS images if it doesn't exist
+ 2016-02-04 13:46:25,566 - vIMS - INFO - centos_7 image doesn't exist on glance repository.
+ Try downloading this image and upload on glance !
+ 2016-02-04 13:47:06,167 - vIMS - INFO - ubuntu_14.04 image doesn't exist on glance repository.
+ Try downloading this image and upload on glance !
+ 2016-02-04 13:47:26,987 - vIMS - INFO - Update security group quota for this tenant
+ 2016-02-04 13:47:27,193 - vIMS - INFO - Update cinder quota for this tenant
+ 2016-02-04 13:47:27,746 - vIMS - INFO - Collect flavor id for cloudify manager server
+ 2016-02-04 13:47:28,326 - vIMS - INFO - Prepare virtualenv for cloudify-cli
+ 2016-02-04 13:48:00,657 - vIMS - INFO - Downloading the cloudify manager server blueprint
+ 2016-02-04 13:48:03,391 - vIMS - INFO - Cloudify deployment Start Time:'2016-02-04 13:48:03'
+ 2016-02-04 13:48:03,391 - vIMS - INFO - Writing the inputs file
+ 2016-02-04 13:48:03,395 - vIMS - INFO - Launching the cloudify-manager deployment
+ 2016-02-04 13:56:03,501 - vIMS - INFO - Cloudify-manager server is UP !
+ 2016-02-04 13:56:03,502 - vIMS - INFO - Cloudify deployment duration:'480.1'
+ 2016-02-04 13:56:03,502 - vIMS - INFO - Collect flavor id for all clearwater vm
+ 2016-02-04 13:56:04,093 - vIMS - INFO - vIMS VNF deployment Start Time:'2016-02-04 13:56:04'
+ 2016-02-04 13:56:04,093 - vIMS - INFO - Downloading the openstack-blueprint.yaml blueprint
+ 2016-02-04 13:56:06,265 - vIMS - INFO - Writing the inputs file
+ 2016-02-04 13:56:06,268 - vIMS - INFO - Launching the clearwater deployment
+ 2016-02-04 14:11:27,101 - vIMS - INFO - The deployment of clearwater-opnfv is ended
+ 2016-02-04 14:11:27,103 - vIMS - INFO - vIMS VNF deployment duration:'923.0'
+ 2016-02-04 14:14:31,976 - vIMS - INFO - vIMS functional test Start Time:'2016-02-04 14:14:31'
+ 2016-02-04 14:15:45,880 - vIMS - INFO - vIMS functional test duration:'73.9'
+ 2016-02-04 14:15:46,113 - vIMS - INFO - Launching the clearwater-opnfv undeployment
+ 2016-02-04 14:18:12,604 - vIMS - INFO - Launching the cloudify-manager undeployment
+ 2016-02-04 14:18:51,808 - vIMS - INFO - Cloudify-manager server has been successfully removed!
+ 2016-02-04 14:18:51,870 - vIMS - INFO - Removing vIMS tenant ..
+ 2016-02-04 14:18:52,131 - vIMS - INFO - Removing vIMS user ..
+
+Please note that vIMS traces can bee summarized in several steps:
+
+ * INFO: environment prepared successfully => environment OK
+ * INFO - Cloudify-manager server is UP ! => orchestrator deployed
+ * INFO - The deployment of clearwater-opnfv is ended => VNF deployed
+ * Multiple Identities (UDP) - (6505550771, 6505550675) Passed => tests run
+
+
+Promise
+^^^^^^^
+
+The results can be observed in the console::
+
+ Running test case: promise
+ ----------------------------------------------
+ FUNCTEST.info: Running PROMISE test case...
+ 2016-02-04 07:10:37,735 - Promise- INFO - Creating tenant 'promise'...
+ 2016-02-04 07:10:37,893 - Promise- INFO - Adding role '59828986a9a94dfaa852548599fde628' to tenant 'promise'...
+ 2016-02-04 07:10:38,005 - Promise- INFO - Creating user 'promiser'...
+ 2016-02-04 07:10:38,128 - Promise- INFO - Updating OpenStack credentials...
+ 2016-02-04 07:10:38,157 - Promise- INFO - Creating image 'promise-img' from '/home/opnfv/functest/data/cirros-0.3.4-x86_64-disk.img'...
+ 2016-02-04 07:10:42,016 - Promise- INFO - Creating flavor 'promise-flavor'...
+ 2016-02-04 07:10:42,836 - Promise- INFO - Exporting environment variables...
+ 2016-02-04 07:10:42,836 - Promise- INFO - Running command: DEBUG=1 npm run -s test -- --reporter json
+ 2016-02-04 07:10:51,505 - Promise- INFO - The test succeeded.
+ ....
+ **********************************
+ Promise test summary
+ **********************************
+ Test start: Thu Feb 04 07:10:42 UTC 2016
+ Test end: Thu Feb 04 07:10:51 UTC 2016
+ Execution time: 8.7
+ Total tests executed: 33
+ Total tests failed: 0
+ **********************************
+
diff --git a/docs/results/fuel-os-nosdn-kvm-ha.rst b/docs/results/fuel-os-nosdn-kvm-ha.rst
new file mode 100644
index 000000000..a63aff27b
--- /dev/null
+++ b/docs/results/fuel-os-nosdn-kvm-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for fuel-os-nosdn-kvm-ha
+=====================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/fuel-os-nosdn-nofeature-ha.rst b/docs/results/fuel-os-nosdn-nofeature-ha.rst
new file mode 100644
index 000000000..a856212c9
--- /dev/null
+++ b/docs/results/fuel-os-nosdn-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for fuel-os-nosdn-nofeature-ha
+===========================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/fuel-os-nosdn-ovs-ha.rst b/docs/results/fuel-os-nosdn-ovs-ha.rst
new file mode 100644
index 000000000..c608e4dff
--- /dev/null
+++ b/docs/results/fuel-os-nosdn-ovs-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for fuel-os-nosdn-ovs-ha
+=====================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/fuel-os-odl_l2-bgpvpn-ha.rst b/docs/results/fuel-os-odl_l2-bgpvpn-ha.rst
new file mode 100644
index 000000000..d3202fa57
--- /dev/null
+++ b/docs/results/fuel-os-odl_l2-bgpvpn-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for fuel-os-odl_l2-bgpvpn-ha
+=========================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/fuel-os-odl_l2-nofeature-ha.rst b/docs/results/fuel-os-odl_l2-nofeature-ha.rst
new file mode 100644
index 000000000..39fb42075
--- /dev/null
+++ b/docs/results/fuel-os-odl_l2-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for fuel-os-odl_l2-nofeature-ha
+============================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/fuel-os-odl_l3-nofeature-ha.rst b/docs/results/fuel-os-odl_l3-nofeature-ha.rst
new file mode 100644
index 000000000..475b2f652
--- /dev/null
+++ b/docs/results/fuel-os-odl_l3-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for fuel-os-odl_l3-nofeature-ha
+============================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/fuel-os-onos-nofeature-ha.rst b/docs/results/fuel-os-onos-nofeature-ha.rst
new file mode 100644
index 000000000..fb3218583
--- /dev/null
+++ b/docs/results/fuel-os-onos-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for apex-os-odl_l2-nofeature-ha
+============================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/index.rst b/docs/results/index.rst
new file mode 100644
index 000000000..dd55fded2
--- /dev/null
+++ b/docs/results/index.rst
@@ -0,0 +1,12 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+=====================
+FuncTest test results
+=====================
+
+.. toctree::
+ :maxdepth: 2
+
+ overview.rst
+ results.rst
diff --git a/docs/results/joid-os-nosdn-nofeature-ha.rst b/docs/results/joid-os-nosdn-nofeature-ha.rst
new file mode 100644
index 000000000..6ace0ca6f
--- /dev/null
+++ b/docs/results/joid-os-nosdn-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for joid-os-nosdn-nofeature-ha
+===========================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/joid-os-ocl-nofeature-ha.rst b/docs/results/joid-os-ocl-nofeature-ha.rst
new file mode 100644
index 000000000..c92363538
--- /dev/null
+++ b/docs/results/joid-os-ocl-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for joid-os-ocl-nofeature-ha
+=========================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/joid-os-odl_l2-nofeature-ha.rst b/docs/results/joid-os-odl_l2-nofeature-ha.rst
new file mode 100644
index 000000000..cb95e9849
--- /dev/null
+++ b/docs/results/joid-os-odl_l2-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for joid-os-odl_l2-nofeature-ha
+============================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/joid-os-onos-nofeature-ha.rst b/docs/results/joid-os-onos-nofeature-ha.rst
new file mode 100644
index 000000000..803cc6e46
--- /dev/null
+++ b/docs/results/joid-os-onos-nofeature-ha.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Test results for joid-os-onos-nofeature-ha
+==========================================
+
+.. Add any text in here that could be useful for a reader.
+
+.. Add the test results in a consistent format.
+
diff --git a/docs/results/overview.rst b/docs/results/overview.rst
new file mode 100644
index 000000000..bd8493667
--- /dev/null
+++ b/docs/results/overview.rst
@@ -0,0 +1,12 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+FuncTest test result document overview
+======================================
+
+Provide an overview of the document and functest here in regard to the results listed
+
+Additional test result assests and information
+==============================================
+
+Provide links to dashboards and other useful information for readers and those wnating to get involved.
diff --git a/docs/results/results.rst b/docs/results/results.rst
new file mode 100644
index 000000000..54fcecd76
--- /dev/null
+++ b/docs/results/results.rst
@@ -0,0 +1,43 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International Licence.
+.. http://creativecommons.org/licenses/by/4.0
+
+Results listed by scenario
+==========================
+
+The following sections describe the functest results as evaluated for
+the Brahmaputra release scenario validation runs. Each section describes
+the determined state of the specific scenario as deployed in the Brahmaputra
+release process.
+
+To qualify for release the scenarios must have deployed and been successfully
+tested in four consecutive installations to establish stability of deployment
+and feature capability
+
+.. Add information related to the dashboards and other useful tools for functest.
+
+.. toctree::
+ :maxdepth: 2
+
+.. Include each of the scenario files as they are set to
+.. release ready and the results are available for publishing.
+
+.. apex-os-ocl-nofeature-ha.rst
+.. apex-os-odl_l2-sfc-ha.rst
+.. apex-os-odl_l3-nofeature-ha.rst
+.. apex-os-onos-nofeature-ha.rst
+.. apex-os-odl_l2-nofeature-ha.rst
+.. compass-os-onos-nofeature-ha.rst
+.. compass-os-nosdn-nofeature-ha.rst
+.. compass-os-ocl-nofeature-ha.rst
+.. compass-os-odl_l2-nofeature-ha.rst
+.. fuel-os-nosdn-kvm-ha.rst
+.. fuel-os-nosdn-nofeature-ha.rst
+.. fuel-os-nosdn-ovs-ha.rst
+.. fuel-os-odl_l2-bgpvpn-ha.rst
+.. fuel-os-odl_l2-nofeature-ha.rst
+.. fuel-os-odl_l3-nofeature-ha.rst
+.. fuel-os-onos-nofeature-ha.rst
+.. joid-os-nosdn-nofeature-ha.rst
+.. joid-os-ocl-nofeature-ha.rst
+.. joid-os-odl_l2-nofeature-ha.rst
+.. joid-os-onos-nofeature-ha.rst
diff --git a/docs/userguide/description.rst b/docs/userguide/description.rst
index 288ad95af..b47d5ef62 100644
--- a/docs/userguide/description.rst
+++ b/docs/userguide/description.rst
@@ -1,12 +1,9 @@
-Description of the test cases
+Overview of the test suites
=============================
-Functest is an OPNFV project dedicated to functional testing.
-In the continuous integration, it is launched after an OPNFV fresh installation.
-The Functest target is to verify the basic functions of the infrastructure.
-
-Functest includes different test suites which several test cases within.
-Test cases are developed in Functest and in feature projects.
+Functest is the OPNFV project primarily targeting function testing.
+In the Continuous Integration pipeline, it is launched after an OPNFV fresh
+installation to validate and verify the basic functions of the infrastructure.
The current list of test suites can be distributed in 3 main domains::
@@ -21,8 +18,8 @@ The current list of test suites can be distributed in 3 main domains::
| +----------------+-------------------------------------------+
|(Virtualised | Tempest | OpenStack reference test suite `[2]`_ |
| Infrastructure +----------------+-------------------------------------------+
- | Manager) | Rally scenario | OpenStack testing tool testing OpenStack |
- | | | modules `[3]`_ |
+ | Manager) | Rally bench | OpenStack testing tool benchmarking |
+ | | | OpenStack modules `[3]`_ |
+----------------+----------------+-------------------------------------------+
| | OpenDaylight | Opendaylight Test suite |
| +----------------+-------------------------------------------+
@@ -30,8 +27,8 @@ The current list of test suites can be distributed in 3 main domains::
| +----------------+-------------------------------------------+
| | OpenContrail | |
+----------------+----------------+-------------------------------------------+
- | Features | vIMS | Show the capability to deploy a real NFV |
- | | | test cases. |
+ | Features | vIMS | Example of a real VNF deployment to show |
+ | | | the NFV capabilities of the platform. |
| | | The IP Multimedia Subsytem is a typical |
| | | Telco test case, referenced by ETSI. |
| | | It provides a fully functional VoIP System|
@@ -46,31 +43,33 @@ The current list of test suites can be distributed in 3 main domains::
| | SDNVPN | |
+----------------+----------------+-------------------------------------------+
-
-Most of the test suites are developed upstream.
-For example, `Tempest <http://docs.openstack.org/developer/tempest/overview.html>`_ is the
-OpenStack integration test suite.
-Functest is in charge of the integration of different functional test suites.
+Functest includes different test suites with several test cases within. Some
+of the tests are developed by Functest team members whereas others are integrated
+from upstream communities or other OPNFV projects. For example,
+`Tempest <http://docs.openstack.org/developer/tempest/overview.html>`_ is the
+OpenStack integration test suite and Functest is in charge of the selection,
+integration and automation of the tests that fit in OPNFV.
The Tempest suite has been customized but no new test cases have been created.
-Some OPNFV feature projects (e.g. SDNVPN) have created Tempest tests cases and
-pushed to upstream.
+Some OPNFV feature projects (e.g. SDNVPN) have written some Tempest tests cases
+and pushed upstream to be used by Functest.
-The tests run from CI are pushed into a database.
-The goal is to populate the database with results and to show them on a Test
-Dashboard.
+The results produced by the tests run from CI are pushed and collected in a NoSQL
+database. The goal is to populate the database with results from different sources
+and scenarios and to show them on a Dashboard.
-There is no real notion of Test domain or Test coverage yet.
-Basic components (VIM, controllers) are tested through their own suites.
-Feature projects also provide their own test suites.
+There is no real notion of Test domain or Test coverage. Basic components
+(VIM, controllers) are tested through their own suites. Feature projects also
+provide their own test suites with different ways of running their tests.
vIMS test case was integrated to demonstrate the capability to deploy a
relatively complex NFV scenario on top of the OPNFV infrastructure.
Functest considers OPNFV as a black box.
-OPNFV, since Brahmaputra, offers lots of possible combinations:
+OPNFV, since the Brahmaputra release, offers lots of potential combinations:
* 3 controllers (OpenDayligh, ONOS, OpenContrail)
* 4 installers (Apex, Compass, Fuel, Joid)
-However most of the tests shall be runnable on any configuration.
+Most of the tests are runnable on any combination, but some others might have
+restrictions imposed by the installers or the available deployed features.
diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst
index 39d795261..727acfcdf 100644
--- a/docs/userguide/index.rst
+++ b/docs/userguide/index.rst
@@ -11,7 +11,7 @@ Introduction
============
The goal of this documents is to describe the Functest test cases as well as
-provide a procedure about how to execute (or launch) them.
+provide a procedure about how to execute them.
A presentation has been created for the first OPNFV Summit `[4]`_.
@@ -21,79 +21,100 @@ It is assumed that Functest container has been properly installed `[1]`_.
The different scenarios are described in the section hereafter.
-VIM
+VIM (Virtualized Infrastructure Manager)
---
-vPing
-^^^^^
+vPing_SSH
+^^^^^^^^^
-The goal of this test can be described as follows::
+Given the script 'ping.sh'::
+
+ #!/bin/sh
+ while true; do
+ ping -c 1 $1 2>&1 >/dev/null
+ RES=$?
+ if [ "Z$RES" = "Z0" ] ; then
+ echo 'vPing OK'
+ break
+ else
+ echo 'vPing KO'
+ fi
+ sleep 1
+ done
+
+The goal of this test is described as follows::
vPing test case
- +-------------+ +-------------+
- | | | |
- | | Boot VM1 | |
- | +------------------>| |
- | | | |
- | | Get IP VM1 | |
- | +------------------>| |
- | Tester | | System |
- | | Boot VM2 | Under |
- | +------------------>| Test |
- | | | |
- | | Create (VM2) | |
- | | floating IP | |
- | +------------------>| |
- | | | |
- | | SCP vPing script | |
- | | to VM2 | |
- | +------------------>| |
- | | | |
- | | SSH to VM2 | |
- | +------------------>| |
- | | | |
- | | Ping VM1 | |
- | | private IP | |
- | +------------------>| |
- | | | |
- | | If ping: | |
- | | exit OK | |
- | | else (timeout) | |
- | | exit KO | |
- | | | |
- +-------------+ +-------------+
-
-This example can be considered as an "Hello World" example.
-It is the first basic example, it must work on any configuration.
+ +-------------+ +-------------+
+ | | | |
+ | | Boot VM1 with IP1 | |
+ | +------------------->| |
+ | Tester | | System |
+ | | Boot VM2 | Under |
+ | +------------------->| Test |
+ | | | |
+ | | Create floating IP | |
+ | +------------------->| |
+ | | | |
+ | | Assign floating IP | |
+ | | to VM2 | |
+ | +------------------->| |
+ | | | |
+ | | Stablish SSH | |
+ | | connection to VM2 | |
+ | | through floating IP| |
+ | +------------------->| |
+ | | | |
+ | | SCP ping.sh to VM2 | |
+ | +------------------->| |
+ | | | |
+ | | VM2 executes | |
+ | | ping.sh to VM1 | |
+ | +------------------->| |
+ | | | |
+ | | If ping: | |
+ | | exit OK | |
+ | | else (timeout) | |
+ | | exit Failed | |
+ | | | |
+ +-------------+ +-------------+
+
+This test can be considered as an "Hello World" example.
+It is the first basic use case which shall work on any deployment.
vPing_userdata
^^^^^^^^^^^^^^
-The goal of this test can be described as follow::
+The goal of this test can be described as follows::
vPing_userdata test case
- +-------------+ +-------------+
- | | | |
- | | Boot VM1 | |
- | +------------------>| |
- | | | |
- | | Get IP VM1 | |
- | +------------------>| |
- | Tester | | System |
- | | Boot VM2 | Under |
- | +------------------>| Test |
- | | VM2 pings VM1 | |
- | | (cloud-init) | |
- | | Check console log | |
- | | If ping: | |
- | | exit OK | |
- | | else (timeout) | |
- | | exit KO | |
- | | | |
- +-------------+ +-------------+
+ +-------------+ +-------------+
+ | | | |
+ | | Boot VM1 with IP1 | |
+ | +------------------->| |
+ | | | |
+ | | Boot VM2 with | |
+ | | ping.sh as userdata| |
+ | | with IP1 as $1. | |
+ | +------------------->| |
+ | Tester | | System |
+ | | VM2 exeutes ping.sh| Under |
+ | | (ping IP1) | Test |
+ | +------------------->| |
+ | | | |
+ | | Monitor nova | |
+ | | console-log VM 2 | |
+ | | If ping: | |
+ | | exit OK | |
+ | | else (timeout) | |
+ | | exit Failed | |
+ | | | |
+ +-------------+ +-------------+
This scenario is similar to the previous one but it uses cloud-init (nova
-metadata service) instead of floating IPs and SSH.
+metadata service) instead of floating IPs and SSH connection. When the second VM
+boots it will execute the script automatically and the ping will be detected
+capturing periodically the output in the console-log of the second VM.
Tempest
@@ -107,27 +128,27 @@ Tempest has batteries of tests for:
* Scenarios
* Other specific tests useful in validating an OpenStack deployment
-We use Rally `[3]`_ to run Tempest suite.
-Rally generates automatically tempest.conf configuration file.
-Before running actual test cases Functest creates needed resources.
-Needed parameters are updated in the configuration file.
-When the Tempest suite is run, each test duration is measured.
-The full console output is stored in the tempest.log file.
+Functest uses Rally `[3]`_ to run the Tempest suite.
+Rally generates automatically the Tempest configuration file (tempest.conf).
+Before running the actual test cases, Functest creates the needed resources and
+updates the appropriate parameters to the configuration file.
+When the Tempest suite is executed, each test duration is measured and the full
+console output is stored in the tempest.log file for further analysis.
As an addition of Arno, Brahmaputra runs a customized set of Tempest test cases.
The list is specificed through *--tests-file* when running Rally.
This option has been introduced in Rally in version 0.1.2.
The customized test list is available in the Functest repo `[4]`_.
-This list contains more than 200 Tempest test cases.
-The list can be divied into two main parts:
+This list contains more than 200 Tempest test cases and can be divided
+into two main sections:
1) Set of tempest smoke test cases
2) Set of test cases from DefCore list `[8]`_
-The goal of Tempest test suite is to check the basic functionalities of
+The goal of the Tempest test suite is to check the basic functionalities of
different OpenStack components on an OPNFV fresh installation using
-corresponding REST API interfaces.
+the corresponding REST API interfaces.
Rally bench test suites
@@ -137,8 +158,8 @@ Rally `[3]`_ is a benchmarking tool that answers the question::
“How does OpenStack work at scale?”.
-The goal of this test suite is to test the different modules of OpenStack and
-get significant figures that could help us to define telco Cloud KPI.
+The goal of this test suite is to benchmark the different OpenStack modules and
+get significant figures that could help to define Telco Cloud KPIs.
The OPNFV scenarios are based on the collection of the existing Rally scenarios:
@@ -152,7 +173,7 @@ The OPNFV scenarios are based on the collection of the existing Rally scenarios:
* quotas
* requests
-Basic SLA (stop test on errors) have been implemented.
+A basic SLA (stop test on errors) have been implemented.
SDN Controllers
@@ -169,11 +190,11 @@ OpenDaylight
^^^^^^^^^^^^
The OpenDaylight (ODL) test suite consists of a set of basic tests inherited
-from ODL project.
+from the ODL project using the Robot `[11]`_ framework.
The suite verifies creation and deletion of networks, subnets and ports with
OpenDaylight and Neutron.
-The list of tests can be described as follow:
+The list of tests can be described as follows:
* Restconf.basic: Get the controller modules via Restconf
* Neutron.Networks
@@ -216,14 +237,13 @@ The list of tests can be described as follow:
ONOS
^^^^
-TestON Framework is used to test ONOS function.
+TestON Framework is used to test the ONOS SDN controller functions.
The test cases deal with L2 and L3 functions.
-ONOS is configured through OPNFV scenario.
The ONOS test suite can be run on any ONOS compliant scenario.
-The test cases may be described as follow:
+The test cases may be described as follows:
- * onosfunctest: The mainly executable file contains the initialization of
+ * onosfunctest: The main executable file contains the initialization of
the docker environment and functions called by FUNCvirNetNB and
FUNCvirNetNBL3
@@ -273,7 +293,7 @@ vIMS
^^^^
The goal of this test suite consists of:
- * deploying a VNF orchestrator (cloudify)
+ * deploy a VNF orchestrator (Cloudify)
* deploy a Clearwater vIMS (IP Multimedia Subsystem) VNF from this
orchestrator based on a TOSCA blueprint defined in `[5]`_
* run suite of signaling tests on top of this VNF
@@ -290,7 +310,7 @@ Two types of information are stored in the Test Database:
* the test results
The deployment of a complete functional VNF allows the test of most of the
-essential functions needed for a NFV system.
+essential functions needed for a NFV platform.
Promise
^^^^^^^
@@ -340,355 +360,16 @@ include::
flavor_ram: 512
flavor_disk: 0
+However, these parameters must not be changed, as they are the values expected
+by the Promise test suite.
+
.. include:: ./runfunctest.rst
Test results
============
-VIM
----
-
-vPing
-^^^^^
-
-vPing results are displayed in the console::
-
- FUNCTEST.info: Running vPing test...
- 2016-01-23 03:18:20,153 - vPing- INFO - Creating neutron network vping-net...
- 2016-01-23 03:18:35,476 - vPing- INFO - Flavor found 'm1.small'
- 2016-01-23 03:18:36,350 - vPing- INFO - vPing Start Time:'2016-01-23 03:18:36'
- 2016-01-23 03:18:38,571 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
- 2016-01-23 03:18:53,716 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
- 2016-01-23 03:18:55,239 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
- 2016-01-23 03:19:15,593 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
- 2016-01-23 03:19:15,593 - vPing- INFO - Creating floating IP for the second VM...
- 2016-01-23 03:19:18,017 - vPing- INFO - Floating IP created: '10.2.65.6'
- 2016-01-23 03:19:18,017 - vPing- INFO - Associating floating ip: '10.2.65.6' to VM2
- 2016-01-23 03:19:37,839 - vPing- INFO - SCP ping script to VM2...
- 2016-01-23 03:19:37,839 - vPing- INFO - Waiting for ping...
- 2016-01-23 03:19:40,130 - vPing- INFO - vPing detected!
- 2016-01-23 03:19:40,130 - vPing- INFO - vPing duration:'63.8'
- 2016-01-23 03:19:40,130 - vPing- INFO - Cleaning up...
- 2016-01-23 03:20:06,574 - vPing- INFO - Deleting network 'vping-net'...
- 2016-01-23 03:20:13,587 - vPing- INFO - vPing OK
-
-
-
-
-vPing_userdata
-^^^^^^^^^^^^^^
-
-vPing_userdata results are displayed in the console::
-
- 2016-01-06 16:06:20,550 - vPing- INFO - Creating neutron network vping-net...
- 2016-01-06 16:06:23,867 - vPing- INFO - Flavor found 'm1.small'
- 2016-01-06 16:06:24,457 - vPing- INFO - vPing Start Time:'2016-01-06 16:06:24'
- 2016-01-06 16:06:24,626 - vPing- INFO - Creating instance 'opnfv-vping-1' with IP 192.168.130.30...
- 2016-01-06 16:06:39,351 - vPing- INFO - Instance 'opnfv-vping-1' is ACTIVE.
- 2016-01-06 16:06:39,650 - vPing- INFO - Creating instance 'opnfv-vping-2' with IP 192.168.130.40...
- 2016-01-06 16:06:53,330 - vPing- INFO - Instance 'opnfv-vping-2' is ACTIVE.
- 2016-01-06 16:06:53,330 - vPing- INFO - Waiting for ping...
- 2016-01-06 16:06:58,669 - vPing- INFO - vPing detected!
- 2016-01-06 16:06:58,669 - vPing- INFO - vPing duration:'34.2'
- 2016-01-06 16:06:58,670 - vPing- INFO - Cleaning up...
- 2016-01-06 16:07:12,661 - vPing- INFO - Deleting network 'vping-net'...
- 2016-01-06 16:07:14,748 - vPing- INFO - vPing OK
-
-Tempest
-^^^^^^^
-
-The Tempest results are displayed in the console::
-
- FUNCTEST.info: Running Tempest tests...
- 2016-01-28 07:56:55,380 - run_tempest - INFO - Creating tenant and user for Tempest suite
- 2016-01-28 07:56:56.127 23795 INFO rally.verification.tempest.tempest [-] Starting: Creating configuration file for Tempest.
- 2016-01-28 07:56:59.512 23795 INFO rally.verification.tempest.tempest [-] Completed: Creating configuration file for Tempest.
- 16-01-28 07:57:00,597 - run_tempest - INFO - Starting Tempest test suite: '--tests-file /home/opnfv/repos/functest/testcases/VIM/OpenStack/CI/custom_tests/test_list.txt'.
- Total results of verification:
- .
- +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
- | UUID | Deployment UUID | Set name | Tests | Failures | Created at | Status |
- +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
- | e0bf7770-2c0f-4c63-913c-cd51a6edd96d | 16582e1e-7b01-4d5d-9c13-a26db8567b7b | | 144 | 30 | 2016-01-28 07:57:01.044856 | finished |
- +--------------------------------------+--------------------------------------+----------+-------+----------+----------------------------+----------+
- .
- Tests:
- .
- +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
- | name | time | status |
- +------------------------------------------------------------------------------------------------------------------------------------------+-----------+---------+
- | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_get_flavor | 0.29804 | success |
- | tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors | 0.06289 | success |
- | tempest.api.compute.images.test_images.ImagesTestJSON.test_delete_saving_image | 9.21756 | success |
- | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_delete_image | 8.65376 | success |
- | tempest.api.compute.images.test_images_oneserver.ImagesOneServerTestJSON.test_create_image_specify_multibyte_character_image_name | 9.10993 | success |
- | tempest.api.compute.images.test_list_image_filters.ListImageFiltersTestJSON.test_list_images_filter_by_changes_since | 0.19585 | success |
- ...........................................
- 2016-01-28 08:19:32,132 - run_tempest - INFO - Results: {'timestart': '2016-01-2807:57:01.044856', 'duration': 1350, 'tests': 144, 'failures': 30}
- 2016-01-28 08:19:32,133 - run_tempest - INFO - Pushing results to DB: 'http://testresults.opnfv.org/testapi/results'.
- 2016-01-28 08:19:32,278 - run_tempest - INFO - Deleting tenant and user for Tempest suite)
-
-In order to check all the available test cases related debug information, please
-inspect tempest.log file stored into related Rally deployment folder.
-
-
-Rally
-^^^^^
-
-The Rally results are displayed in the console, each module is run one after the
-other. Tables are displayed::
-
- +-------------------------------------------------------------------------------------------+
- | Response Times (sec) |
- +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
- | action | min | median | 90%ile | 95%ile | max | avg | success | count |
- +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
- | keystone.create_role | 0.358 | 0.572 | 0.772 | 0.811 | 1.106 | 0.603 | 100.0% | 20 |
- | keystone.add_role | 0.32 | 0.436 | 0.846 | 0.903 | 1.018 | 0.51 | 100.0% | 20 |
- | keystone.list_roles | 0.102 | 0.185 | 0.253 | 0.275 | 0.347 | 0.188 | 100.0% | 20 |
- | total | 0.845 | 1.223 | 1.821 | 1.822 | 1.823 | 1.302 | 100.0% | 20 |
- +----------------------+-------+--------+--------+--------+-------+-------+---------+-------+
- Load duration: 7.13633608818
- Full duration: 36.7863121033
- ..............
- +------------------------------------------------------------------------------------------+
- | Response Times (sec) |
- +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
- | action | min | median | 90%ile | 95%ile | max | avg | success | count |
- +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
- | nova.create_keypair | 1.005 | 1.784 | 3.025 | 3.636 | 4.373 | 2.004 | 100.0% | 20 |
- | nova.delete_keypair | 0.199 | 0.699 | 1.007 | 1.244 | 3.014 | 0.79 | 100.0% | 20 |
- | total | 1.249 | 2.625 | 4.259 | 4.845 | 5.131 | 2.794 | 100.0% | 20 |
- +---------------------+-------+--------+--------+--------+-------+-------+---------+-------+
- Load duration: 14.9231169224
- Full duration: 71.4614388943
-
-
-At the end of the module test, a message is displayed to provide a global
-summary::
-
- 2016-02-04 12:50:18,382 - run_rally - INFO - Test scenario: "requests" OK.
-
-
-Controllers
------------
-
-OpenDaylight
-^^^^^^^^^^^^
-
-The results of ODL tests can be seen in the console::
-
- ==============================================================================
- Basic
- ==============================================================================
- Basic.010 Restconf OK :: Test suite to verify Restconf is OK
- ==============================================================================
- Get Controller Modules :: Get the controller modules via Restconf | PASS |
- ------------------------------------------------------------------------------
- Basic.010 Restconf OK :: Test suite to verify Restconf is OK | PASS |
- 1 critical test, 1 passed, 0 failed
- 1 test total, 1 passed, 0 failed
- ==============================================================================
- Basic | PASS |
- 1 critical test, 1 passed, 0 failed
- 1 test total, 1 passed, 0 failed
- ==============================================================================
- Output: /home/jenkins-ci/workspace/functest-opnfv-jump-2/output.xml
- Log: /home/jenkins-ci/workspace/functest-opnfv-jump-2/log.html
- Report: /home/jenkins-ci/workspace/functest-opnfv-jump-2/report.html
-
- ..............................................................................
-
- Neutron.Delete Networks :: Checking Network deleted in OpenStack a... | PASS |
- 2 critical tests, 2 passed, 0 failed
- 2 tests total, 2 passed, 0 failed
- ==============================================================================
- Neutron :: Test suite for Neutron Plugin | PASS |
- 18 critical tests, 18 passed, 0 failed
- 18 tests total, 18 passed, 0 failed
- ==============================================================================
- Output: /home/jenkins-ci/workspace/functest-opnfv-jump-2/output.xml
- Log: /home/jenkins-ci/workspace/functest-opnfv-jump-2/log.html
- Report: /home/jenkins-ci/workspace/functest-opnfv-jump-2/report.html
-
-3 result files are generated:
- * output.xml
- * log.html
- * report.html
-
-**ODL result page**
-
-.. figure:: ../images/functestODL.png
- :width: 170mm
- :align: center
- :alt: ODL suite result page
-
-
-ONOS
-^^^^
-
-The ONOS test logs can be found in OnosSystemTest/, and TestON/, and logs/
-(ONOSCI_PATH to be added), and can also be seen in the console::
-
- ******************************
- Result summary for Testcase4
- ******************************
-
- 2016-01-14 05:25:40,529 - FUNCvirNetNBL3 - INFO - ONOS Router Delete test Start
-
- [2016-01-14 05:25:40.529644] [FUNCvirNetNBL3] [CASE] Virtual Network NBI Test - Router
- 2016-01-14 05:25:40,530 - FUNCvirNetNBL3 - INFO - Generate Post Data
-
- [2016-01-14 05:25:40.530825] [FUNCvirNetNBL3] [STEP] 4.1: Post Network Data via HTTP(Post Router need post network)
- 2016-01-14 05:25:40,531 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/networks/ using POST method.
- 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
- 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Post Network Success
- 2016-01-14 05:25:40,539 - FUNCvirNetNBL3 - INFO - Assertion Passed
-
- [2016-01-14 05:25:40.539687] [FUNCvirNetNBL3] [STEP] 4.2: Post Router Data via HTTP
- 2016-01-14 05:25:40,540 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/ using POST method.
- 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
- 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Post Router Success
- 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Assertion Passed
-
- [2016-01-14 05:25:40.543489] [FUNCvirNetNBL3] [STEP] 4.3: Delete Router Data via HTTP
- 2016-01-14 05:25:40,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
- 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
- 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Delete Router Success
- 2016-01-14 05:25:40,546 - FUNCvirNetNBL3 - INFO - Assertion Passed
-
- [2016-01-14 05:25:40.546774] [FUNCvirNetNBL3] [STEP] 4.4: Get Router Data is NULL
- 2016-01-14 05:25:40,547 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/routers/e44bd655-e22c-4aeb-b1e9-ea1606875178 using GET method.
- 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
- 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Get Router Success
- 2016-01-14 05:25:40,550 - FUNCvirNetNBL3 - INFO - Assertion Passed
-
-
- *****************************
- Result: Pass
- *****************************
-
- .......................................................................................
-
- ******************************
- Result summary for Testcase9
- ******************************
- .......................................................................................
-
-
- [2016-01-14 05:26:42.543489] [FUNCvirNetNBL3] [STEP] 9.6: FloatingIp Clean Data via HTTP
- 2016-01-14 05:26:42,543 - FUNCvirNetNBL3 - INFO - Sending request http://192.168.122.56:8181/onos/vtn/floatingips/e44bd655-e22c-4aeb-b1e9-ea1606875178 using DELETE method.
- 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - INFO - Verifying the Expected is equal to the actual or not using assert_equal
- 2016-01-14 05:26:42,546 - FUNCvirNetNBL3 - ERROR - Delete Floatingip failed
-
- .......................................................................................
-
- *****************************
- Result: Failed
- *****************************
-
-There is a result summary for each testcase, and a global summary for the whole test.
-If any problem occurs during the test, a ERROR message will be provided in the test and the the global summary::
-
- *************************************
- Test Execution Summary
- *************************************
-
- Test Start : 14 Jan 2016 05:25:37
- Test End : 14 Jan 2016 05:25:41
- Execution Time : 0:00:03.349087
- Total tests planned : 11
- Total tests RUN : 11
- Total Pass : 8
- Total Fail : 3
- Total No Result : 0
- Success Percentage : 72%
- Execution Result : 100%
-
-
-OpenContrail
-^^^^^^^^^^^^
-
-TODO OVNO
-
-
-Feature
--------
-
-vIMS
-^^^^
-
-The results in the console can be described as follows::
-
- FUNCTEST.info: Running vIMS test...
- 2016-02-04 13:46:25,025 - vIMS - INFO - Prepare OpenStack plateform (create tenant and user)
- 2016-02-04 13:46:25,312 - vIMS - INFO - Update OpenStack creds informations
- 2016-02-04 13:46:25,312 - vIMS - INFO - Upload some OS images if it doesn't exist
- 2016-02-04 13:46:25,566 - vIMS - INFO - centos_7 image doesn't exist on glance repository.
- Try downloading this image and upload on glance !
- 2016-02-04 13:47:06,167 - vIMS - INFO - ubuntu_14.04 image doesn't exist on glance repository.
- Try downloading this image and upload on glance !
- 2016-02-04 13:47:26,987 - vIMS - INFO - Update security group quota for this tenant
- 2016-02-04 13:47:27,193 - vIMS - INFO - Update cinder quota for this tenant
- 2016-02-04 13:47:27,746 - vIMS - INFO - Collect flavor id for cloudify manager server
- 2016-02-04 13:47:28,326 - vIMS - INFO - Prepare virtualenv for cloudify-cli
- 2016-02-04 13:48:00,657 - vIMS - INFO - Downloading the cloudify manager server blueprint
- 2016-02-04 13:48:03,391 - vIMS - INFO - Cloudify deployment Start Time:'2016-02-04 13:48:03'
- 2016-02-04 13:48:03,391 - vIMS - INFO - Writing the inputs file
- 2016-02-04 13:48:03,395 - vIMS - INFO - Launching the cloudify-manager deployment
- 2016-02-04 13:56:03,501 - vIMS - INFO - Cloudify-manager server is UP !
- 2016-02-04 13:56:03,502 - vIMS - INFO - Cloudify deployment duration:'480.1'
- 2016-02-04 13:56:03,502 - vIMS - INFO - Collect flavor id for all clearwater vm
- 2016-02-04 13:56:04,093 - vIMS - INFO - vIMS VNF deployment Start Time:'2016-02-04 13:56:04'
- 2016-02-04 13:56:04,093 - vIMS - INFO - Downloading the openstack-blueprint.yaml blueprint
- 2016-02-04 13:56:06,265 - vIMS - INFO - Writing the inputs file
- 2016-02-04 13:56:06,268 - vIMS - INFO - Launching the clearwater deployment
- 2016-02-04 14:11:27,101 - vIMS - INFO - The deployment of clearwater-opnfv is ended
- 2016-02-04 14:11:27,103 - vIMS - INFO - vIMS VNF deployment duration:'923.0'
- 2016-02-04 14:14:31,976 - vIMS - INFO - vIMS functional test Start Time:'2016-02-04 14:14:31'
- 2016-02-04 14:15:45,880 - vIMS - INFO - vIMS functional test duration:'73.9'
- 2016-02-04 14:15:46,113 - vIMS - INFO - Launching the clearwater-opnfv undeployment
- 2016-02-04 14:18:12,604 - vIMS - INFO - Launching the cloudify-manager undeployment
- 2016-02-04 14:18:51,808 - vIMS - INFO - Cloudify-manager server has been successfully removed!
- 2016-02-04 14:18:51,870 - vIMS - INFO - Removing vIMS tenant ..
- 2016-02-04 14:18:52,131 - vIMS - INFO - Removing vIMS user ..
-
-Please note that vIMS traces can bee summarized in several steps:
-
- * INFO: environment prepared successfully => environment OK
- * INFO - Cloudify-manager server is UP ! => orchestrator deployed
- * INFO - The deployment of clearwater-opnfv is ended => VNF deployed
- * Multiple Identities (UDP) - (6505550771, 6505550675) Passed => tests run
- * DEBUG - Pushing results to DB.... => tests saved
-
-
-Promise
-^^^^^^^
-
-The results can be observed in the console::
-
- Running test case: promise
- ----------------------------------------------
- FUNCTEST.info: Running PROMISE test case...
- 2016-02-04 07:10:37,735 - Promise- INFO - Creating tenant 'promise'...
- 2016-02-04 07:10:37,893 - Promise- INFO - Adding role '59828986a9a94dfaa852548599fde628' to tenant 'promise'...
- 2016-02-04 07:10:38,005 - Promise- INFO - Creating user 'promiser'...
- 2016-02-04 07:10:38,128 - Promise- INFO - Updating OpenStack credentials...
- 2016-02-04 07:10:38,157 - Promise- INFO - Creating image 'promise-img' from '/home/opnfv/functest/data/cirros-0.3.4-x86_64-disk.img'...
- 2016-02-04 07:10:42,016 - Promise- INFO - Creating flavor 'promise-flavor'...
- 2016-02-04 07:10:42,836 - Promise- INFO - Exporting environment variables...
- 2016-02-04 07:10:42,836 - Promise- INFO - Running command: DEBUG=1 npm run -s test -- --reporter json
- 2016-02-04 07:10:51,505 - Promise- INFO - The test succeeded.
- ....
- **********************************
- Promise test summary
- **********************************
- Test start: Thu Feb 04 07:10:42 UTC 2016
- Test end: Thu Feb 04 07:10:51 UTC 2016
- Execution time: 8.7
- Total tests executed: 33
- Total tests failed: 0
- **********************************
+For Brahmaputra test results, see the functest results document at:
+http://artifacts.opnfv.org/functest/brahmaputra/docs/results/index.html
Test Dashboard
@@ -704,8 +385,8 @@ Troubleshooting
VIM
---
-vPing
-^^^^^
+vPing_SSH
+^^^^^^^^^
vPing should work on all the scenarios. In case of timeout, check your network
connectivity. The test case creates its own security group to allow SSH access,
@@ -833,6 +514,7 @@ References
.. _`[8]`: https://wiki.openstack.org/wiki/Governance/DefCoreCommittee
.. _`[9]`: https://git.opnfv.org/cgit/functest/tree/testcases/VIM/OpenStack/CI/libraries/os_defaults.yaml
.. _`[10]`: https://git.opnfv.org/cgit/functest/tree/testcases/VIM/OpenStack/CI/rally_cert/task.yaml
+.. _`[11]`: http://robotframework.org/
OPNFV main site: opnfvmain_.
diff --git a/docs/userguide/runfunctest.rst b/docs/userguide/runfunctest.rst
index b186e539c..364333740 100644
--- a/docs/userguide/runfunctest.rst
+++ b/docs/userguide/runfunctest.rst
@@ -5,89 +5,120 @@ Manual testing
--------------
Once the Functest docker container is running and Functest environment ready
-(through /home/opnfv/repos/functest/docker/prepare_env.sh script), the system is
+(through */home/opnfv/repos/functest/docker/prepare_env.sh* script), the system is
ready to run the tests.
-The script *run_tests.sh* is located in $repos_dir/functest/docker and it has
+The script *run_tests.sh* launches the test in an automated way.
+Although it is possible to execute the different tests manually, it is
+recommended to use the previous shell script which makes the call
+to the actual scripts with the appropriate parameters.
+
+It is located in *$repos_dir/functest/docker* and it has
several options::
./run_tests.sh -h
Script to trigger the tests automatically.
usage:
- bash run_tests.sh [--offline] [-h|--help] [-t <test_name>]
+ bash run_tests.sh [-h|--help] [-r|--report] [-n|--no-clean] [-t|--test <test_name>]
where:
-h|--help show this help text
-r|--report push results to database (false by default)
-n|--no-clean do not clean up OpenStack resources after test run
+ -s|--serial run tests in one thread
-t|--test run specific set of tests
- <test_name> one or more of the following: vping,vping_userdata,odl,rally,tempest,vims,onos,promise. Separated by comma.
+ <test_name> one or more of the following separated by comma:
+ vping_ssh,vping_userdata,odl,rally,tempest,vims,onos,promise,ovno
examples:
run_tests.sh
run_tests.sh --test vping,odl
run_tests.sh -t tempest,rally --no-clean
-The *-r* option is used by the Continuous Integration in order to push the test
-results into a test collection database, see in next section for details.
-In manual mode, you must not use it, your try will be anyway probably rejected
-as your POD must be declared in the database to collect the data.
+The *-r* option is used by the OPNFV Continuous Integration automation mechanisms
+in order to push the test results into the NoSQL results collection database.
+This database is read only for a regular user given that it needs special rights
+and special conditions to push data.
-The *-n* option is used for preserving all the existing OpenStack resources after
-execution test cases.
+The *-t* option can be used to specify the list of a desired test to be launched,
+by default Functest will launch all the test suites in the following order:
+vPing, Tempest, vIMS, Rally.
-The *-t* option can be used to specify the list of test you want to launch, by
-default Functest will try to launch all its test suites in the following order
-vPing, odl, Tempest, vIMS, Rally.
-You may launch only one single test by using *-t <the test you want to launch>*.
+A single or set of test may be launched at once using *-t <test_name>* specifying
+the test name or names separated by commas in the following list:
+*[vping,vping_userdata,odl,rally,tempest,vims,onos,promise]*.
-Within Tempest test suite you can define which test cases you want to execute in
-your environment by editing test_list.txt file before executing *run_tests.sh*
-script.
+The *-n* option is used for preserving all the possible OpenStack resources created
+by the tests after their execution.
Please note that Functest includes cleaning mechanism in order to remove
-everything except what was present after a fresh install.
-If you create your own VMs, tenants, networks etc. and then launch Functest,
-they all will be deleted after executing the tests. Use the *--no-clean* option with
-run_test.sh in order to preserve all the existing resources.
-However, be aware that Tempest and Rally create of lot of resources (users,
+all the VIM resources except what was present before running any test. The script
+*$repos_dir/functest/testcases/VIM/OpenStack/CI/libraries/generate_defaults.py*
+is called once by *prepare_env.sh* when setting up the Functest environment
+to snapshot all the OpenStack resources (images, networks, volumes, security groups,
+tenants, users) so that an eventual cleanup does not remove any of this defaults.
+
+The *-s* option forces execution of test cases in a single thread. Currently this
+option affects Tempest test cases only and can be used e.g. for troubleshooting
+concurrency problems.
+
+The script
+*$repos_dir/functest/testcases/VIM/OpenStack/CI/libraries/clean_openstack.py*
+is normally called after a test execution if the *-n* is not specified. It
+is in charge of cleaning the OpenStack resources that are not specified
+in the defaults file generated previously which is stored in
+*/home/opnfv/functest/conf/os_defaults.yaml* in the docker
+container.
+
+It is important to mention that if there are new OpenStack resources created
+manually after preparing the Functest environment, they will be removed if this
+flag is not specified in the *run_tests.sh* command.
+The reason to include this cleanup meachanism in Functest is because some
+test suites such as Tempest or Rally create a lot of resources (users,
tenants, networks, volumes etc.) that are not always properly cleaned, so this
-cleaning function has been set to keep the system as clean as possible after a
-full Functest run.
+cleaning function has been set to keep the system as clean as it was before a
+full Functest execution.
-You may also add you own test by adding a section into the function run_test().
+Within the Tempest test suite it is possible to define which test cases to execute
+by editing *test_list.txt* file before executing *run_tests.sh* script. This file
+is located in *$repos_dir/functest/testcases/VIM/OpenStack/CI/custom_tests/test_list.txt*
+Although *run_tests.sh* provides an easy way to run any test, it is possible to
+do a direct call to the desired test script. For example::
+ python $repos_dir/functest/testcases/vPing/vPing.py -d
Automated testing
-----------------
-As mentioned in `[1]`, the *prepare-env.sh* and *run_test.sh* can be executed within
-the container from jenkins.
-2 jobs have been created, one to run all the test and one that allows testing
-test suite by test suite.
-You thus just have to launch the acurate jenkins job on the target lab, all the
-tests shall be automatically run.
+As mentioned in `[1]`, the *prepare-env.sh* and *run_test.sh* can be called within
+the container from Jenkins. There are 2 jobs that automate all the manual steps
+explained in the previous section. One job runs all the tests and the other one allows testing
+test suite by test suite specifying the test name. The user might use one or
+the other job to execute the desired test suites.
+
+One of the most challenging task in the Brahmaputra release consists
+in dealing with lots of scenarios and installers. Thus, when the tests are
+automatically started from CI, a basic algorithm has been created in order to
+detect whether a given test is runnable or not on the given scenario.
+Some Functest test suites cannot be systematically run (e.g. ODL suite can not
+be run on an ONOS scenario).
+
-When the tests are automatically started from CI, a basic algorithm has been
-created in order to detect whether the test is runnable or not on the given
-scenario.
-In fact, one of the most challenging task in Brahmaputra consists in dealing
-with lots of scenario and installers.
-Functest test suites cannot be systematically run (e.g. run the ODL suite on an
-ONOS scenario).
-CI provides several information:
+CI provides some useful information passed to the container as environment
+variables:
- * The installer (apex|compass|fuel|joid)
- * The scenario [controller]-[feature]-[mode] with
+ * Installer (apex|compass|fuel|joid), stored in INSTALLER_TYPE
+ * Installer IP of the engine or VM running the actual deployment, stored in INSTALLER_IP
+ * The scenario [controller]-[feature]-[mode], stored in DEPLOY_SCENARIO with
* controller = (odl|onos|ocl|nosdn)
* feature = (ovs(dpdk)|kvm)
* mode = (ha|noha)
-Constraints per test case are defined in the Functest configuration file
-/home/opnfv/functest/config/config_functest.yaml::
+The constraints per test case are defined in the Functest configuration file
+*/home/opnfv/functest/config/config_functest.yaml*::
test-dependencies:
functest:
@@ -105,16 +136,17 @@ Constraints per test case are defined in the Functest configuration file
....
At the end of the Functest environment creation (prepare_env.sh see `[1]`_), a
-file (/home/opnfv/functest/conf/testcase-list.txt) is created with the list of
+file */home/opnfv/functest/conf/testcase-list.txt* is created with the list of
all the runnable tests.
-We consider the static constraints as regex and compare them with the scenario.
-For instance, odl can be run only on scenario including odl in its name.
+Functest considers the static constraints as regular expressions and compare them
+with the given scenario name.
+For instance, ODL suite can be run only on an scenario including 'odl' in its name.
The order of execution is also described in the Functest configuration file::
test_exec_priority:
- 1: vping
+ 1: vping_ssh
2: vping_userdata
3: tempest
4: odl
@@ -130,13 +162,10 @@ The order of execution is also described in the Functest configuration file::
The tests are executed in the following order:
- * Basic scenario (vPing, vPing_userdata, Tempest)
+ * Basic scenario (vPing_ssh, vPing_userdata, Tempest)
* Controller suites: ODL or ONOS or OpenContrail
* Feature projects (promise, vIMS)
* Rally (benchmark scenario)
-At the end of an automated execution, everything is cleaned.
-Before running Functest, a snapshot of the OpenStack configuration (users,
-tenants, networks, ....) is performed. After Functest, a clean mechanism is
-launched to delete everything that would not have been properly deleted in order
-to restitute the system as it was prior to the tests.
+As explained before, at the end of an automated execution, the OpenStack resources
+might be eventually removed.
diff --git a/testcases/Controllers/ODL/CI/odlreport2db.py b/testcases/Controllers/ODL/CI/odlreport2db.py
index 1538f79cf..47067963a 100644
--- a/testcases/Controllers/ODL/CI/odlreport2db.py
+++ b/testcases/Controllers/ODL/CI/odlreport2db.py
@@ -130,6 +130,7 @@ def main(argv):
# -p opnfv-jump-2
# -s os-odl_l2-ha
functest_utils.push_results_to_db(database,
+ "functest",
data['case_name'],
None,
data['pod_name'],
diff --git a/testcases/Controllers/ONOS/Teston/CI/onosfunctest.py b/testcases/Controllers/ONOS/Teston/CI/onosfunctest.py
index bf031cb47..dc45088b3 100644
--- a/testcases/Controllers/ONOS/Teston/CI/onosfunctest.py
+++ b/testcases/Controllers/ONOS/Teston/CI/onosfunctest.py
@@ -182,6 +182,7 @@ def main():
pod_name = functest_utils.get_pod_name(logger)
result = GetResult()
functest_utils.push_results_to_db(TEST_DB,
+ "functest",
"ONOS",
logger, pod_name, scenario,
payload=result)
diff --git a/testcases/VIM/OpenStack/CI/libraries/run_rally-cert.py b/testcases/VIM/OpenStack/CI/libraries/run_rally-cert.py
index 0fb6ce7d4..0d1992604 100755
--- a/testcases/VIM/OpenStack/CI/libraries/run_rally-cert.py
+++ b/testcases/VIM/OpenStack/CI/libraries/run_rally-cert.py
@@ -13,16 +13,17 @@
# 0.3 (19/10/2015) remove Tempest from run_rally
# and push result into test DB
#
-
-import re
-import json
-import os
import argparse
+import json
import logging
-import yaml
+import os
+import re
import requests
import subprocess
import sys
+import time
+import yaml
+
from novaclient import client as novaclient
from glanceclient import client as glanceclient
from keystoneclient.v2_0 import client as keystoneclient
@@ -53,6 +54,9 @@ parser.add_argument("-s", "--smoke",
parser.add_argument("-v", "--verbose",
help="Print verbose info about the progress",
action="store_true")
+parser.add_argument("-n", "--noclean",
+ help="Don't clean the created resources for this test.",
+ action="store_true")
args = parser.parse_args()
@@ -78,7 +82,7 @@ formatter = logging.Formatter("%(asctime)s - %(name)s - "
ch.setFormatter(formatter)
logger.addHandler(ch)
-REPO_PATH=os.environ['repos_dir']+'/functest/'
+REPO_PATH = os.environ['repos_dir']+'/functest/'
if not os.path.exists(REPO_PATH):
logger.error("Functest repository directory not found '%s'" % REPO_PATH)
exit(-1)
@@ -90,8 +94,8 @@ with open("/home/opnfv/functest/conf/config_functest.yaml") as f:
f.close()
HOME = os.environ['HOME']+"/"
-####todo:
-#SCENARIOS_DIR = REPO_PATH + functest_yaml.get("general"). \
+### todo:
+# SCENARIOS_DIR = REPO_PATH + functest_yaml.get("general"). \
# get("directories").get("dir_rally_scn")
SCENARIOS_DIR = REPO_PATH + "testcases/VIM/OpenStack/CI/rally_cert/"
###
@@ -108,8 +112,6 @@ CONCURRENCY = 4
RESULTS_DIR = functest_yaml.get("general").get("directories"). \
get("dir_rally_res")
TEST_DB = functest_yaml.get("results").get("test_db_url")
-FLOATING_NETWORK = functest_yaml.get("general"). \
- get("openstack").get("neutron_public_net_name")
PRIVATE_NETWORK = functest_yaml.get("general"). \
get("openstack").get("neutron_private_net_name")
@@ -125,14 +127,17 @@ GLANCE_IMAGE_PATH = functest_yaml.get("general"). \
CINDER_VOLUME_TYPE_NAME = "volume_test"
-def push_results_to_db(payload):
+SUMMARY = []
+
+
+def push_results_to_db(case, payload):
url = TEST_DB + "/results"
installer = functest_utils.get_installer_type(logger)
scenario = functest_utils.get_scenario(logger)
pod_name = functest_utils.get_pod_name(logger)
# TODO pod_name hardcoded, info shall come from Jenkins
- params = {"project_name": "functest", "case_name": "Rally",
+ params = {"project_name": "functest", "case_name": case,
"pod_name": pod_name, "installer": installer,
"version": scenario, "details": payload}
@@ -180,9 +185,6 @@ def build_task_args(test_file_name):
task_args['image_name'] = GLANCE_IMAGE_NAME
task_args['flavor_name'] = FLAVOR_NAME
task_args['glance_image_location'] = GLANCE_IMAGE_PATH
- task_args['floating_network'] = FLOATING_NETWORK
- task_args['netid'] = functest_utils.get_network_id(client_dict['neutron'],
- PRIVATE_NETWORK).encode('ascii', 'ignore')
task_args['tmpl_dir'] = TEMPLATE_DIR
task_args['sup_dir'] = SUPPORT_DIR
task_args['users_amount'] = USERS_AMOUNT
@@ -190,19 +192,32 @@ def build_task_args(test_file_name):
task_args['iterations'] = ITERATIONS_AMOUNT
task_args['concurrency'] = CONCURRENCY
+ ext_net = functest_utils.get_external_net(client_dict['neutron'])
+ if ext_net:
+ task_args['floating_network'] = str(ext_net)
+ else:
+ task_args['floating_network'] = ''
+
+ net_id = functest_utils.get_network_id(client_dict['neutron'],
+ PRIVATE_NETWORK)
+ task_args['netid'] = str(net_id)
+
return task_args
-def get_output(proc):
+def get_output(proc, test_name):
+ global SUMMARY
result = ""
- if args.verbose:
- while proc.poll() is None:
- line = proc.stdout.readline()
- print line.replace('\n', '')
+ nb_tests = 0
+ overall_duration = 0.0
+ success = 0.0
+ nb_totals = 0
+
+ while proc.poll() is None:
+ line = proc.stdout.readline()
+ if args.verbose:
result += line
- else:
- while proc.poll() is None:
- line = proc.stdout.readline()
+ else:
if "Load duration" in line or \
"started" in line or \
"finished" in line or \
@@ -214,7 +229,36 @@ def get_output(proc):
result += "\n" + line
elif "Full duration" in line:
result += line + "\n\n"
- logger.info("\n" + result)
+
+ # parse output for summary report
+ if "| " in line and \
+ "| action" not in line and \
+ "| Starting" not in line and \
+ "| Completed" not in line and \
+ "| ITER" not in line and \
+ "| " not in line and \
+ "| total" not in line:
+ nb_tests += 1
+ elif "| total" in line:
+ percentage = ((line.split('|')[8]).strip(' ')).strip('%')
+ success += float(percentage)
+ nb_totals += 1
+ elif "Full duration" in line:
+ overall_duration += float(line.split(': ')[1])
+
+ overall_duration="{:10.2f}".format(overall_duration)
+ if nb_totals == 0:
+ success_avg = 0
+ else:
+ success_avg = "{:0.2f}".format(success / nb_totals)
+
+ scenario_summary = {'test_name': test_name,
+ 'overall_duration': overall_duration,
+ 'nb_tests': nb_tests,
+ 'success': success_avg}
+ SUMMARY.append(scenario_summary)
+
+ logger.info("\n" + result)
return result
@@ -225,7 +269,7 @@ def run_task(test_name):
# :param test_name: name for the rally test
# :return: void
#
-
+ global SUMMARY
logger.info('Starting test scenario "{}" ...'.format(test_name))
task_file = '{}task.yaml'.format(SCENARIOS_DIR)
@@ -233,7 +277,8 @@ def run_task(test_name):
logger.error("Task file '%s' does not exist." % task_file)
exit(-1)
- test_file_name = '{}opnfv-{}.yaml'.format(SCENARIOS_DIR + "scenario/", test_name)
+ test_file_name = '{}opnfv-{}.yaml'.format(SCENARIOS_DIR + "scenario/",
+ test_name)
if not os.path.exists(test_file_name):
logger.error("The scenario '%s' does not exist." % test_file_name)
exit(-1)
@@ -245,18 +290,19 @@ def run_task(test_name):
"--task-args \"{}\" ".format(build_task_args(test_name))
logger.debug('running command line : {}'.format(cmd_line))
- p = subprocess.Popen(cmd_line, stdout=subprocess.PIPE, stderr=RALLY_STDERR, shell=True)
- output = get_output(p)
+ p = subprocess.Popen(cmd_line, stdout=subprocess.PIPE,
+ stderr=RALLY_STDERR, shell=True)
+ output = get_output(p, test_name)
task_id = get_task_id(output)
logger.debug('task_id : {}'.format(task_id))
if task_id is None:
- logger.error("failed to retrieve task_id")
+ logger.error("Failed to retrieve task_id.")
exit(-1)
# check for result directory and create it otherwise
if not os.path.exists(RESULTS_DIR):
- logger.debug('does not exists, we create it'.format(RESULTS_DIR))
+ logger.debug('%s does not exist, we create it.'.format(RESULTS_DIR))
os.makedirs(RESULTS_DIR)
# write html report file
@@ -283,7 +329,7 @@ def run_task(test_name):
# Push results in payload of testcase
if args.report:
logger.debug("Push result into DB")
- push_results_to_db(json_data)
+ push_results_to_db("Rally_details", json_data)
""" parse JSON operation result """
if task_succeed(json_results):
@@ -293,23 +339,25 @@ def run_task(test_name):
def main():
+ global SUMMARY
# configure script
if not (args.test_name in tests):
logger.error('argument not valid')
exit(-1)
+ SUMMARY = []
creds_nova = functest_utils.get_credentials("nova")
- nova_client = novaclient.Client('2',**creds_nova)
+ nova_client = novaclient.Client('2', **creds_nova)
creds_neutron = functest_utils.get_credentials("neutron")
neutron_client = neutronclient.Client(**creds_neutron)
creds_keystone = functest_utils.get_credentials("keystone")
keystone_client = keystoneclient.Client(**creds_keystone)
glance_endpoint = keystone_client.service_catalog.url_for(service_type='image',
- endpoint_type='publicURL')
+ endpoint_type='publicURL')
glance_client = glanceclient.Client(1, glance_endpoint,
token=keystone_client.auth_token)
creds_cinder = functest_utils.get_credentials("cinder")
- cinder_client = cinderclient.Client('2',creds_cinder['username'],
+ cinder_client = cinderclient.Client('2', creds_cinder['username'],
creds_cinder['api_key'],
creds_cinder['project_id'],
creds_cinder['auth_url'],
@@ -317,9 +365,10 @@ def main():
client_dict['neutron'] = neutron_client
- volume_types = functest_utils.list_volume_types(cinder_client, private=False)
+ volume_types = functest_utils.list_volume_types(cinder_client,
+ private=False)
if not volume_types:
- volume_type = functest_utils.create_volume_type(cinder_client, \
+ volume_type = functest_utils.create_volume_type(cinder_client,
CINDER_VOLUME_TYPE_NAME)
if not volume_type:
logger.error("Failed to create volume type...")
@@ -333,10 +382,11 @@ def main():
image_id = functest_utils.get_image_id(glance_client, GLANCE_IMAGE_NAME)
if image_id == '':
- logger.debug("Creating image '%s' from '%s'..." % (GLANCE_IMAGE_NAME, \
+ logger.debug("Creating image '%s' from '%s'..." % (GLANCE_IMAGE_NAME,
GLANCE_IMAGE_PATH))
- image_id = functest_utils.create_glance_image(glance_client,\
- GLANCE_IMAGE_NAME,GLANCE_IMAGE_PATH)
+ image_id = functest_utils.create_glance_image(glance_client,
+ GLANCE_IMAGE_NAME,
+ GLANCE_IMAGE_PATH)
if not image_id:
logger.error("Failed to create the Glance image...")
exit(-1)
@@ -345,7 +395,7 @@ def main():
% (GLANCE_IMAGE_NAME, image_id))
else:
logger.debug("Using existing image '%s' with ID '%s'..." \
- % (GLANCE_IMAGE_NAME,image_id))
+ % (GLANCE_IMAGE_NAME, image_id))
if args.test_name == "all":
for test_name in tests:
@@ -353,9 +403,66 @@ def main():
test_name == 'vm'):
run_task(test_name)
else:
- print(args.test_name)
+ logger.debug("Test name: " + args.test_name)
run_task(args.test_name)
+ report = "\n"\
+ " \n"\
+ " Rally Summary Report\n"\
+ "+===================+============+===============+===========+\n"\
+ "| Module | Duration | nb. Test Run | Success |\n"\
+ "+===================+============+===============+===========+\n"
+ payload = []
+
+ #for each scenario we draw a row for the table
+ total_duration = 0.0
+ total_nb_tests = 0
+ total_success = 0.0
+ for s in SUMMARY:
+ name = "{0:<17}".format(s['test_name'])
+ duration = float(s['overall_duration'])
+ total_duration += duration
+ duration = time.strftime("%M:%S", time.gmtime(duration))
+ duration = "{0:<10}".format(duration)
+ nb_tests = "{0:<13}".format(s['nb_tests'])
+ total_nb_tests += int(s['nb_tests'])
+ success = "{0:<10}".format(str(s['success'])+'%')
+ total_success += float(s['success'])
+ report += ""\
+ "| " + name + " | " + duration + " | " + nb_tests + " | " + success + "|\n"\
+ "+-------------------+------------+---------------+-----------+\n"
+ payload.append({'module': name,
+ 'details': {'duration': s['overall_duration'],
+ 'nb tests': s['nb_tests'],
+ 'success': s['success']}})
+
+ total_duration_str = time.strftime("%H:%M:%S", time.gmtime(total_duration))
+ total_duration_str2 = "{0:<10}".format(total_duration_str)
+ total_nb_tests_str = "{0:<13}".format(total_nb_tests)
+ total_success = "{:0.2f}".format(total_success / len(SUMMARY))
+ total_success_str = "{0:<10}".format(str(total_success)+'%')
+ report += "+===================+============+===============+===========+\n"
+ report += "| TOTAL: | " + total_duration_str2 + " | " + \
+ total_nb_tests_str + " | " + total_success_str + "|\n"
+ report += "+===================+============+===============+===========+\n"
+
+ logger.info("\n"+report)
+ payload.append({'summary': {'duration': total_duration,
+ 'nb tests': total_nb_tests,
+ 'nb success': total_success}})
+
+ # Generate json results for DB
+ #json_results = {"timestart": time_start, "duration": total_duration,
+ # "tests": int(total_nb_tests), "success": int(total_success)}
+ #logger.info("Results: "+str(json_results))
+
+ if args.report:
+ logger.debug("Pushing Rally summary into DB...")
+ push_results_to_db("Rally", payload)
+
+ if args.noclean:
+ exit(0)
+
logger.debug("Deleting image '%s' with ID '%s'..." \
% (GLANCE_IMAGE_NAME, image_id))
if not functest_utils.delete_glance_image(nova_client, image_id):
diff --git a/testcases/VIM/OpenStack/CI/libraries/run_rally.py b/testcases/VIM/OpenStack/CI/libraries/run_rally.py
index 3c70e3880..6b1aae2eb 100755
--- a/testcases/VIM/OpenStack/CI/libraries/run_rally.py
+++ b/testcases/VIM/OpenStack/CI/libraries/run_rally.py
@@ -47,6 +47,9 @@ parser.add_argument("-r", "--report",
parser.add_argument("-v", "--verbose",
help="Print verbose info about the progress",
action="store_true")
+parser.add_argument("-n", "--noclean",
+ help="Don't clean the created resources for this test.",
+ action="store_true")
args = parser.parse_args()
@@ -70,7 +73,7 @@ formatter = logging.Formatter("%(asctime)s - %(name)s - "
ch.setFormatter(formatter)
logger.addHandler(ch)
-REPO_PATH=os.environ['repos_dir']+'/functest/'
+REPO_PATH = os.environ['repos_dir']+'/functest/'
if not os.path.exists(REPO_PATH):
logger.error("Functest repository directory not found '%s'" % REPO_PATH)
exit(-1)
@@ -97,14 +100,14 @@ GLANCE_IMAGE_PATH = functest_yaml.get("general"). \
get("directories").get("dir_functest_data") + "/" + GLANCE_IMAGE_FILENAME
-def push_results_to_db(payload):
+def push_results_to_db(case, payload):
url = TEST_DB + "/results"
installer = functest_utils.get_installer_type(logger)
scenario = functest_utils.get_scenario(logger)
pod_name = functest_utils.get_pod_name(logger)
# TODO pod_name hardcoded, info shall come from Jenkins
- params = {"project_name": "functest", "case_name": "Rally",
+ params = {"project_name": "functest", "case_name": case,
"pod_name": pod_name, "installer": installer,
"version": scenario, "details": payload}
@@ -213,7 +216,7 @@ def run_task(test_name):
# Push results in payload of testcase
if args.report:
logger.debug("Push result into DB")
- push_results_to_db(json_data)
+ push_results_to_db("Rally_details", json_data)
""" parse JSON operation result """
if task_succeed(json_results):
@@ -232,22 +235,22 @@ def main():
exit(-1)
creds_nova = functest_utils.get_credentials("nova")
- nova_client = novaclient.Client('2',**creds_nova)
+ nova_client = novaclient.Client('2', **creds_nova)
creds_keystone = functest_utils.get_credentials("keystone")
keystone_client = keystoneclient.Client(**creds_keystone)
glance_endpoint = keystone_client.service_catalog.url_for(service_type='image',
- endpoint_type='publicURL')
+ endpoint_type='publicURL')
glance_client = glanceclient.Client(1, glance_endpoint,
token=keystone_client.auth_token)
-
image_id = functest_utils.get_image_id(glance_client, GLANCE_IMAGE_NAME)
if image_id == '':
- logger.debug("Creating image '%s' from '%s'..." % (GLANCE_IMAGE_NAME, \
+ logger.debug("Creating image '%s' from '%s'..." % (GLANCE_IMAGE_NAME,
GLANCE_IMAGE_PATH))
- image_id = functest_utils.create_glance_image(glance_client,\
- GLANCE_IMAGE_NAME,GLANCE_IMAGE_PATH)
+ image_id = functest_utils.create_glance_image(glance_client,
+ GLANCE_IMAGE_NAME,
+ GLANCE_IMAGE_PATH)
if not image_id:
logger.error("Failed to create the Glance image...")
exit(-1)
@@ -256,8 +259,7 @@ def main():
% (GLANCE_IMAGE_NAME, image_id))
else:
logger.debug("Using existing image '%s' with ID '%s'..." \
- % (GLANCE_IMAGE_NAME,image_id))
-
+ % (GLANCE_IMAGE_NAME, image_id))
if args.test_name == "all":
for test_name in tests:
@@ -272,6 +274,9 @@ def main():
print(args.test_name)
run_task(args.test_name)
+ if args.noclean:
+ exit(0)
+
logger.debug("Deleting image '%s' with ID '%s'..." \
% (GLANCE_IMAGE_NAME, image_id))
if not functest_utils.delete_glance_image(nova_client, image_id):
diff --git a/testcases/VIM/OpenStack/CI/libraries/run_tempest.py b/testcases/VIM/OpenStack/CI/libraries/run_tempest.py
index b8ed2716e..294669182 100644
--- a/testcases/VIM/OpenStack/CI/libraries/run_tempest.py
+++ b/testcases/VIM/OpenStack/CI/libraries/run_tempest.py
@@ -33,12 +33,21 @@ modes = ['full', 'smoke', 'baremetal', 'compute', 'data_processing',
""" tests configuration """
parser = argparse.ArgumentParser()
-parser.add_argument("-d", "--debug", help="Debug mode", action="store_true")
-parser.add_argument("-m", "--mode", help="Tempest test mode [smoke, all]",
+parser.add_argument("-d", "--debug",
+ help="Debug mode",
+ action="store_true")
+parser.add_argument("-s", "--serial",
+ help="Run tests in one thread",
+ action="store_true")
+parser.add_argument("-m", "--mode",
+ help="Tempest test mode [smoke, all]",
default="smoke")
parser.add_argument("-r", "--report",
help="Create json result file",
action="store_true")
+parser.add_argument("-n", "--noclean",
+ help="Don't clean the created resources for this test.",
+ action="store_true")
args = parser.parse_args()
@@ -289,12 +298,19 @@ def main():
else:
MODE = "--set "+args.mode
+ if args.serial:
+ MODE = "--concur 1 "+MODE
+
if not os.path.exists(TEMPEST_RESULTS_DIR):
os.makedirs(TEMPEST_RESULTS_DIR)
create_tempest_resources()
configure_tempest()
run_tempest(MODE)
+
+ if args.noclean:
+ exit(0)
+
free_tempest_resources()
diff --git a/testcases/config_functest.yaml b/testcases/config_functest.yaml
index f32314ac6..7d5f21360 100644
--- a/testcases/config_functest.yaml
+++ b/testcases/config_functest.yaml
@@ -49,12 +49,6 @@ general:
image_file_name: cirros-0.3.4-x86_64-disk.img
image_disk_format: qcow2
- #Public network. Optional
- neutron_public_net_name: net04_ext
- neutron_public_subnet_name: net04_ext__subnet
- neutron_public_subnet_cidr: 172.16.9.0/24
- neutron_public_subnet_start: 172.16.9.130
- neutron_public_subnet_end: 172.16.9.254
#Private network for functest. Will be created by config_functest.py
neutron_private_net_name: functest-net
neutron_private_subnet_name: functest-subnet
@@ -172,7 +166,7 @@ results:
# the execution order is important as some tests may be more destructive than others
# and if vPing is failing is usually not needed to continue...
test_exec_priority:
- 1: vping
+ 1: vping_ssh
2: vping_userdata
3: tempest
4: odl
@@ -237,7 +231,7 @@ test-dependencies:
functest:
vims:
scenario: '(ocl)|(odl)|(nosdn)'
- vping:
+ vping_ssh:
vping_userdata:
scenario: '(ocl)|(odl)|(nosdn)'
tempest:
diff --git a/testcases/features/doctor.py b/testcases/features/doctor.py
index 8eb85a808..5669a9900 100644
--- a/testcases/features/doctor.py
+++ b/testcases/features/doctor.py
@@ -71,7 +71,7 @@ def main():
'd': details,
})
functest_utils.push_results_to_db(TEST_DB_URL,
- 'doctor-notification',
+ 'doctor','doctor-notification',
logger, pod_name, scenario,
details)
diff --git a/testcases/functest_utils.py b/testcases/functest_utils.py
index 57ec1863f..94a4fa8a5 100644
--- a/testcases/functest_utils.py
+++ b/testcases/functest_utils.py
@@ -18,7 +18,6 @@ import socket
import subprocess
import sys
import urllib2
-import yaml
from git import Repo
@@ -39,6 +38,7 @@ def check_credentials():
env_vars = ['OS_AUTH_URL', 'OS_USERNAME', 'OS_PASSWORD', 'OS_TENANT_NAME']
return all(map(lambda v: v in os.environ and os.environ[v], env_vars))
+
def get_credentials(service):
"""Returns a creds dictionary filled with the following keys:
* username
@@ -70,7 +70,6 @@ def get_credentials(service):
return creds
-
#*********************************************
# NOVA
#*********************************************
@@ -134,10 +133,10 @@ def get_floating_ips(nova_client):
def create_flavor(nova_client, flavor_name, ram, disk, vcpus):
try:
- flavor = nova_client.flavors.create(flavor_name,ram,vcpus,disk)
+ flavor = nova_client.flavors.create(flavor_name, ram, vcpus, disk)
except Exception, e:
print "Error [create_flavor(nova_client, '%s', '%s', '%s', "\
- "'%s')]:" %(flavor_name,ram, disk, vcpus), e
+ "'%s')]:" % (flavor_name, ram, disk, vcpus), e
return None
return flavor.id
@@ -156,7 +155,7 @@ def create_floating_ip(neutron_client):
def add_floating_ip(nova_client, server_id, floatingip_id):
try:
- nova_client.servers.add_floating_ip(server_id,floatingip_id)
+ nova_client.servers.add_floating_ip(server_id, floatingip_id)
return True
except Exception, e:
print "Error [add_floating_ip(nova_client, '%s', '%s')]:" % \
@@ -182,8 +181,6 @@ def delete_floating_ip(nova_client, floatingip_id):
return False
-
-
#*********************************************
# NEUTRON
#*********************************************
@@ -274,7 +271,7 @@ def create_neutron_subnet(neutron_client, name, cidr, net_id):
return subnet['subnets'][0]['id']
except Exception, e:
print "Error [create_neutron_subnet(neutron_client, '%s', '%s', "\
- "'%s')]:" %(name,cidr, net_id), e
+ "'%s')]:" % (name, cidr, net_id), e
return False
@@ -300,7 +297,7 @@ def create_neutron_port(neutron_client, name, network_id, ip):
return port['port']['id']
except Exception, e:
print "Error [create_neutron_port(neutron_client, '%s', '%s', "\
- "'%s')]:" %(name,network_id, ip), e
+ "'%s')]:" % (name, network_id, ip), e
return False
@@ -311,7 +308,7 @@ def update_neutron_net(neutron_client, network_id, shared=False):
return True
except Exception, e:
print "Error [update_neutron_net(neutron_client, '%s', '%s')]:" % \
- (network_id,str(shared)), e
+ (network_id, str(shared)), e
return False
@@ -325,7 +322,7 @@ def update_neutron_port(neutron_client, port_id, device_owner):
return port['port']['id']
except Exception, e:
print "Error [update_neutron_port(neutron_client, '%s', '%s')]:" % \
- (port_id,device_owner), e
+ (port_id, device_owner), e
return False
@@ -336,14 +333,15 @@ def add_interface_router(neutron_client, router_id, subnet_id):
return True
except Exception, e:
print "Error [add_interface_router(neutron_client, '%s', '%s')]:" % \
- (router_id,subnet_id), e
+ (router_id, subnet_id), e
return False
+
def add_gateway_router(neutron_client, router_id):
ext_net_id = get_external_net_id(neutron_client)
router_dict = {'network_id': ext_net_id}
try:
- neutron_client.add_gateway_router(router_id,router_dict)
+ neutron_client.add_gateway_router(router_id, router_dict)
return True
except Exception, e:
print "Error [add_gateway_router(neutron_client, '%s')]:" % router_id, e
@@ -396,7 +394,7 @@ def remove_interface_router(neutron_client, router_id, subnet_id):
return True
except Exception, e:
print "Error [remove_interface_router(neutron_client, '%s', '%s')]:" % \
- (router_id,subnet_id), e
+ (router_id, subnet_id), e
return False
@@ -409,7 +407,6 @@ def remove_gateway_router(neutron_client, router_id):
return False
-
#*********************************************
# SEC GROUPS
#*********************************************
@@ -424,44 +421,43 @@ def get_security_groups(neutron_client):
def create_security_group(neutron_client, sg_name, sg_description):
- json_body= {'security_group' : { 'name' : sg_name, \
- 'description' : sg_description }}
+ json_body = {'security_group': {'name': sg_name,
+ 'description': sg_description}}
try:
secgroup = neutron_client.create_security_group(json_body)
return secgroup['security_group']
except Exception, e:
print "Error [create_security_group(neutron_client, '%s', '%s')]:" % \
- (sg_name,sg_description), e
+ (sg_name, sg_description), e
return False
def create_secgroup_rule(neutron_client, sg_id, direction, protocol,
- port_range_min = None, port_range_max = None):
- if port_range_min == None and port_range_max == None:
- json_body = { 'security_group_rule' : \
- { 'direction' : direction, \
- 'security_group_id' : sg_id, \
- 'protocol' : protocol } }
- elif port_range_min != None and port_range_max != None:
- json_body = { 'security_group_rule' : \
- { 'direction' : direction, \
- 'security_group_id' : sg_id, \
- 'port_range_min': port_range_min, \
- 'port_range_max' : port_range_max, \
- 'protocol' : protocol } }
+ port_range_min=None, port_range_max=None):
+ if port_range_min is None and port_range_max is None:
+ json_body = {'security_group_rule': {'direction': direction,
+ 'security_group_id': sg_id,
+ 'protocol': protocol}}
+ elif port_range_min is not None and port_range_max is not None:
+ json_body = {'security_group_rule': {'direction': direction,
+ 'security_group_id': sg_id,
+ 'port_range_min': port_range_min,
+ 'port_range_max': port_range_max,
+ 'protocol': protocol}}
else:
print "Error [create_secgroup_rule(neutron_client, '%s', '%s', "\
- "'%s', '%s', '%s', '%s')]:" %(neutron_client, sg_id, direction, \
- port_range_min, port_range_max, protocol),\
- " Invalid values for port_range_min, port_range_max"
+ "'%s', '%s', '%s', '%s')]:" % (neutron_client, sg_id, direction, \
+ port_range_min, port_range_max, protocol),\
+ " Invalid values for port_range_min, port_range_max"
return False
try:
neutron_client.create_security_group_rule(json_body)
return True
except Exception, e:
print "Error [create_secgroup_rule(neutron_client, '%s', '%s', "\
- "'%s', '%s', '%s', '%s')]:" %(neutron_client, sg_id, direction, \
- port_range_min, port_range_max, protocol), e
+ "'%s', '%s', '%s', '%s')]:" % (neutron_client, sg_id, direction,
+ port_range_min, port_range_max,
+ protocol), e
return False
@@ -487,7 +483,7 @@ def update_sg_quota(neutron_client, tenant_id, sg_quota, sg_rule_quota):
return True
except Exception, e:
print "Error [update_sg_quota(neutron_client, '%s', '%s', "\
- "'%s')]:" %(tenant_id, sg_quota, sg_rule_quota), e
+ "'%s')]:" % (tenant_id, sg_quota, sg_rule_quota), e
return False
@@ -500,8 +496,6 @@ def delete_security_group(neutron_client, secgroup_id):
return False
-
-
#*********************************************
# GLANCE
#*********************************************
@@ -538,7 +532,7 @@ def create_glance_image(glance_client, image_name, file_path, public=True):
return image.id
except Exception, e:
print "Error [create_glance_image(glance_client, '%s', '%s', "\
- "'%s')]:" %(image_name, file_path, str(public)), e
+ "'%s')]:" % (image_name, file_path, str(public)), e
return False
@@ -551,7 +545,6 @@ def delete_glance_image(nova_client, image_id):
return False
-
#*********************************************
# CINDER
#*********************************************
@@ -594,11 +587,11 @@ def update_cinder_quota(cinder_client, tenant_id, vols_quota,
try:
quotas_default = cinder_client.quotas.update(tenant_id,
- **quotas_values)
+ **quotas_values)
return True
except Exception, e:
print "Error [update_cinder_quota(cinder_client, '%s', '%s', '%s'" \
- "'%s')]:" %(tenant_id, vols_quota, snapshots_quota, gigabytes_quota), e
+ "'%s')]:" % (tenant_id, vols_quota, snapshots_quota, gigabytes_quota), e
return False
@@ -628,7 +621,6 @@ def delete_volume_type(cinder_client, volume_type):
return False
-
#*********************************************
# KEYSTONE
#*********************************************
@@ -701,7 +693,7 @@ def create_user(keystone_client, user_name, user_password,
return user.id
except Exception, e:
print "Error [create_user(keystone_client, '%s', '%s', '%s'" \
- "'%s')]:" %(user_name, user_password, user_email, tenant_id), e
+ "'%s')]:" % (user_name, user_password, user_email, tenant_id), e
return False
@@ -711,7 +703,7 @@ def add_role_user(keystone_client, user_id, role_id, tenant_id):
return True
except Exception, e:
print "Error [add_role_user(keystone_client, '%s', '%s'" \
- "'%s')]:" %(user_id, role_id, tenant_id), e
+ "'%s')]:" % (user_id, role_id, tenant_id), e
return False
@@ -827,14 +819,14 @@ def get_pod_name(logger=None):
return "unknown-pod"
-def push_results_to_db(db_url, case_name, logger, pod_name,
+def push_results_to_db(db_url, project, case_name, logger, pod_name,
version, payload):
"""
POST results to the Result target DB
"""
url = db_url + "/results"
installer = get_installer_type(logger)
- params = {"project_name": "functest", "case_name": case_name,
+ params = {"project_name": project, "case_name": case_name,
"pod_name": pod_name, "installer": installer,
"version": version, "details": payload}
@@ -845,8 +837,8 @@ def push_results_to_db(db_url, case_name, logger, pod_name,
logger.debug(r)
return True
except Exception, e:
- print "Error [push_results_to_db('%s', '%s', '%s', '%s', '%s')]:" \
- % (db_url, case_name, pod_name, version, payload), e
+ print "Error [push_results_to_db('%s', '%s', '%s', '%s', '%s', '%s')]:" \
+ % (db_url, project, case_name, pod_name, version, payload), e
return False
@@ -861,7 +853,7 @@ def get_resolvconf_ns():
ip = re.search(r"\b(?:[0-9]{1,3}\.){3}[0-9]{1,3}\b", line)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if ip:
- result = sock.connect_ex((ip.group(),53))
+ result = sock.connect_ex((ip.group(), 53))
if result == 0:
nameservers.append(ip.group())
line = rconf.readline()
diff --git a/testcases/tests/TestFunctestUtils.py b/testcases/tests/TestFunctestUtils.py
index 17bc958e3..fd83ed6f5 100644
--- a/testcases/tests/TestFunctestUtils.py
+++ b/testcases/tests/TestFunctestUtils.py
@@ -65,7 +65,10 @@ class TestFunctestUtils(unittest.TestCase):
test = isTestRunnable('functest/odl', functest_yaml)
self.assertTrue(test)
- test = isTestRunnable('functest/vping', functest_yaml)
+ test = isTestRunnable('functest/vping_ssh', functest_yaml)
+ self.assertTrue(test)
+
+ test = isTestRunnable('functest/vping_userdata', functest_yaml)
self.assertTrue(test)
test = isTestRunnable('functest/tempest', functest_yaml)
@@ -82,7 +85,7 @@ class TestFunctestUtils(unittest.TestCase):
test = generateTestcaseList(functest_yaml)
- expected_list = "vping tempest odl doctor promise policy-test odl-vpn_service-tests vims rally "
+ expected_list = "vping_ssh vping_userdata tempest odl doctor promise policy-test odl-vpn_service-tests vims rally "
self.assertEqual(test, expected_list)
def tearDown(self):
diff --git a/testcases/vIMS/CI/vIMS.py b/testcases/vIMS/CI/vIMS.py
index a8ac97f5c..1746d38bb 100644
--- a/testcases/vIMS/CI/vIMS.py
+++ b/testcases/vIMS/CI/vIMS.py
@@ -40,6 +40,9 @@ parser.add_argument("-d", "--debug", help="Debug mode", action="store_true")
parser.add_argument("-r", "--report",
help="Create json result file",
action="store_true")
+parser.add_argument("-n", "--noclean",
+ help="Don't clean the created resources for this test.",
+ action="store_true")
args = parser.parse_args()
""" logging configuration """
@@ -134,7 +137,9 @@ def push_results():
scenario = functest_utils.get_scenario(logger)
pod_name = functest_utils.get_pod_name(logger)
- functest_utils.push_results_to_db(db_url=DB_URL, case_name="vIMS",
+ functest_utils.push_results_to_db(db_url=DB_URL,
+ project="functest",
+ case_name="vIMS",
logger=logger, pod_name=pod_name,
version=scenario,
payload=RESULTS)
@@ -461,6 +466,8 @@ def main():
cfy.undeploy_manager()
############### GENERAL CLEANUP ################
+ if args.noclean:
+ exit(0)
ks_creds = functest_utils.get_credentials("keystone")
diff --git a/testcases/vPing/CI/libraries/vPing2.py b/testcases/vPing/CI/libraries/vPing_ssh.py
index 1ce6dc9e5..3050aad57 100644
--- a/testcases/vPing/CI/libraries/vPing2.py
+++ b/testcases/vPing/CI/libraries/vPing_ssh.py
@@ -37,12 +37,15 @@ parser.add_argument("-d", "--debug", help="Debug mode", action="store_true")
parser.add_argument("-r", "--report",
help="Create json result file",
action="store_true")
+parser.add_argument("-n", "--noclean",
+ help="Don't clean the created resources for this test.",
+ action="store_true")
args = parser.parse_args()
""" logging configuration """
-logger = logging.getLogger('vPing')
+logger = logging.getLogger('vPing_ssh')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
@@ -198,6 +201,9 @@ def create_private_neutron_net(neutron):
def cleanup(nova, neutron, image_id, network_dic, port_id1, port_id2, secgroup_id):
+ if args.noclean:
+ logger.debug("The OpenStack resources are not deleted.")
+ return True
# delete both VMs
logger.info("Cleaning up...")
@@ -288,6 +294,7 @@ def push_results(start_time_ts, duration, test_status):
scenario = functest_utils.get_scenario(logger)
pod_name = functest_utils.get_pod_name(logger)
functest_utils.push_results_to_db(TEST_DB,
+ "functest",
"vPing",
logger, pod_name, scenario,
payload={'timestart': start_time_ts,
diff --git a/testcases/vPing/CI/libraries/vPing.py b/testcases/vPing/CI/libraries/vPing_userdata.py
index 1368bbec1..90562969b 100644
--- a/testcases/vPing/CI/libraries/vPing.py
+++ b/testcases/vPing/CI/libraries/vPing_userdata.py
@@ -35,12 +35,15 @@ parser.add_argument("-d", "--debug", help="Debug mode", action="store_true")
parser.add_argument("-r", "--report",
help="Create json result file",
action="store_true")
+parser.add_argument("-n", "--noclean",
+ help="Don't clean the created resources for this test.",
+ action="store_true")
args = parser.parse_args()
""" logging configuration """
-logger = logging.getLogger('vPing')
+logger = logging.getLogger('vPing_userdata')
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
@@ -192,6 +195,9 @@ def create_private_neutron_net(neutron):
def cleanup(nova, neutron, image_id, network_dic, port_id1, port_id2):
+ if args.noclean:
+ logger.debug("The OpenStack resources are not deleted.")
+ return True
# delete both VMs
logger.info("Cleaning up...")
@@ -276,6 +282,7 @@ def push_results(start_time_ts, duration, test_status):
scenario = functest_utils.get_scenario(logger)
pod_name = functest_utils.get_pod_name(logger)
functest_utils.push_results_to_db(TEST_DB,
+ "functest",
"vPing_userdata",
logger, pod_name, scenario,
payload={'timestart': start_time_ts,