summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh41
-rw-r--r--jjb/apex/apex.yml21
-rw-r--r--jjb/armband/armband-ci-jobs.yml9
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yml6
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yml4
-rw-r--r--jjb/domino/domino.yml55
-rw-r--r--jjb/fuel/fuel-ci-jobs.yml6
-rw-r--r--jjb/functest/functest-ci-jobs.yml47
-rwxr-xr-xjjb/functest/functest-loop.sh (renamed from jjb/functest/functest-daily.sh)0
-rwxr-xr-xjjb/functest/set-functest-env.sh7
-rw-r--r--jjb/joid/joid-ci-jobs.yml4
-rw-r--r--jjb/opnfv/slave-params.yml28
-rw-r--r--jjb/yardstick/yardstick-ci-jobs.yml2
-rw-r--r--utils/gpg_import_key.sh42
-rw-r--r--utils/push-test-logs.sh4
-rw-r--r--utils/test/reporting/functest/reporting-status.py92
-rw-r--r--utils/test/reporting/functest/reportingConf.py10
-rw-r--r--utils/test/reporting/functest/reportingUtils.py11
-rw-r--r--utils/test/result_collection_api/docker/Dockerfile52
-rwxr-xr-xutils/test/result_collection_api/docker/prepare-env.sh16
-rwxr-xr-xutils/test/result_collection_api/docker/start-server.sh4
-rw-r--r--utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py2
-rw-r--r--utils/test/result_collection_api/opnfv_testapi/tests/unit/test_project.py4
23 files changed, 399 insertions, 68 deletions
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
index 0598f5615..ba69f3eb6 100755
--- a/jjb/apex/apex-upload-artifact.sh
+++ b/jjb/apex/apex-upload-artifact.sh
@@ -11,6 +11,32 @@ echo
# source the opnfv.properties to get ARTIFACT_VERSION
source $WORKSPACE/opnfv.properties
+#this is where we import the siging key
+source $WORKSPACE/releng/utils/gpg_import_key.sh
+
+signrpm () {
+for artifact in $RPM_LIST $SRPM_LIST; do
+ echo "Signing artifact: ${artifact}"
+ gpg2 -vvv --batch \
+ --default-key opnfv-helpdesk@rt.linuxfoundation.org \
+ --passphrase besteffort \
+ --detach-sig $artifact
+ gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig)
+ echo "Upload complete for ${artifact} signature"
+done
+}
+
+signiso () {
+time gpg2 -vvv --batch \
+ --default-key opnfv-helpdesk@rt.linuxfoundation.org \
+ --passphrase notreallysecure \
+ --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
+
+gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
+echo "ISO signature Upload Complete!"
+}
+
+uploadiso () {
# upload artifact and additional files to google storage
gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log
echo "ISO Upload Complete!"
@@ -26,7 +52,10 @@ VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
for pkg in common undercloud opendaylight-sfc onos; do
SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
done
+}
+uploadrpm () {
+#This is where we upload the rpms
for artifact in $RPM_LIST $SRPM_LIST; do
echo "Uploading artifact: ${artifact}"
gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log
@@ -34,6 +63,18 @@ for artifact in $RPM_LIST $SRPM_LIST; do
done
gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log
gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log
+}
+
+if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
+ echo "Signing Key avaliable"
+ signiso
+ uploadiso
+ signrpm
+ uploadrpm
+else
+ uploadiso
+ uploadrpm
+fi
echo
echo "--------------------------------------------------------"
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index 578024e07..3ba8842fc 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -239,13 +239,13 @@
git-revision: false
block: true
same-node: true
-# - trigger-builds:
-# - project: 'functest-apex-{verify-slave}-suite-{stream1}'
-# predefined-parameters: |
-# DEPLOY_SCENARIO=os-nosdn-nofeature-ha
-# FUNCTEST_SUITE_NAME=vping_userdata
-# block: true
-# same-node: true
+ - trigger-builds:
+ - project: 'functest-apex-{verify-slave}-suite-{stream1}'
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-nofeature-ha
+ FUNCTEST_SUITE_NAME=healthcheck
+ block: true
+ same-node: true
- trigger-builds:
- project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream1}'
predefined-parameters: |
@@ -254,6 +254,13 @@
git-revision: false
block: true
same-node: true
+ - trigger-builds:
+ - project: 'functest-apex-{verify-slave}-suite-{stream1}'
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l2-nofeature-ha
+ FUNCTEST_SUITE_NAME=healthcheck
+ block: true
+ same-node: true
- 'apex-workspace-cleanup'
- job-template:
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 3d0db6587..53c652eaf 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -23,6 +23,8 @@
pod:
- arm-pod1:
<<: *brahmaputra
+ - arm-pod2:
+ <<: *brahmaputra
#--------------------------------
# master
#--------------------------------
@@ -193,3 +195,10 @@
name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-brahmaputra-trigger'
triggers:
- timed: '0 18 * * *'
+#---------------------------------------------------------------
+# Enea Armband POD 2 Triggers running against brahmaputra branch
+#---------------------------------------------------------------
+- trigger:
+ name: 'armband-os-odl_l2-nofeature-ha-arm-pod2-brahmaputra-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml
index 4bbea4952..52d678554 100644
--- a/jjb/compass4nfv/compass-ci-jobs.yml
+++ b/jjb/compass4nfv/compass-ci-jobs.yml
@@ -105,6 +105,8 @@
- '{auto-trigger-name}'
builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
- trigger-builds:
- project: 'compass-deploy-{pod}-daily-{stream}'
current-parameters: true
@@ -183,6 +185,8 @@
builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
- shell:
!include-raw-escape: ./compass-download-artifact.sh
- shell:
@@ -211,8 +215,8 @@
- choice:
name: COMPASS_OPENSTACK_VERSION
choices:
- - 'liberty'
- 'mitaka'
+ - 'liberty'
########################
# trigger macros
diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml
index a0438ee1f..da286878e 100644
--- a/jjb/compass4nfv/compass-project-jobs.yml
+++ b/jjb/compass4nfv/compass-project-jobs.yml
@@ -100,7 +100,7 @@
- project: 'functest-{slave-label}-suite-{stream}'
current-parameters: true
predefined-parameters:
- FUNCTEST_SUITE_NAME=vping_userdata
+ FUNCTEST_SUITE_NAME=healthcheck
same-node: true
block: true
block-thresholds:
@@ -243,8 +243,8 @@
- choice:
name: COMPASS_OPENSTACK_VERSION
choices:
- - 'liberty'
- 'mitaka'
+ - 'liberty'
- choice:
name: COMPASS_OS_VERSION
choices:
diff --git a/jjb/domino/domino.yml b/jjb/domino/domino.yml
new file mode 100644
index 000000000..29e171b80
--- /dev/null
+++ b/jjb/domino/domino.yml
@@ -0,0 +1,55 @@
+- project:
+ name: domino
+
+ project: '{name}'
+
+ jobs:
+ - 'domino-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+
+- job-template:
+ name: 'domino-verify-{stream}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+ ./tests/run.sh
diff --git a/jjb/fuel/fuel-ci-jobs.yml b/jjb/fuel/fuel-ci-jobs.yml
index e78be54b7..de7ca6af1 100644
--- a/jjb/fuel/fuel-ci-jobs.yml
+++ b/jjb/fuel/fuel-ci-jobs.yml
@@ -113,6 +113,8 @@
gs-pathname: '{gs-pathname}'
builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
- trigger-builds:
- project: 'fuel-deploy-{pod}-daily-{stream}'
current-parameters: false
@@ -188,6 +190,8 @@
name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
- shell:
!include-raw-escape: ./fuel-download-artifact.sh
- shell:
@@ -316,7 +320,7 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
- - timed: '0 12 * * *'
+ - timed: '15 9 * * *'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml
index 32251b8cb..f9cf01162 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-ci-jobs.yml
@@ -134,6 +134,7 @@
testsuite:
- 'daily'
+ - 'weekly'
- 'suite'
jobs:
@@ -183,6 +184,8 @@
branch: '{branch}'
builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
- 'functest-{testsuite}-builder'
########################
@@ -194,7 +197,14 @@
- string:
name: FUNCTEST_SUITE_NAME
default: 'daily'
- description: "Suite name to run"
+ description: "Daily suite name to run"
+- parameter:
+ name: functest-weekly-parameter
+ parameters:
+ - string:
+ name: FUNCTEST_SUITE_NAME
+ default: 'weekly'
+ description: "Weekly suite name to run"
- parameter:
name: functest-suite-parameter
parameters:
@@ -202,16 +212,19 @@
name: FUNCTEST_SUITE_NAME
choices:
- 'healthcheck'
- - 'tempest'
- - 'rally'
+ - 'vping_userdata'
+ - 'vping_ssh'
+ - 'tempest_smoke_serial'
+ - 'rally_sanity'
- 'odl'
- 'onos'
- - 'ovno'
- 'promise'
- 'doctor'
+ - 'bgpvpn'
+ - 'security_scan'
+ - 'tempest_full_parallel'
+ - 'rally_full'
- 'vims'
- - 'vping_userdata'
- - 'vping_ssh'
- parameter:
name: functest-parameter
parameters:
@@ -251,6 +264,14 @@
- 'functest-store-results'
- builder:
+ name: functest-weekly-builder
+ builders:
+ - 'functest-cleanup'
+ - 'set-functest-env'
+ - 'functest-weekly'
+ - 'functest-store-results'
+
+- builder:
name: functest-suite-builder
builders:
- 'functest-cleanup'
@@ -258,16 +279,22 @@
- 'functest-suite'
- builder:
- name: functest-suite
+ name: functest-daily
builders:
- shell:
- !include-raw: ./functest-suite.sh
+ !include-raw: ./functest-loop.sh
- builder:
- name: functest-daily
+ name: functest-weekly
builders:
- shell:
- !include-raw: ./functest-daily.sh
+ !include-raw: ./functest-loop.sh
+
+- builder:
+ name: functest-suite
+ builders:
+ - shell:
+ !include-raw: ./functest-suite.sh
- builder:
name: set-functest-env
diff --git a/jjb/functest/functest-daily.sh b/jjb/functest/functest-loop.sh
index 5abddfd84..5abddfd84 100755
--- a/jjb/functest/functest-daily.sh
+++ b/jjb/functest/functest-loop.sh
diff --git a/jjb/functest/set-functest-env.sh b/jjb/functest/set-functest-env.sh
index 0b8747a6f..d2e232de5 100755
--- a/jjb/functest/set-functest-env.sh
+++ b/jjb/functest/set-functest-env.sh
@@ -27,6 +27,7 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
fi
+
elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
# If production lab then creds may be retrieved dynamically
# creds are on the jumphost, always in the same folder
@@ -34,6 +35,12 @@ elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
# If dev lab, credentials may not be the default ones, just provide a path to put them into docker
# replace the default one by the customized one provided by jenkins config
fi
+
+# Set iptables rule to allow forwarding return traffic for container
+if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
+ sudo iptables -I FORWARD -j RETURN
+fi
+
echo "Functest: Start Docker and prepare environment"
envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
diff --git a/jjb/joid/joid-ci-jobs.yml b/jjb/joid/joid-ci-jobs.yml
index 6927ad99c..a1e56770b 100644
--- a/jjb/joid/joid-ci-jobs.yml
+++ b/jjb/joid/joid-ci-jobs.yml
@@ -116,6 +116,8 @@
default: '{scenario}'
builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
- trigger-builds:
- project: 'joid-deploy-{pod}-daily-{stream}'
current-parameters: true
@@ -187,6 +189,8 @@
branch: '{branch}'
builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
- 'builder-macro'
########################
# builder macros
diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml
index da0808b76..e5313c801 100644
--- a/jjb/opnfv/slave-params.yml
+++ b/jjb/opnfv/slave-params.yml
@@ -621,6 +621,34 @@
name: LAB_CONFIG_URL
default: ssh://git@git.enea.com/pharos/lab-config
description: 'Base URI to the configuration directory'
+
+- parameter:
+ name: 'arm-pod2-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - arm-pod2
+ default-slaves:
+ - arm-pod2
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: DEFAULT_BRIDGE
+ default: 'admin_br0,public_br0'
+ desciption: 'The bridge to use for Fuel PXE booting. It can be a comma sparated list of bridges, in which case the first is the PXE boot bridge, and all subsequent interfaces that will be added to the VM. If left empty, most deploy scripts will default to pxebr.'
+ - string:
+ name: DEPLOY_TIMEOUT
+ default: '360'
+ description: 'Deployment timeout in minutes'
+ - string:
+ name: LAB_CONFIG_URL
+ default: ssh://git@git.enea.com/pharos/lab-config
+ description: 'Base URI to the configuration directory'
+
- parameter:
name: 'opnfv-build-centos-defaults'
parameters:
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml
index 4f98e2c22..8b8ced1ba 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-ci-jobs.yml
@@ -211,6 +211,8 @@
branch: '{branch}'
builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
- 'yardstick-cleanup'
#- 'yardstick-fetch-os-creds'
- 'yardstick-{testsuite}'
diff --git a/utils/gpg_import_key.sh b/utils/gpg_import_key.sh
new file mode 100644
index 000000000..3afeda839
--- /dev/null
+++ b/utils/gpg_import_key.sh
@@ -0,0 +1,42 @@
+#!/bin/bash -e
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 NEC and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+function isinstalled {
+if rpm -q "$@" >/dev/null 2>&1; then
+ true
+ else
+ echo installing "$1"
+ sudo yum install "$1"
+ false
+fi
+}
+
+if ! isinstalled gnupg2; then
+ echo "error with install"
+ exit 1
+fi
+
+if ! which gsutil;
+ then echo "error gsutil not installed";
+ exit 1
+fi
+
+if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
+ echo "Key Already available"
+else
+ if [ -z "$NODE_NAME" ];
+ then echo "Cannot find node name"
+ exit 1
+ else echo "Importing key for '$NODE_NAME'";
+ gsutil cp gs://opnfv-signing-keys/"$NODE_NAME"-subkey .
+ gpg2 --import "$NODE_NAME"-subkey
+ rm -f "$NODE_NAME"-subkey
+ fi
+fi
diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh
index 745645041..964b41908 100644
--- a/utils/push-test-logs.sh
+++ b/utils/push-test-logs.sh
@@ -17,13 +17,13 @@ res_build_date=${1:-$(date -u +"%Y-%m-%d_%H-%M-%S")}
project=$PROJECT
branch=${GIT_BRANCH##*/}
testbed=$NODE_NAME
-dir_result="${HOME}/opnfv/$project/results"
+dir_result="${HOME}/opnfv/$project/results/${branch}"
# src: https://wiki.opnfv.org/display/INF/Hardware+Infrastructure
# + intel-pod3 (vsperf)
node_list=(\
'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \
'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
-'ericsson-pod2' 'huawei-pod1')
+'ericsson-pod2' 'huawei-pod1' 'huawei-pod2' 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
if [[ ! " ${node_list[@]} " =~ " ${testbed} " ]]; then
echo "This is not a CI POD. Aborting pushing the logs to artifacts."
diff --git a/utils/test/reporting/functest/reporting-status.py b/utils/test/reporting/functest/reporting-status.py
index 2ce5efbd0..adbee36aa 100644
--- a/utils/test/reporting/functest/reporting-status.py
+++ b/utils/test/reporting/functest/reporting-status.py
@@ -8,6 +8,7 @@
#
import datetime
import jinja2
+import logging
import os
import requests
import sys
@@ -19,7 +20,21 @@ import reportingConf as conf
import testCase as tc
import scenarioResult as sr
-testCases4Validation = []
+# Logger
+logFormatter = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s")
+logger = logging.getLogger()
+
+fileHandler = logging.FileHandler("{0}/{1}".format('.', conf.LOG_FILE))
+fileHandler.setFormatter(logFormatter)
+logger.addHandler(fileHandler)
+
+consoleHandler = logging.StreamHandler()
+consoleHandler.setFormatter(logFormatter)
+logger.addHandler(consoleHandler)
+logger.setLevel(conf.LOG_LEVEL)
+
+# Initialization
+testValid = []
otherTestCases = []
# init just tempest to get the list of scenarios
@@ -28,16 +43,16 @@ tempest = tc.TestCase("tempest_smoke_serial", "functest", -1)
# Retrieve the Functest configuration to detect which tests are relevant
# according to the installer, scenario
-# cf = "https://git.opnfv.org/cgit/functest/plain/ci/config_functest.yaml"
-cf = "https://git.opnfv.org/cgit/functest/plain/ci/testcases.yaml"
+cf = conf.TEST_CONF
response = requests.get(cf)
+
functest_yaml_config = yaml.load(response.text)
-print "****************************************"
-print "* Generating reporting..... *"
-print ("* Data retention = %s days *" % conf.PERIOD)
-print "* *"
-print "****************************************"
+logger.info("****************************************")
+logger.info("* Generating reporting..... *")
+logger.info("* Data retention = %s days *" % conf.PERIOD)
+logger.info("* *")
+logger.info("****************************************")
# Retrieve test cases of Tier 1 (smoke)
config_tiers = functest_yaml_config.get("tiers")
@@ -50,19 +65,22 @@ config_tiers = functest_yaml_config.get("tiers")
for tier in config_tiers:
if tier['order'] > 0 and tier['order'] < 3:
for case in tier['testcases']:
- testCases4Validation.append(tc.TestCase(case['name'],
- "functest",
- case['dependencies']))
+ if case['name'] not in conf.blacklist:
+ testValid.append(tc.TestCase(case['name'],
+ "functest",
+ case['dependencies']))
elif tier['order'] == 3:
for case in tier['testcases']:
- testCases4Validation.append(tc.TestCase(case['name'],
- case['name'],
- case['dependencies']))
+ if case['name'] not in conf.blacklist:
+ testValid.append(tc.TestCase(case['name'],
+ case['name'],
+ case['dependencies']))
elif tier['order'] > 3:
for case in tier['testcases']:
- otherTestCases.append(tc.TestCase(case['name'],
- "functest",
- case['dependencies']))
+ if case['name'] not in conf.blacklist:
+ otherTestCases.append(tc.TestCase(case['name'],
+ "functest",
+ case['dependencies']))
# For all the versions
for version in conf.versions:
@@ -84,27 +102,27 @@ for version in conf.versions:
# Check if test case is runnable / installer, scenario
# for the test case used for Scenario validation
try:
- print ("---------------------------------")
- print ("installer %s, version %s, scenario %s:" %
- (installer, version, s))
+ logger.info("---------------------------------")
+ logger.info("installer %s, version %s, scenario %s:" %
+ (installer, version, s))
# 1) Manage the test cases for the scenario validation
# concretely Tiers 0-3
- for test_case in testCases4Validation:
+ for test_case in testValid:
test_case.checkRunnable(installer, s,
test_case.getConstraints())
- print ("testcase %s is %s" % (test_case.getName(),
- test_case.isRunnable))
+ logger.debug("testcase %s is %s" % (test_case.getName(),
+ test_case.isRunnable))
time.sleep(1)
if test_case.isRunnable:
dbName = test_case.getDbName()
name = test_case.getName()
project = test_case.getProject()
nb_test_runnable_for_this_scenario += 1
- print (" Searching results for case %s " %
- (dbName))
+ logger.info(" Searching results for case %s " %
+ (dbName))
result = utils.getResult(dbName, installer, s, version)
- print " >>>> Test result=" + str(result)
+ logger.info(" >>>> Test score = " + str(result))
test_case.setCriteria(result)
test_case.setIsRunnable(True)
testCases2BeDisplayed.append(tc.TestCase(name,
@@ -120,15 +138,15 @@ for version in conf.versions:
for test_case in otherTestCases:
test_case.checkRunnable(installer, s,
test_case.getConstraints())
- print ("testcase %s is %s" % (test_case.getName(),
- test_case.isRunnable))
+ logger.info("testcase %s is %s" %
+ (test_case.getName(), test_case.isRunnable))
time.sleep(1)
if test_case.isRunnable:
dbName = test_case.getDbName()
name = test_case.getName()
project = test_case.getProject()
- print (" Searching results for case %s " %
- (dbName))
+ logger.info(" Searching results for case %s " %
+ (dbName))
result = utils.getResult(dbName, installer, s, version)
test_case.setCriteria(result)
test_case.setIsRunnable(True)
@@ -141,9 +159,9 @@ for version in conf.versions:
items[s] = testCases2BeDisplayed
except:
- print ("Error: installer %s, version %s, scenario %s" %
- (installer, version, s))
- print "No data available , error %s " % (sys.exc_info()[0])
+ logger.error("Error: installer %s, version %s, scenario %s" %
+ (installer, version, s))
+ logger.error("No data available: %s " % (sys.exc_info()[0]))
# **********************************************
# Evaluate the results for scenario validation
@@ -158,11 +176,11 @@ for version in conf.versions:
s_score = str(scenario_score) + "/" + str(scenario_criteria)
s_status = "KO"
if scenario_score < scenario_criteria:
- print (">>>> scenario not OK, score = %s/%s" %
- (scenario_score, scenario_criteria))
+ logger.info(">>>> scenario not OK, score = %s/%s" %
+ (scenario_score, scenario_criteria))
s_status = "KO"
else:
- print ">>>>> scenario OK, save the information"
+ logger.info(">>>>> scenario OK, save the information")
s_status = "OK"
path_validation_file = ("./release/" + version +
"/validated_scenario_history.txt")
@@ -173,7 +191,7 @@ for version in conf.versions:
f.write(info)
scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_score)
- print "--------------------------"
+ logger.info("--------------------------")
templateLoader = jinja2.FileSystemLoader(os.path.dirname
(os.path.abspath
diff --git a/utils/test/reporting/functest/reportingConf.py b/utils/test/reporting/functest/reportingConf.py
index 649246d64..61410b414 100644
--- a/utils/test/reporting/functest/reportingConf.py
+++ b/utils/test/reporting/functest/reportingConf.py
@@ -10,9 +10,17 @@
#
# ****************************************************
installers = ["apex", "compass", "fuel", "joid"]
-# installers = ["compass"]
+# installers = ["apex"]
+# list of test cases declared in testcases.yaml but that must not be
+# taken into account for the scoring
+blacklist = ["odl", "ovno", "security_scan"]
# versions = ["brahmaputra", "master"]
versions = ["master"]
PERIOD = 10
MAX_SCENARIO_CRITERIA = 18
+# get the last 5 test results to determinate the success criteria
+NB_TESTS = 5
URL_BASE = 'http://testresults.opnfv.org/test/api/v1/results'
+TEST_CONF = "https://git.opnfv.org/cgit/functest/plain/ci/testcases.yaml"
+LOG_LEVEL = "INFO"
+LOG_FILE = "reporting.log"
diff --git a/utils/test/reporting/functest/reportingUtils.py b/utils/test/reporting/functest/reportingUtils.py
index 0db570f32..2f06b8449 100644
--- a/utils/test/reporting/functest/reportingUtils.py
+++ b/utils/test/reporting/functest/reportingUtils.py
@@ -21,7 +21,8 @@ def getApiResults(case, installer, scenario, version):
# "&period=30&installer=" + installer
url = (reportingConf.URL_BASE + "?case=" + case +
"&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
- "&scenario=" + scenario + "&version=" + version)
+ "&scenario=" + scenario + "&version=" + version +
+ "&last=" + str(reportingConf.NB_TESTS))
request = Request(url)
try:
@@ -104,7 +105,7 @@ def getResult(testCase, installer, scenario, version):
# print "nb of results:" + str(len(test_results))
for r in test_results:
- # print r["creation_date"]
+ # print r["start_date"]
# print r["criteria"]
scenario_results.append({r["start_date"]: r["criteria"]})
# sort results
@@ -116,7 +117,7 @@ def getResult(testCase, installer, scenario, version):
# 0: 0% success, not passing
test_result_indicator = 0
nbTestOk = getNbtestOk(scenario_results)
- # print "Nb test OK:"+ str(nbTestOk)
+ # print "Nb test OK (last 10 days):"+ str(nbTestOk)
# check that we have at least 4 runs
if nbTestOk < 1:
test_result_indicator = 0
@@ -126,7 +127,9 @@ def getResult(testCase, installer, scenario, version):
# Test the last 4 run
if (len(scenario_results) > 3):
last4runResults = scenario_results[-4:]
- if getNbtestOk(last4runResults):
+ nbTestOkLast4 = getNbtestOk(last4runResults)
+ # print "Nb test OK (last 4 run):"+ str(nbTestOkLast4)
+ if nbTestOkLast4 > 3:
test_result_indicator = 3
else:
test_result_indicator = 2
diff --git a/utils/test/result_collection_api/docker/Dockerfile b/utils/test/result_collection_api/docker/Dockerfile
new file mode 100644
index 000000000..ffee4c231
--- /dev/null
+++ b/utils/test/result_collection_api/docker/Dockerfile
@@ -0,0 +1,52 @@
+#######################################################
+# Docker container for OPNFV-TESTAPI
+#######################################################
+# Purpose: run opnfv-testapi for gathering test results
+#
+# Maintained by SerenaFeng
+# Build:
+# $ docker build -t opnfv/testapi:tag .
+#
+# Execution:
+# $ docker run -dti -p 8000:8000 \
+# -e "swagger_url=http://10.63.243.17:8000" \
+# -e "mongodb_url=mongodb://10.63.243.17:27017/" \
+# -e "api_port=8000"
+# opnfv/testapi:tag
+#
+# NOTE: providing swagger_url, api_port, mongodb_url is optional.
+# If not provided, it will use the default one
+# configured in config.ini
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+FROM ubuntu:14.04
+MAINTAINER SerenaFeng <feng.xiaowei@zte.com.cn>
+LABEL version="v1" description="OPNFV TestAPI Docker container"
+
+ENV HOME /home
+
+# Packaged dependencies
+RUN apt-get update && apt-get install -y \
+curl \
+git \
+gcc \
+wget \
+python-dev \
+python-pip \
+crudini \
+--no-install-recommends
+
+RUN pip install --upgrade pip
+
+RUN git config --global http.sslVerify false
+RUN git clone https://gerrit.opnfv.org/gerrit/releng /home/releng
+
+WORKDIR /home/releng/utils/test/result_collection_api/
+RUN pip install -r requirements.txt
+RUN python setup.py install
+CMD ["bash", "docker/start-server.sh"]
diff --git a/utils/test/result_collection_api/docker/prepare-env.sh b/utils/test/result_collection_api/docker/prepare-env.sh
new file mode 100755
index 000000000..99433cc8c
--- /dev/null
+++ b/utils/test/result_collection_api/docker/prepare-env.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+FILE=/etc/opnfv_testapi/config.ini
+
+
+if [ "$mongodb_url" != "" ]; then
+ sudo crudini --set --existing $FILE mongo url $mongodb_url
+fi
+
+if [ "$swagger_url" != "" ]; then
+ sudo crudini --set --existing $FILE swagger base_url $swagger_url
+fi
+
+if [ "$api_port" != "" ];then
+ sudo crudini --set --existing $FILE api port $api_port
+fi
+
diff --git a/utils/test/result_collection_api/docker/start-server.sh b/utils/test/result_collection_api/docker/start-server.sh
new file mode 100755
index 000000000..8bf6084ae
--- /dev/null
+++ b/utils/test/result_collection_api/docker/start-server.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+bash docker/prepare-env.sh
+opnfv-testapi
diff --git a/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py b/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py
index 148a803da..400b84ac1 100644
--- a/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py
+++ b/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py
@@ -112,7 +112,7 @@ class ResultsCLHandler(GenericResultHandler):
@type period: L{string}
@in period: query
@required period: False
- @param last: last days
+ @param last: last records stored until now
@type last: L{string}
@in last: query
@required last: False
diff --git a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_project.py b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_project.py
index d47306093..327ddf7b2 100644
--- a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_project.py
+++ b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_project.py
@@ -10,7 +10,7 @@ import unittest
from test_base import TestBase
from opnfv_testapi.resources.project_models import ProjectCreateRequest, \
- Project, Projects
+ Project, Projects, ProjectUpdateRequest
from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
HTTP_FORBIDDEN, HTTP_NOT_FOUND
@@ -112,7 +112,7 @@ class TestProjectUpdate(TestProjectBase):
code, body = self.get(self.req_d.name)
_id = body._id
- req = ProjectCreateRequest('newName', 'new description')
+ req = ProjectUpdateRequest('newName', 'new description')
code, body = self.update(req, self.req_d.name)
self.assertEqual(code, HTTP_OK)
self.assertEqual(_id, body._id)