summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh16
-rw-r--r--jjb/armband/armband-ci-jobs.yml25
-rwxr-xr-xjjb/armband/armband-download-artifact.sh5
-rw-r--r--jjb/armband/armband-project-jobs.yml2
-rwxr-xr-xjjb/armband/upload-artifacts.sh9
-rw-r--r--jjb/bottlenecks/bottlenecks-project-jobs.yml2
-rw-r--r--jjb/compass4nfv/compass-deploy.sh2
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yml4
-rw-r--r--jjb/fuel/fuel-project-jobs.yml6
-rw-r--r--jjb/functest/functest-ci-jobs.yml7
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yml4
-rwxr-xr-xjjb/moon/moon-verify.sh3
-rw-r--r--jjb/moon/moon.yml54
-rw-r--r--jjb/opnfv/artifact-cleanup.yml42
-rw-r--r--jjb/opnfv/installer-params.yml16
-rw-r--r--jjb/opnfv/opnfv-docker.yml4
-rw-r--r--jjb/opnfv/slave-params.yml404
-rw-r--r--jjb/opnfv/test-sign.yml42
-rw-r--r--jjb/releng-macros.yaml5
-rwxr-xr-xjjb/sandbox/basic.sh61
-rwxr-xr-xjjb/sandbox/build.sh61
-rwxr-xr-xjjb/sandbox/deploy.sh61
-rwxr-xr-xjjb/sandbox/functest.sh61
-rwxr-xr-xjjb/sandbox/merge.sh61
-rwxr-xr-xjjb/sandbox/promote.sh61
-rw-r--r--jjb/sandbox/sandbox-daily-jobs.yml264
-rw-r--r--jjb/sandbox/sandbox-merge-jobs.yml159
-rw-r--r--jjb/sandbox/sandbox-verify-jobs.yml185
-rw-r--r--jjb/sandbox/sandbox-weekly-jobs.yml264
-rwxr-xr-xjjb/sandbox/test.sh61
-rwxr-xr-xjjb/sandbox/verify.sh29
-rwxr-xr-xjjb/sandbox/yardstick.sh61
-rw-r--r--jjb/yardstick/yardstick-ci-jobs.yml8
-rw-r--r--jjb/yardstick/yardstick-project-jobs.yml4
-rwxr-xr-xutils/jenkins-jnlp-connect.sh12
-rw-r--r--utils/push-test-logs.sh2
-rwxr-xr-xutils/retention_script.sh39
-rwxr-xr-xutils/test-sign-artifact.sh26
-rw-r--r--utils/test/reporting/functest/reporting-status.py68
-rw-r--r--utils/test/reporting/functest/reporting-tempest.py71
-rw-r--r--utils/test/reporting/functest/reporting-vims.py68
-rw-r--r--utils/test/reporting/functest/reportingConf.py8
-rw-r--r--utils/test/reporting/functest/reportingUtils.py38
-rw-r--r--utils/test/reporting/functest/template/index-status-tmpl.html2
-rw-r--r--utils/test/reporting/functest/testCase.py58
-rw-r--r--utils/test/result_collection_api/etc/config.ini2
-rw-r--r--utils/test/result_collection_api/opnfv_testapi/resources/handlers.py5
-rw-r--r--utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py22
-rw-r--r--utils/test/result_collection_api/opnfv_testapi/resources/result_models.py101
-rw-r--r--utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py5
-rw-r--r--utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py2
-rw-r--r--utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py51
52 files changed, 2156 insertions, 477 deletions
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
index ba69f3eb6..0dd112bc8 100755
--- a/jjb/apex/apex-upload-artifact.sh
+++ b/jjb/apex/apex-upload-artifact.sh
@@ -11,14 +11,20 @@ echo
# source the opnfv.properties to get ARTIFACT_VERSION
source $WORKSPACE/opnfv.properties
+# clone releng repository
+echo "Cloning releng repository..."
+[ -d releng ] && rm -rf releng
+git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
#this is where we import the siging key
-source $WORKSPACE/releng/utils/gpg_import_key.sh
+if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
+ source $WORKSPACE/releng/utils/gpg_import_key.sh
+fi
signrpm () {
for artifact in $RPM_LIST $SRPM_LIST; do
echo "Signing artifact: ${artifact}"
- gpg2 -vvv --batch \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
+ gpg2 -vvv --batch --yes --no-tty \
+ --default-key opnfv-helpdesk@rt.linuxfoundation.org \
--passphrase besteffort \
--detach-sig $artifact
gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig)
@@ -27,9 +33,9 @@ done
}
signiso () {
-time gpg2 -vvv --batch \
+time gpg2 -vvv --batch --yes --no-tty \
--default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase notreallysecure \
+ --passphrase besteffort \
--detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 53c652eaf..9d7c198d0 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -28,16 +28,24 @@
#--------------------------------
# master
#--------------------------------
-# No master deploys for now
-# - arm-pod1:
-# <<: *master
+ pod:
+ - arm-pod1:
+ <<: *master
+ - arm-pod2:
+ <<: *master
#--------------------------------
# scenarios
#--------------------------------
scenario:
# HA scenarios
+ - 'os-nosdn-nofeature-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
- 'os-odl_l2-nofeature-ha':
auto-trigger-name: 'armband-{scenario}-{pod}-{stream}-trigger'
+ - 'os-odl_l3-nofeature-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-odl_l2-bgpvpn-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
# NOHA scenarios
- 'os-odl_l2-nofeature-noha':
@@ -187,14 +195,21 @@
name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-master-trigger'
triggers:
- timed: ''
-
#---------------------------------------------------------------
# Enea Armband POD 1 Triggers running against brahmaputra branch
#---------------------------------------------------------------
- trigger:
name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-brahmaputra-trigger'
triggers:
- - timed: '0 18 * * *'
+ - timed: ''
+#----------------------------------------------------------
+# Enea Armband POD 2 Triggers running against master branch
+#----------------------------------------------------------
+# No triggers for master for now
+- trigger:
+ name: 'armband-os-odl_l2-nofeature-ha-arm-pod2-master-trigger'
+ triggers:
+ - timed: ''
#---------------------------------------------------------------
# Enea Armband POD 2 Triggers running against brahmaputra branch
#---------------------------------------------------------------
diff --git a/jjb/armband/armband-download-artifact.sh b/jjb/armband/armband-download-artifact.sh
index 18b55d7a7..7d01c09cf 100755
--- a/jjb/armband/armband-download-artifact.sh
+++ b/jjb/armband/armband-download-artifact.sh
@@ -10,6 +10,9 @@
set -o errexit
set -o pipefail
+# Configurable environment variables:
+# ISOSTORE (/iso_mount/opnfv_ci)
+
if [[ "$JOB_NAME" =~ "merge" ]]; then
echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
# get the properties file for the Armband Fuel ISO built for a merged change
@@ -33,7 +36,7 @@ ISO_FILE=${WORKSPACE}/opnfv.iso
# using ISOs for verify & merge jobs from local storage will be enabled later
if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
# check if we already have the ISO to avoid redownload
- ISOSTORE="/iso_mount/opnfv_ci/${GIT_BRANCH##*/}"
+ ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}/${GIT_BRANCH##*/}
if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
echo "ISO exists locally. Skipping the download and using the file from ISO store"
ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE}
diff --git a/jjb/armband/armband-project-jobs.yml b/jjb/armband/armband-project-jobs.yml
index 732a9ea4d..764a5d45b 100644
--- a/jjb/armband/armband-project-jobs.yml
+++ b/jjb/armband/armband-project-jobs.yml
@@ -76,7 +76,7 @@
parameters:
- project-parameter:
project: '{project}'
- - 'arm-build1-defaults'
+ - 'opnfv-build-arm-defaults'
- armband-project-parameter:
gs-pathname: '{gs-pathname}'
diff --git a/jjb/armband/upload-artifacts.sh b/jjb/armband/upload-artifacts.sh
index f4e84e9d2..7059ac344 100755
--- a/jjb/armband/upload-artifacts.sh
+++ b/jjb/armband/upload-artifacts.sh
@@ -9,6 +9,9 @@
##############################################################################
set -o pipefail
+# configurable environment variables:
+# ISOSTORE (/iso_mount/opnfv_ci)
+
# check if we built something
if [ -f $WORKSPACE/.noupload ]; then
echo "Nothing new to upload. Exiting."
@@ -19,11 +22,15 @@ fi
# source the opnfv.properties to get ARTIFACT_VERSION
source $WORKSPACE/opnfv.properties
+
# storing ISOs for verify & merge jobs will be done once we get the disk array
if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
# store ISO locally on NFS first
- ISOSTORE="/home/jenkins/opnfv/iso_store"
+ ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}
if [[ -d "$ISOSTORE" ]]; then
+ ISOSTORE=${ISOSTORE}/${GIT_BRANCH##*/}
+ mkdir -p $ISOSTORE
+
# remove all but most recent 3 ISOs first to keep iso_mount clean & tidy
cd $ISOSTORE
ls -tp | grep -v '/' | tail -n +4 | xargs -d '\n' /bin/rm -f --
diff --git a/jjb/bottlenecks/bottlenecks-project-jobs.yml b/jjb/bottlenecks/bottlenecks-project-jobs.yml
index ea000d8b9..28b49bc8b 100644
--- a/jjb/bottlenecks/bottlenecks-project-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-project-jobs.yml
@@ -119,7 +119,7 @@
parameters:
- project-parameter:
project: '{project}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- bottlenecks-parameter:
gs-packagepath: '{gs-packagepath}'
diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh
index 7d09d5311..68a93a15b 100644
--- a/jjb/compass4nfv/compass-deploy.sh
+++ b/jjb/compass4nfv/compass-deploy.sh
@@ -27,6 +27,8 @@ export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
if [[ "${DEPLOY_SCENARIO}" =~ "-ocl" ]]; then
export NETWORK_CONF_FILE=network_ocl.yml
+elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
+ export NETWORK_CONF_FILE=network_onos.yml
else
export NETWORK_CONF_FILE=network.yml
fi
diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml
index 8cd67ba0b..6e10e2fc2 100644
--- a/jjb/compass4nfv/compass-project-jobs.yml
+++ b/jjb/compass4nfv/compass-project-jobs.yml
@@ -100,7 +100,7 @@
- project: 'functest-{slave-label}-suite-{stream}'
current-parameters: true
predefined-parameters:
- FUNCTEST_SUITE_NAME=vping_userdata
+ FUNCTEST_SUITE_NAME=healthcheck
same-node: true
block: true
block-thresholds:
@@ -160,7 +160,7 @@
- compass-project-parameter:
installer: '{installer}'
gs-pathname: '{gs-pathname}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- '{installer}-defaults'
scm:
diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml
index 67343fb1b..9fc7ab257 100644
--- a/jjb/fuel/fuel-project-jobs.yml
+++ b/jjb/fuel/fuel-project-jobs.yml
@@ -43,7 +43,7 @@
parameters:
- project-parameter:
project: '{project}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- '{installer}-defaults'
- choice:
name: FORCE_BUILD
@@ -92,7 +92,7 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- '{installer}-defaults'
- fuel-project-parameter:
gs-pathname: '{gs-pathname}'
@@ -159,7 +159,7 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- '{installer}-defaults'
- fuel-project-parameter:
gs-pathname: '{gs-pathname}'
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml
index f9cf01162..727419d8e 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-ci-jobs.yml
@@ -45,13 +45,6 @@
slave-label: fuel-virtual
installer: fuel
<<: *brahmaputra
-
-# just in case if things go wrong
- - lf-pod2:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *master
-
# joid CI PODs
- baremetal:
slave-label: joid-baremetal
diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml
index b042c56a7..aa8b645f3 100644
--- a/jjb/kvmfornfv/kvmfornfv.yml
+++ b/jjb/kvmfornfv/kvmfornfv.yml
@@ -23,7 +23,7 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
@@ -62,7 +62,7 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
diff --git a/jjb/moon/moon-verify.sh b/jjb/moon/moon-verify.sh
new file mode 100755
index 000000000..23bf47c06
--- /dev/null
+++ b/jjb/moon/moon-verify.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo "Hello World"
diff --git a/jjb/moon/moon.yml b/jjb/moon/moon.yml
new file mode 100644
index 000000000..0044eb9ec
--- /dev/null
+++ b/jjb/moon/moon.yml
@@ -0,0 +1,54 @@
+- project:
+ name: moon
+
+ project: '{name}'
+
+ jobs:
+ - 'moon-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+
+- job-template:
+ name: 'moon-verify-{stream}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell:
+ !include-raw: ./moon-verify.sh
diff --git a/jjb/opnfv/artifact-cleanup.yml b/jjb/opnfv/artifact-cleanup.yml
new file mode 100644
index 000000000..b0f819145
--- /dev/null
+++ b/jjb/opnfv/artifact-cleanup.yml
@@ -0,0 +1,42 @@
+- project:
+ name: artifact-cleanup
+
+ project: 'releng'
+
+ jobs:
+ - 'artifact-cleanup-daily-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+
+
+- job-template:
+ name: 'artifact-cleanup-daily-{stream}'
+
+ # Job template for daily builders
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: master
+
+ disabled: false
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ triggers:
+ - timed: 'H H * * *'
+
+ builders:
+ - shell: |
+ $WORKSPACE/utils/retention_script.sh
diff --git a/jjb/opnfv/installer-params.yml b/jjb/opnfv/installer-params.yml
index f95d79f93..60fee9263 100644
--- a/jjb/opnfv/installer-params.yml
+++ b/jjb/opnfv/installer-params.yml
@@ -93,3 +93,19 @@
name: CPU_ARCHITECTURE
default: 'amd64'
description: "CPU Architecture to use for Ubuntu distro "
+
+- parameter:
+ name: 'sandbox-defaults'
+ parameters:
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.0.2'
+ description: 'IP of the installer'
+ - string:
+ name: INSTALLER_TYPE
+ default: sandbox
+ description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: EXTERNAL_NETWORK
+ default: 'admin_floating_net'
+ description: 'external network for test'
diff --git a/jjb/opnfv/opnfv-docker.yml b/jjb/opnfv/opnfv-docker.yml
index 6b4924295..6b4861c01 100644
--- a/jjb/opnfv/opnfv-docker.yml
+++ b/jjb/opnfv/opnfv-docker.yml
@@ -34,7 +34,7 @@
parameters:
- project-parameter:
project: '{project}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- string:
name: PUSH_IMAGE
default: "true"
@@ -77,7 +77,7 @@
parameters:
- project-parameter:
project: 'yardstick'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- string:
name: PUSH_IMAGE
default: "true"
diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml
index e5313c801..b7ecfde29 100644
--- a/jjb/opnfv/slave-params.yml
+++ b/jjb/opnfv/slave-params.yml
@@ -1,3 +1,8 @@
+#####################################################
+# Parameters for slaves using old labels
+# This will be cleaned up once the new job structure and
+# use of the new labels are in place
+#####################################################
- parameter:
name: 'apex-daily-master-defaults'
parameters:
@@ -5,10 +10,6 @@
name: SLAVE_LABEL
default: 'apex-daily-master'
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
@@ -23,9 +24,23 @@
name: SLAVE_LABEL
default: 'apex-verify-master'
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+- parameter:
+ name: 'lf-pod1-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod1
+ default-slaves:
+ - lf-pod1
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -34,22 +49,39 @@
name: SSH_KEY
default: /root/.ssh/id_rsa
description: 'SSH key to use for Apex'
+#####################################################
+# Parameters for CI baremetal PODs
+#####################################################
- parameter:
- name: 'fuel-baremetal-defaults'
+ name: 'apex-baremetal-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'fuel-baremetal'
+ default: 'apex-baremetal'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
- parameter:
- name: 'fuel-virtual-defaults'
+ name: 'compass-baremetal-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'fuel-virtual'
+ default: 'compass-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+ name: 'fuel-baremetal-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'fuel-baremetal'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -72,26 +104,23 @@
name: EXTERNAL_NETWORK
default: ext-net;flat;10.5.15.5;10.5.15.250;10.5.15.254;10.5.15.0/24
description: "External network to create for pod5 (name;type;first ip;last ip; gateway;network)"
+#####################################################
+# Parameters for CI virtual PODs
+#####################################################
- parameter:
- name: 'joid-virtual-defaults'
+ name: 'apex-virtual-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'joid-virtual'
+ default: 'apex-virtual'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-- parameter:
- name: 'compass-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-baremetal'
- string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
- parameter:
name: 'compass-virtual-defaults'
parameters:
@@ -103,176 +132,112 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
- name: 'lf-pod1-defaults'
+ name: 'fuel-virtual-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod1
- default-slaves:
- - lf-pod1
- - string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
+ - label:
+ name: SLAVE_LABEL
+ default: 'fuel-virtual'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-- parameter:
- name: 'lf-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod2
- default-slaves:
- - lf-pod2
- - string:
- name: GIT_BASE
- default: ssh://gerrit.opnfv.org:29418/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- parameter:
- name: 'ericsson-pod1-defaults'
+ name: 'joid-virtual-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - ericsson-pod1
- default-slaves:
- - ericsson-pod1
+ - label:
+ name: SLAVE_LABEL
+ default: 'joid-virtual'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
+#####################################################
+# Parameters for build slaves
+#####################################################
- parameter:
- name: 'ericsson-pod2-defaults'
+ name: 'opnfv-build-arm-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - ericsson-pod2
- default-slaves:
- - ericsson-pod2
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-arm'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
- parameter:
- name: 'intel-pod2-defaults'
+ name: 'opnfv-build-centos-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod2
- default-slaves:
- - intel-pod2
- - string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-centos'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
- parameter:
- name: 'intel-pod3-defaults'
+ name: 'opnfv-build-ubuntu-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod3
- default-slaves:
- - intel-pod3
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-ubuntu'
+ description: 'Slave label on Jenkins'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
- parameter:
- name: 'intel-pod5-defaults'
+ name: 'huawei-build-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - intel-pod5
+ - huawei-build
default-slaves:
- - intel-pod5
- - string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
+ - huawei-build
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: CEPH_DISKS
- default: /srv
- description: "Disks to use by ceph (comma separated list)"
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net;flat;10.5.15.5;10.5.15.250;10.5.15.254;10.5.15.0/24
- description: "External network to create for pod5 (name;type;first ip;last ip; gateway;network)"
-
+#####################################################
+# Parameters for none-CI PODs
+#####################################################
- parameter:
- name: 'intel-pod6-defaults'
+ name: 'ericsson-pod1-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - intel-pod6
+ - ericsson-pod1
default-slaves:
- - intel-pod6
- - string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
+ - ericsson-pod1
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: CEPH_DISKS
- default: /srv
- description: "Disks to use by ceph (comma separated list)"
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net;flat;10.6.15.5;10.6.15.250;10.6.15.254;10.6.15.0/24
- description: "External network to create for pod6 (name;type;first ip;last ip; gateway;network)"
-
- parameter:
- name: 'intel-pod7-defaults'
+ name: 'intel-pod2-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - intel-pod7
+ - intel-pod2
default-slaves:
- - intel-pod7
- - string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
+ - intel-pod2
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -281,64 +246,20 @@
name: SSH_KEY
default: /root/.ssh/id_rsa
description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'intel-pod8-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod8
- default-slaves:
- - intel-pod8
- - string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
- name: 'huawei-build-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-build
- default-slaves:
- - huawei-build
- - string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'huawei-pod1-defaults'
+ name: 'intel-pod3-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - huawei-pod1
+ - intel-pod3
default-slaves:
- - huawei-pod1
- - string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
+ - intel-pod3
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'huawei-pod2-defaults'
parameters:
@@ -350,14 +271,9 @@
default-slaves:
- huawei-pod2
- string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'huawei-pod3-defaults'
parameters:
@@ -372,14 +288,9 @@
name: SLAVE_LABEL
default: 'huawei-test'
- string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'huawei-pod4-defaults'
parameters:
@@ -394,10 +305,6 @@
name: SLAVE_LABEL
default: 'huawei-test'
- string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
@@ -415,7 +322,6 @@
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'juniper-pod1-defaults'
parameters:
@@ -427,10 +333,6 @@
default-slaves:
- juniper-pod1
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
@@ -438,7 +340,6 @@
name: CEPH_DISKS
default: /srv
description: "Disks to use by ceph (comma separated list)"
-
- parameter:
name: 'orange-pod2-defaults'
parameters:
@@ -450,10 +351,6 @@
default-slaves:
- orange-pod2
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
@@ -465,7 +362,6 @@
name: EXTERNAL_NETWORK
default: ext-net;flat;161.105.231.2;161.105.231.62;161.105.231.1;161.105.231.0/26
description: "External network to create (name;type;first ip;last ip; gateway;network)"
-
- parameter:
name: 'orange-pod5-defaults'
parameters:
@@ -477,48 +373,9 @@
default-slaves:
- orange-pod5
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-build-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-build'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on these Jenkins Slaves'
-
-- parameter:
- name: 'intel-virtual2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-virtual2
- default-slaves:
- - intel-virtual2
- - string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
- parameter:
name: 'dell-pod1-defaults'
parameters:
@@ -530,14 +387,9 @@
default-slaves:
- dell-pod1
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'dell-pod2-defaults'
parameters:
@@ -549,14 +401,9 @@
default-slaves:
- dell-pod2
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'nokia-pod1-defaults'
parameters:
@@ -568,10 +415,6 @@
default-slaves:
- nokia-pod1
- string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
@@ -579,22 +422,6 @@
name: SSH_KEY
default: /root/.ssh/id_rsa
description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'arm-build1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - arm-build1
- default-slaves:
- - arm-build1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'arm-pod1-defaults'
parameters:
@@ -621,7 +448,6 @@
name: LAB_CONFIG_URL
default: ssh://git@git.enea.com/pharos/lab-config
description: 'Base URI to the configuration directory'
-
- parameter:
name: 'arm-pod2-defaults'
parameters:
@@ -648,13 +474,16 @@
name: LAB_CONFIG_URL
default: ssh://git@git.enea.com/pharos/lab-config
description: 'Base URI to the configuration directory'
-
+#####################################################
+# These slaves are just dummy slaves for sandbox jobs
+#####################################################
- parameter:
- name: 'opnfv-build-centos-defaults'
+ name: 'sandbox-baremetal-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'opnfv-build-centos'
+ default: 'sandbox-baremetal'
+ description: 'Slave label on Jenkins'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -664,11 +493,26 @@
default: $WORKSPACE/build_output
description: "Directory where the build artifact will be located upon the completion of the build."
- parameter:
- name: 'opnfv-build-ubuntu-defaults'
+ name: 'sandbox-virtual-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
+ default: 'sandbox-virtual-defaults'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+- parameter:
+ name: 'dummy-pod1-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'dummy-pod1-defaults'
description: 'Slave label on Jenkins'
- string:
name: GIT_BASE
diff --git a/jjb/opnfv/test-sign.yml b/jjb/opnfv/test-sign.yml
new file mode 100644
index 000000000..b27d75777
--- /dev/null
+++ b/jjb/opnfv/test-sign.yml
@@ -0,0 +1,42 @@
+- project:
+ name: test-sign
+
+ project: 'releng'
+
+ jobs:
+ - 'test-sign-daily-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+
+
+- job-template:
+ name: 'test-sign-daily-{stream}'
+
+ # Job template for daily builders
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: master
+
+ disabled: false
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ triggers:
+ - timed: 'H H * * *'
+
+ builders:
+ - shell: |
+ $WORKSPACE/utils/test-sign-artifact.sh
diff --git a/jjb/releng-macros.yaml b/jjb/releng-macros.yaml
index b10054cdf..631a33f41 100644
--- a/jjb/releng-macros.yaml
+++ b/jjb/releng-macros.yaml
@@ -62,6 +62,11 @@
- timed: ''
- trigger:
+ name: 'weekly-trigger-disabled'
+ triggers:
+ - timed: ''
+
+- trigger:
name: 'brahmaputra-trigger-daily-enabled'
triggers:
- timed: '0 2 * * *'
diff --git a/jjb/sandbox/basic.sh b/jjb/sandbox/basic.sh
new file mode 100755
index 000000000..2f9be2729
--- /dev/null
+++ b/jjb/sandbox/basic.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+ verify)
+ echo "Running as part of verify job"
+ ;;
+ merge)
+ echo "Running as part of merge job"
+ ;;
+ daily)
+ echo "Running as part of daily job"
+ ;;
+ weekly)
+ echo "Running as part of weekly job"
+ ;;
+ *)
+ echo "Job type $JOB_TYPE is not supported!"
+ exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/build.sh b/jjb/sandbox/build.sh
new file mode 100755
index 000000000..2f9be2729
--- /dev/null
+++ b/jjb/sandbox/build.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+ verify)
+ echo "Running as part of verify job"
+ ;;
+ merge)
+ echo "Running as part of merge job"
+ ;;
+ daily)
+ echo "Running as part of daily job"
+ ;;
+ weekly)
+ echo "Running as part of weekly job"
+ ;;
+ *)
+ echo "Job type $JOB_TYPE is not supported!"
+ exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/deploy.sh b/jjb/sandbox/deploy.sh
new file mode 100755
index 000000000..2f9be2729
--- /dev/null
+++ b/jjb/sandbox/deploy.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+ verify)
+ echo "Running as part of verify job"
+ ;;
+ merge)
+ echo "Running as part of merge job"
+ ;;
+ daily)
+ echo "Running as part of daily job"
+ ;;
+ weekly)
+ echo "Running as part of weekly job"
+ ;;
+ *)
+ echo "Job type $JOB_TYPE is not supported!"
+ exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/functest.sh b/jjb/sandbox/functest.sh
new file mode 100755
index 000000000..2f9be2729
--- /dev/null
+++ b/jjb/sandbox/functest.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+ verify)
+ echo "Running as part of verify job"
+ ;;
+ merge)
+ echo "Running as part of merge job"
+ ;;
+ daily)
+ echo "Running as part of daily job"
+ ;;
+ weekly)
+ echo "Running as part of weekly job"
+ ;;
+ *)
+ echo "Job type $JOB_TYPE is not supported!"
+ exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/merge.sh b/jjb/sandbox/merge.sh
new file mode 100755
index 000000000..2f9be2729
--- /dev/null
+++ b/jjb/sandbox/merge.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+ verify)
+ echo "Running as part of verify job"
+ ;;
+ merge)
+ echo "Running as part of merge job"
+ ;;
+ daily)
+ echo "Running as part of daily job"
+ ;;
+ weekly)
+ echo "Running as part of weekly job"
+ ;;
+ *)
+ echo "Job type $JOB_TYPE is not supported!"
+ exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/promote.sh b/jjb/sandbox/promote.sh
new file mode 100755
index 000000000..2f9be2729
--- /dev/null
+++ b/jjb/sandbox/promote.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+ verify)
+ echo "Running as part of verify job"
+ ;;
+ merge)
+ echo "Running as part of merge job"
+ ;;
+ daily)
+ echo "Running as part of daily job"
+ ;;
+ weekly)
+ echo "Running as part of weekly job"
+ ;;
+ *)
+ echo "Job type $JOB_TYPE is not supported!"
+ exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/sandbox-daily-jobs.yml b/jjb/sandbox/sandbox-daily-jobs.yml
new file mode 100644
index 000000000..fc7244e41
--- /dev/null
+++ b/jjb/sandbox/sandbox-daily-jobs.yml
@@ -0,0 +1,264 @@
+- project:
+ name: 'sandbox-daily-jobs'
+
+ project: 'sandbox'
+
+ installer: 'sandbox'
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+# CI PODs
+#--------------------------------
+ pod:
+ - baremetal:
+ slave-label: sandbox-baremetal
+ <<: *master
+ - virtual:
+ slave-label: fuel-virtual
+ <<: *master
+#--------------------------------
+# None-CI PODs
+#--------------------------------
+ - dummy-pod1:
+ slave-label: dummy-pod1
+ <<: *master
+#--------------------------------
+# scenarios
+#--------------------------------
+ scenario:
+ # HA scenarios
+ - 'os-nosdn-nofeature-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-odl_l2-nofeature-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+
+ jobs:
+ - 'sandbox-{scenario}-{pod}-daily-{stream}'
+ - 'sandbox-deploy-{pod}-daily-{stream}'
+ - 'yardstick-sandbox-{pod}-daily-{stream}'
+ - 'functest-sandbox-{pod}-daily-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+ name: 'sandbox-{scenario}-{pod}-daily-{stream}'
+
+ concurrent: false
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'sandbox-os-.*?-{pod}-daily-{stream}'
+ block-level: 'NODE'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ triggers:
+ - '{auto-trigger-name}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{scenario}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - trigger-builds:
+ - project: 'sandbox-deploy-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ - trigger-builds:
+ - project: 'yardstick-sandbox-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'functest-sandbox-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+
+- job-template:
+ name: 'sandbox-deploy-{pod}-daily-{stream}'
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'fuel-deploy-{pod}-daily-{stream}'
+ - 'fuel-deploy-generic-daily-.*'
+ block-level: 'NODE'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'sandbox-deploy-daily-builder'
+
+- job-template:
+ name: 'yardstick-sandbox-{pod}-daily-{stream}'
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER Suite: $YARDSTICK_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 400
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults':
+ installer: '{installer}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'yardstick-sandbox-daily-builder'
+
+- job-template:
+ name: 'functest-sandbox-{pod}-daily-{stream}'
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 400
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - '{slave-label}-defaults'
+ - '{installer}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'functest-sandbox-daily-builder'
+
+#####################################
+# builder macros
+#####################################
+- builder:
+ name: 'sandbox-deploy-daily-builder'
+ builders:
+ - shell:
+ !include-raw: ./deploy.sh
+
+- builder:
+ name: 'functest-sandbox-daily-builder'
+ builders:
+ - shell:
+ !include-raw: ./functest.sh
+
+- builder:
+ name: 'yardstick-sandbox-daily-builder'
+ builders:
+ - shell:
+ !include-raw: ./yardstick.sh
diff --git a/jjb/sandbox/sandbox-merge-jobs.yml b/jjb/sandbox/sandbox-merge-jobs.yml
new file mode 100644
index 000000000..69fcb4330
--- /dev/null
+++ b/jjb/sandbox/sandbox-merge-jobs.yml
@@ -0,0 +1,159 @@
+- project:
+ name: 'sandbox-merge-jobs'
+
+ project: 'sandbox'
+
+ installer: 'sandbox'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+
+# what are the verification activities we do for this installer
+ activity:
+ - 'basic'
+ - 'build'
+ - 'promote'
+
+ jobs:
+ - 'sandbox-merge-{stream}'
+ - 'sandbox-merge-{activity}-{stream}'
+
+- job-template:
+ name: 'sandbox-merge-{stream}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ wrappers:
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+ - timeout:
+ timeout: 360
+ fail: true
+
+ triggers:
+ - gerrit:
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**'
+ readable-message: true
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-merge-builder'
+ - trigger-builds:
+ - project: 'sandbox-merge-basic-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ block: true
+ - trigger-builds:
+ - project: 'sandbox-merge-build-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ block: true
+ - trigger-builds:
+ - project: 'sandbox-merge-promote-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ block: true
+
+- job-template:
+ name: 'sandbox-merge-{activity}-{stream}'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ choosing-strategy: 'default'
+
+ wrappers:
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+ - timeout:
+ timeout: 360
+ fail: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{project}-merge-{activity}-parameter'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-merge-{activity}-builder'
+
+#####################################
+# parameter builders
+#####################################
+- parameter:
+ name: 'sandbox-merge-basic-parameter'
+ parameters:
+ - 'opnfv-build-ubuntu-defaults'
+
+- parameter:
+ name: 'sandbox-merge-build-parameter'
+ parameters:
+ - 'opnfv-build-ubuntu-defaults'
+
+- parameter:
+ name: 'sandbox-merge-promote-parameter'
+ parameters:
+ - 'opnfv-build-centos-defaults'
+#####################################
+# builder builders
+#####################################
+- builder:
+ name: 'sandbox-merge-builder'
+ builders:
+ - shell:
+ !include-raw: ./merge.sh
+
+- builder:
+ name: 'sandbox-merge-basic-builder'
+ builders:
+ - shell:
+ !include-raw: ./basic.sh
+
+- builder:
+ name: 'sandbox-merge-build-builder'
+ builders:
+ - shell:
+ !include-raw: ./build.sh
+
+- builder:
+ name: 'sandbox-merge-promote-builder'
+ builders:
+ - shell:
+ !include-raw: ./promote.sh
diff --git a/jjb/sandbox/sandbox-verify-jobs.yml b/jjb/sandbox/sandbox-verify-jobs.yml
new file mode 100644
index 000000000..a899193d7
--- /dev/null
+++ b/jjb/sandbox/sandbox-verify-jobs.yml
@@ -0,0 +1,185 @@
+- project:
+ name: 'sandbox-verify-jobs'
+
+ project: 'sandbox'
+
+ installer: 'sandbox'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+
+# what are the verification activities we do for this installer
+ activity:
+ - 'basic'
+ - 'build'
+ - 'deploy'
+ - 'test'
+
+ jobs:
+ - 'sandbox-verify-{stream}'
+ - 'sandbox-verify-{activity}-{stream}'
+
+- job-template:
+ name: 'sandbox-verify-{stream}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ wrappers:
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+ - timeout:
+ timeout: 360
+ fail: true
+
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+ readable-message: true
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-verify-builder'
+ - trigger-builds:
+ - project: 'sandbox-verify-basic-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ block: true
+ - trigger-builds:
+ - project: 'sandbox-verify-build-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ block: true
+ - trigger-builds:
+ - project: 'sandbox-verify-deploy-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ block: true
+ - trigger-builds:
+ - project: 'sandbox-verify-test-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ block: true
+
+- job-template:
+ name: 'sandbox-verify-{activity}-{stream}'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ wrappers:
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+ - timeout:
+ timeout: 360
+ fail: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{project}-verify-{activity}-parameter'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-verify-{activity}-builder'
+
+#####################################
+# parameter builders
+#####################################
+- parameter:
+ name: 'sandbox-verify-basic-parameter'
+ parameters:
+ - 'opnfv-build-ubuntu-defaults'
+
+- parameter:
+ name: 'sandbox-verify-build-parameter'
+ parameters:
+ - 'opnfv-build-ubuntu-defaults'
+
+- parameter:
+ name: 'sandbox-verify-deploy-parameter'
+ parameters:
+ - 'opnfv-build-centos-defaults'
+
+- parameter:
+ name: 'sandbox-verify-test-parameter'
+ parameters:
+ - 'opnfv-build-centos-defaults'
+#####################################
+# builder builders
+#####################################
+- builder:
+ name: 'sandbox-verify-builder'
+ builders:
+ - shell:
+ !include-raw: ./verify.sh
+
+- builder:
+ name: 'sandbox-verify-basic-builder'
+ builders:
+ - shell:
+ !include-raw: ./basic.sh
+
+- builder:
+ name: 'sandbox-verify-build-builder'
+ builders:
+ - shell:
+ !include-raw: ./build.sh
+
+- builder:
+ name: 'sandbox-verify-deploy-builder'
+ builders:
+ - shell:
+ !include-raw: ./deploy.sh
+
+- builder:
+ name: 'sandbox-verify-test-builder'
+ builders:
+ - shell:
+ !include-raw: ./test.sh
diff --git a/jjb/sandbox/sandbox-weekly-jobs.yml b/jjb/sandbox/sandbox-weekly-jobs.yml
new file mode 100644
index 000000000..52f852923
--- /dev/null
+++ b/jjb/sandbox/sandbox-weekly-jobs.yml
@@ -0,0 +1,264 @@
+- project:
+ name: 'sandbox-weekly-jobs'
+
+ project: 'sandbox'
+
+ installer: 'sandbox'
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+# CI PODs
+#--------------------------------
+ pod:
+ - baremetal:
+ slave-label: sandbox-baremetal
+ <<: *master
+ - virtual:
+ slave-label: fuel-virtual
+ <<: *master
+#--------------------------------
+# None-CI PODs
+#--------------------------------
+ - dummy-pod1:
+ slave-label: dummy-pod1
+ <<: *master
+#--------------------------------
+# scenarios
+#--------------------------------
+ scenario:
+ # HA scenarios
+ - 'os-nosdn-nofeature-ha':
+ auto-trigger-name: 'weekly-trigger-disabled'
+ - 'os-odl_l2-nofeature-ha':
+ auto-trigger-name: 'weekly-trigger-disabled'
+
+ jobs:
+ - 'sandbox-{scenario}-{pod}-weekly-{stream}'
+ - 'sandbox-deploy-{pod}-weekly-{stream}'
+ - 'yardstick-sandbox-{pod}-weekly-{stream}'
+ - 'functest-sandbox-{pod}-weekly-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+ name: 'sandbox-{scenario}-{pod}-weekly-{stream}'
+
+ concurrent: false
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'sandbox-os-.*?-{pod}-weekly-{stream}'
+ block-level: 'NODE'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ triggers:
+ - '{auto-trigger-name}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{scenario}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - trigger-builds:
+ - project: 'sandbox-deploy-{pod}-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ - trigger-builds:
+ - project: 'yardstick-sandbox-{pod}-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'functest-sandbox-{pod}-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+
+- job-template:
+ name: 'sandbox-deploy-{pod}-weekly-{stream}'
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'fuel-deploy-{pod}-weekly-{stream}'
+ - 'fuel-deploy-generic-weekly-.*'
+ block-level: 'NODE'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'sandbox-deploy-weekly-builder'
+
+- job-template:
+ name: 'yardstick-sandbox-{pod}-weekly-{stream}'
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER Suite: $YARDSTICK_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 400
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults':
+ installer: '{installer}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'yardstick-sandbox-weekly-builder'
+
+- job-template:
+ name: 'functest-sandbox-{pod}-weekly-{stream}'
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 400
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - '{slave-label}-defaults'
+ - '{installer}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'functest-sandbox-weekly-builder'
+
+#####################################
+# builder macros
+#####################################
+- builder:
+ name: 'sandbox-deploy-weekly-builder'
+ builders:
+ - shell:
+ !include-raw: ./deploy.sh
+
+- builder:
+ name: 'functest-sandbox-weekly-builder'
+ builders:
+ - shell:
+ !include-raw: ./functest.sh
+
+- builder:
+ name: 'yardstick-sandbox-weekly-builder'
+ builders:
+ - shell:
+ !include-raw: ./yardstick.sh
diff --git a/jjb/sandbox/test.sh b/jjb/sandbox/test.sh
new file mode 100755
index 000000000..2f9be2729
--- /dev/null
+++ b/jjb/sandbox/test.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+ verify)
+ echo "Running as part of verify job"
+ ;;
+ merge)
+ echo "Running as part of merge job"
+ ;;
+ daily)
+ echo "Running as part of daily job"
+ ;;
+ weekly)
+ echo "Running as part of weekly job"
+ ;;
+ *)
+ echo "Job type $JOB_TYPE is not supported!"
+ exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/verify.sh b/jjb/sandbox/verify.sh
new file mode 100755
index 000000000..f62d2b977
--- /dev/null
+++ b/jjb/sandbox/verify.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+# this is where we check the commit message, unit test, etc.
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/sandbox/yardstick.sh b/jjb/sandbox/yardstick.sh
new file mode 100755
index 000000000..2f9be2729
--- /dev/null
+++ b/jjb/sandbox/yardstick.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+# get the job type
+# we only support verify, merge, daily and weekly jobs
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+# do stuff differently based on the job type
+case "$JOB_TYPE" in
+ verify)
+ echo "Running as part of verify job"
+ ;;
+ merge)
+ echo "Running as part of merge job"
+ ;;
+ daily)
+ echo "Running as part of daily job"
+ ;;
+ weekly)
+ echo "Running as part of weekly job"
+ ;;
+ *)
+ echo "Job type $JOB_TYPE is not supported!"
+ exit 1
+esac
+
+# this just shows we can get the patch/commit information
+# no matter what job we are executed by
+cd $WORKSPACE
+echo
+echo "Commit Message is"
+echo "-------------------------------------"
+git log --format=%B -n 1 $(git rev-parse HEAD)
+echo "-------------------------------------"
+echo
+echo "Repo contents"
+echo "-------------------------------------"
+ls -al
+echo "-------------------------------------"
+echo
+echo "Changed files are"
+echo "-------------------------------------"
+git diff origin/master --name-only
+echo "-------------------------------------"
+echo
+echo "Change introduced"
+echo "-------------------------------------"
+git diff origin/master
+echo "-------------------------------------"
+echo
+echo "git show"
+echo "-------------------------------------"
+git show
+echo "-------------------------------------"
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml
index 8b8ced1ba..21a6b80b0 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-ci-jobs.yml
@@ -49,14 +49,6 @@
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *brahmaputra
-
-# just in case if things go wrong
- - lf-pod2:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
-
# joid CI PODs
- baremetal:
slave-label: joid-baremetal
diff --git a/jjb/yardstick/yardstick-project-jobs.yml b/jjb/yardstick/yardstick-project-jobs.yml
index c6f317386..5973fcec4 100644
--- a/jjb/yardstick/yardstick-project-jobs.yml
+++ b/jjb/yardstick/yardstick-project-jobs.yml
@@ -31,7 +31,7 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- gerrit-trigger-scm:
@@ -70,7 +70,7 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'ericsson-build-defaults'
+ - 'opnfv-build-ubuntu-defaults'
- string:
name: GS_URL
default: '$GS_BASE{gs-pathname}'
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index 8c41620d6..d268a28de 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -48,6 +48,14 @@ main () {
exit 1
fi
+ if [[ $(whoami) != "root" ]]; then
+ if grep "^Defaults requiretty" /etc/sudoers
+ then echo "please comment out Defaults requiretty from /etc/sudoers"
+ exit 1
+ fi
+ fi
+
+
if [ -d /etc/monit/conf.d ]; then
monitconfdir="/etc/monit/conf.d/"
elif [ -d /etc/monit.d ]; then
@@ -87,7 +95,7 @@ main () {
echo "Writing the following as monit config:"
cat << EOF | tee $monitconfdir/jenkins
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'"
+start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@' with timeout 60 seconds"
stop program = "/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'"
EOF
}
@@ -96,7 +104,7 @@ EOF
#test for diff
if [[ "$(diff $monitconfdir/jenkins <(echo "\
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\"
+start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@' with timeout 60 seconds\"
stop program = \"/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\
") )" ]]; then
echo "Updating monit config..."
diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh
index 7486adb40..964b41908 100644
--- a/utils/push-test-logs.sh
+++ b/utils/push-test-logs.sh
@@ -17,7 +17,7 @@ res_build_date=${1:-$(date -u +"%Y-%m-%d_%H-%M-%S")}
project=$PROJECT
branch=${GIT_BRANCH##*/}
testbed=$NODE_NAME
-dir_result="${HOME}/opnfv/$project/results"
+dir_result="${HOME}/opnfv/$project/results/${branch}"
# src: https://wiki.opnfv.org/display/INF/Hardware+Infrastructure
# + intel-pod3 (vsperf)
node_list=(\
diff --git a/utils/retention_script.sh b/utils/retention_script.sh
new file mode 100755
index 000000000..7e50623ca
--- /dev/null
+++ b/utils/retention_script.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 The Linux Foundation and others
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##############################################################################
+
+PATH=$PATH:/usr/local/bin/
+
+#These are the only projects that generate artifacts
+for x in armband ovsnfv fuel apex compass4nfv
+do
+
+ echo "Looking at artifacts for project $x"
+
+ while IFS= read -r artifact; do
+
+ artifact_date="$(gsutil ls -L $artifact | grep "Creation time:" | awk '{print $4,$5,$6}')"
+ age=$(($(date +%s)-$(date -d"$artifact_date" +%s)))
+ daysold=$(($age/86400))
+
+ if [[ "$daysold" -gt "10" ]]; then
+ echo "$daysold Days old deleting: $(basename $artifact)"
+ else
+ echo "$daysold Days old retaining: $(basename $artifact)"
+ fi
+
+ done < <(gsutil ls gs://artifacts.opnfv.org/"$x" |grep -v "/$")
+done
diff --git a/utils/test-sign-artifact.sh b/utils/test-sign-artifact.sh
new file mode 100755
index 000000000..f09b7f4e2
--- /dev/null
+++ b/utils/test-sign-artifact.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+export PATH=$PATH:/usr/local/bin/
+
+# clone releng repository
+echo "Cloning releng repository..."
+[ -d releng ] && rm -rf releng
+git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
+#this is where we import the siging key
+if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
+ source $WORKSPACE/releng/utils/gpg_import_key.sh
+fi
+
+artifact="foo"
+echo foo > foo
+
+testsign () {
+ echo "Signing artifact: ${artifact}"
+ gpg2 -vvv --batch \
+ --default-key opnfv-helpdesk@rt.linuxfoundation.org \
+ --passphrase besteffort \
+ --detach-sig $artifact
+}
+
+testsign
+
diff --git a/utils/test/reporting/functest/reporting-status.py b/utils/test/reporting/functest/reporting-status.py
index adbee36aa..622c375cc 100644
--- a/utils/test/reporting/functest/reporting-status.py
+++ b/utils/test/reporting/functest/reporting-status.py
@@ -8,8 +8,6 @@
#
import datetime
import jinja2
-import logging
-import os
import requests
import sys
import time
@@ -21,17 +19,7 @@ import testCase as tc
import scenarioResult as sr
# Logger
-logFormatter = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s")
-logger = logging.getLogger()
-
-fileHandler = logging.FileHandler("{0}/{1}".format('.', conf.LOG_FILE))
-fileHandler.setFormatter(logFormatter)
-logger.addHandler(fileHandler)
-
-consoleHandler = logging.StreamHandler()
-consoleHandler.setFormatter(logFormatter)
-logger.addHandler(consoleHandler)
-logger.setLevel(conf.LOG_LEVEL)
+logger = utils.getLogger("Status")
# Initialization
testValid = []
@@ -48,11 +36,11 @@ response = requests.get(cf)
functest_yaml_config = yaml.load(response.text)
-logger.info("****************************************")
-logger.info("* Generating reporting..... *")
-logger.info("* Data retention = %s days *" % conf.PERIOD)
-logger.info("* *")
-logger.info("****************************************")
+logger.info("*******************************************")
+logger.info("* Generating reporting scenario status *")
+logger.info("* Data retention = %s days *" % conf.PERIOD)
+logger.info("* *")
+logger.info("*******************************************")
# Retrieve test cases of Tier 1 (smoke)
config_tiers = functest_yaml_config.get("tiers")
@@ -111,17 +99,22 @@ for version in conf.versions:
for test_case in testValid:
test_case.checkRunnable(installer, s,
test_case.getConstraints())
- logger.debug("testcase %s is %s" % (test_case.getName(),
- test_case.isRunnable))
+ logger.debug("testcase %s is %s" %
+ (test_case.getDisplayName(),
+ test_case.isRunnable))
time.sleep(1)
if test_case.isRunnable:
dbName = test_case.getDbName()
name = test_case.getName()
+ displayName = test_case.getDisplayName()
project = test_case.getProject()
nb_test_runnable_for_this_scenario += 1
logger.info(" Searching results for case %s " %
- (dbName))
+ (displayName))
result = utils.getResult(dbName, installer, s, version)
+ # if no result set the value to 0
+ if result < 0:
+ result = 0
logger.info(" >>>> Test score = " + str(result))
test_case.setCriteria(result)
test_case.setIsRunnable(True)
@@ -144,18 +137,23 @@ for version in conf.versions:
if test_case.isRunnable:
dbName = test_case.getDbName()
name = test_case.getName()
+ displayName = test_case.getDisplayName()
project = test_case.getProject()
logger.info(" Searching results for case %s " %
- (dbName))
+ (displayName))
result = utils.getResult(dbName, installer, s, version)
- test_case.setCriteria(result)
- test_case.setIsRunnable(True)
- testCases2BeDisplayed.append(tc.TestCase(name,
- project,
- "",
- result,
- True,
- 4))
+ # at least 1 result for the test
+ if result > -1:
+ test_case.setCriteria(result)
+ test_case.setIsRunnable(True)
+ testCases2BeDisplayed.append(tc.TestCase(name,
+ project,
+ "",
+ result,
+ True,
+ 4))
+ else:
+ logger.debug("No results found")
items[s] = testCases2BeDisplayed
except:
@@ -182,7 +180,7 @@ for version in conf.versions:
else:
logger.info(">>>>> scenario OK, save the information")
s_status = "OK"
- path_validation_file = ("./release/" + version +
+ path_validation_file = (conf.REPORTING_PATH + "/release/" + version +
"/validated_scenario_history.txt")
with open(path_validation_file, "a") as f:
time_format = "%Y-%m-%d %H:%M"
@@ -193,12 +191,10 @@ for version in conf.versions:
scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_score)
logger.info("--------------------------")
- templateLoader = jinja2.FileSystemLoader(os.path.dirname
- (os.path.abspath
- (__file__)))
+ templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
templateEnv = jinja2.Environment(loader=templateLoader)
- TEMPLATE_FILE = "./template/index-status-tmpl.html"
+ TEMPLATE_FILE = "/template/index-status-tmpl.html"
template = templateEnv.get_template(TEMPLATE_FILE)
outputText = template.render(scenario_stats=scenario_stats,
@@ -208,6 +204,6 @@ for version in conf.versions:
period=conf.PERIOD,
version=version)
- with open("./release/" + version +
+ with open(conf.REPORTING_PATH + "/release/" + version +
"/index-status-" + installer + ".html", "wb") as fh:
fh.write(outputText)
diff --git a/utils/test/reporting/functest/reporting-tempest.py b/utils/test/reporting/functest/reporting-tempest.py
index a065ef442..e3f4e3306 100644
--- a/utils/test/reporting/functest/reporting-tempest.py
+++ b/utils/test/reporting/functest/reporting-tempest.py
@@ -1,28 +1,44 @@
from urllib2 import Request, urlopen, URLError
import json
import jinja2
-import os
+import reportingConf as conf
+import reportingUtils as utils
-installers = ["apex", "compass", "fuel", "joid"]
+installers = conf.installers
items = ["tests", "Success rate", "duration"]
-PERIOD = 7
-print "Generate Tempest automatic reporting"
+PERIOD = conf.PERIOD
+criteria_nb_test = 165
+criteria_duration = 1800
+criteria_success_rate = 90
+
+logger = utils.getLogger("Tempest")
+logger.info("************************************************")
+logger.info("* Generating reporting Tempest_smoke_serial *")
+logger.info("* Data retention = %s days *" % PERIOD)
+logger.info("* *")
+logger.info("************************************************")
+
+logger.info("Success criteria:")
+logger.info("nb tests executed > %s s " % criteria_nb_test)
+logger.info("test duration < %s s " % criteria_duration)
+logger.info("success rate > %s " % criteria_success_rate)
+
for installer in installers:
# we consider the Tempest results of the last PERIOD days
- url = "http://testresults.opnfv.org/test/api/v1/results?case=tempest_smoke_serial"
- request = Request(url + '&period=' + str(PERIOD)
- + '&installer=' + installer + '&version=master')
-
+ url = conf.URL_BASE + "?case=tempest_smoke_serial"
+ request = Request(url + '&period=' + str(PERIOD) +
+ '&installer=' + installer + '&version=master')
+ logger.info("Search tempest_smoke_serial results for installer %s"
+ % installer)
try:
response = urlopen(request)
k = response.read()
results = json.loads(k)
except URLError, e:
- print 'No kittez. Got an error code:', e
+ logger.error("Error code: %s" % e)
test_results = results['results']
- test_results.reverse()
scenario_results = {}
criteria = {}
@@ -48,8 +64,8 @@ for installer in installers:
nb_tests_run = result['details']['tests']
nb_tests_failed = result['details']['failures']
if nb_tests_run != 0:
- success_rate = 100*(int(nb_tests_run)
- - int(nb_tests_failed))/int(nb_tests_run)
+ success_rate = 100*(int(nb_tests_run) -
+ int(nb_tests_failed)) / int(nb_tests_run)
else:
success_rate = 0
@@ -63,40 +79,49 @@ for installer in installers:
crit_time = False
# Expect that at least 165 tests are run
- if nb_tests_run >= 165:
+ if nb_tests_run >= criteria_nb_test:
crit_tests = True
# Expect that at least 90% of success
- if success_rate >= 90:
+ if success_rate >= criteria_success_rate:
crit_rate = True
# Expect that the suite duration is inferior to 30m
- if result['details']['duration'] < 1800:
+ if result['details']['duration'] < criteria_duration:
crit_time = True
result['criteria'] = {'tests': crit_tests,
'Success rate': crit_rate,
'duration': crit_time}
- # error management
+ try:
+ logger.debug("Scenario %s, Installer %s"
+ % (s_result[1]['scenario'], installer))
+ logger.debug("Nb Test run: %s" % nb_tests_run)
+ logger.debug("Test duration: %s"
+ % result['details']['duration'])
+ logger.debug("Success rate: %s" % success_rate)
+ except:
+ logger.error("Data format error")
+
+ # Error management
# ****************
try:
errors = result['details']['errors']
result['errors'] = errors.replace('{0}', '')
except:
- print "Error field not present (Brahamputra runs?)"
+ logger.error("Error field not present (Brahamputra runs?)")
- mypath = os.path.abspath(__file__)
- tplLoader = jinja2.FileSystemLoader(os.path.dirname(mypath))
- templateEnv = jinja2.Environment(loader=tplLoader)
+ templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
+ templateEnv = jinja2.Environment(loader=templateLoader)
- TEMPLATE_FILE = "./template/index-tempest-tmpl.html"
+ TEMPLATE_FILE = "/template/index-tempest-tmpl.html"
template = templateEnv.get_template(TEMPLATE_FILE)
outputText = template.render(scenario_results=scenario_results,
items=items,
installer=installer)
- with open("./release/master/index-tempest-" +
+ with open(conf.REPORTING_PATH + "/release/master/index-tempest-" +
installer + ".html", "wb") as fh:
fh.write(outputText)
-print "Tempest automatic reporting Done"
+logger.info("Tempest automatic reporting succesfully generated.")
diff --git a/utils/test/reporting/functest/reporting-vims.py b/utils/test/reporting/functest/reporting-vims.py
index 4033687e8..d0436ed14 100644
--- a/utils/test/reporting/functest/reporting-vims.py
+++ b/utils/test/reporting/functest/reporting-vims.py
@@ -1,7 +1,11 @@
from urllib2 import Request, urlopen, URLError
import json
import jinja2
-import os
+import reportingConf as conf
+import reportingUtils as utils
+
+logger = utils.getLogger("vIMS")
+
def sig_test_format(sig_test):
nbPassed = 0
@@ -9,7 +13,7 @@ def sig_test_format(sig_test):
nbSkipped = 0
for data_test in sig_test:
if data_test['result'] == "Passed":
- nbPassed+= 1
+ nbPassed += 1
elif data_test['result'] == "Failed":
nbFailures += 1
elif data_test['result'] == "Skipped":
@@ -20,21 +24,29 @@ def sig_test_format(sig_test):
total_sig_test_result['skipped'] = nbSkipped
return total_sig_test_result
-installers = ["fuel", "compass", "joid", "apex"]
-step_order = ["initialisation", "orchestrator", "vIMS", "sig_test"]
+logger.info("****************************************")
+logger.info("* Generating reporting vIMS *")
+logger.info("* Data retention = %s days *" % conf.PERIOD)
+logger.info("* *")
+logger.info("****************************************")
+installers = conf.installers
+step_order = ["initialisation", "orchestrator", "vIMS", "sig_test"]
+logger.info("Start processing....")
for installer in installers:
- request = Request('http://testresults.opnfv.org/test/api/v1/results?case=vims&installer=' + installer)
+ logger.info("Search vIMS results for installer %s" % installer)
+ request = Request(conf.URL_BASE + '?case=vims&installer=' + installer)
try:
response = urlopen(request)
k = response.read()
results = json.loads(k)
except URLError, e:
- print 'No kittez. Got an error code:', e
+ logger.error("Error code: %s" % e)
test_results = results['results']
- test_results.reverse()
+
+ logger.debug("Results found: %s" % test_results)
scenario_results = {}
for r in test_results:
@@ -44,6 +56,7 @@ for installer in installers:
for s, s_result in scenario_results.items():
scenario_results[s] = s_result[0:5]
+ logger.debug("Search for success criteria")
for result in scenario_results[s]:
result["start_date"] = result["start_date"].split(".")[0]
sig_test = result['details']['sig_test']['result']
@@ -67,17 +80,34 @@ for installer in installers:
result['pr_step_ok'] = 0
if nb_step != 0:
result['pr_step_ok'] = (float(nb_step_ok)/nb_step)*100
-
-
- templateLoader = jinja2.FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
- templateEnv = jinja2.Environment( loader=templateLoader )
-
- TEMPLATE_FILE = "./template/index-vims-tmpl.html"
- template = templateEnv.get_template( TEMPLATE_FILE )
-
- outputText = template.render( scenario_results = scenario_results, step_order = step_order, installer = installer)
-
- with open("./release/master/index-vims-" + installer + ".html", "wb") as fh:
+ try:
+ logger.debug("Scenario %s, Installer %s"
+ % (s_result[1]['scenario'], installer))
+ logger.debug("Orchestrator deployment: %s s"
+ % result['details']['orchestrator']['duration'])
+ logger.debug("vIMS deployment: %s s"
+ % result['details']['vIMS']['duration'])
+ logger.debug("Signaling testing: %s s"
+ % result['details']['sig_test']['duration'])
+ logger.debug("Signaling testing results: %s"
+ % format_result)
+ except:
+ logger.error("Data badly formatted")
+ logger.debug("------------------------------------------------")
+
+ templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
+ templateEnv = jinja2.Environment(loader=templateLoader)
+
+ TEMPLATE_FILE = "/template/index-vims-tmpl.html"
+ template = templateEnv.get_template(TEMPLATE_FILE)
+
+ outputText = template.render(scenario_results=scenario_results,
+ step_order=step_order,
+ installer=installer)
+
+ with open(conf.REPORTING_PATH +
+ "/release/master/index-vims-" +
+ installer + ".html", "wb") as fh:
fh.write(outputText)
-
+logger.info("vIMS report succesfully generated")
diff --git a/utils/test/reporting/functest/reportingConf.py b/utils/test/reporting/functest/reportingConf.py
index 61410b414..a58eeecc9 100644
--- a/utils/test/reporting/functest/reportingConf.py
+++ b/utils/test/reporting/functest/reportingConf.py
@@ -13,14 +13,16 @@ installers = ["apex", "compass", "fuel", "joid"]
# installers = ["apex"]
# list of test cases declared in testcases.yaml but that must not be
# taken into account for the scoring
-blacklist = ["odl", "ovno", "security_scan"]
+blacklist = ["odl", "ovno", "security_scan", "copper", "moon"]
# versions = ["brahmaputra", "master"]
versions = ["master"]
PERIOD = 10
MAX_SCENARIO_CRITERIA = 18
# get the last 5 test results to determinate the success criteria
NB_TESTS = 5
+# REPORTING_PATH = "/usr/share/nginx/html/reporting/functest"
+REPORTING_PATH = "."
URL_BASE = 'http://testresults.opnfv.org/test/api/v1/results'
TEST_CONF = "https://git.opnfv.org/cgit/functest/plain/ci/testcases.yaml"
-LOG_LEVEL = "INFO"
-LOG_FILE = "reporting.log"
+LOG_LEVEL = "ERROR"
+LOG_FILE = REPORTING_PATH + "/reporting.log"
diff --git a/utils/test/reporting/functest/reportingUtils.py b/utils/test/reporting/functest/reportingUtils.py
index 2f06b8449..5051ffa95 100644
--- a/utils/test/reporting/functest/reportingUtils.py
+++ b/utils/test/reporting/functest/reportingUtils.py
@@ -7,8 +7,26 @@
# http://www.apache.org/licenses/LICENSE-2.0
#
from urllib2 import Request, urlopen, URLError
+import logging
import json
-import reportingConf
+import reportingConf as conf
+
+
+def getLogger(module):
+ logFormatter = logging.Formatter("%(asctime)s [" +
+ module +
+ "] [%(levelname)-5.5s] %(message)s")
+ logger = logging.getLogger()
+
+ fileHandler = logging.FileHandler("{0}/{1}".format('.', conf.LOG_FILE))
+ fileHandler.setFormatter(logFormatter)
+ logger.addHandler(fileHandler)
+
+ consoleHandler = logging.StreamHandler()
+ consoleHandler.setFormatter(logFormatter)
+ logger.addHandler(consoleHandler)
+ logger.setLevel(conf.LOG_LEVEL)
+ return logger
def getApiResults(case, installer, scenario, version):
@@ -19,10 +37,10 @@ def getApiResults(case, installer, scenario, version):
# urllib2.install_opener(opener)
# url = "http://127.0.0.1:8000/results?case=" + case + \
# "&period=30&installer=" + installer
- url = (reportingConf.URL_BASE + "?case=" + case +
- "&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
+ url = (conf.URL_BASE + "?case=" + case +
+ "&period=" + str(conf.PERIOD) + "&installer=" + installer +
"&scenario=" + scenario + "&version=" + version +
- "&last=" + str(reportingConf.NB_TESTS))
+ "&last=" + str(conf.NB_TESTS))
request = Request(url)
try:
@@ -38,9 +56,8 @@ def getApiResults(case, installer, scenario, version):
def getScenarios(case, installer, version):
case = case.getName()
- print case
- url = (reportingConf.URL_BASE + "?case=" + case +
- "&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
+ url = (conf.URL_BASE + "?case=" + case +
+ "&period=" + str(conf.PERIOD) + "&installer=" + installer +
"&version=" + version)
request = Request(url)
@@ -115,11 +132,16 @@ def getResult(testCase, installer, scenario, version):
# 2: <4 successful consecutive runs but passing the criteria
# 1: close to pass the success criteria
# 0: 0% success, not passing
+ # -1: no run available
test_result_indicator = 0
nbTestOk = getNbtestOk(scenario_results)
+
# print "Nb test OK (last 10 days):"+ str(nbTestOk)
# check that we have at least 4 runs
- if nbTestOk < 1:
+ if len(scenario_results) < 1:
+ # No results available
+ test_result_indicator = -1
+ elif nbTestOk < 1:
test_result_indicator = 0
elif nbTestOk < 2:
test_result_indicator = 1
diff --git a/utils/test/reporting/functest/template/index-status-tmpl.html b/utils/test/reporting/functest/template/index-status-tmpl.html
index 89a1d1527..0c3fa9426 100644
--- a/utils/test/reporting/functest/template/index-status-tmpl.html
+++ b/utils/test/reporting/functest/template/index-status-tmpl.html
@@ -76,7 +76,7 @@
{% for test in items[scenario] -%}
<th>
{% if test.getCriteria() > -1 -%}
- {{test.getDbName() }}
+ {{test.getDisplayName() }}
{%- endif %}
{% if test.getTier() > 3 -%}
*
diff --git a/utils/test/reporting/functest/testCase.py b/utils/test/reporting/functest/testCase.py
index f0e8f5995..e19853a09 100644
--- a/utils/test/reporting/functest/testCase.py
+++ b/utils/test/reporting/functest/testCase.py
@@ -19,6 +19,28 @@ class TestCase(object):
self.criteria = criteria
self.isRunnable = isRunnable
self.tier = tier
+ display_name_matrix = {'healthcheck': 'healthcheck',
+ 'vping_ssh': 'vPing (ssh)',
+ 'vping_userdata': 'vPing (userdata)',
+ 'odl': 'ODL',
+ 'onos': 'ONOS',
+ 'ocl': 'OCL',
+ 'tempest_smoke_serial': 'Tempest (smoke)',
+ 'tempest_full_parallel': 'Tempest (full)',
+ 'rally_sanity': 'Rally (smoke)',
+ 'bgpvpn': 'bgpvpn',
+ 'rally_full': 'Rally (full)',
+ 'vims': 'vIMS',
+ 'doctor': 'Doctor',
+ 'promise': 'Promise',
+ 'moon': 'moon',
+ 'copper': 'copper',
+ 'security_scan': 'security'
+ }
+ try:
+ self.displayName = display_name_matrix[self.name]
+ except:
+ self.displayName = "unknown"
def getName(self):
return self.name
@@ -74,10 +96,10 @@ class TestCase(object):
self.isRunnable = is_runnable
def toString(self):
- testcase = ("Name=" + self.name + ";Criteria=" + str(self.criteria)
- + ";Project=" + self.project + ";Constraints="
- + str(self.constraints) + ";IsRunnable"
- + str(self.isRunnable))
+ testcase = ("Name=" + self.name + ";Criteria=" +
+ str(self.criteria) + ";Project=" + self.project +
+ ";Constraints=" + str(self.constraints) +
+ ";IsRunnable" + str(self.isRunnable))
return testcase
def getDbName(self):
@@ -98,31 +120,15 @@ class TestCase(object):
'rally_full': 'rally_full',
'vims': 'vims',
'doctor': 'doctor-notification',
- 'promise': 'promise'
+ 'promise': 'promise',
+ 'moon': 'moon',
+ 'copper': 'copper',
+ 'security_scan': 'security'
}
try:
return test_match_matrix[self.name]
except:
return "unknown"
- def getTestDisplayName(self):
- # Correspondance name of the test case / name in the DB
- test_match_matrix = {'healthcheck': 'healthcheck',
- 'vping_ssh': 'vPing (ssh)',
- 'vping_userdata': 'vPing (userdata)',
- 'odl': 'ODL',
- 'onos': 'ONOS',
- 'ocl': 'OCL',
- 'tempest_smoke_serial': 'Tempest (smoke)',
- 'tempest_full_parallel': 'Tempest (full)',
- 'rally_sanity': 'Rally (smoke)',
- 'bgpvpn': 'bgpvpn',
- 'rally_full': 'Rally (full)',
- 'vims': 'vIMS',
- 'doctor': 'Doctor',
- 'promise': 'Promise'
- }
- try:
- return test_match_matrix[self.name]
- except:
- return "unknown"
+ def getDisplayName(self):
+ return self.displayName
diff --git a/utils/test/result_collection_api/etc/config.ini b/utils/test/result_collection_api/etc/config.ini
index 16346bf36..0edb73a3f 100644
--- a/utils/test/result_collection_api/etc/config.ini
+++ b/utils/test/result_collection_api/etc/config.ini
@@ -13,4 +13,4 @@ port = 8000
debug = True
[swagger]
-base_url = http://testresults.opnfv.org/test \ No newline at end of file
+base_url = http://localhost:8000
diff --git a/utils/test/result_collection_api/opnfv_testapi/resources/handlers.py b/utils/test/result_collection_api/opnfv_testapi/resources/handlers.py
index 873701103..f98c35e8f 100644
--- a/utils/test/result_collection_api/opnfv_testapi/resources/handlers.py
+++ b/utils/test/result_collection_api/opnfv_testapi/resources/handlers.py
@@ -198,9 +198,8 @@ class GenericApiHandler(RequestHandler):
comparing values
"""
if not (new_value is None):
- if len(new_value) > 0:
- if new_value != old_value:
- edit_request[key] = new_value
+ if new_value != old_value:
+ edit_request[key] = new_value
return edit_request
diff --git a/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py b/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py
index 5198ba355..400b84ac1 100644
--- a/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py
+++ b/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py
@@ -45,7 +45,7 @@ class GenericResultHandler(GenericApiHandler):
obj = {"$gte": str(period)}
query['start_date'] = obj
elif k == 'trust_indicator':
- query[k] = float(v)
+ query[k + '.current'] = float(v)
elif k != 'last':
query[k] = v
return query
@@ -116,8 +116,8 @@ class ResultsCLHandler(GenericResultHandler):
@type last: L{string}
@in last: query
@required last: False
- @param trust_indicator: must be int/long/float
- @type trust_indicator: L{string}
+ @param trust_indicator: must be float
+ @type trust_indicator: L{float}
@in trust_indicator: query
@required trust_indicator: False
"""
@@ -180,3 +180,19 @@ class ResultsGURHandler(GenericResultHandler):
query = dict()
query["_id"] = ObjectId(result_id)
self._get_one(query)
+
+ @swagger.operation(nickname="update")
+ def put(self, result_id):
+ """
+ @description: update a single result by _id
+ @param body: fields to be updated
+ @type body: L{ResultUpdateRequest}
+ @in body: body
+ @rtype: L{Result}
+ @return 200: update success
+ @raise 404: result not exist
+ @raise 403: nothing to update
+ """
+ query = {'_id': ObjectId(result_id)}
+ db_keys = []
+ self._update(query, db_keys)
diff --git a/utils/test/result_collection_api/opnfv_testapi/resources/result_models.py b/utils/test/result_collection_api/opnfv_testapi/resources/result_models.py
index fdd80593a..dd1e3dc53 100644
--- a/utils/test/result_collection_api/opnfv_testapi/resources/result_models.py
+++ b/utils/test/result_collection_api/opnfv_testapi/resources/result_models.py
@@ -10,7 +10,69 @@ from opnfv_testapi.tornado_swagger import swagger
@swagger.model()
+class TIHistory(object):
+ """
+ @ptype step: L{float}
+ """
+ def __init__(self, date=None, step=0):
+ self.date = date
+ self.step = step
+
+ def format(self):
+ return {
+ "date": self.date,
+ "step": self.step
+ }
+
+ @staticmethod
+ def from_dict(a_dict):
+ if a_dict is None:
+ return None
+
+ return TIHistory(a_dict.get('date'), a_dict.get('step'))
+
+
+@swagger.model()
+class TI(object):
+ """
+ @property histories: trust_indicator update histories
+ @ptype histories: C{list} of L{TIHistory}
+ @ptype current: L{float}
+ """
+ def __init__(self, current=0):
+ self.current = current
+ self.histories = list()
+
+ def format(self):
+ hs = []
+ for h in self.histories:
+ hs.append(h.format())
+
+ return {
+ "current": self.current,
+ "histories": hs
+ }
+
+ @staticmethod
+ def from_dict(a_dict):
+ if a_dict is None:
+ return None
+ t = TI()
+ t.current = a_dict.get('current')
+ if 'histories' in a_dict.keys():
+ for history in a_dict.get('histories', None):
+ t.histories.append(TIHistory.from_dict(history))
+ else:
+ t.histories = []
+ return t
+
+
+@swagger.model()
class ResultCreateRequest(object):
+ """
+ @property trust_indicator:
+ @ptype trust_indicator: L{TI}
+ """
def __init__(self,
pod_name=None,
project_name=None,
@@ -50,15 +112,30 @@ class ResultCreateRequest(object):
"build_tag": self.build_tag,
"scenario": self.scenario,
"criteria": self.criteria,
- "trust_indicator": self.trust_indicator
+ "trust_indicator": self.trust_indicator.format()
+ }
+
+
+@swagger.model()
+class ResultUpdateRequest(object):
+ """
+ @property trust_indicator:
+ @ptype trust_indicator: L{TI}
+ """
+ def __init__(self, trust_indicator=None):
+ self.trust_indicator = trust_indicator
+
+ def format(self):
+ return {
+ "trust_indicator": self.trust_indicator.format(),
}
@swagger.model()
class TestResult(object):
"""
- @property trust_indicator: must be int/long/float
- @ptype trust_indicator: L{float}
+ @property trust_indicator: used for long duration test case
+ @ptype trust_indicator: L{TI}
"""
def __init__(self, _id=None, case_name=None, project_name=None,
pod_name=None, installer=None, version=None,
@@ -98,19 +175,7 @@ class TestResult(object):
t.build_tag = a_dict.get('build_tag')
t.scenario = a_dict.get('scenario')
t.criteria = a_dict.get('criteria')
- # 0 < trust indicator < 1
- # if bad value => set this indicator to 0
- t.trust_indicator = a_dict.get('trust_indicator')
- if t.trust_indicator is not None:
- if isinstance(t.trust_indicator, (int, long, float)):
- if t.trust_indicator < 0:
- t.trust_indicator = 0
- elif t.trust_indicator > 1:
- t.trust_indicator = 1
- else:
- t.trust_indicator = 0
- else:
- t.trust_indicator = 0
+ t.trust_indicator = TI.from_dict(a_dict.get('trust_indicator'))
return t
def format(self):
@@ -126,7 +191,7 @@ class TestResult(object):
"build_tag": self.build_tag,
"scenario": self.scenario,
"criteria": self.criteria,
- "trust_indicator": self.trust_indicator
+ "trust_indicator": self.trust_indicator.format()
}
def format_http(self):
@@ -143,7 +208,7 @@ class TestResult(object):
"build_tag": self.build_tag,
"scenario": self.scenario,
"criteria": self.criteria,
- "trust_indicator": self.trust_indicator
+ "trust_indicator": self.trust_indicator.format()
}
diff --git a/utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py b/utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py
index 6ab98c720..450969248 100644
--- a/utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py
+++ b/utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py
@@ -116,8 +116,8 @@ class MemDb(object):
if k == 'start_date':
if not MemDb._compare_date(v, content.get(k)):
return False
- elif k == 'trust_indicator':
- if float(content.get(k)) != float(v):
+ elif k == 'trust_indicator.current':
+ if content.get('trust_indicator').get('current') != v:
return False
elif content.get(k, None) != v:
return False
@@ -173,7 +173,6 @@ class MemDb(object):
def _check_keys(self, doc):
for key in doc.keys():
- print('key', key, 'value', doc.get(key))
if '.' in key:
raise NameError('key {} must not contain .'.format(key))
if key.startswith('$'):
diff --git a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py
index 27382f089..9a1253e94 100644
--- a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py
+++ b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py
@@ -8,9 +8,9 @@
##############################################################################
import unittest
-from tornado.web import Application
from tornado import gen
from tornado.testing import AsyncHTTPTestCase, gen_test
+from tornado.web import Application
import fake_pymongo
diff --git a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py
index bba3b228f..98ef7c08c 100644
--- a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py
+++ b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py
@@ -6,15 +6,16 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-import unittest
import copy
+import unittest
+from datetime import datetime, timedelta
from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
HTTP_NOT_FOUND
from opnfv_testapi.resources.pod_models import PodCreateRequest
from opnfv_testapi.resources.project_models import ProjectCreateRequest
from opnfv_testapi.resources.result_models import ResultCreateRequest, \
- TestResult, TestResults
+ TestResult, TestResults, ResultUpdateRequest, TI, TIHistory
from opnfv_testapi.resources.testcase_models import TestcaseCreateRequest
from test_base import TestBase
@@ -55,9 +56,11 @@ class TestResultBase(TestBase):
self.build_tag = 'v3.0'
self.scenario = 'odl-l2'
self.criteria = 'passed'
- self.trust_indicator = 0.7
+ self.trust_indicator = TI(0.7)
self.start_date = "2016-05-23 07:16:09.477097"
self.stop_date = "2016-05-23 07:16:19.477097"
+ self.update_date = "2016-05-24 07:16:19.477097"
+ self.update_step = -0.05
super(TestResultBase, self).setUp()
self.details = Details(timestart='0', duration='9s', status='OK')
self.req_d = ResultCreateRequest(pod_name=self.pod,
@@ -74,6 +77,7 @@ class TestResultBase(TestBase):
trust_indicator=self.trust_indicator)
self.get_res = TestResult
self.list_res = TestResults
+ self.update_res = TestResult
self.basePath = '/api/v1/results'
self.req_pod = PodCreateRequest(self.pod, 'metal', 'zte pod 1')
self.req_project = ProjectCreateRequest(self.project, 'vping test')
@@ -103,10 +107,19 @@ class TestResultBase(TestBase):
self.assertEqual(result.build_tag, req.build_tag)
self.assertEqual(result.scenario, req.scenario)
self.assertEqual(result.criteria, req.criteria)
- self.assertEqual(result.trust_indicator, req.trust_indicator)
self.assertEqual(result.start_date, req.start_date)
self.assertEqual(result.stop_date, req.stop_date)
self.assertIsNotNone(result._id)
+ ti = result.trust_indicator
+ self.assertEqual(ti.current, req.trust_indicator.current)
+ if ti.histories:
+ history = ti.histories[0]
+ self.assertEqual(history.date, self.update_date)
+ self.assertEqual(history.step, self.update_step)
+
+ def _create_d(self):
+ _, res = self.create_d()
+ return res.href.split('/')[-1]
class TestResultCreate(TestResultBase):
@@ -172,8 +185,7 @@ class TestResultCreate(TestResultBase):
class TestResultGet(TestResultBase):
def test_getOne(self):
- _, res = self.create_d()
- _id = res.href.split('/')[-1]
+ _id = self._create_d()
code, body = self.get(_id)
self.assert_res(code, body)
@@ -266,8 +278,6 @@ class TestResultGet(TestResultBase):
self.assert_res(code, result, req)
def _create_changed_date(self, **kwargs):
- import copy
- from datetime import datetime, timedelta
req = copy.deepcopy(self.req_d)
req.start_date = datetime.now() + timedelta(**kwargs)
req.stop_date = str(req.start_date + timedelta(minutes=10))
@@ -276,13 +286,36 @@ class TestResultGet(TestResultBase):
return req
def _set_query(self, *args):
+ def get_value(arg):
+ return eval('self.' + arg) \
+ if arg != 'trust_indicator' else self.trust_indicator.current
uri = ''
for arg in args:
if '=' in arg:
uri += arg + '&'
else:
- uri += '{}={}&'.format(arg, eval('self.' + arg))
+ uri += '{}={}&'.format(arg, get_value(arg))
return uri[0: -1]
+
+class TestResultUpdate(TestResultBase):
+ def test_success(self):
+ _id = self._create_d()
+
+ new_ti = copy.deepcopy(self.trust_indicator)
+ new_ti.current += self.update_step
+ new_ti.histories.append(TIHistory(self.update_date, self.update_step))
+ new_data = copy.deepcopy(self.req_d)
+ new_data.trust_indicator = new_ti
+ update = ResultUpdateRequest(trust_indicator=new_ti)
+ code, body = self.update(update, _id)
+ self.assertEqual(_id, body._id)
+ self.assert_res(code, body, new_data)
+
+ code, new_body = self.get(_id)
+ self.assertEqual(_id, new_body._id)
+ self.assert_res(code, new_body, new_data)
+
+
if __name__ == '__main__':
unittest.main()