summaryrefslogtreecommitdiffstats
path: root/jjb
diff options
context:
space:
mode:
Diffstat (limited to 'jjb')
-rwxr-xr-xjjb/apex/apex-deploy.sh41
-rw-r--r--jjb/apex/apex-snapshot-create.sh23
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh13
-rw-r--r--jjb/apex/apex.yml331
-rw-r--r--jjb/armband/armband-ci-jobs.yml96
-rwxr-xr-xjjb/armband/armband-deploy.sh8
-rw-r--r--jjb/armband/armband-project-jobs.yml2
-rw-r--r--jjb/armband/armband-verify-jobs.yml1
-rw-r--r--jjb/availability/availability.yml1
-rw-r--r--jjb/barometer/barometer.yml4
-rw-r--r--jjb/bottlenecks/bottlenecks-ci-jobs.yml62
-rw-r--r--jjb/bottlenecks/bottlenecks-cleanup.sh111
-rw-r--r--jjb/bottlenecks/bottlenecks-project-jobs.yml3
-rw-r--r--jjb/bottlenecks/bottlenecks-run-suite.sh65
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yml40
-rw-r--r--jjb/compass4nfv/compass-deploy.sh2
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yml2
-rw-r--r--jjb/compass4nfv/compass-verify-jobs.yml107
-rw-r--r--jjb/conductor/conductor.yml1
-rw-r--r--jjb/copper/copper.yml4
-rw-r--r--jjb/cperf/cperf-ci-jobs.yml40
-rw-r--r--jjb/daisy4nfv/daisy-daily-jobs.yml199
-rwxr-xr-xjjb/daisy4nfv/daisy-deploy.sh63
-rw-r--r--jjb/daisy4nfv/daisy-project-jobs.yml4
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-build.sh1
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-deploy.sh3
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-download-artifact.sh23
-rw-r--r--jjb/daisy4nfv/daisy4nfv-merge-jobs.yml5
-rw-r--r--jjb/daisy4nfv/daisy4nfv-verify-jobs.yml3
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-virtual-deploy.sh29
-rw-r--r--jjb/doctor/doctor.yml17
-rw-r--r--jjb/domino/domino.yml1
-rw-r--r--jjb/dovetail/dovetail-ci-jobs.yml40
-rw-r--r--jjb/dpacc/dpacc.yml1
-rw-r--r--jjb/escalator/escalator.yml2
-rwxr-xr-xjjb/fuel/fuel-basic-exp.sh18
-rwxr-xr-xjjb/fuel/fuel-build-exp.sh10
-rw-r--r--jjb/fuel/fuel-daily-jobs.yml55
-rwxr-xr-xjjb/fuel/fuel-deploy-exp.sh10
-rwxr-xr-xjjb/fuel/fuel-deploy.sh2
-rw-r--r--jjb/fuel/fuel-project-jobs.yml4
-rwxr-xr-xjjb/fuel/fuel-smoke-test-exp.sh10
-rw-r--r--jjb/fuel/fuel-verify-jobs-experimental.yml255
-rw-r--r--jjb/fuel/fuel-verify-jobs.yml3
-rw-r--r--jjb/fuel/fuel-weekly-jobs.yml210
-rw-r--r--jjb/functest/functest-daily-jobs.yml (renamed from jjb/functest/functest-ci-jobs.yml)65
-rw-r--r--jjb/functest/functest-project-jobs.yml1
-rwxr-xr-xjjb/functest/functest-suite.sh27
-rw-r--r--jjb/functest/functest-weekly-jobs.yml124
-rwxr-xr-xjjb/functest/set-functest-env.sh34
-rw-r--r--jjb/global/releng-macros.yml52
-rw-r--r--jjb/global/slave-params.yml40
-rw-r--r--jjb/infra/bifrost-cleanup-job.yml140
-rw-r--r--jjb/infra/bifrost-verify-jobs.yml4
-rw-r--r--jjb/ipv6/ipv6.yml1
-rw-r--r--jjb/joid/joid-daily-jobs.yml94
-rw-r--r--jjb/joid/joid-verify-jobs.yml1
-rw-r--r--jjb/models/models.yml68
-rw-r--r--jjb/moon/moon.yml1
-rwxr-xr-xjjb/multisite/fuel-deploy-for-multisite.sh3
-rw-r--r--jjb/multisite/multisite-daily-jobs.yml4
-rw-r--r--jjb/multisite/multisite-verify-jobs.yml3
-rw-r--r--jjb/netready/netready.yml1
-rw-r--r--jjb/octopus/octopus.yml1
-rw-r--r--jjb/onosfw/onosfw.yml3
-rw-r--r--jjb/openretriever/openretriever-project.yml1
-rw-r--r--jjb/opera/opera-daily-jobs.yml2
-rw-r--r--jjb/opera/opera-verify-jobs.yml1
-rw-r--r--jjb/opnfvdocs/docs-post-rtd.sh7
-rw-r--r--jjb/opnfvdocs/docs-rtd.yaml90
-rw-r--r--jjb/opnfvdocs/opnfvdocs.yml7
-rw-r--r--jjb/ovsnfv/ovsnfv.yml2
-rw-r--r--jjb/parser/parser.yml7
-rw-r--r--jjb/pharos/pharos.yml1
-rw-r--r--jjb/prediction/prediction.yml1
-rw-r--r--jjb/promise/promise.yml1
-rw-r--r--jjb/qtip/helpers/validate-deploy.sh12
-rw-r--r--jjb/qtip/qtip-validate-jobs.yml13
-rw-r--r--jjb/qtip/qtip-verify-jobs.yml7
-rw-r--r--jjb/releng/opnfv-docker-arm.yml2
-rw-r--r--jjb/releng/opnfv-docker.sh36
-rw-r--r--jjb/releng/opnfv-docker.yml2
-rw-r--r--jjb/releng/opnfv-lint.yml2
-rw-r--r--jjb/releng/testapi-backup-mongodb.sh2
-rw-r--r--jjb/snaps/snaps.yml63
-rw-r--r--jjb/storperf/storperf.yml2
-rw-r--r--jjb/ves/ves.yml69
-rw-r--r--jjb/vswitchperf/vswitchperf.yml6
-rw-r--r--jjb/yardstick/yardstick-ci-jobs.yml2
89 files changed, 2171 insertions, 759 deletions
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
index dc70488e7..c91e3ee82 100755
--- a/jjb/apex/apex-deploy.sh
+++ b/jjb/apex/apex-deploy.sh
@@ -3,7 +3,7 @@ set -o errexit
set -o nounset
set -o pipefail
-APEX_PKGS="common undercloud onos"
+APEX_PKGS="common undercloud" # removed onos for danube
IPV6_FLAG=False
# log info to console
@@ -15,7 +15,7 @@ if ! rpm -q wget > /dev/null; then
sudo yum -y install wget
fi
-if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
+if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
# Build is from a verify, use local build artifacts (not RPMs)
cd $WORKSPACE/../${BUILD_DIRECTORY}
WORKSPACE=$(pwd)
@@ -79,8 +79,8 @@ elif [[ "$DEPLOY_SCENARIO" == *gate* ]]; then
fi
fi
-# use local build for verify and csit promote
-if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
+# use local build for verify and promote
+if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
if [ ! -e "${WORKSPACE}/build/lib" ]; then
ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib
fi
@@ -159,7 +159,7 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then
else
clean_opts=''
fi
- if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
+ if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
sudo CONFIG=${CONFIG} LIB=${LIB} ./clean.sh ${clean_opts}
else
sudo CONFIG=${CONFIG} LIB=${LIB} opnfv-clean ${clean_opts}
@@ -181,26 +181,19 @@ fi
if [[ "$JOB_NAME" == *virtual* ]]; then
# settings for virtual deployment
- if [ "$IPV6_FLAG" == "True" ]; then
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
- elif echo ${DEPLOY_SCENARIO} | grep fdio; then
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_vpp.yaml"
- else
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
- fi
DEPLOY_CMD="${DEPLOY_CMD} -v"
+ if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
+ DEPLOY_CMD="${DEPLOY_CMD} --virtual-default-ram 14 --virtual-compute-ram 8"
+ fi
if [[ "$JOB_NAME" == *csit* ]]; then
- DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml --virtual-computes 2"
+ DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml"
+ fi
+ if [[ "$JOB_NAME" == *promote* ]]; then
+ DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 2"
fi
else
# settings for bare metal deployment
- if [ "$IPV6_FLAG" == "True" ]; then
- NETWORK_FILE="/root/network/network_settings_v6.yaml"
- elif [[ "$JOB_NAME" == *master* ]]; then
- NETWORK_FILE="/root/network/network_settings-master.yaml"
- else
- NETWORK_FILE="/root/network/network_settings.yaml"
- fi
+ NETWORK_SETTINGS_DIR="/root/network"
INVENTORY_FILE="/root/inventory/pod_settings.yaml"
if ! sudo test -e "$INVENTORY_FILE"; then
@@ -211,6 +204,14 @@ else
DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
fi
+if [ "$IPV6_FLAG" == "True" ]; then
+ NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
+elif echo ${DEPLOY_SCENARIO} | grep fdio; then
+ NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_vpp.yaml"
+else
+ NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
+fi
+
# Check that network settings file exists
if ! sudo test -e "$NETWORK_FILE"; then
echo "ERROR: Required settings file missing: Network Settings file ${NETWORK_FILE}"
diff --git a/jjb/apex/apex-snapshot-create.sh b/jjb/apex/apex-snapshot-create.sh
index f146dd810..b2a39449e 100644
--- a/jjb/apex/apex-snapshot-create.sh
+++ b/jjb/apex/apex-snapshot-create.sh
@@ -13,6 +13,7 @@ set -o nounset
set -o pipefail
SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
+SNAP_TYPE=$(echo ${JOB_NAME} | sed -n 's/^apex-\(.\+\)-promote.*$/\1/p')
echo "Creating Apex snapshot..."
echo "-------------------------"
@@ -81,17 +82,19 @@ sudo chown jenkins-ci:jenkins-ci *
# tar up artifacts
DATE=`date +%Y-%m-%d`
-tar czf ../apex-csit-snap-${DATE}.tar.gz .
+tar czf ../apex-${SNAP_TYPE}-snap-${DATE}.tar.gz .
popd > /dev/null
sudo rm -rf ${tmp_dir}
-echo "Snapshot saved as apex-csit-snap-${DATE}.tar.gz"
+echo "Snapshot saved as apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
# update opnfv properties file
-curl -O -L http://$GS_URL/snapshot.properties
-sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
-snap_sha=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)
-sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
-echo "OPNFV_SNAP_URL=$GS_URL/apex-csit-snap-${DATE}.tar.gz"
-echo "OPNFV_SNAP_SHA512SUM=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)"
-echo "Updated properties file: "
-cat snapshot.properties
+if [ "$SNAP_TYPE" == 'csit' ]; then
+ curl -O -L http://$GS_URL/snapshot.properties
+ sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
+ snap_sha=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)
+ sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
+ echo "OPNFV_SNAP_URL=$GS_URL/apex-csit-snap-${DATE}.tar.gz"
+ echo "OPNFV_SNAP_SHA512SUM=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)"
+ echo "Updated properties file: "
+ cat snapshot.properties
+fi
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
index ef8ad5329..c2de7d70d 100755
--- a/jjb/apex/apex-upload-artifact.sh
+++ b/jjb/apex/apex-upload-artifact.sh
@@ -51,13 +51,13 @@ echo "ISO Upload Complete!"
RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
-for pkg in common undercloud onos; do
+for pkg in common undercloud; do # removed onos for danube
RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
done
SRPM_INSTALL_PATH=$BUILD_DIRECTORY
SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
-for pkg in common undercloud onos; do
+for pkg in common undercloud; do # removed onos for danube
SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
done
}
@@ -76,12 +76,15 @@ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.la
uploadsnap () {
# Uploads snapshot artifact and updated properties file
echo "Uploading snapshot artifacts"
- gsutil cp $WORKSPACE/apex-csit-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
- gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
+ SNAP_TYPE=$(echo ${JOB_NAME} | sed -n 's/^apex-\(.\+\)-promote.*$/\1/p')
+ gsutil cp $WORKSPACE/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
+ if [ "$SNAP_TYPE" == 'csit' ]; then
+ gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
+ fi
echo "Upload complete for Snapshot"
}
-if echo $WORKSPACE | grep csit > /dev/null; then
+if echo $WORKSPACE | grep promote > /dev/null; then
uploadsnap
elif gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
echo "Signing Key avaliable"
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index ff9fbec14..60340e9ca 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -10,10 +10,8 @@
- 'apex-deploy-virtual-{scenario}-{stream}'
- 'apex-deploy-baremetal-{scenario}-{stream}'
- 'apex-daily-{stream}'
- - 'apex-daily-colorado'
- - 'apex-build-colorado'
- - 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado'
- 'apex-csit-promote-daily-{stream}'
+ - 'apex-fdio-promote-daily-{stream}'
# stream: branch with - in place of / (eg. stable-arno)
# branch: branch (eg. stable/arno)
@@ -24,6 +22,12 @@
slave: 'lf-pod1'
verify-slave: 'apex-verify-master'
daily-slave: 'apex-daily-master'
+ - danube:
+ branch: 'stable/danube'
+ gs-pathname: '/danube'
+ slave: 'lf-pod1'
+ verify-slave: 'apex-verify-danube'
+ daily-slave: 'apex-daily-danube'
project: 'apex'
@@ -34,6 +38,9 @@
- 'os-nosdn-ovs-noha'
- 'os-nosdn-fdio-noha'
- 'os-nosdn-fdio-ha'
+ - 'os-nosdn-kvm-ha'
+ - 'os-nosdn-kvm-noha'
+ - 'os-odl_l2-fdio-noha'
- 'os-odl_l2-fdio-ha'
- 'os-odl_l2-netvirt_gbp_fdio-noha'
- 'os-odl_l2-sfc-noha'
@@ -45,6 +52,7 @@
- 'os-odl_l3-fdio_dvr-noha'
- 'os-odl_l3-fdio_dvr-ha'
- 'os-odl_l3-csit-noha'
+ - 'os-odl_l3-nofeature-noha'
- 'os-onos-nofeature-ha'
- 'gate'
@@ -177,21 +185,6 @@
- 'apex-unit-test'
- 'apex-build'
- trigger-builds:
- - project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream}
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- same-node: true
- - trigger-builds:
- - project: 'functest-apex-{verify-slave}-suite-{stream}'
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
- FUNCTEST_SUITE_NAME=healthcheck
- block: true
- same-node: true
- - trigger-builds:
- project: 'apex-deploy-virtual-os-odl_l3-nofeature-ha-{stream}'
predefined-parameters: |
BUILD_DIRECTORY=apex-verify-{stream}
@@ -318,7 +311,7 @@
blocking-jobs:
- 'apex-daily.*'
- 'apex-verify.*'
- - 'apex-csit.*'
+ - 'apex-.*-promote.*'
builders:
- trigger-builds:
@@ -383,7 +376,7 @@
builders:
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l3-nofeature-ha-{stream}'
+ - project: 'apex-deploy-baremetal-os-odl_l3-nofeature-noha-{stream}'
predefined-parameters:
OPNFV_CLEAN=yes
git-revision: false
@@ -392,7 +385,7 @@
- trigger-builds:
- project: 'cperf-apex-intel-pod2-daily-{stream}'
predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
block: true
same-node: true
@@ -570,7 +563,7 @@
- 'apex-deploy.*'
- 'apex-build.*'
- 'apex-runner.*'
- - 'apex-csit.*'
+ - 'apex-.*-promote.*'
triggers:
- 'apex-{stream}'
@@ -612,6 +605,23 @@
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
+ # 1.dovetail only master by now, not sync with A/B/C branches
+ # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
+ # 3.only debug testsuite here(includes basic testcase,
+ # i.e. one tempest smoke ipv6, two vping from functest)
+ # 4.not used for release criteria or compliance,
+ # only to debug the dovetail tool bugs with apex
+ - trigger-builds:
+ - project: 'dovetail-apex-{slave}-debug-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-nosdn-nofeature-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
- trigger-builds:
- project: 'apex-deploy-baremetal-os-odl_l3-nofeature-ha-{stream}'
predefined-parameters: |
@@ -702,104 +712,137 @@
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
-# Colorado Build
-- job-template:
- name: 'apex-build-colorado'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: 'apex-daily-colorado'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: 'stable/colorado'
- - apex-parameter:
- gs-pathname: '/colorado'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-build'
- - 'apex-upload-artifact'
-
-
-# Colorado FDIO Deploy
-- job-template:
- name: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado'
-
- # Job template for baremetal deployment
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: 'lf-pod1'
-
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: 'stable/colorado'
- - apex-parameter:
- gs-pathname: '/colorado'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-fdio-ha'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-deploy.*'
- - 'apex-build.*'
-
-
- builders:
- - 'apex-deploy'
- - 'apex-workspace-cleanup'
+ - trigger-builds:
+ - project: 'apex-deploy-baremetal-os-odl_l2-fdio-noha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/.build
+ OPNFV_CLEAN=yes
+ git-revision: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ block: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l2-fdio-noha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l2-fdio-noha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/.build
+ OPNFV_CLEAN=yes
+ git-revision: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ block: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l2-fdio-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l2-fdio-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'apex-deploy-baremetal-os-nosdn-kvm-ha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/.build
+ OPNFV_CLEAN=yes
+ git-revision: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ block: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-nosdn-kvm-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-nosdn-kvm-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'apex-deploy-baremetal-os-odl_l3-fdio-noha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/.build
+ OPNFV_CLEAN=yes
+ git-revision: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ block: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l3-fdio-noha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l3-fdio-noha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
-# Colorado FDIO Daily
+# CSIT promote
- job-template:
- name: 'apex-daily-colorado'
+ name: 'apex-csit-promote-daily-{stream}'
- # Job template for daily build
+ # Job template for promoting CSIT Snapshots
#
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: 'apex-daily-colorado'
+ node: '{daily-slave}'
disabled: false
@@ -809,12 +852,11 @@
parameters:
- project-parameter:
project: '{project}'
- branch: 'stable/colorado'
+ branch: '{branch}'
- apex-parameter:
- gs-pathname: '/colorado'
+ gs-pathname: '{gs-pathname}'
properties:
- - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -823,41 +865,36 @@
- 'apex-deploy.*'
- 'apex-build.*'
- 'apex-runner.*'
+ - 'apex-daily.*'
triggers:
- - 'apex-colorado'
+ - timed: '0 12 * * 0'
builders:
+ - 'apex-build'
- trigger-builds:
- - project: 'apex-build-colorado'
- git-revision: true
- current-parameters: true
- same-node: true
- block: true
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado'
+ - project: 'apex-deploy-virtual-os-odl_l3-csit-noha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-colorado/.build
+ BUILD_DIRECTORY=apex-csit-promote-daily-{stream}
OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
+ git-revision: false
block: true
+ same-node: true
- trigger-builds:
- - project: 'functest-apex-apex-daily-colorado-daily-colorado'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-fdio-ha
+ - project: 'functest-apex-{daily-slave}-suite-{stream}'
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
+ FUNCTEST_SUITE_NAME=tempest_smoke_serial
block: true
same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
+ - shell:
+ !include-raw-escape: ./apex-snapshot-create.sh
+ - shell:
+ !include-raw-escape: ./apex-upload-artifact.sh
-# CSIT promote
+# FDIO promote
- job-template:
- name: 'apex-csit-promote-daily-{stream}'
+ name: 'apex-fdio-promote-daily-{stream}'
# Job template for promoting CSIT Snapshots
#
@@ -889,26 +926,16 @@
- 'apex-runner.*'
- 'apex-daily.*'
- triggers:
- - timed: '0 12 * * 0'
-
builders:
- 'apex-build'
- trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l3-csit-noha-{stream}'
+ - project: 'apex-deploy-virtual-os-odl_l2-fdio-noha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-csit-promote-daily-{stream}
+ BUILD_DIRECTORY=apex-fdio-promote-daily-{stream}
OPNFV_CLEAN=yes
git-revision: false
block: true
same-node: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-suite-{stream}'
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
- FUNCTEST_SUITE_NAME=tempest_smoke_serial
- block: true
- same-node: true
- shell:
!include-raw-escape: ./apex-snapshot-create.sh
- shell:
@@ -1020,9 +1047,9 @@
- trigger:
name: 'apex-master'
triggers:
- - timed: '0 3 * * *'
+ - timed: '0 3 * * 7'
- trigger:
- name: 'apex-colorado'
+ name: 'apex-danube'
triggers:
- timed: '0 12 * * *'
- trigger:
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 4cb58d916..ddcbbd038 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -15,7 +15,7 @@
stream: danube
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
@@ -56,6 +56,10 @@
slave-label: arm-pod3
installer: fuel
<<: *danube
+ - arm-pod3-2:
+ slave-label: arm-pod3-2
+ installer: fuel
+ <<: *danube
#--------------------------------
# master
#--------------------------------
@@ -67,6 +71,10 @@
slave-label: arm-pod3
installer: fuel
<<: *master
+ - arm-pod3-2:
+ slave-label: arm-pod3-2
+ installer: fuel
+ <<: *master
#--------------------------------
# scenarios
#--------------------------------
@@ -262,23 +270,23 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 8 * * 1,3,5,7'
+ - timed: '0 0 * * 1'
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 16 * * 2,7'
+ - timed: '0 0 * * 2'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 16 * * 1,4,6'
+ - timed: '0 0 * * 3'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 8 * * 2,4,6'
+ - timed: '0 0 * * 4'
- trigger:
name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 16 * * 3,5'
+ - timed: '0 0 * * 5'
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger'
triggers:
@@ -294,31 +302,31 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 0 * * 1'
+ - timed: '0 8 * * 1,4'
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 0 * * 2'
+ - timed: '0 16 * * 1,4'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 0 * * 4'
+ - timed: '0 8 * * 2,5'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 0 * * 3'
+ - timed: '0 16 * * 2,5'
- trigger:
name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 0 * * 5'
+ - timed: '0 8 * * 3,6'
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: ''
+ - timed: '0 16 * * 3,6'
- trigger:
name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-danube-trigger'
triggers:
- - timed: ''
+ - timed: '0 8,16 * * 7'
#---------------------------------------------------------------
# Enea Armband CI Virtual Triggers running against master branch
#---------------------------------------------------------------
@@ -505,3 +513,65 @@
name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-danube-trigger'
triggers:
- timed: ''
+#--------------------------------------------------------------------------
+# Enea Armband POD 3 Triggers running against master branch (aarch64 slave)
+#--------------------------------------------------------------------------
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-2-master-trigger'
+ triggers:
+ - timed: ''
+#--------------------------------------------------------------------------
+# Enea Armband POD 3 Triggers running against danube branch (aarch64 slave)
+#--------------------------------------------------------------------------
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-2-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-2-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-2-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-2-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-2-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-2-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-2-danube-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
index adabfcaeb..2e5aa3924 100755
--- a/jjb/armband/armband-deploy.sh
+++ b/jjb/armband/armband-deploy.sh
@@ -32,6 +32,14 @@ fi
# set deployment parameters
export TMPDIR=${WORKSPACE}/tmpdir
+
+# arm-pod3-2 is an aarch64 jenkins slave for the same POD as the
+# x86 jenkins slave arm-pod3; therefore we use the same pod name
+# to deploy the pod from both jenkins slaves
+if [[ "${NODE_NAME}" == "arm-pod3-2" ]]; then
+ NODE_NAME="arm-pod3"
+fi
+
LAB_NAME=${NODE_NAME/-*}
POD_NAME=${NODE_NAME/*-}
diff --git a/jjb/armband/armband-project-jobs.yml b/jjb/armband/armband-project-jobs.yml
index fd37c5af6..f6840a008 100644
--- a/jjb/armband/armband-project-jobs.yml
+++ b/jjb/armband/armband-project-jobs.yml
@@ -20,7 +20,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
- job-template:
name: 'armband-{installer}-build-daily-{stream}'
diff --git a/jjb/armband/armband-verify-jobs.yml b/jjb/armband/armband-verify-jobs.yml
index 3486718e4..567456d9b 100644
--- a/jjb/armband/armband-verify-jobs.yml
+++ b/jjb/armband/armband-verify-jobs.yml
@@ -86,6 +86,7 @@
pattern: 'ci/**'
- compare-type: ANT
pattern: 'patches/**'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
diff --git a/jjb/availability/availability.yml b/jjb/availability/availability.yml
index 9cb7f8899..302bbc996 100644
--- a/jjb/availability/availability.yml
+++ b/jjb/availability/availability.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/barometer/barometer.yml b/jjb/barometer/barometer.yml
index 6a17e1706..9ec30e809 100644
--- a/jjb/barometer/barometer.yml
+++ b/jjb/barometer/barometer.yml
@@ -20,7 +20,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
- job-template:
name: 'barometer-verify-{stream}'
@@ -55,6 +55,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
@@ -105,6 +106,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
diff --git a/jjb/bottlenecks/bottlenecks-ci-jobs.yml b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
index a9ccd6977..2779e316b 100644
--- a/jjb/bottlenecks/bottlenecks-ci-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
@@ -72,7 +72,8 @@
suite:
- 'rubbos'
- 'vstf'
- - 'posca'
+ - 'posca_stress_traffic'
+ - 'posca_stress_ping'
jobs:
- 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
@@ -137,65 +138,14 @@
- builder:
name: bottlenecks-env-cleanup
builders:
- - shell: |
- #!/bin/bash
- set -e
- [[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
- echo "Bottlenecks: docker containers/images cleaning up"
- if [[ ! -z $(docker ps -a | grep opnfv/bottlenecks) ]]; then
- echo "removing existing opnfv/bottlenecks containers"
- docker ps -a | grep opnfv/bottlenecks | awk '{print $1}' | xargs docker rm -f >$redirect
- fi
-
- if [[ ! -z $(docker images | grep opnfv/bottlenecks) ]]; then
- echo "Bottlenecks: docker images to remove:"
- docker images | head -1 && docker images | grep opnfv/bottlenecks
- image_tags=($(docker images | grep opnfv/bottlenecks | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- echo "Removing docker image opnfv/bottlenecks:$tag..."
- docker rmi opnfv/bottlenecks:$tag >$redirect
- done
- fi
+ - shell:
+ !include-raw: ./bottlenecks-cleanup.sh
- builder:
name: bottlenecks-run-suite
builders:
- - shell: |
- #!/bin/bash
- set -e
- [[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
- echo "Bottlenecks: to pull image opnfv/bottlenecks:${DOCKER_TAG}"
- docker pull opnfv/bottlenecks:$DOCKER_TAG >${redirect}
-
- echo "Bottlenecks: docker start running"
- opts="--privileged=true -id"
- envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
- -e BOTTLENECKS_BRANCH=${BOTTLENECKS_BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
- -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL}"
- cmd="sudo docker run ${opts} ${envs} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
- echo "Bottlenecks: docker cmd running ${cmd}"
- ${cmd} >${redirect}
-
- echo "Bottlenecks: obtain docker id"
- container_id=$(docker ps | grep "opnfv/bottlenecks:${DOCKER_TAG}" | awk '{print $1}' | head -1)
- if [ -z ${container_id} ]; then
- echo "Cannot find opnfv/bottlenecks container ID ${container_id}. Please check if it exists."
- docker ps -a
- exit 1
- fi
-
- echo "Bottlenecks: to prepare openstack environment"
- prepare_env="${REPO_DIR}/ci/prepare_env.sh"
- echo "Bottlenecks: docker cmd running: ${prepare_env}"
- sudo docker exec ${container_id} ${prepare_env}
-
- echo "Bottlenecks: to run testsuite ${SUITE_NAME}"
- run_testsuite="${REPO_DIR}/run_tests.sh -s ${SUITE_NAME}"
- echo "Bottlenecks: docker cmd running: ${run_testsuite}"
- sudo docker exec ${container_id} ${run_testsuite}
+ - shell:
+ !include-raw: ./bottlenecks-run-suite.sh
####################
# parameter macros
diff --git a/jjb/bottlenecks/bottlenecks-cleanup.sh b/jjb/bottlenecks/bottlenecks-cleanup.sh
new file mode 100644
index 000000000..0ba042318
--- /dev/null
+++ b/jjb/bottlenecks/bottlenecks-cleanup.sh
@@ -0,0 +1,111 @@
+#!/bin/bash
+set -e
+[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+
+BOTTLENECKS_IMAGE=opnfv/bottlenecks
+echo "Bottlenecks: docker containers/images cleaning up"
+
+dangling_images=($(docker images -f "dangling=true" | grep $BOTTLENECKS_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+ echo "Removing $BOTTLENECKS_IMAGE:<none> dangling images and their containers"
+ docker images | head -1 && docker images | grep $dangling_images
+ for image_id in "${dangling_images[@]}"; do
+ echo "Bottlenecks: Removing dangling image $image_id"
+ docker rmi -f $image_id >${redirect}
+ done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+ if [[ -n $(docker ps -a | grep $image_id) ]]; then
+ echo "Bottlenecks: Removing containers associated with dangling image: $image_id"
+ docker ps -a | head -1 && docker ps -a | grep $image_id
+ docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+ fi
+done
+
+if [[ -n $(docker ps -a | grep $BOTTLENECKS_IMAGE) ]]; then
+ echo "Removing existing $BOTTLENECKS_IMAGE containers"
+ docker ps -a | grep $BOTTLENECKS_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $BOTTLENECKS_IMAGE) ]]; then
+ echo "Bottlenecks: docker images to remove:"
+ docker images | head -1 && docker images | grep $BOTTLENECKS_IMAGE
+ image_tags=($(docker images | grep $BOTTLENECKS_IMAGE | awk '{print $2}'))
+ for tag in "${image_tags[@]}"; do
+ echo "Removing docker image $BOTTLENECKS_IMAGE:$tag..."
+ docker rmi $BOTTLENECKS_IMAGE:$tag >$redirect
+ done
+fi
+
+echo "Yardstick: docker containers/images cleaning up"
+YARDSTICK_IMAGE=opnfv/yardstick
+
+dangling_images=($(docker images -f "dangling=true" | grep $YARDSTICK_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+ echo "Removing $YARDSTICK_IMAGE:<none> dangling images and their containers"
+ docker images | head -1 && docker images | grep $dangling_images
+ for image_id in "${dangling_images[@]}"; do
+ echo "Yardstick: Removing dangling image $image_id"
+ docker rmi -f $image_id >${redirect}
+ done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+ if [[ -n $(docker ps -a | grep $image_id) ]]; then
+ echo "Yardstick: Removing containers associated with dangling image: $image_id"
+ docker ps -a | head -1 && docker ps -a | grep $image_id
+ docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+ fi
+done
+
+if [[ -n $(docker ps -a | grep $YARDSTICK_IMAGE) ]]; then
+ echo "Removing existing $YARDSTICK_IMAGE containers"
+ docker ps -a | grep $YARDSTICK_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $YARDSTICK_IMAGE) ]]; then
+ echo "Yardstick: docker images to remove:"
+ docker images | head -1 && docker images | grep $YARDSTICK_IMAGE
+ image_tags=($(docker images | grep $YARDSTICK_IMAGE | awk '{print $2}'))
+ for tag in "${image_tags[@]}"; do
+ echo "Removing docker image $YARDSTICK_IMAGE:$tag..."
+ docker rmi $YARDSTICK_IMAGE:$tag >$redirect
+ done
+fi
+
+echo "InfluxDB: docker containers/images cleaning up"
+INFLUXDB_IMAGE=tutum/influxdb
+
+dangling_images=($(docker images -f "dangling=true" | grep $INFLUXDB_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+ echo "Removing $INFLUXDB_IMAGE:<none> dangling images and their containers"
+ docker images | head -1 && docker images | grep $dangling_images
+ for image_id in "${dangling_images[@]}"; do
+ echo "InfluxDB: Removing dangling image $image_id"
+ docker rmi -f $image_id >${redirect}
+ done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+ if [[ -n $(docker ps -a | grep $image_id) ]]; then
+ echo "InfluxDB: Removing containers associated with dangling image: $image_id"
+ docker ps -a | head -1 && docker ps -a | grep $image_id
+ docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+ fi
+done
+
+if [[ -n $(docker ps -a | grep $INFLUXDB_IMAGE) ]]; then
+ echo "Removing existing $INFLUXDB_IMAGE containers"
+ docker ps -a | grep $INFLUXDB_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $INFLUXDB_IMAGE) ]]; then
+ echo "InfluxDB: docker images to remove:"
+ docker images | head -1 && docker images | grep $INFLUXDB_IMAGE
+ image_tags=($(docker images | grep $INFLUXDB_IMAGE | awk '{print $2}'))
+ for tag in "${image_tags[@]}"; do
+ echo "Removing docker image $INFLUXDB_IMAGE:$tag..."
+ docker rmi $INFLUXDB_IMAGE:$tag >$redirect
+ done
+fi \ No newline at end of file
diff --git a/jjb/bottlenecks/bottlenecks-project-jobs.yml b/jjb/bottlenecks/bottlenecks-project-jobs.yml
index 12ea31b13..a0abb9331 100644
--- a/jjb/bottlenecks/bottlenecks-project-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-project-jobs.yml
@@ -29,7 +29,8 @@
suite:
- 'rubbos'
- 'vstf'
- - 'posca'
+ - 'posca_stress_traffic'
+ - 'posca_stress_ping'
################################
# job templates
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
new file mode 100644
index 000000000..f69463fc2
--- /dev/null
+++ b/jjb/bottlenecks/bottlenecks-run-suite.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+#set -e
+[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+BOTTLENECKS_IMAGE=opnfv/bottlenecks
+
+if [[ $SUITE_NAME == rubbos || $SUITE_NAME == vstf ]]; then
+ echo "Bottlenecks: to pull image $BOTTLENECKS_IMAGE:${DOCKER_TAG}"
+ docker pull $BOTTLENECKS_IMAGE:$DOCKER_TAG >${redirect}
+
+ echo "Bottlenecks: docker start running"
+ opts="--privileged=true -id"
+ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+ -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
+ -e BOTTLENECKS_BRANCH=${BOTTLENECKS_BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
+ -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL}"
+ cmd="sudo docker run ${opts} ${envs} $BOTTLENECKS_IMAGE:${DOCKER_TAG} /bin/bash"
+ echo "Bottlenecks: docker cmd running ${cmd}"
+ ${cmd} >${redirect}
+
+ echo "Bottlenecks: obtain docker id"
+ container_id=$(docker ps | grep "$BOTTLENECKS_IMAGE:${DOCKER_TAG}" | awk '{print $1}' | head -1)
+ if [ -z ${container_id} ]; then
+ echo "Cannot find $BOTTLENECKS_IMAGE container ID ${container_id}. Please check if it exists."
+ docker ps -a
+ exit 1
+ fi
+
+ echo "Bottlenecks: to prepare openstack environment"
+ prepare_env="${REPO_DIR}/ci/prepare_env.sh"
+ echo "Bottlenecks: docker cmd running: ${prepare_env}"
+ sudo docker exec ${container_id} ${prepare_env}
+
+ echo "Bottlenecks: to run testsuite ${SUITE_NAME}"
+ run_testsuite="${REPO_DIR}/run_tests.sh -s ${SUITE_NAME}"
+ echo "Bottlenecks: docker cmd running: ${run_testsuite}"
+ sudo docker exec ${container_id} ${run_testsuite}
+else
+ echo "Bottlenecks: installing POSCA docker-compose"
+ if [ -d usr/local/bin/docker-compose ]; then
+ rm -rf usr/local/bin/docker-compose
+ fi
+ curl -L https://github.com/docker/compose/releases/download/1.11.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
+ chmod +x /usr/local/bin/docker-compose
+
+ echo "Bottlenecks: composing up dockers"
+ cd $WORKSPACE
+ docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml up -d
+
+ echo "Bottlenecks: running traffic stress/factor testing in posca testsuite "
+ POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
+ if [[ $SUITE_NAME == posca_stress_traffic ]]; then
+ TEST_CASE=posca_factor_system_bandwidth
+ echo "Bottlenecks: pulling tutum/influxdb for yardstick"
+ docker pull tutum/influxdb:0.13
+ sleep 5
+ docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+ elif [[ $SUITE_NAME == posca_stress_ping ]]; then
+ TEST_CASE=posca_stress_ping
+ sleep 5
+ docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+ fi
+
+ echo "Bottlenecks: cleaning up docker-compose images and dockers"
+ docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml down --rmi all
+fi \ No newline at end of file
diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml
index 7258e89f4..237f8944d 100644
--- a/jjb/compass4nfv/compass-ci-jobs.yml
+++ b/jjb/compass4nfv/compass-ci-jobs.yml
@@ -15,7 +15,7 @@
stream: danube
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
@@ -32,11 +32,11 @@
<<: *master
- baremetal:
slave-label: compass-baremetal
- os-version: 'trusty'
+ os-version: 'xenial'
<<: *danube
- virtual:
slave-label: compass-virtual
- os-version: 'trusty'
+ os-version: 'xenial'
<<: *danube
#--------------------------------
# master
@@ -71,6 +71,10 @@
- 'os-nosdn-kvm-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-openo-ha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+
jobs:
- 'compass-{scenario}-{pod}-daily-{stream}'
@@ -247,6 +251,10 @@
triggers:
- timed: '0 19 * * *'
- trigger:
+ name: 'compass-os-nosdn-openo-ha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
name: 'compass-os-odl_l2-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- timed: '0 23 * * *'
@@ -280,6 +288,10 @@
triggers:
- timed: '0 2 * * *'
- trigger:
+ name: 'compass-os-nosdn-openo-ha-baremetal-master-trigger'
+ triggers:
+ - timed: '0 3 * * *'
+- trigger:
name: 'compass-os-odl_l2-nofeature-ha-baremetal-master-trigger'
triggers:
- timed: '0 22 * * *'
@@ -311,23 +323,27 @@
- trigger:
name: 'compass-os-nosdn-nofeature-ha-baremetal-danube-trigger'
triggers:
- - timed: ''
+ - timed: '0 9 * * *'
+- trigger:
+ name: 'compass-os-nosdn-openo-ha-baremetal-danube-trigger'
+ triggers:
+ - timed: '0 13 * * *'
- trigger:
name: 'compass-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
triggers:
- - timed: ''
+ - timed: '0 17 * * *'
- trigger:
name: 'compass-os-odl_l3-nofeature-ha-baremetal-danube-trigger'
triggers:
- - timed: ''
+ - timed: '0 21 * * *'
- trigger:
name: 'compass-os-onos-nofeature-ha-baremetal-danube-trigger'
triggers:
- - timed: ''
+ - timed: '0 1 * * *'
- trigger:
name: 'compass-os-ocl-nofeature-ha-baremetal-danube-trigger'
triggers:
- - timed: ''
+ - timed: '0 5 * * *'
- trigger:
name: 'compass-os-onos-sfc-ha-baremetal-danube-trigger'
triggers:
@@ -346,6 +362,10 @@
triggers:
- timed: '0 21 * * *'
- trigger:
+ name: 'compass-os-nosdn-openo-ha-virtual-master-trigger'
+ triggers:
+ - timed: '0 22 * * *'
+- trigger:
name: 'compass-os-odl_l2-nofeature-ha-virtual-master-trigger'
triggers:
- timed: '0 20 * * *'
@@ -379,6 +399,10 @@
triggers:
- timed: '0 21 * * *'
- trigger:
+ name: 'compass-os-nosdn-openo-ha-virtual-danube-trigger'
+ triggers:
+ - timed: '0 22 * * *'
+- trigger:
name: 'compass-os-odl_l2-nofeature-ha-virtual-danube-trigger'
triggers:
- timed: '0 20 * * *'
diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh
index 6696e4b3d..534e17e62 100644
--- a/jjb/compass4nfv/compass-deploy.sh
+++ b/jjb/compass4nfv/compass-deploy.sh
@@ -34,6 +34,8 @@ if [[ "${DEPLOY_SCENARIO}" =~ "-ocl" ]]; then
export NETWORK_CONF_FILE=network_ocl.yml
elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
export NETWORK_CONF_FILE=network_onos.yml
+elif [[ "${DEPLOY_SCENARIO}" =~ "-openo" ]]; then
+ export NETWORK_CONF_FILE=network_openo.yml
else
export NETWORK_CONF_FILE=network.yml
fi
diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml
index ed0fee6c0..9b13e693a 100644
--- a/jjb/compass4nfv/compass-project-jobs.yml
+++ b/jjb/compass4nfv/compass-project-jobs.yml
@@ -16,7 +16,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
jobs:
- 'compass-build-iso-{stream}'
diff --git a/jjb/compass4nfv/compass-verify-jobs.yml b/jjb/compass4nfv/compass-verify-jobs.yml
index d58138088..e625c686a 100644
--- a/jjb/compass4nfv/compass-verify-jobs.yml
+++ b/jjb/compass4nfv/compass-verify-jobs.yml
@@ -37,6 +37,7 @@
#####################################
jobs:
- 'compass-verify-{distro}-{stream}'
+ - 'compass-verify-k8-{distro}-{stream}'
- 'compass-verify-{phase}-{distro}-{stream}'
#####################################
# job templates
@@ -95,6 +96,7 @@
file-paths:
- compare-type: ANT
pattern: '**/*'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
@@ -125,6 +127,11 @@
node-parameters: true
kill-phase-on: FAILURE
abort-all-job: true
+ - name: 'opnfv-yamllint-verify-{stream}'
+ current-parameters: true
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
- multijob:
name: deploy-virtual
condition: SUCCESSFUL
@@ -156,6 +163,106 @@
abort-all-job: true
- job-template:
+ name: 'compass-verify-k8-{distro}-{stream}'
+
+ project-type: multijob
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'compass-verify-[^-]*-[^-]*'
+ - 'compass-os-.*?-virtual-daily-.*?'
+ block-level: 'NODE'
+
+ scm:
+ - git-scm-gerrit
+
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 120
+ fail: true
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - comment-added-contains-event:
+ comment-contains-value: 'check k8'
+ - comment-added-contains-event:
+ comment-contains-value: 'verify k8'
+ - comment-added-contains-event:
+ comment-contains-value: 'check kubernetes'
+ - comment-added-contains-event:
+ comment-contains-value: 'verify kubernetes'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: '**/*'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**'
+ readable-message: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'compass-virtual-defaults'
+ - '{installer}-defaults'
+ - 'compass-verify-defaults':
+ installer: '{installer}'
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'k8-nosdn-nofeature-ha'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: basic
+ condition: SUCCESSFUL
+ projects:
+ - name: 'opnfv-lint-verify-{stream}'
+ current-parameters: true
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - name: 'opnfv-yamllint-verify-{stream}'
+ current-parameters: true
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'compass-verify-deploy-virtual-{distro}-{stream}'
+ current-parameters: true
+ predefined-parameters: |
+ COMPASS_OS_VERSION={os-version}
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+
+- job-template:
name: 'compass-verify-{phase}-{distro}-{stream}'
disabled: '{obj:disabled}'
diff --git a/jjb/conductor/conductor.yml b/jjb/conductor/conductor.yml
index 1d47624e1..d2ce649fc 100644
--- a/jjb/conductor/conductor.yml
+++ b/jjb/conductor/conductor.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/copper/copper.yml b/jjb/copper/copper.yml
index ea1af473c..d06afe4c0 100644
--- a/jjb/copper/copper.yml
+++ b/jjb/copper/copper.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
@@ -64,5 +65,4 @@
set -o nounset
set -o pipefail
- cd $WORKSPACE/ci
- shellcheck -f tty tests/*.sh
+ # shellcheck -f tty tests/*.sh
diff --git a/jjb/cperf/cperf-ci-jobs.yml b/jjb/cperf/cperf-ci-jobs.yml
index 125937e80..d1914f6f1 100644
--- a/jjb/cperf/cperf-ci-jobs.yml
+++ b/jjb/cperf/cperf-ci-jobs.yml
@@ -113,13 +113,43 @@
builders:
- shell: |
#!/bin/bash
- set +e
- # TODO: need to figure out the logic to get ${CONTROLLER_IP} used below
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+ undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
+ grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+ INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+ sudo scp $INSTALLER_IP:/home/stack/stackrc /tmp/stackrc
+ source /tmp/stackrc
+
+ # robot suites need the ssh key to log in to controller nodes, so throwing it
+ # in tmp, and mounting /tmp as $HOME as far as robot is concerned
+ sudo mkdir -p /tmp/.ssh
+ sudo scp $INSTALLER_IP:/home/stack/.ssh/id_rsa /tmp/.ssh/
+ sudo chmod -R 0600 /tmp/.ssh
+
+ # cbench requires the openflow drop test feature to be installed.
+ sshpass -p karaf ssh -o StrictHostKeyChecking=no \
+ -o UserKnownHostsFile=/dev/null \
+ -o LogLevel=error \
+ -p 8101 karaf@$SDN_CONTROLLER_IP \
+ feature:install odl-openflowplugin-flow-services-ui odl-openflowplugin-drop-test
+
docker pull opnfv/cperf:$DOCKER_TAG
- robot_cmd="pybot -e exclude -v ODL_SYSTEM_IP:${CONTROLLER_IP} -v switch_count:100 -v loops:10 \
- -v TOOLS_SYSTEM_IP:localhost -v duration_in_seconds:60"
+
+ robot_cmd="pybot -e exclude -L TRACE \
+ -v ODL_SYSTEM_1_IP:${SDN_CONTROLLER_IP} \
+ -v ODL_SYSTEM_IP:${SDN_CONTROLLER_IP} \
+ -v BUNDLEFOLDER:/opt/opendaylight \
+ -v RESTCONFPORT:8081 \
+ -v USER_HOME:/tmp \
+ -v USER:heat-admin \
+ -v ODL_SYSTEM_USER:heat-admin \
+ -v TOOLS_SYSTEM_IP:localhost \
+ -v of_port:6653"
robot_suite="/home/opnfv/repos/odl_test/csit/suites/openflowplugin/Performance/010_Cbench.robot"
- docker run opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
+
+ docker run -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
- builder:
name: cperf-cleanup
diff --git a/jjb/daisy4nfv/daisy-daily-jobs.yml b/jjb/daisy4nfv/daisy-daily-jobs.yml
new file mode 100644
index 000000000..ffae70f8f
--- /dev/null
+++ b/jjb/daisy4nfv/daisy-daily-jobs.yml
@@ -0,0 +1,199 @@
+# jenkins job templates for Daisy
+# TODO
+# [ ] enable baremetal jobs after baremetal deployment finish
+# [ ] enable jobs in danuble
+# [ ] add more scenarios
+# [ ] integration with yardstick
+
+- project:
+
+ name: 'daisy'
+ project: '{name}'
+ installer: '{name}'
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ disabled: false
+ gs-pathname: ''
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+# CI PODs
+#--------------------------------
+ pod:
+ - baremetal:
+ slave-label: daisy-baremetal
+ <<: *master
+ - virtual:
+ slave-label: daisy-virtual
+ <<: *master
+#--------------------------------
+# None-CI PODs
+#--------------------------------
+
+#--------------------------------
+# scenarios
+#--------------------------------
+ scenario:
+ # HA scenarios
+ - 'os-nosdn-nofeature-ha':
+ auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
+ # NOHA scenarios
+ - 'os-nosdn-nofeature-noha':
+ auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
+
+ jobs:
+ - '{project}-{scenario}-{pod}-daily-{stream}'
+ - '{project}-deploy-{pod}-daily-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+ name: '{project}-{scenario}-{pod}-daily-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: false
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'daisy.*-deploy-({pod})?-daily-.*'
+ block-level: 'NODE'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ triggers:
+ - '{auto-trigger-name}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults':
+ installer: '{installer}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{scenario}'
+ - 'daisy-project-parameter':
+ gs-pathname: '{gs-pathname}'
+
+ builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
+ - trigger-builds:
+ - project: 'daisy-deploy-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ - trigger-builds:
+ - project: 'functest-daisy-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+
+- job-template:
+ name: '{project}-deploy-{pod}-daily-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'daisy.*-deploy-({pod})?-daily-.*'
+ block-level: 'NODE'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults':
+ installer: '{installer}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-ha'
+ - 'daisy-project-parameter':
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_TIMEOUT
+ default: '150'
+ description: 'Deployment timeout in minutes'
+
+ scm:
+ - git-scm
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
+ - shell:
+ !include-raw-escape: ./daisy4nfv-download-artifact.sh
+ - shell:
+ !include-raw-escape: ./daisy-deploy.sh
+
+
+########################
+# trigger macros
+########################
+#-----------------------------------------------
+# Triggers for job running on daisy-baremetal against master branch
+#-----------------------------------------------
+# HA Scenarios
+- trigger:
+ name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: ''
+# NOHA Scenarios
+- trigger:
+ name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: ''
+#-----------------------------------------------
+# Triggers for job running on daisy-virtual against master branch
+#-----------------------------------------------
+- trigger:
+ name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
+ triggers:
+ - timed: ''
+# NOHA Scenarios
+- trigger:
+ name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
+ triggers:
+ - timed: 'H 8,22 * * *'
+
diff --git a/jjb/daisy4nfv/daisy-deploy.sh b/jjb/daisy4nfv/daisy-deploy.sh
new file mode 100755
index 000000000..b512e3f60
--- /dev/null
+++ b/jjb/daisy4nfv/daisy-deploy.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+set -o nounset
+set -o pipefail
+
+echo "--------------------------------------------------------"
+echo "This is $INSTALLER_TYPE deploy job!"
+echo "--------------------------------------------------------"
+
+DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-ha"}
+BRIDGE=${BRIDGE:-pxebr}
+LAB_NAME=${NODE_NAME/-*}
+POD_NAME=${NODE_NAME/*-}
+deploy_ret=0
+
+if [[ ! "$NODE_NAME" =~ "-virtual" ]] && [[ ! "$LAB_NAME" =~ (zte) ]]; then
+ echo "Unsupported lab $LAB_NAME for now, Cannot continue!"
+ exit $deploy_ret
+fi
+
+# clone the securedlab repo
+cd $WORKSPACE
+BASE_DIR=$(cd ./;pwd)
+
+echo "Cloning securedlab repo $BRANCH"
+git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/securedlab --quiet \
+ --branch $BRANCH
+
+# daisy ci/deploy/deploy.sh use $BASE_DIR/labs dir
+cp -r securedlab/labs .
+
+DEPLOY_COMMAND="sudo ./ci/deploy/deploy.sh -b $BASE_DIR \
+ -l $LAB_NAME -p $POD_NAME -B $BRIDGE"
+
+# log info to console
+echo """
+Deployment parameters
+--------------------------------------------------------
+Scenario: $DEPLOY_SCENARIO
+LAB: $LAB_NAME
+POD: $POD_NAME
+BRIDGE: $BRIDGE
+BASE_DIR: $BASE_DIR
+
+Starting the deployment using $INSTALLER_TYPE. This could take some time...
+--------------------------------------------------------
+Issuing command
+$DEPLOY_COMMAND
+"""
+
+# start the deployment
+$DEPLOY_COMMAND
+
+if [ $? -ne 0 ]; then
+ echo
+ echo "Depolyment failed!"
+ deploy_ret=1
+else
+ echo
+ echo "--------------------------------------------------------"
+ echo "Deployment done!"
+fi
+
+exit $deploy_ret
diff --git a/jjb/daisy4nfv/daisy-project-jobs.yml b/jjb/daisy4nfv/daisy-project-jobs.yml
index 156740980..9a57e1753 100644
--- a/jjb/daisy4nfv/daisy-project-jobs.yml
+++ b/jjb/daisy4nfv/daisy-project-jobs.yml
@@ -23,7 +23,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
phase:
- 'build':
@@ -196,7 +196,7 @@
- shell:
!include-raw: ./daisy4nfv-download-artifact.sh
- shell:
- !include-raw: ./daisy4nfv-deploy.sh
+ !include-raw: ./daisy-deploy.sh
- builder:
name: 'daisy-test-daily-macro'
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
index eb29fed72..375d80733 100755
--- a/jjb/daisy4nfv/daisy4nfv-build.sh
+++ b/jjb/daisy4nfv/daisy4nfv-build.sh
@@ -26,6 +26,7 @@ cd $WORKSPACE
echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
+ echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $OUTPUT_DIR/opnfv-$OPNFV_ARTIFACT_VERSION.bin | cut -d' ' -f1)"
echo "OPNFV_BUILD_URL=$BUILD_URL"
) > $WORKSPACE/opnfv.properties
diff --git a/jjb/daisy4nfv/daisy4nfv-deploy.sh b/jjb/daisy4nfv/daisy4nfv-deploy.sh
deleted file mode 100755
index cc2c10388..000000000
--- a/jjb/daisy4nfv/daisy4nfv-deploy.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo "Daisy deployment WIP"
diff --git a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
index 90b5fa62f..1cc0443ad 100755
--- a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
+++ b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
@@ -12,7 +12,7 @@ set -o errexit
set -o pipefail
# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
-[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=$GS_BASE_PROXY
+[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
if [[ "$JOB_NAME" =~ "merge" ]]; then
echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
@@ -36,6 +36,25 @@ echo "Using $OPNFV_ARTIFACT for deployment"
[[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL
+if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
+ # check if we already have the image to avoid redownload
+ BINSTORE="/bin_mount/opnfv_ci/${BRANCH##*/}"
+ if [[ -f "$BINSTORE/$OPNFV_ARTIFACT" && ! -z $OPNFV_ARTIFACT_SHA512SUM ]]; then
+ echo "BIN exists locally. Starting to check the sha512sum."
+ if [[ $OPNFV_ARTIFACT_SHA512SUM = $(sha512sum -b $BINSTORE/$OPNFV_ARTIFACT | cut -d' ' -f1) ]]; then
+ echo "Sha512sum is verified. Skipping the download and using the file from BIN store."
+ ln -s $BINSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.bin
+ echo "--------------------------------------------------------"
+ echo
+ ls -al $WORKSPACE/opnfv.bin
+ echo
+ echo "--------------------------------------------------------"
+ echo "Done!"
+ exit 0
+ fi
+ fi
+fi
+
# log info to console
echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
echo "This could take some time..."
@@ -43,7 +62,7 @@ echo "--------------------------------------------------------"
echo
# download the file
-curl -s -o $WORKSPACE/opnfv.bin http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
+curl -L -s -o $WORKSPACE/opnfv.bin http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
# list the file
ls -al $WORKSPACE/opnfv.bin
diff --git a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
index a6659b2bf..11531f4a4 100644
--- a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
@@ -21,7 +21,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
#####################################
# patch merge phases
#####################################
@@ -84,6 +84,7 @@
pattern: 'code/**'
- compare-type: ANT
pattern: 'deploy/**'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
@@ -193,7 +194,7 @@
- shell:
!include-raw: ./daisy4nfv-download-artifact.sh
- shell:
- !include-raw: ./daisy4nfv-virtual-deploy.sh
+ !include-raw: ./daisy-deploy.sh
- shell:
!include-raw: ./daisy4nfv-workspace-cleanup.sh
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
index ee82c14b2..ee78ab59f 100644
--- a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
@@ -21,7 +21,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
#####################################
# patch verification phases
#####################################
@@ -88,6 +88,7 @@
pattern: 'code/**'
- compare-type: ANT
pattern: 'deploy/**'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
diff --git a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
deleted file mode 100755
index ef4a07b8d..000000000
--- a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv virtual deploy job!"
-echo "--------------------------------------------------------"
-
-cd $WORKSPACE
-
-if [[ "$NODE_NAME" =~ "-virtual" ]]; then
- export NETWORK_CONF=./deploy/config/vm_environment/$NODE_NAME/network.yml
- export DHA_CONF=./deploy/config/vm_environment/$NODE_NAME/deploy.yml
-else
- # TODO: For the time being, we need to pass this script to let contributors merge their work.
- echo "No support for non-virtual node"
- exit 0
-fi
-
-sudo ./ci/deploy/deploy.sh -d ${DHA_CONF} -n ${NETWORK_CONF} -p ${NODE_NAME:-"zte-virtual1"}
-
-if [ $? -ne 0 ]; then
- echo "depolyment failed!"
- deploy_ret=1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-
-exit $deploy_ret
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index 2333fca14..c677ef96e 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -22,9 +22,9 @@
- fuel:
slave-label: 'ool-virtual2'
pod: 'ool-virtual2'
- - joid:
- slave-label: 'ool-virtual3'
- pod: 'ool-virtual3'
+ #- joid:
+ # slave-label: 'ool-virtual3'
+ # pod: 'ool-virtual3'
inspector:
- 'sample'
@@ -145,11 +145,16 @@
builders:
- 'clean-workspace-log'
+ - shell: |
+ # NOTE: Create symbolic link, so that we can archive file outside
+ # of $WORKSPACE .
+ # NOTE: We are printing all logs under 'tests/' during test run,
+ # so this symbolic link should not be in 'tests/'. Otherwise,
+ # we'll have the same log twice in jenkins console log.
+ ln -sfn $HOME/opnfv/functest/results/{stream} functest_results
- 'functest-suite-builder'
- shell: |
functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log"
- to_be_archived="$WORKSPACE/tests/functest-{project}.log"
- cp $functest_log $to_be_archived
# NOTE: checking the test result, as the previous job could return
# 0 regardless the result of doctor test scenario.
grep -e ' OK$' $functest_log || exit 1
@@ -157,6 +162,8 @@
publishers:
- archive:
artifacts: 'tests/*.log'
+ - archive:
+ artifacts: 'functest_results/{project}.log'
#####################################
diff --git a/jjb/domino/domino.yml b/jjb/domino/domino.yml
index 5fd9db3f1..8c9be120b 100644
--- a/jjb/domino/domino.yml
+++ b/jjb/domino/domino.yml
@@ -49,6 +49,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml
index e2a334d40..0bd32a4ab 100644
--- a/jjb/dovetail/dovetail-ci-jobs.yml
+++ b/jjb/dovetail/dovetail-ci-jobs.yml
@@ -20,8 +20,8 @@
dovetail-branch: '{stream}'
gs-pathname: ''
docker-tag: 'latest'
- colorado: &colorado
- stream: colorado
+ danube: &danube
+ stream: danube
branch: 'stable/{stream}'
dovetail-branch: master
gs-pathname: '/{stream}'
@@ -54,12 +54,12 @@
slave-label: fuel-baremetal
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
+ <<: *danube
- virtual:
slave-label: fuel-virtual
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
+ <<: *danube
#compass CI PODs
- baremetal:
slave-label: compass-baremetal
@@ -75,33 +75,29 @@
slave-label: compass-baremetal
SUT: compass
auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
+ <<: *danube
- virtual:
slave-label: compass-virtual
SUT: compass
auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
-#apex CI PODs
- - apex-verify-master:
- slave-label: '{pod}'
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - apex-daily-master:
+ <<: *danube
+#--------------------------------
+# Installers not using labels
+# CI PODs
+# This section should only contain the installers
+# that have not been switched using labels for slaves
+#--------------------------------
+#apex PODs
+ - lf-pod1:
slave-label: '{pod}'
SUT: apex
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - apex-verify-colorado:
- slave-label: '{pod}'
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
- - apex-daily-colorado:
+ - lf-pod1:
slave-label: '{pod}'
SUT: apex
auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
+ <<: *danube
#armband CI PODs
- armband-baremetal:
slave-label: armband-baremetal
@@ -117,12 +113,12 @@
slave-label: armband-baremetal
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
+ <<: *danube
- armband-virtual:
slave-label: armband-virtual
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
+ <<: *danube
#--------------------------------
# None-CI PODs
#--------------------------------
diff --git a/jjb/dpacc/dpacc.yml b/jjb/dpacc/dpacc.yml
index bc61d7447..63eb044ad 100644
--- a/jjb/dpacc/dpacc.yml
+++ b/jjb/dpacc/dpacc.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/escalator/escalator.yml b/jjb/escalator/escalator.yml
index 2265dafce..041a41f91 100644
--- a/jjb/escalator/escalator.yml
+++ b/jjb/escalator/escalator.yml
@@ -73,6 +73,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
@@ -185,6 +186,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/fuel/fuel-basic-exp.sh b/jjb/fuel/fuel-basic-exp.sh
deleted file mode 100755
index a70a0c765..000000000
--- a/jjb/fuel/fuel-basic-exp.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-set -o nounset
-
-echo "-----------------------------------------------------------------------"
-echo $GERRIT_CHANGE_COMMIT_MESSAGE
-echo "-----------------------------------------------------------------------"
-
-# proposal for specifying the scenario name in commit message
-# currently only 1 scenario name is supported but depending on
-# the need, it can be expanded, supporting multiple scenarios
-# using comma separated list or something
-SCENARIO_NAME_PATTERN="(?<=@scenario:).*?(?=@)"
-SCENARIO_NAME=(echo $GERRIT_CHANGE_COMMIT_MESSAGE | grep -oP "$SCENARIO_NAME_PATTERN")
-if [[ $? -ne 0 ]]; then
- echo "The patch verification will be done only with build!"
-else
- echo "Will run full verification; build, deploy, and smoke test using scenario $SCENARIO_NAME"
-fi
diff --git a/jjb/fuel/fuel-build-exp.sh b/jjb/fuel/fuel-build-exp.sh
deleted file mode 100755
index f7f613dc0..000000000
--- a/jjb/fuel/fuel-build-exp.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-echo "Not activated!"
diff --git a/jjb/fuel/fuel-daily-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml
index f78c4a317..36f3ce414 100644
--- a/jjb/fuel/fuel-daily-jobs.yml
+++ b/jjb/fuel/fuel-daily-jobs.yml
@@ -18,7 +18,7 @@
danube: &danube
stream: danube
branch: 'stable/{stream}'
- disabled: true
+ disabled: false
gs-pathname: '/{stream}'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
@@ -106,6 +106,8 @@
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-kvm_ovs_dpdk-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+ - 'os-nosdn-kvm_ovs_dpdk_bar-noha':
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
jobs:
- 'fuel-{scenario}-{pod}-daily-{stream}'
@@ -132,6 +134,7 @@
use-build-blocker: true
blocking-jobs:
- 'fuel-os-.*?-{pod}-daily-.*'
+ - 'fuel-os-.*?-{pod}-weekly-.*'
block-level: 'NODE'
wrappers:
@@ -156,7 +159,7 @@
builders:
- description-setter:
- description: "POD: $NODE_NAME"
+ description: "Built on $NODE_NAME"
- trigger-builds:
- project: 'fuel-deploy-{pod}-daily-{stream}'
current-parameters: false
@@ -210,6 +213,8 @@
blocking-jobs:
- 'fuel-deploy-{pod}-daily-.*'
- 'fuel-deploy-generic-daily-.*'
+ - 'fuel-deploy-{pod}-weekly-.*'
+ - 'fuel-deploy-generic-weekly-.*'
block-level: 'NODE'
parameters:
@@ -238,7 +243,7 @@
builders:
- description-setter:
- description: "POD: $NODE_NAME"
+ description: "Built on $NODE_NAME"
- shell:
!include-raw-escape: ./fuel-download-artifact.sh
- shell:
@@ -357,7 +362,11 @@
- trigger:
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-master-trigger'
triggers:
- - timed: '30 16 * * *'
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# Triggers for job running on fuel-baremetal against danube branch
#-----------------------------------------------
@@ -447,6 +456,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-danube-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# Triggers for job running on fuel-virtual against master branch
#-----------------------------------------------
@@ -534,7 +547,11 @@
- trigger:
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
triggers:
- - timed: ''
+ - timed: '30 16 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-master-trigger'
+ triggers:
+ - timed: '30 20 * * *'
#-----------------------------------------------
# Triggers for job running on fuel-virtual against danube branch
#-----------------------------------------------
@@ -623,6 +640,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-danube-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD1 Triggers running against master branch
#-----------------------------------------------
@@ -711,6 +732,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD2 Triggers running against master branch
@@ -800,6 +825,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD3 Triggers running against master branch
#-----------------------------------------------
@@ -888,6 +917,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD1 Triggers running against danube branch
#-----------------------------------------------
@@ -976,6 +1009,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-danube-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD2 Triggers running against danube branch
@@ -1065,6 +1102,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-danube-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD3 Triggers running against danube branch
#-----------------------------------------------
@@ -1153,3 +1194,7 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-danube-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/fuel/fuel-deploy-exp.sh b/jjb/fuel/fuel-deploy-exp.sh
deleted file mode 100755
index f7f613dc0..000000000
--- a/jjb/fuel/fuel-deploy-exp.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-echo "Not activated!"
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index 4efccd611..f5bbd1818 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -95,7 +95,7 @@ echo "Deployment is done!"
# upload logs for baremetal deployments
# work with virtual deployments is still going on so we skip that for the timebeing
-if [[ "$JOB_NAME" =~ "baremetal-daily" ]]; then
+if [[ "$JOB_NAME" =~ (baremetal-daily|baremetal-weekly) ]]; then
echo "Uploading deployment logs"
gsutil cp $WORKSPACE/$FUEL_LOG_FILENAME gs://$GS_URL/logs/$FUEL_LOG_FILENAME > /dev/null 2>&1
echo "Logs are available as http://$GS_URL/logs/$FUEL_LOG_FILENAME"
diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml
index 32ad8907e..1f0ddd363 100644
--- a/jjb/fuel/fuel-project-jobs.yml
+++ b/jjb/fuel/fuel-project-jobs.yml
@@ -16,7 +16,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
jobs:
- 'fuel-build-daily-{stream}'
@@ -125,6 +125,7 @@
pattern: 'build/**'
- compare-type: ANT
pattern: 'deploy/**'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
@@ -193,6 +194,7 @@
pattern: 'build/**'
- compare-type: ANT
pattern: 'deploy/**'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
diff --git a/jjb/fuel/fuel-smoke-test-exp.sh b/jjb/fuel/fuel-smoke-test-exp.sh
deleted file mode 100755
index f7f613dc0..000000000
--- a/jjb/fuel/fuel-smoke-test-exp.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-echo "Not activated!"
diff --git a/jjb/fuel/fuel-verify-jobs-experimental.yml b/jjb/fuel/fuel-verify-jobs-experimental.yml
deleted file mode 100644
index ae6458021..000000000
--- a/jjb/fuel/fuel-verify-jobs-experimental.yml
+++ /dev/null
@@ -1,255 +0,0 @@
-- project:
- # TODO: rename the project name
- # TODO: get rid of appended -exp from the remainder of the file
- name: 'fuel-verify-jobs-experimental'
-
- project: 'fuel'
-
- installer: 'fuel'
-#------------------------------------
-# branch definitions
-#------------------------------------
- # TODO: enable master once things settle
- stream-exp:
- - experimental:
- branch: 'stable/{stream-exp}'
- gs-pathname: '/{stream-exp}'
- disabled: false
-#------------------------------------
-# patch verification phases
-#------------------------------------
- phase:
- - 'basic':
- # this phase does basic commit message check, unit test and so on
- slave-label: 'opnfv-build'
- - 'build':
- # this phase builds artifacts if valid for given installer
- slave-label: 'opnfv-build-ubuntu'
- - 'deploy-virtual':
- # this phase does virtual deployment using the artifacts produced in previous phase
- slave-label: 'fuel-virtual'
- - 'smoke-test':
- # this phase runs functest smoke test
- slave-label: 'fuel-virtual'
-#------------------------------------
-# jobs
-#------------------------------------
- jobs:
- - 'fuel-verify-{stream-exp}'
- - 'fuel-verify-{phase}-{stream-exp}'
-#------------------------------------
-# job templates
-#------------------------------------
-- job-template:
- name: 'fuel-verify-{stream-exp}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- # TODO: this is valid for experimental only
- # enable concurrency for master once things settle
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'deploy/**'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-defaults'
- - 'fuel-verify-defaults-exp':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-basic-{stream-exp}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-build-{stream-exp}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-deploy-virtual-{stream-exp}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-smoke-test-{stream-exp}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'fuel-verify-{phase}-{stream-exp}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 6
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-verify-deploy-.*'
- - 'fuel-verify-test-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
- - 'fuel-verify-defaults-exp':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro-exp'
-#------------------------------------
-# builder macros
-#------------------------------------
-- builder:
- name: 'fuel-verify-basic-macro-exp'
- builders:
- - shell:
- !include-raw: ./fuel-basic-exp.sh
-
-- builder:
- name: 'fuel-verify-build-macro-exp'
- builders:
- - shell:
- !include-raw: ./fuel-build-exp.sh
- - shell:
- !include-raw: ./fuel-workspace-cleanup.sh
-
-- builder:
- name: 'fuel-verify-deploy-virtual-macro-exp'
- builders:
- - shell:
- !include-raw: ./fuel-deploy-exp.sh
-
-- builder:
- name: 'fuel-verify-smoke-test-macro-exp'
- builders:
- - shell:
- !include-raw: ./fuel-smoke-test-exp.sh
-#------------------------------------
-# parameter macros
-#------------------------------------
-- parameter:
- name: 'fuel-verify-defaults-exp'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/fuel/fuel-verify-jobs.yml b/jjb/fuel/fuel-verify-jobs.yml
index 7f9eff04d..549f7dafa 100644
--- a/jjb/fuel/fuel-verify-jobs.yml
+++ b/jjb/fuel/fuel-verify-jobs.yml
@@ -15,7 +15,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
#####################################
# patch verification phases
#####################################
@@ -88,6 +88,7 @@
pattern: 'build/**'
- compare-type: ANT
pattern: 'deploy/**'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
diff --git a/jjb/fuel/fuel-weekly-jobs.yml b/jjb/fuel/fuel-weekly-jobs.yml
new file mode 100644
index 000000000..bd42ed85c
--- /dev/null
+++ b/jjb/fuel/fuel-weekly-jobs.yml
@@ -0,0 +1,210 @@
+# jenkins job templates for Fuel
+- project:
+
+ name: fuel-weekly
+
+ project: fuel
+
+ installer: fuel
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ disabled: false
+ gs-pathname: ''
+ danube: &danube
+ stream: danube
+ branch: 'stable/{stream}'
+ disabled: false
+ gs-pathname: '/{stream}'
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+# CI PODs
+#--------------------------------
+ pod:
+ - baremetal:
+ slave-label: fuel-baremetal
+ <<: *master
+ - virtual:
+ slave-label: fuel-virtual
+ <<: *master
+ - baremetal:
+ slave-label: fuel-baremetal
+ <<: *danube
+ - virtual:
+ slave-label: fuel-virtual
+ <<: *danube
+#--------------------------------
+# scenarios
+#--------------------------------
+ scenario:
+ # HA scenarios
+ - 'os-nosdn-nofeature-ha':
+ auto-trigger-name: 'weekly-trigger-disabled'
+
+ jobs:
+ - 'fuel-{scenario}-{pod}-weekly-{stream}'
+ - 'fuel-deploy-{pod}-weekly-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+ name: 'fuel-{scenario}-{pod}-weekly-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: false
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'fuel-os-.*?-{pod}-daily-.*'
+ - 'fuel-os-.*?-{pod}-weekly-.*'
+ block-level: 'NODE'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ triggers:
+ - '{auto-trigger-name}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults':
+ installer: '{installer}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{scenario}'
+ - fuel-weekly-parameter:
+ gs-pathname: '{gs-pathname}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - trigger-builds:
+ - project: 'fuel-deploy-{pod}-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ - trigger-builds:
+ - project: 'functest-fuel-{pod}-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+
+ publishers:
+ - email:
+ recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+
+- job-template:
+ name: 'fuel-deploy-{pod}-weekly-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'fuel-deploy-{pod}-daily-.*'
+ - 'fuel-deploy-generic-daily-.*'
+ - 'fuel-deploy-{pod}-weekly-.*'
+ - 'fuel-deploy-generic-weekly-.*'
+ block-level: 'NODE'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults':
+ installer: '{installer}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+ - fuel-weekly-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_TIMEOUT
+ default: '150'
+ description: 'Deployment timeout in minutes'
+
+ scm:
+ - git-scm
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - shell:
+ !include-raw-escape: ./fuel-download-artifact.sh
+ - shell:
+ !include-raw-escape: ./fuel-deploy.sh
+
+ publishers:
+ - email:
+ recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+
+########################
+# parameter macros
+########################
+- parameter:
+ name: fuel-weekly-parameter
+ parameters:
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+ - string:
+ name: CACHE_DIRECTORY
+ default: $HOME/opnfv/cache/$INSTALLER_TYPE
+ description: "Directory where the cache to be used during the build is located."
+ - string:
+ name: GS_URL
+ default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+ description: "URL to Google Storage."
+########################
+# trigger macros
+########################
+#-----------------------------------------------
+# Triggers for job running on fuel-baremetal against master branch
+#-----------------------------------------------
+# HA Scenarios
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-baremetal-weekly-master-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-daily-jobs.yml
index 49901bea2..a3268d3e5 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-daily-jobs.yml
@@ -2,9 +2,9 @@
# job configuration for functest
###################################
- project:
- name: functest
+ name: functest-daily
- project: '{name}'
+ project: functest
#--------------------------------
# BRANCH ANCHORS
@@ -88,14 +88,14 @@
slave-label: '{pod}'
installer: apex
<<: *master
-# - apex-verify-danube:
-# slave-label: '{pod}'
-# installer: apex
-# <<: *danube
-# - apex-daily-danube:
-# slave-label: '{pod}'
-# installer: apex
-# <<: *danube
+ - apex-verify-danube:
+ slave-label: '{pod}'
+ installer: apex
+ <<: *danube
+ - apex-daily-danube:
+ slave-label: '{pod}'
+ installer: apex
+ <<: *danube
# armband CI PODs
- armband-baremetal:
slave-label: armband-baremetal
@@ -113,6 +113,15 @@
slave-label: armband-virtual
installer: fuel
<<: *danube
+# daisy CI PODs
+ - baremetal:
+ slave-label: daisy-baremetal
+ installer: daisy
+ <<: *master
+ - virtual:
+ slave-label: daisy-virtual
+ installer: daisy
+ <<: *master
# netvirt 3rd party ci
- virtual:
slave-label: odl-netvirt-virtual
@@ -149,6 +158,10 @@
slave-label: '{pod}'
installer: fuel
<<: *master
+ - arm-pod3-2:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *master
- zte-pod1:
slave-label: '{pod}'
installer: fuel
@@ -177,6 +190,10 @@
slave-label: '{pod}'
installer: fuel
<<: *danube
+ - arm-pod3-2:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *danube
# PODs for verify jobs triggered by each patch upload
- ool-virtual1:
slave-label: '{pod}'
@@ -189,8 +206,6 @@
job-timeout: 60
- 'daily':
job-timeout: 180
- - 'weekly':
- job-timeout: 400
jobs:
- 'functest-{installer}-{pod}-{testsuite}-{stream}'
@@ -243,7 +258,7 @@
builders:
- description-setter:
- description: "POD: $NODE_NAME"
+ description: "Built on $NODE_NAME"
- 'functest-{testsuite}-builder'
########################
@@ -257,13 +272,6 @@
default: 'daily'
description: "Daily suite name to run"
- parameter:
- name: functest-weekly-parameter
- parameters:
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'weekly'
- description: "Weekly suite name to run"
-- parameter:
name: functest-suite-parameter
parameters:
- choice:
@@ -275,6 +283,7 @@
- 'tempest_smoke_serial'
- 'rally_sanity'
- 'odl'
+ - 'odl_netvirt'
- 'onos'
- 'promise'
- 'doctor'
@@ -333,22 +342,11 @@
- 'functest-exit'
- builder:
- name: functest-weekly-builder
- builders:
- - 'functest-cleanup'
- - 'set-functest-env'
- - 'functest-weekly'
- - 'functest-store-results'
- - 'functest-exit'
-
-- builder:
name: functest-suite-builder
builders:
- 'functest-cleanup'
- 'set-functest-env'
- 'functest-suite'
- - 'functest-store-results'
- - 'functest-exit'
- builder:
name: functest-daily
@@ -356,11 +354,6 @@
- shell:
!include-raw: ./functest-loop.sh
-- builder:
- name: functest-weekly
- builders:
- - shell:
- !include-raw: ./functest-loop.sh
- builder:
name: functest-suite
diff --git a/jjb/functest/functest-project-jobs.yml b/jjb/functest/functest-project-jobs.yml
index 42c19a777..14ad73a91 100644
--- a/jjb/functest/functest-project-jobs.yml
+++ b/jjb/functest/functest-project-jobs.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh
index f28d3d037..228cc3da4 100755
--- a/jjb/functest/functest-suite.sh
+++ b/jjb/functest/functest-suite.sh
@@ -1,19 +1,18 @@
#!/bin/bash
-set -e
-echo "Functest: run $FUNCTEST_SUITE_NAME on branch $BRANCH"
-if [[ "$BRANCH" =~ 'brahmaputra' ]]; then
- cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh --test $FUNCTEST_SUITE_NAME"
-elif [[ "$BRANCH" =~ 'colorado' ]]; then
- cmd="python ${FUNCTEST_REPO_DIR}/ci/run_tests.py -t $FUNCTEST_SUITE_NAME"
-else
- cmd="functest testcase run $FUNCTEST_SUITE_NAME"
-fi
container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
-docker exec $container_id $cmd
+if [ -z $container_id ]; then
+ echo "Functest container not found"
+ exit 1
+fi
+
+global_ret_val=0
-ret_value=$?
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo ${ret_value}>${ret_val_file}
+tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
+for test in ${tests[@]}; do
+ cmd="python /home/opnfv/repos/functest/functest/ci/run_tests.py -t $test"
+ docker exec $container_id $cmd
+ let global_ret_val+=$?
+done
-exit 0
+exit $global_ret_val
diff --git a/jjb/functest/functest-weekly-jobs.yml b/jjb/functest/functest-weekly-jobs.yml
new file mode 100644
index 000000000..f44f7b8aa
--- /dev/null
+++ b/jjb/functest/functest-weekly-jobs.yml
@@ -0,0 +1,124 @@
+###################################
+# job configuration for functest
+###################################
+- project:
+ name: functest-weekly
+
+ project: functest
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+ docker-tag: 'latest'
+ disabled: false
+ danube: &danube
+ stream: danube
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ docker-tag: 'stable'
+ disabled: true
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+# Installers using labels
+# CI PODs
+# This section should only contain the installers
+# that have been switched using labels for slaves
+#--------------------------------
+ pod:
+# fuel CI PODs
+ - baremetal:
+ slave-label: fuel-baremetal
+ installer: fuel
+ <<: *master
+ - virtual:
+ slave-label: fuel-virtual
+ installer: fuel
+ <<: *master
+ - baremetal:
+ slave-label: fuel-baremetal
+ installer: fuel
+ <<: *danube
+ - virtual:
+ slave-label: fuel-virtual
+ installer: fuel
+ <<: *danube
+#--------------------------------
+ jobs:
+ - 'functest-{installer}-{pod}-weekly-{stream}'
+
+################################
+# job template
+################################
+- job-template:
+ name: 'functest-{installer}-{pod}-weekly-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: '400'
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
+ - string:
+ name: FUNCTEST_SUITE_NAME
+ default: 'weekly'
+ description: "Weekly suite name to run"
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+ - string:
+ name: DOCKER_TAG
+ default: '{docker-tag}'
+ description: 'Tag to pull docker image'
+ - string:
+ name: CLEAN_DOCKER_IMAGES
+ default: 'false'
+ description: 'Remove downloaded docker images (opnfv/functest*:*)'
+ - functest-parameter:
+ gs-pathname: '{gs-pathname}'
+
+ scm:
+ - git-scm
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'functest-weekly-builder'
+########################
+# builder macros
+########################
+- builder:
+ name: functest-weekly-builder
+ builders:
+ - shell:
+ !include-raw: ./functest-cleanup.sh
+ - shell:
+ !include-raw: ./set-functest-env.sh
+ - shell:
+ !include-raw: ./functest-loop.sh
+ - shell:
+ !include-raw: ../../utils/push-test-logs.sh
+ - shell:
+ !include-raw: ./functest-exit.sh
diff --git a/jjb/functest/set-functest-env.sh b/jjb/functest/set-functest-env.sh
index abec480dc..05e3d5792 100755
--- a/jjb/functest/set-functest-env.sh
+++ b/jjb/functest/set-functest-env.sh
@@ -17,32 +17,34 @@ if [[ ${RC_FILE_PATH} != '' ]] && [[ -f ${RC_FILE_PATH} ]] ; then
echo "Credentials file detected: ${RC_FILE_PATH}"
# volume if credentials file path is given to Functest
rc_file_vol="-v ${RC_FILE_PATH}:/home/opnfv/functest/conf/openstack.creds"
+ RC_FLAG=1
fi
if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
- if sudo virsh list | grep instack; then
- instack_mac=$(sudo virsh domiflist instack | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- elif sudo virsh list | grep undercloud; then
- instack_mac=$(sudo virsh domiflist undercloud | grep default | \
+ if sudo virsh list | grep undercloud; then
+ echo "Installer VM detected"
+ undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+ INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+ sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
+ sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
+ stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
+
+ if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+ sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
+ fi
+ if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+ sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
+ fi
+ elif [[ "$RC_FLAG" == 1 ]]; then
+ echo "No available installer VM, but credentials provided...continuing"
else
- echo "No available installer VM exists...exiting"
+ echo "No available installer VM exists and no credentials provided...exiting"
exit 1
fi
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
- sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
- stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
- if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
fi
diff --git a/jjb/global/releng-macros.yml b/jjb/global/releng-macros.yml
index 9b09e315f..63613f88d 100644
--- a/jjb/global/releng-macros.yml
+++ b/jjb/global/releng-macros.yml
@@ -61,7 +61,21 @@
choosing-strategy: 'gerrit'
refspec: '$GERRIT_REFSPEC'
<<: *git-scm-defaults
-
+- scm:
+ name: git-scm-with-submodules
+ scm:
+ - git:
+ credentials-id: '$SSH_CREDENTIAL_ID'
+ url: '$GIT_BASE'
+ refspec: ''
+ branches:
+ - 'refs/heads/{branch}'
+ skip-tag: true
+ wipe-workspace: true
+ submodule:
+ recursive: true
+ timeout: 20
+ shallow-clone: true
- trigger:
name: 'daily-trigger-disabled'
triggers:
@@ -72,7 +86,6 @@
triggers:
- timed: ''
-# NOTE: unused macro, but we may use this for some jobs.
- trigger:
name: gerrit-trigger-patchset-created
triggers:
@@ -86,12 +99,22 @@
- draft-published-event
- comment-added-contains-event:
comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
projects:
- project-compare-type: 'ANT'
project-pattern: '{project}'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: '{files}'
+ skip-vote:
+ successful: false
+ failed: false
+ unstable: false
+ notbuilt: false
- trigger:
name: gerrit-trigger-change-merged
@@ -108,6 +131,9 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: '{files}'
- trigger:
name: 'experimental'
@@ -426,7 +452,7 @@
name: clean-workspace-log
builders:
- shell: |
- find $WORKSPACE -type f -print -name '*.log' | xargs rm -f
+ find $WORKSPACE -type f -name '*.log' | xargs rm -f
- publisher:
name: archive-artifacts
@@ -436,3 +462,23 @@
allow-empty: true
fingerprint: true
latest-only: true
+
+- publisher:
+ name: publish-coverage
+ publishers:
+ - cobertura:
+ report-file: "coverage.xml"
+ only-stable: "true"
+ health-auto-update: "false"
+ stability-auto-update: "false"
+ zoom-coverage-chart: "true"
+ targets:
+ - files:
+ healthy: 10
+ unhealthy: 20
+ failing: 30
+ - method:
+ healthy: 50
+ unhealthy: 40
+ failing: 30
+
diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml
index 429828e8e..1905a098a 100644
--- a/jjb/global/slave-params.yml
+++ b/jjb/global/slave-params.yml
@@ -25,11 +25,11 @@
default-slaves:
- lf-pod1
- parameter:
- name: 'apex-daily-colorado-defaults'
+ name: 'apex-daily-danube-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'apex-daily-colorado'
+ default: 'apex-daily-danube'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -71,11 +71,11 @@
- intel-virtual4
- intel-virtual5
- parameter:
- name: 'apex-verify-colorado-defaults'
+ name: 'apex-verify-danube-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'apex-verify-colorado'
+ default: 'apex-verify-danube'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -382,6 +382,20 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
+ name: 'cengn-pod1-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - cengn-pod1
+ default-slaves:
+ - cengn-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
name: 'intel-pod1-defaults'
parameters:
- node:
@@ -733,6 +747,24 @@
default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
description: 'Base URI to the configuration directory'
- parameter:
+ name: 'arm-pod3-2-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - arm-pod3-2
+ default-slaves:
+ - arm-pod3-2
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: LAB_CONFIG_URL
+ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
+ description: 'Base URI to the configuration directory'
+- parameter:
name: 'intel-virtual6-defaults'
parameters:
- node:
diff --git a/jjb/infra/bifrost-cleanup-job.yml b/jjb/infra/bifrost-cleanup-job.yml
new file mode 100644
index 000000000..571e275da
--- /dev/null
+++ b/jjb/infra/bifrost-cleanup-job.yml
@@ -0,0 +1,140 @@
+- project:
+ name: 'openstack-bifrost-cleanup'
+#--------------------------------
+# branches
+#--------------------------------
+ stream:
+ - master:
+ branch: '{stream}'
+
+#--------------------------------
+# projects
+#--------------------------------
+ project:
+ - 'openstack':
+ project-repo: 'https://git.openstack.org/openstack/bifrost'
+ clone-location: '/opt/bifrost'
+ - 'opnfv':
+ project-repo: 'https://gerrit.opnfv.org/gerrit/releng'
+ clone-location: '/opt/releng'
+
+#--------------------------------
+# jobs
+#--------------------------------
+ jobs:
+ - '{project}-bifrost-cleanup-{stream}'
+
+- job-template:
+ name: '{project}-bifrost-cleanup-{stream}'
+
+ concurrent: false
+
+ node: bifrost-verify-virtual
+
+ # Make sure no verify job is running on any of the slaves since that would
+ # produce build logs after we wipe the destination directory.
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - '{project}-bifrost-verify-*'
+
+ parameters:
+ - string:
+ name: PROJECT
+ default: '{project}'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ set -eu
+
+ # DO NOT change this unless you know what you are doing.
+ BIFROST_GS_URL="gs://artifacts.opnfv.org/cross-community-ci/openstack/bifrost/$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/"
+
+ # This should never happen... even 'recheck' uses the last jobs'
+ # gerrit information. Better exit with error so we can investigate
+ [[ ! -n $GERRIT_NAME ]] || [[ ! -n $GERRIT_CHANGE_NUMBER ]] && exit 1
+
+ echo "Removing build artifacts for $GERRIT_NAME/$GERRIT_CHANGE_NUMBER"
+
+ if ! [[ "$BIFROST_GS_URL" =~ "/cross-community-ci/openstack/bifrost/" ]]; then
+ echo "Oops! BIFROST_GS_URL=$BIFROST_GS_URL does not seem like a valid"
+ echo "bifrost location on the Google storage server. Please double-check"
+ echo "that it's set properly or fix this line if necessary."
+ echo "gsutil will not be executed until this is fixed!"
+ exit 1
+ fi
+ # No force (-f). We always verify upstream jobs so if there are no logs
+ # something else went wrong and we need to break immediately and investigate
+ gsutil -m rm -r $BIFROST_GS_URL
+
+ triggers:
+ - '{project}-gerrit-trigger-cleanup':
+ branch: '{branch}'
+
+ publishers:
+ - email:
+ recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
+#--------------------------------
+# trigger macros
+#--------------------------------
+- trigger:
+ name: 'openstack-gerrit-trigger-cleanup'
+ triggers:
+ - gerrit:
+ server-name: 'review.openstack.org'
+ escape-quotes: true
+ trigger-on:
+ # We only run this when the change is merged or
+ # abandoned since we don't need the logs anymore
+ - patchset-uploaded-event: 'false'
+ - change-merged-event: 'true'
+ - change-abandoned-event: 'true'
+ - change-restored-event: 'false'
+ - draft-published-event: 'false'
+ # This is an OPNFV maintenance job. We don't want to provide
+ # feedback on Gerrit
+ silent: true
+ silent-start: true
+ projects:
+ - project-compare-type: 'PLAIN'
+ project-pattern: 'openstack/bifrost'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'doc/**'
+ - compare-type: ANT
+ pattern: 'releasenotes/**'
+ disable-strict-forbidden-file-verification: 'true'
+ readable-message: true
+- trigger:
+ name: 'opnfv-gerrit-trigger-cleanup'
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ # We only run this when the change is merged or
+ # abandoned since we don't need the logs anymore
+ - patchset-uploaded-event: 'false'
+ - change-merged-event: 'true'
+ - change-abandoned-event: 'true'
+ - change-restored-event: 'false'
+ - draft-published-event: 'false'
+ # This is an OPNFV maintenance job. We don't want to provide
+ # feedback on Gerrit
+ silent: true
+ silent-start: true
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'prototypes/bifrost/**'
+ readable-message: true
diff --git a/jjb/infra/bifrost-verify-jobs.yml b/jjb/infra/bifrost-verify-jobs.yml
index c99023edf..33032bc7b 100644
--- a/jjb/infra/bifrost-verify-jobs.yml
+++ b/jjb/infra/bifrost-verify-jobs.yml
@@ -147,7 +147,7 @@
publishers:
- email:
- recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
+ recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
#--------------------------------
# trigger macros
#--------------------------------
@@ -172,11 +172,13 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'doc/**'
- compare-type: ANT
pattern: 'releasenotes/**'
+ disable-strict-forbidden-file-verification: 'true'
readable-message: true
- trigger:
name: 'opnfv-gerrit-trigger'
diff --git a/jjb/ipv6/ipv6.yml b/jjb/ipv6/ipv6.yml
index a6745cd99..b0db7640a 100644
--- a/jjb/ipv6/ipv6.yml
+++ b/jjb/ipv6/ipv6.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/joid/joid-daily-jobs.yml b/jjb/joid/joid-daily-jobs.yml
index b28dd6025..97e290c8a 100644
--- a/jjb/joid/joid-daily-jobs.yml
+++ b/jjb/joid/joid-daily-jobs.yml
@@ -20,7 +20,7 @@
danube: &danube
stream: danube
branch: 'stable/{stream}'
- disabled: true
+ disabled: false
gs-pathname: '/{stream}'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
@@ -46,6 +46,9 @@
- orange-pod1:
slave-label: orange-pod1
<<: *master
+ - cengn-pod1:
+ slave-label: cengn-pod1
+ <<: *master
#--------------------------------
# scenarios
#--------------------------------
@@ -154,6 +157,23 @@
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
+ # 1.dovetail only master by now, not sync with A/B/C branches
+ # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
+ # 3.only debug testsuite here(includes basic testcase,
+ # i.e. one tempest smoke ipv6, two vping from functest)
+ # 4.not used for release criteria or compliance,
+ # only to debug the dovetail tool bugs with joid
+ - trigger-builds:
+ - project: 'dovetail-joid-{pod}-debug-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
- job-template:
name: 'joid-deploy-{pod}-daily-{stream}'
@@ -232,6 +252,10 @@
name: 'joid-os-nosdn-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-nofeature-ha trigger - branch: danube
- trigger:
name: 'joid-os-nosdn-nofeature-ha-baremetal-danube-trigger'
@@ -245,6 +269,10 @@
name: 'joid-os-nosdn-nofeature-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-odl_l2-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
@@ -258,6 +286,10 @@
name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# os-odl_l2-nofeature-ha trigger - branch: danube
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
@@ -271,6 +303,10 @@
name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-onos-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-nofeature-ha-baremetal-master-trigger'
@@ -284,6 +320,10 @@
name: 'joid-os-onos-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-onos-nofeature-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# os-onos-nofeature-ha trigger - branch: danube
- trigger:
name: 'joid-os-onos-nofeature-ha-baremetal-danube-trigger'
@@ -297,6 +337,10 @@
name: 'joid-os-onos-nofeature-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-onos-nofeature-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-onos-sfc-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
@@ -310,6 +354,10 @@
name: 'joid-os-onos-sfc-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-onos-sfc-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# os-onos-sfc-ha trigger - branch: danube
- trigger:
name: 'joid-os-onos-sfc-ha-baremetal-danube-trigger'
@@ -323,6 +371,10 @@
name: 'joid-os-onos-sfc-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-onos-sfc-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-lxd-noha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
@@ -336,6 +388,10 @@
name: 'joid-os-nosdn-lxd-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-lxd-noha trigger - branch: danube
- trigger:
name: 'joid-os-nosdn-lxd-noha-baremetal-danube-trigger'
@@ -349,6 +405,10 @@
name: 'joid-os-nosdn-lxd-noha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-lxd-ha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
@@ -362,6 +422,10 @@
name: 'joid-os-nosdn-lxd-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-lxd-ha trigger - branch: danube
- trigger:
name: 'joid-os-nosdn-lxd-ha-baremetal-danube-trigger'
@@ -375,6 +439,10 @@
name: 'joid-os-nosdn-lxd-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-nofeature-noha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
@@ -388,6 +456,10 @@
name: 'joid-os-nosdn-nofeature-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-nofeature-noha trigger - branch: danube
- trigger:
name: 'joid-os-nosdn-nofeature-noha-baremetal-danube-trigger'
@@ -401,6 +473,10 @@
name: 'joid-os-nosdn-nofeature-noha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# k8-nosdn-nofeature-noha trigger - branch: master
- trigger:
name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
@@ -414,6 +490,10 @@
name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# k8-nosdn-nofeature-noha trigger - branch: danube
- trigger:
name: 'joid-k8-nosdn-nofeature-noha-baremetal-danube-trigger'
@@ -427,6 +507,10 @@
name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# k8-nosdn-lb-noha trigger - branch: master
- trigger:
name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
@@ -440,6 +524,10 @@
name: 'joid-k8-nosdn-lb-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: ''
# k8-nosdn-lb-noha trigger - branch: danube
- trigger:
name: 'joid-k8-nosdn-lb-noha-baremetal-danube-trigger'
@@ -453,3 +541,7 @@
name: 'joid-k8-nosdn-lb-noha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/joid/joid-verify-jobs.yml b/jjb/joid/joid-verify-jobs.yml
index 7b8ce7701..03fab553e 100644
--- a/jjb/joid/joid-verify-jobs.yml
+++ b/jjb/joid/joid-verify-jobs.yml
@@ -86,6 +86,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/models/models.yml b/jjb/models/models.yml
new file mode 100644
index 000000000..683103678
--- /dev/null
+++ b/jjb/models/models.yml
@@ -0,0 +1,68 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+ name: models
+
+ project: '{name}'
+
+ jobs:
+ - 'models-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+
+- job-template:
+ name: 'models-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+
+ # shellcheck -f tty tests/*.sh
diff --git a/jjb/moon/moon.yml b/jjb/moon/moon.yml
index a318bc54d..fb28feb53 100644
--- a/jjb/moon/moon.yml
+++ b/jjb/moon/moon.yml
@@ -42,6 +42,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/multisite/fuel-deploy-for-multisite.sh b/jjb/multisite/fuel-deploy-for-multisite.sh
index d8b40517c..06617610c 100755
--- a/jjb/multisite/fuel-deploy-for-multisite.sh
+++ b/jjb/multisite/fuel-deploy-for-multisite.sh
@@ -27,6 +27,9 @@ if [[ -z "FUEL_PROPERTIES_FILE" ]]; then
echo "Unable to extract the url to Fuel ISO properties from ${FUEL_DEPLOY_URL}"
exit 1
fi
+
+# use known/working version of fuel
+FUEL_PROPERTIES_FILE="opnfv-2017-03-06_16-00-15.properties"
curl -L -s -o $WORKSPACE/latest.properties http://artifacts.opnfv.org/fuel/$FUEL_PROPERTIES_FILE
# source the file so we get OPNFV vars
diff --git a/jjb/multisite/multisite-daily-jobs.yml b/jjb/multisite/multisite-daily-jobs.yml
index 6b022fd75..23c95f627 100644
--- a/jjb/multisite/multisite-daily-jobs.yml
+++ b/jjb/multisite/multisite-daily-jobs.yml
@@ -138,8 +138,8 @@
- name: 'functest-fuel-virtual-suite-{stream}'
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO='os-nosdn-multisite-noha'
- FUNCTEST_SUITE_NAME='multisite'
+ DEPLOY_SCENARIO=os-nosdn-multisite-noha
+ FUNCTEST_SUITE_NAME=multisite
OS_REGION=RegionOne
REGIONONE_IP=100.64.209.10
REGIONTWO_IP=100.64.209.11
diff --git a/jjb/multisite/multisite-verify-jobs.yml b/jjb/multisite/multisite-verify-jobs.yml
index 5ecfafb55..9431e0bac 100644
--- a/jjb/multisite/multisite-verify-jobs.yml
+++ b/jjb/multisite/multisite-verify-jobs.yml
@@ -19,7 +19,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
timed: ''
- job-template:
@@ -57,6 +57,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/netready/netready.yml b/jjb/netready/netready.yml
index 382434ae6..9a4d8858c 100644
--- a/jjb/netready/netready.yml
+++ b/jjb/netready/netready.yml
@@ -44,6 +44,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
diff --git a/jjb/octopus/octopus.yml b/jjb/octopus/octopus.yml
index cb66112fe..c06fa89e8 100644
--- a/jjb/octopus/octopus.yml
+++ b/jjb/octopus/octopus.yml
@@ -52,6 +52,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/onosfw/onosfw.yml b/jjb/onosfw/onosfw.yml
index 13c96718c..9d6b037e1 100644
--- a/jjb/onosfw/onosfw.yml
+++ b/jjb/onosfw/onosfw.yml
@@ -16,7 +16,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
project: 'onosfw'
@@ -56,6 +56,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/openretriever/openretriever-project.yml b/jjb/openretriever/openretriever-project.yml
index 3d53f9b2e..3bcfab6d3 100644
--- a/jjb/openretriever/openretriever-project.yml
+++ b/jjb/openretriever/openretriever-project.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/opera/opera-daily-jobs.yml b/jjb/opera/opera-daily-jobs.yml
index d49caf1a6..5d2cc03f3 100644
--- a/jjb/opera/opera-daily-jobs.yml
+++ b/jjb/opera/opera-daily-jobs.yml
@@ -83,7 +83,7 @@
- name: 'compass-deploy-virtual-daily-{stream}'
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-openo-noha
+ DEPLOY_SCENARIO=os-nosdn-openo-ha
COMPASS_OS_VERSION=xenial
node-parameters: true
kill-phase-on: FAILURE
diff --git a/jjb/opera/opera-verify-jobs.yml b/jjb/opera/opera-verify-jobs.yml
index b7b5cb3c9..4da41d8d9 100644
--- a/jjb/opera/opera-verify-jobs.yml
+++ b/jjb/opera/opera-verify-jobs.yml
@@ -76,6 +76,7 @@
file-paths:
- compare-type: ANT
pattern: '**/*'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
diff --git a/jjb/opnfvdocs/docs-post-rtd.sh b/jjb/opnfvdocs/docs-post-rtd.sh
new file mode 100644
index 000000000..e3dc9b5f0
--- /dev/null
+++ b/jjb/opnfvdocs/docs-post-rtd.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+if [ $GERRIT_BRANCH == "master" ]; then
+ RTD_BUILD_VERSION=latest
+else
+ RTD_BUILD_VERSION=${{GERRIT_BRANCH/\//-}}
+fi
+curl -X POST --data "version_slug=$RTD_BUILD_VERSION" https://readthedocs.org/build/opnfvdocsdemo
diff --git a/jjb/opnfvdocs/docs-rtd.yaml b/jjb/opnfvdocs/docs-rtd.yaml
new file mode 100644
index 000000000..bf6d0012b
--- /dev/null
+++ b/jjb/opnfvdocs/docs-rtd.yaml
@@ -0,0 +1,90 @@
+- project:
+ name: docs-rtd
+ jobs:
+ - 'docs-merge-rtd-{stream}'
+ - 'docs-verify-rtd-{stream}'
+
+ stream:
+ - master:
+ branch: 'master'
+ - danube:
+ branch: 'stable/{stream}'
+
+ project: 'opnfvdocs'
+ rtdproject: 'opnfv'
+ # TODO: Archive Artifacts
+
+- job-template:
+ name: 'docs-merge-rtd-{stream}'
+
+ project-type: freestyle
+
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'lf-build1'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/releng
+ description: 'Git URL to use on this Jenkins Slave'
+ scm:
+ - git-scm
+
+ triggers:
+ - gerrit-trigger-change-merged:
+ project: '**'
+ branch: '{branch}'
+ files: 'docs/**/*.*'
+
+ builders:
+ - shell: !include-raw: docs-post-rtd.sh
+
+- job-template:
+ name: 'docs-verify-rtd-{stream}'
+
+ project-type: freestyle
+
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'lf-build2'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/opnfvdocs
+ description: 'Git URL to use on this Jenkins Slave'
+ scm:
+ - git-scm-with-submodules:
+ branch: '{branch}'
+
+ triggers:
+ - gerrit-trigger-patchset-created:
+ server: 'gerrit.opnfv.org'
+ project: '**'
+ branch: '{branch}'
+ files: 'docs/**/*.*'
+ - timed: 'H H * * *'
+
+ builders:
+ - shell: |
+ if [ "$GERRIT_PROJECT" != "opnfvdocs" ]; then
+ cd docs/submodules/$GERRIT_PROJECT
+ git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
+ else
+ git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
+ fi
+ - shell: |
+ sudo pip install virtualenv
+ virtualenv $WORKSPACE/venv
+ . $WORKSPACE/venv/bin/activate
+ pip install --upgrade pip
+ pip freeze
+ pip install tox
+ tox -edocs
diff --git a/jjb/opnfvdocs/opnfvdocs.yml b/jjb/opnfvdocs/opnfvdocs.yml
index 12950338d..0553cd436 100644
--- a/jjb/opnfvdocs/opnfvdocs.yml
+++ b/jjb/opnfvdocs/opnfvdocs.yml
@@ -20,7 +20,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
########################
# job templates
@@ -62,6 +62,11 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
builders:
- check-bash-syntax
diff --git a/jjb/ovsnfv/ovsnfv.yml b/jjb/ovsnfv/ovsnfv.yml
index 937a367fb..0e8c713fd 100644
--- a/jjb/ovsnfv/ovsnfv.yml
+++ b/jjb/ovsnfv/ovsnfv.yml
@@ -16,7 +16,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
- job-template:
name: 'ovsnfv-verify-{stream}'
diff --git a/jjb/parser/parser.yml b/jjb/parser/parser.yml
index 69fcefc20..35e97c3b3 100644
--- a/jjb/parser/parser.yml
+++ b/jjb/parser/parser.yml
@@ -53,9 +53,14 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
- pattern: 'docs/**|.gitignore'
+ pattern: 'docs/**'
+ - compare-type: ANT
+ pattern: 'governance/**'
+ - compare-type: ANT
+ pattern: '*.txt|.gitignore|.gitreview|INFO|LICENSE'
builders:
- shell: |
diff --git a/jjb/pharos/pharos.yml b/jjb/pharos/pharos.yml
index 6dae9f33c..12ae5cabe 100644
--- a/jjb/pharos/pharos.yml
+++ b/jjb/pharos/pharos.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/prediction/prediction.yml b/jjb/prediction/prediction.yml
index b380d8c86..a153a9bb0 100644
--- a/jjb/prediction/prediction.yml
+++ b/jjb/prediction/prediction.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/promise/promise.yml b/jjb/promise/promise.yml
index a5aa302c7..eeace5f78 100644
--- a/jjb/promise/promise.yml
+++ b/jjb/promise/promise.yml
@@ -53,6 +53,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
diff --git a/jjb/qtip/helpers/validate-deploy.sh b/jjb/qtip/helpers/validate-deploy.sh
index 16455371f..90b54a1c9 100644
--- a/jjb/qtip/helpers/validate-deploy.sh
+++ b/jjb/qtip/helpers/validate-deploy.sh
@@ -8,15 +8,14 @@
##############################################################################
set -e
-envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} -e NODE_NAME=${NODE_NAME}"
-suite="TEST_CASE=all"
+envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}
+-e NODE_NAME=${NODE_NAME} -e CI_DEBUG=${CI_DEBUG}"
dir_imgstore="${HOME}/imgstore"
-img_volume="${dir_imgstore}:/home/opnfv/imgstore"
echo "Qtip: Pulling docker image: opnfv/qtip:${DOCKER_TAG}"
docker pull opnfv/qtip:$DOCKER_TAG
-cmd=" docker run -id -e $envs -e $suite -v ${img_volume} opnfv/qtip:${DOCKER_TAG} /bin/bash"
+cmd=" docker run -id -e $envs opnfv/qtip:${DOCKER_TAG} /bin/bash"
echo "Qtip: Running docker command: ${cmd}"
${cmd}
@@ -27,7 +26,10 @@ if [ $(docker ps | grep 'opnfv/qtip' | wc -l) == 0 ]; then
else
echo "The container ID is: ${container_id}"
QTIP_REPO=/home/opnfv/repos/qtip
-# TODO(yujunz): execute benchmark plan for compute-qpi
+# TODO(zhihui_wu): use qtip cli to execute benchmark test in the future
+ docker exec -t ${container_id} bash -c "cd ${QTIP_REPO}/qtip/runner/ &&
+ python runner.py -d /home/opnfv/qtip/results/ -b all"
+
fi
echo "Qtip done!"
diff --git a/jjb/qtip/qtip-validate-jobs.yml b/jjb/qtip/qtip-validate-jobs.yml
index 98f7ab90a..4cd8490fd 100644
--- a/jjb/qtip/qtip-validate-jobs.yml
+++ b/jjb/qtip/qtip-validate-jobs.yml
@@ -13,6 +13,11 @@
branch: '{stream}'
gs-pathname: ''
docker-tag: latest
+ danube: &danube
+ stream: danube
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ docker-tag: 'stable'
#--------------------------------
# JOB VARIABLES
@@ -24,6 +29,9 @@
- zte-pod3:
installer: fuel
<<: *master
+ - zte-pod3:
+ installer: fuel
+ <<: *danube
task:
- daily:
auto-builder-name: qtip-validate-deploy
@@ -53,6 +61,10 @@
<<: *master
- '{installer}-defaults'
- '{pod}-defaults'
+ - string:
+ name: CI_DEBUG
+ default: 'false'
+ description: "Show debug output information"
scm:
- git-scm
triggers:
@@ -139,3 +151,4 @@
- gerrit-trigger-change-merged:
project: '{project}'
branch: '{branch}'
+ files: '**'
diff --git a/jjb/qtip/qtip-verify-jobs.yml b/jjb/qtip/qtip-verify-jobs.yml
index d1fc34d11..dd444c7a5 100644
--- a/jjb/qtip/qtip-verify-jobs.yml
+++ b/jjb/qtip/qtip-verify-jobs.yml
@@ -12,6 +12,10 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
################################
## job templates
@@ -49,12 +53,15 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
builders:
- qtip-unit-tests-and-docs-build
+ publishers:
+ - publish-coverage
################################
## job builders
diff --git a/jjb/releng/opnfv-docker-arm.yml b/jjb/releng/opnfv-docker-arm.yml
index 09c9f335e..ba540ed76 100644
--- a/jjb/releng/opnfv-docker-arm.yml
+++ b/jjb/releng/opnfv-docker-arm.yml
@@ -13,7 +13,7 @@
danube: &danube
stream: danube
branch: 'stable/{stream}'
- disabled: true
+ disabled: false
functest-arm-receivers: &functest-arm-receivers
receivers: >
cristina.pauna@enea.com
diff --git a/jjb/releng/opnfv-docker.sh b/jjb/releng/opnfv-docker.sh
index c906e1fcd..9bd711bc6 100644
--- a/jjb/releng/opnfv-docker.sh
+++ b/jjb/releng/opnfv-docker.sh
@@ -43,19 +43,29 @@ fi
if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
echo "Docker images to remove:"
docker images | head -1 && docker images | grep $DOCKER_REPO_NAME
- image_tags=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $tag)" ]]; then
- echo "Removing docker image $DOCKER_REPO_NAME:$tag..."
- docker rmi -f $DOCKER_REPO_NAME:$tag
+ image_ids=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $3}'))
+ for id in "${image_ids[@]}"; do
+ if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $id)" ]]; then
+ echo "Removing docker image $DOCKER_REPO_NAME:$id..."
+ docker rmi -f $id
fi
done
fi
cd $WORKSPACE/docker
-if [ ! -f ${DOCKERFILE} ]; then
- echo "ERROR: Dockerfile not found."
- exit 1
+HOST_ARCH=$(uname -m)
+if [ ! -f "${DOCKERFILE}" ]; then
+ # If this is expected to be a Dockerfile for other arch than x86
+ # and it does not exist, but there is a patch for the said arch,
+ # then apply the patch and create the Dockerfile.${HOST_ARCH} file
+ if [[ "${DOCKERFILE}" == *"${HOST_ARCH}" && \
+ -f "Dockerfile.${HOST_ARCH}.patch" ]]; then
+ patch -o Dockerfile."${HOST_ARCH}" Dockerfile \
+ Dockerfile."${HOST_ARCH}".patch
+ else
+ echo "ERROR: No Dockerfile or ${HOST_ARCH} patch found."
+ exit 1
+ fi
fi
# Get tag version
@@ -64,7 +74,7 @@ echo "Current branch: $BRANCH"
if [[ "$BRANCH" == "master" ]]; then
DOCKER_TAG="latest"
else
- if [[ "$RELEASE_VERSION" != "" ]]; then
+ if [[ -n "${RELEASE_VERSION-}" ]]; then
release=${BRANCH##*/}
DOCKER_TAG=${release}.${RELEASE_VERSION}
# e.g. colorado.1.0, colorado.2.0, colorado.3.0
@@ -77,8 +87,12 @@ fi
echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
echo "--------------------------------------------------------"
echo
-cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
- -f $DOCKERFILE ."
+if [[ $DOCKER_REPO_NAME == *"dovetail"* ]]; then
+ cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG -f $DOCKERFILE ."
+else
+ cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
+ -f $DOCKERFILE ."
+fi
echo ${cmd}
${cmd}
diff --git a/jjb/releng/opnfv-docker.yml b/jjb/releng/opnfv-docker.yml
index 90a91f802..f69992156 100644
--- a/jjb/releng/opnfv-docker.yml
+++ b/jjb/releng/opnfv-docker.yml
@@ -13,7 +13,7 @@
danube: &danube
stream: danube
branch: 'stable/{stream}'
- disabled: true
+ disabled: false
functest-receivers: &functest-receivers
receivers: >
jose.lausuch@ericsson.com morgan.richomme@orange.com
diff --git a/jjb/releng/opnfv-lint.yml b/jjb/releng/opnfv-lint.yml
index 37cdef28f..166aea8f9 100644
--- a/jjb/releng/opnfv-lint.yml
+++ b/jjb/releng/opnfv-lint.yml
@@ -102,7 +102,7 @@
comment-contains-value: 'reverify'
projects:
- project-compare-type: 'REG_EXP'
- project-pattern: 'compass4nfv'
+ project-pattern: ''
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
diff --git a/jjb/releng/testapi-backup-mongodb.sh b/jjb/releng/testapi-backup-mongodb.sh
index 8dba17beb..795e479d9 100644
--- a/jjb/releng/testapi-backup-mongodb.sh
+++ b/jjb/releng/testapi-backup-mongodb.sh
@@ -27,5 +27,5 @@ if [ $? != 0 ]; then
else
echo "Uploading mongodump to artifact $artifact_dir"
/usr/local/bin/gsutil cp -r "$workspace"/"$file_name" gs://artifacts.opnfv.org/"$artifact_dir"/
- echo "MongoDump can be found at http://artifacts.opnfv.org/$artifact_dir"
+ echo "MongoDump can be found at http://artifacts.opnfv.org/$artifact_dir.html"
fi
diff --git a/jjb/snaps/snaps.yml b/jjb/snaps/snaps.yml
new file mode 100644
index 000000000..50b7c3070
--- /dev/null
+++ b/jjb/snaps/snaps.yml
@@ -0,0 +1,63 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+ name: snaps
+
+ project: '{name}'
+
+ jobs:
+ - 'snaps-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+
+- job-template:
+ name: 'snaps-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ echo "Nothing to verify!"
diff --git a/jjb/storperf/storperf.yml b/jjb/storperf/storperf.yml
index a04a9f4b4..709a1ebab 100644
--- a/jjb/storperf/storperf.yml
+++ b/jjb/storperf/storperf.yml
@@ -16,7 +16,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
- job-template:
name: 'storperf-verify-{stream}'
diff --git a/jjb/ves/ves.yml b/jjb/ves/ves.yml
new file mode 100644
index 000000000..e6243f32c
--- /dev/null
+++ b/jjb/ves/ves.yml
@@ -0,0 +1,69 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+ name: ves
+
+ project: '{name}'
+
+ jobs:
+ - 'ves-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+
+- job-template:
+ name: 'ves-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+
+ # shellcheck -f tty tests/*.sh
+ # shellcheck -f tty utils/*.sh
diff --git a/jjb/vswitchperf/vswitchperf.yml b/jjb/vswitchperf/vswitchperf.yml
index ef0e90a76..c5c81c898 100644
--- a/jjb/vswitchperf/vswitchperf.yml
+++ b/jjb/vswitchperf/vswitchperf.yml
@@ -18,8 +18,8 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
- slave-label: 'intel-pod12'
+ disabled: false
+ slave-label: 'opnfv-build-ubuntu'
- job-template:
@@ -97,6 +97,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
@@ -152,6 +153,7 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**'
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml
index 604eaed25..1f2f3122c 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-ci-jobs.yml
@@ -272,7 +272,7 @@
publishers:
- email:
- recipients: jean.gaoliang@huawei.com matthew.lijun@huawei.com
+ recipients: jean.gaoliang@huawei.com limingjiang@huawei.com
########################
# builder macros