summaryrefslogtreecommitdiffstats
path: root/jjb
diff options
context:
space:
mode:
Diffstat (limited to 'jjb')
-rwxr-xr-xjjb/3rd_party_ci/download-netvirt-artifact.sh13
-rw-r--r--jjb/3rd_party_ci/odl-netvirt.yml9
-rwxr-xr-xjjb/apex/apex-build.sh10
-rwxr-xr-xjjb/apex/apex-deploy.sh201
-rwxr-xr-xjjb/apex/apex-download-artifact.sh70
-rwxr-xr-xjjb/apex/apex-gs-cleanup.sh19
-rwxr-xr-xjjb/apex/apex-iso-verify.sh63
-rw-r--r--jjb/apex/apex-jjb-renderer.py41
-rw-r--r--jjb/apex/apex-snapshot-create.sh6
-rw-r--r--jjb/apex/apex-snapshot-deploy.sh1
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh174
-rw-r--r--jjb/apex/apex.yml1562
-rw-r--r--jjb/apex/apex.yml.j21032
-rw-r--r--jjb/apex/scenarios.yaml.hidden32
-rw-r--r--jjb/armband/armband-ci-jobs.yml184
-rwxr-xr-xjjb/armband/armband-deploy.sh4
-rw-r--r--jjb/barometer/barometer.yml16
-rw-r--r--jjb/bottlenecks/bottlenecks-run-suite.sh5
-rw-r--r--jjb/ci_gate_security/anteater-report-to-gerrit.sh25
-rw-r--r--jjb/ci_gate_security/anteater-security-audit.sh32
-rw-r--r--jjb/ci_gate_security/opnfv-ci-gate-security.yml (renamed from jjb/securityaudit/opnfv-security-audit.yml)62
-rw-r--r--jjb/compass4nfv/compass-build.sh15
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yml56
-rw-r--r--jjb/compass4nfv/compass-deploy.sh6
-rw-r--r--jjb/compass4nfv/compass-dovetail-jobs.yml11
-rw-r--r--jjb/compass4nfv/compass-download-artifact.sh18
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yml2
-rw-r--r--jjb/compass4nfv/compass-upload-artifact.sh19
-rw-r--r--jjb/compass4nfv/compass-verify-jobs.yml42
-rw-r--r--jjb/cperf/cperf-ci-jobs.yml2
-rwxr-xr-xjjb/daisy4nfv/daisy-deploy.sh4
-rw-r--r--jjb/daisy4nfv/daisy-project-jobs.yml7
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-basic.sh1
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-build.sh4
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-download-artifact.sh10
-rw-r--r--jjb/daisy4nfv/daisy4nfv-merge-jobs.yml8
-rw-r--r--jjb/daisy4nfv/daisy4nfv-verify-jobs.yml28
-rw-r--r--jjb/doctor/doctor.yml2
-rwxr-xr-xjjb/dovetail/dovetail-artifacts-upload.sh2
-rw-r--r--jjb/dovetail/dovetail-artifacts-upload.yml36
-rw-r--r--jjb/dovetail/dovetail-ci-jobs.yml31
-rwxr-xr-xjjb/dovetail/dovetail-cleanup.sh8
-rwxr-xr-xjjb/dovetail/dovetail-run.sh81
-rw-r--r--jjb/dovetail/dovetail-weekly-jobs.yml1
-rwxr-xr-xjjb/fuel/fuel-build.sh9
-rw-r--r--jjb/fuel/fuel-daily-jobs.yml60
-rwxr-xr-xjjb/fuel/fuel-deploy.sh14
-rwxr-xr-xjjb/fuel/fuel-download-artifact.sh3
-rw-r--r--jjb/functest/functest-daily-jobs.yml36
-rwxr-xr-xjjb/functest/functest-env-presetup.sh31
-rwxr-xr-xjjb/functest/functest-loop.sh10
-rw-r--r--jjb/functest/functest-project-jobs.yml2
-rwxr-xr-xjjb/functest/functest-suite.sh5
-rw-r--r--jjb/functest/functest-weekly-jobs.yml5
-rwxr-xr-xjjb/functest/set-functest-env.sh82
-rw-r--r--jjb/global/installer-params.yml10
-rw-r--r--jjb/global/releng-macros.yml2
-rw-r--r--jjb/global/slave-params.yml131
-rw-r--r--jjb/joid/joid-daily-jobs.yml2
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-upload-artifact.sh23
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yml68
-rw-r--r--jjb/opera/opera-daily-jobs.yml105
-rw-r--r--jjb/qtip/helpers/validate-deploy.sh48
-rw-r--r--jjb/releng/opnfv-docker-arm.yml8
-rw-r--r--jjb/releng/opnfv-docker.sh14
-rw-r--r--jjb/releng/opnfv-docker.yml3
-rw-r--r--jjb/releng/opnfv-lint.yml2
-rw-r--r--jjb/releng/testapi-automate.yml3
-rwxr-xr-xjjb/securedlab/check-jinja2.sh9
-rw-r--r--jjb/securedlab/check-jinja2.yml80
-rw-r--r--jjb/storperf/storperf.yml49
-rwxr-xr-xjjb/xci/bifrost-provision.sh4
-rw-r--r--jjb/xci/bifrost-verify-jobs.yml8
-rwxr-xr-xjjb/xci/bifrost-verify.sh24
-rwxr-xr-xjjb/xci/xci-deploy.sh2
-rw-r--r--jjb/yardstick/yardstick-daily-jobs.yml (renamed from jjb/yardstick/yardstick-ci-jobs.yml)17
-rw-r--r--jjb/yardstick/yardstick-project-jobs.yml13
77 files changed, 3397 insertions, 1440 deletions
diff --git a/jjb/3rd_party_ci/download-netvirt-artifact.sh b/jjb/3rd_party_ci/download-netvirt-artifact.sh
index 6aea01d2a..7ecf8d78d 100755
--- a/jjb/3rd_party_ci/download-netvirt-artifact.sh
+++ b/jjb/3rd_party_ci/download-netvirt-artifact.sh
@@ -6,11 +6,18 @@ set -o pipefail
ODL_ZIP=distribution-karaf-0.6.0-SNAPSHOT.zip
echo "Attempting to fetch the artifact location from ODL Jenkins"
-CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/netvirt~master~$GERRIT_CHANGE_ID/detail"
+if [ "$ODL_BRANCH" != 'master' ]; then
+ DIST=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\2#p')
+ ODL_BRANCH=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\1%2F\2#p')
+else
+ DIST='nitrogen'
+fi
+CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/netvirt~${ODL_BRANCH}~${GERRIT_CHANGE_ID}/detail"
# due to limitation with the Jenkins Gerrit Trigger, we need to use Gerrit REST API to get the change details
-ODL_BUILD_JOB_NUM=$(curl -s $CHANGE_DETAILS_URL | grep -Eo 'netvirt-distribution-check-carbon/[0-9]+' | tail -1 | grep -Eo [0-9]+)
+ODL_BUILD_JOB_NUM=$(curl --fail -s ${CHANGE_DETAILS_URL} | grep -Eo "netvirt-distribution-check-${DIST}/[0-9]+" | tail -1 | grep -Eo [0-9]+)
+DISTRO_CHECK_CONSOLE_LOG="https://logs.opendaylight.org/releng/jenkins092/netvirt-distribution-check-${DIST}/${ODL_BUILD_JOB_NUM}/console.log.gz"
+NETVIRT_ARTIFACT_URL=$(curl --fail -s --compressed ${DISTRO_CHECK_CONSOLE_LOG} | grep 'BUNDLE_URL' | cut -d = -f 2)
-NETVIRT_ARTIFACT_URL="https://jenkins.opendaylight.org/releng/job/netvirt-distribution-check-carbon/${ODL_BUILD_JOB_NUM}/artifact/${ODL_ZIP}"
echo -e "URL to artifact is\n\t$NETVIRT_ARTIFACT_URL"
echo "Downloading the artifact. This could take time..."
diff --git a/jjb/3rd_party_ci/odl-netvirt.yml b/jjb/3rd_party_ci/odl-netvirt.yml
index 470e4335e..a937acbed 100644
--- a/jjb/3rd_party_ci/odl-netvirt.yml
+++ b/jjb/3rd_party_ci/odl-netvirt.yml
@@ -12,6 +12,10 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
+ - carbon:
+ branch: 'stable/carbon'
+ gs-pathname: ''
+ disabled: false
#####################################
# patch verification phases
#####################################
@@ -111,6 +115,7 @@
- name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}'
current-parameters: false
predefined-parameters: |
+ ODL_BRANCH={branch}
BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
@@ -125,10 +130,10 @@
name: functest
condition: SUCCESSFUL
projects:
- - name: 'functest-netvirt-virtual-suite-{stream}'
+ - name: 'functest-netvirt-virtual-suite-master'
predefined-parameters: |
DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
- FUNCTEST_SUITE_NAME=tempest_smoke_serial
+ FUNCTEST_SUITE_NAME=odl_netvirt
RC_FILE_PATH=$HOME/cloner-info/overcloudrc
node-parameters: true
kill-phase-on: FAILURE
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
index b6b2f212a..58d9f1a40 100755
--- a/jjb/apex/apex-build.sh
+++ b/jjb/apex/apex-build.sh
@@ -9,8 +9,9 @@ echo
# create the cache directory if it doesn't exist
[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
# set OPNFV_ARTIFACT_VERSION
-if echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then
- export OPNFV_ARTIFACT_VERSION=dev${BUILD_NUMBER}
+if echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
+ GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
+ export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
elif echo $BUILD_TAG | grep "csit" 1> /dev/null; then
export OPNFV_ARTIFACT_VERSION=csit${BUILD_NUMBER}
@@ -23,6 +24,9 @@ else
export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
fi
+# Temporary hack until we fix apex build script
+BUILD_DIRECTORY=${WORKSPACE}/build
+
# start the build
cd $WORKSPACE/ci
./build.sh $BUILD_ARGS
@@ -39,7 +43,7 @@ echo "Cache Directory Contents:"
echo "-------------------------"
ls -al $CACHE_DIRECTORY
-if ! echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then
+if ! echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
echo "Writing opnfv.properties file"
# save information regarding artifact into file
(
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
index 06f7622f5..3a2ca606b 100755
--- a/jjb/apex/apex-deploy.sh
+++ b/jjb/apex/apex-deploy.sh
@@ -3,68 +3,14 @@ set -o errexit
set -o nounset
set -o pipefail
-APEX_PKGS="common undercloud" # removed onos for danube
+APEX_PKGS="common undercloud onos"
IPV6_FLAG=False
# log info to console
-echo "Starting the Apex virtual deployment."
+echo "Starting the Apex deployment."
echo "--------------------------------------------------------"
echo
-if ! rpm -q wget > /dev/null; then
- sudo yum -y install wget
-fi
-
-if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
- # Build is from a verify, use local build artifacts (not RPMs)
- cd $WORKSPACE/../${BUILD_DIRECTORY}
- WORKSPACE=$(pwd)
- echo "WORKSPACE modified to $WORKSPACE"
- cd $WORKSPACE/ci
-elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then
- # if artifact name is passed the pull a
- # specific artifact from artifacts.opnfv.org
- # artifact specified should be opnfv-apex-<version>.noarch.rpm
- RPM_INSTALL_PATH=$GS_URL
- RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME
-else
- # Use latest RPMS
- if [[ $BUILD_DIRECTORY == *apex-build* ]]; then
- # Triggered from a daily so RPMS should be in local directory
- BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
- echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
-
- if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then
- # if opnfv.properties exists then use the
- # local build. Source the file so we get local OPNFV vars
- source ${BUILD_DIRECTORY}/../opnfv.properties
- RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch
- RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
- else
- echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL"
- echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY"
- exit 1
- fi
- else
- # use the latest from artifacts.opnfv.org
- # get the latest.properties to get the link to the latest artifact
- if ! wget -O $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties; then
- echo "ERROR: Unable to find latest.properties at ${GS_URL}...exiting"
- exit 1
- fi
- # source the file so we get OPNFV vars
- source opnfv.properties
- RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
- RPM_LIST=${RPM_INSTALL_PATH}/$(basename $OPNFV_RPM_URL)
- fi
-fi
-
-# rename odl_l3 to odl only for master
-# this can be removed once all the odl_l3 references
-# are updated to odl after the danube jobs are removed
-if [[ "$BUILD_DIRECTORY" == *master* ]]; then
- DEPLOY_SCENARIO=${DEPLOY_SCENARIO/odl_l3/odl}
-fi
if [ -z "$DEPLOY_SCENARIO" ]; then
echo "Deploy scenario not set!"
exit 1
@@ -85,75 +31,66 @@ elif [[ "$DEPLOY_SCENARIO" == *gate* ]]; then
fi
fi
-# use local build for verify and promote
-if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
- if [ ! -e "${WORKSPACE}/build/lib" ]; then
- ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib
- fi
- DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
- NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
- DEPLOY_CMD="$(pwd)/deploy.sh"
- IMAGES="${WORKSPACE}/.build/"
- BASE="${WORKSPACE}/build"
- LIB="${WORKSPACE}/lib"
- # Make sure python34 deps are installed
- for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do
- if ! rpm -q ${dep_pkg} > /dev/null; then
- if ! sudo yum install -y ${dep_pkg}; then
- echo "Failed to install ${dep_pkg}"
- exit 1
- fi
- fi
- done
-
- # Make sure jinja2 is installed
- for python_pkg in jinja2; do
- if ! python3.4 -c "import $python_pkg"; then
- echo "$python_pkg package not found for python3.4, attempting to install..."
- if ! sudo easy_install-3.4 $python_pkg; then
- echo -e "Failed to install $python_pkg package for python3.4"
- exit 1
- fi
- fi
- done
-
- # Make sure ipxe-roms-qemu package is updated to latest.
- # This package is needed for multi virtio nic PXE boot in virtual environment.
- sudo yum update -y ipxe-roms-qemu
-
- if [ -z ${PYTHONPATH:-} ]; then
- export PYTHONPATH=${WORKSPACE}/lib/python
- else
- export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python
- fi
-# use RPMs
+# Dev or RPM/ISO build
+if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
+ # Settings for deploying from git workspace
+ DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
+ NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
+ DEPLOY_CMD="${WORKSPACE}/ci/deploy.sh"
+ CLEAN_CMD="${WORKSPACE}/ci/clean.sh"
+ RESOURCES="${WORKSPACE}/.build/"
+ CONFIG="${WORKSPACE}/build"
+ BASE=$CONFIG
+ IMAGES=$RESOURCES
+ LIB="${WORKSPACE}/lib"
+
+ # Ensure artifacts were downloaded and extracted correctly
+ # TODO(trozet) add verification here
+
else
- # find version of RPM
- VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
- # build RPM List which already includes base Apex RPM
- for pkg in ${APEX_PKGS}; do
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm"
- done
-
- # remove old / install new RPMs
- if rpm -q opnfv-apex > /dev/null; then
- INSTALLED_RPMS=$(rpm -qa | grep apex)
- if [ -n "$INSTALLED_RPMS" ]; then
- sudo yum remove -y ${INSTALLED_RPMS}
- fi
- fi
+ DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
+ NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
+ DEPLOY_CMD="opnfv-deploy"
+ CLEAN_CMD="opnfv-clean"
+ RESOURCES="/var/opt/opnfv/images"
+ CONFIG="/var/opt/opnfv"
+ BASE=$CONFIG
+ IMAGES=$RESOURCES
+ LIB="/var/opt/opnfv/lib"
- if ! sudo yum install -y $RPM_LIST; then
- echo "Unable to install new RPMs: $RPM_LIST"
+fi
+
+# Install Dependencies
+# Make sure python34 dependencies are installed
+for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do
+ if ! rpm -q ${dep_pkg} > /dev/null; then
+ if ! sudo yum install -y ${dep_pkg}; then
+ echo "Failed to install ${dep_pkg}"
exit 1
fi
-
- DEPLOY_CMD=opnfv-deploy
- DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
- NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
- IMAGES="/var/opt/opnfv/images"
- BASE="/var/opt/opnfv"
- LIB="/var/opt/opnfv/lib"
+ fi
+done
+
+# Make sure jinja2 is installed
+for python_pkg in jinja2; do
+ if ! python3.4 -c "import $python_pkg"; then
+ echo "$python_pkg package not found for python3.4, attempting to install..."
+ if ! sudo easy_install-3.4 $python_pkg; then
+ echo -e "Failed to install $python_pkg package for python3.4"
+ exit 1
+ fi
+ fi
+done
+
+if [[ "$JOB_NAME" =~ "virtual" ]]; then
+ # Make sure ipxe-roms-qemu package is updated to latest.
+ # This package is needed for multi virtio nic PXE boot in virtual environment.
+ sudo yum update -y ipxe-roms-qemu
+ if [ -z ${PYTHONPATH:-} ]; then
+ export PYTHONPATH=${WORKSPACE}/lib/python
+ else
+ export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python
+ fi
fi
# set env vars to deploy cmd
@@ -165,11 +102,8 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then
else
clean_opts=''
fi
- if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *promote* ]]; then
- sudo BASE=${BASE} LIB=${LIB} ./clean.sh ${clean_opts}
- else
- sudo BASE=${BASE} LIB=${LIB} opnfv-clean ${clean_opts}
- fi
+
+ sudo BASE=${BASE} LIB=${LIB} ${CLEAN_CMD} ${clean_opts}
fi
if echo ${DEPLOY_SCENARIO} | grep ipv6; then
@@ -185,7 +119,7 @@ if [ ! -e "$DEPLOY_FILE" ]; then
echo "ERROR: Required settings file missing: Deploy settings file ${DEPLOY_FILE}"
fi
-if [[ "$JOB_NAME" == *virtual* ]]; then
+if [[ "$JOB_NAME" =~ "virtual" ]]; then
# settings for virtual deployment
DEPLOY_CMD="${DEPLOY_CMD} -v"
if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
@@ -194,7 +128,7 @@ if [[ "$JOB_NAME" == *virtual* ]]; then
if [[ "$JOB_NAME" == *csit* ]]; then
DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml"
fi
- if [[ "$JOB_NAME" == *promote* ]]; then
+ if [[ "$PROMOTE" == "True" ]]; then
DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 2"
fi
else
@@ -202,17 +136,6 @@ else
NETWORK_SETTINGS_DIR="/root/network"
INVENTORY_FILE="/root/inventory/pod_settings.yaml"
-# (trozet) According to FDS folks uio_pci_generic works with UCS-B
-# and there appears to be a bug with vfio-pci
- # if fdio on baremetal, then we are using UCS enic and
- # need to use vfio-pci instead of uio generic
-# if [[ "$DEPLOY_SCENARIO" == *fdio* ]]; then
-# TMP_DEPLOY_FILE="${WORKSPACE}/${DEPLOY_SCENARIO}.yaml"
-# cp -f ${DEPLOY_FILE} ${TMP_DEPLOY_FILE}
-# sed -i 's/^\(\s*uio-driver:\).*$/\1 vfio-pci/g' ${TMP_DEPLOY_FILE}
-# DEPLOY_FILE=${TMP_DEPLOY_FILE}
-# fi
-
if ! sudo test -e "$INVENTORY_FILE"; then
echo "ERROR: Required settings file missing: Inventory settings file ${INVENTORY_FILE}"
exit 1
diff --git a/jjb/apex/apex-download-artifact.sh b/jjb/apex/apex-download-artifact.sh
new file mode 100755
index 000000000..52c3c67ec
--- /dev/null
+++ b/jjb/apex/apex-download-artifact.sh
@@ -0,0 +1,70 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+APEX_PKGS="common undercloud onos"
+
+# log info to console
+echo "Downloading the Apex artifact. This could take some time..."
+echo "--------------------------------------------------------"
+echo
+
+[[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
+
+if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
+ # dev build
+ GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
+ export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
+ # get build artifact
+ pushd ${BUILD_DIRECTORY} > /dev/null
+ echo "Downloading packaged dev build: apex-${OPNFV_ARTIFACT_VERSION}.tar.gz"
+ curl --fail -s -o $BUILD_DIRECTORY/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
+ tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
+ popd > /dev/null
+else
+ echo "Will download RPMs..."
+
+ # Must be RPMs/ISO
+ echo "Downloading latest properties file"
+
+ # get the properties file in order to get info regarding artifacts
+ curl --fail -s -o $BUILD_DIRECTORY/opnfv.properties http://$GS_URL/latest.properties
+
+ # source the file so we get OPNFV vars
+ source $BUILD_DIRECTORY/opnfv.properties
+
+ RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
+ RPM_LIST=${RPM_INSTALL_PATH}/$(basename $OPNFV_RPM_URL)
+
+ # find version of RPM
+ VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
+ # build RPM List which already includes base Apex RPM
+ for pkg in ${APEX_PKGS}; do
+ RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm"
+ done
+
+ # remove old / install new RPMs
+ if rpm -q opnfv-apex > /dev/null; then
+ INSTALLED_RPMS=$(rpm -qa | grep apex)
+ if [ -n "$INSTALLED_RPMS" ]; then
+ sudo yum remove -y ${INSTALLED_RPMS}
+ fi
+ fi
+ if ! sudo yum install -y $RPM_LIST; then
+ echo "Unable to install new RPMs: $RPM_LIST"
+ exit 1
+ fi
+fi
+
+# TODO: Uncomment these lines to verify SHA512SUMs once the sums are
+# fixed.
+# echo "$OPNFV_ARTIFACT_SHA512SUM $BUILD_DIRECTORY/apex.iso" | sha512sum -c
+# echo "$OPNFV_RPM_SHA512SUM $BUILD_DIRECTORY/$(basename $OPNFV_RPM_URL)" | sha512sum -c
+
+# list the files
+ls -al $BUILD_DIRECTORY
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
diff --git a/jjb/apex/apex-gs-cleanup.sh b/jjb/apex/apex-gs-cleanup.sh
deleted file mode 100755
index 1629aa85e..000000000
--- a/jjb/apex/apex-gs-cleanup.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Cleaning Google Storage"
-echo "-----------------------"
-echo
-
-thirty_days_ago=$(date -d "30 days ago" +"%Y%m%d")
-
-for i in $(gsutil ls gs://$GS_URL/*201?*); do
- filedate=$(date -d "$(echo $i | grep -Eo 201[0-9]-?[0-9][0-9]-?[0-9][0-9])" +"%Y%m%d")
- if [ $filedate -lt $thirty_days_ago ]; then
- # gsutil indicates what it is removing so no need for output here
- gsutil rm $i
- fi
-done
diff --git a/jjb/apex/apex-iso-verify.sh b/jjb/apex/apex-iso-verify.sh
new file mode 100755
index 000000000..f102421f3
--- /dev/null
+++ b/jjb/apex/apex-iso-verify.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# log info to console
+echo "Starting the Apex iso verify."
+echo "--------------------------------------------------------"
+echo
+
+source $BUILD_DIRECTORY/../opnfv.properties
+
+if ! rpm -q virt-install > /dev/null; then
+ sudo yum -y install virt-install
+fi
+
+# define a clean function
+rm_apex_iso_verify () {
+if sudo virsh list --all | grep apex-iso-verify | grep running; then
+ sudo virsh destroy apex-iso-verify
+fi
+if sudo virsh list --all | grep apex-iso-verify; then
+ sudo virsh undefine apex-iso-verify
+fi
+}
+
+# Make sure a pre-existing iso-verify isn't there
+rm_apex_iso_verify
+
+#make sure there is not an existing console log file for the VM
+sudo rm -f /var/log/libvirt/qemu/apex-iso-verify-console.log
+
+# run an install from the iso
+# This streams a serial console to tcp port 3737 on localhost
+sudo virt-install -n apex-iso-verify -r 4096 --vcpus 4 --os-variant=rhel7 \
+ --accelerate -v --noautoconsole \
+ --disk path=/var/lib/libvirt/images/apex-iso-verify.qcow2,size=30,format=qcow2 \
+ -l $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso \
+ --extra-args 'console=ttyS0 console=ttyS0,115200n8 serial inst.ks=file:/iso-verify.ks inst.stage2=hd:LABEL=OPNFV\x20CentOS\x207\x20x86_64:/' \
+ --initrd-inject $BUILD_DIRECTORY/../ci/iso-verify.ks \
+ --serial file,path=/var/log/libvirt/qemu/apex-iso-verify-console.log
+
+echo "Waiting for install to finish..."
+sleep 10
+end_time=$(($SECONDS+1500))
+while ! [[ `sudo tail -n1 /var/log/libvirt/qemu/apex-iso-verify-console.log` =~ 'Power down' ]]; do
+ if [ $SECONDS -gt $end_time ] || ! sudo virsh list --all | grep apex-iso-verify | grep running > /dev/null; then
+ sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
+ sudo virsh list --all
+ echo "Error: Failed to find power down message after install"
+ exit 1
+ fi
+ sleep 10
+done
+
+sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
+
+# clean up
+rm_apex_iso_verify
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
diff --git a/jjb/apex/apex-jjb-renderer.py b/jjb/apex/apex-jjb-renderer.py
new file mode 100644
index 000000000..6fd83afe9
--- /dev/null
+++ b/jjb/apex/apex-jjb-renderer.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2016 Tim Rozet (trozet@redhat.com) and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import pprint
+import yaml
+from jinja2 import Environment
+from jinja2 import FileSystemLoader
+
+gspathname = dict()
+branch = dict()
+build_slave = dict()
+env = Environment(loader=FileSystemLoader('./'), autoescape=True)
+
+with open('scenarios.yaml.hidden') as _:
+ scenarios = yaml.safe_load(_)
+
+template = env.get_template('apex.yml.j2')
+
+print("Scenarios are: ")
+pprint.pprint(scenarios)
+
+for stream in scenarios:
+ if stream == 'master':
+ gspathname['master'] = ''
+ branch[stream] = stream
+ else:
+ gspathname[stream] = '/' + stream
+ branch[stream] = 'stable/' + stream
+ build_slave[stream] = 'apex-baremetal-{}'.format(stream)
+
+output = template.render(scenarios=scenarios, gspathname=gspathname,
+ branch=branch, build_slave=build_slave)
+
+with open('./apex.yml', 'w') as fh:
+ fh.write(output)
diff --git a/jjb/apex/apex-snapshot-create.sh b/jjb/apex/apex-snapshot-create.sh
index b2a39449e..342896c7d 100644
--- a/jjb/apex/apex-snapshot-create.sh
+++ b/jjb/apex/apex-snapshot-create.sh
@@ -13,7 +13,11 @@ set -o nounset
set -o pipefail
SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-SNAP_TYPE=$(echo ${JOB_NAME} | sed -n 's/^apex-\(.\+\)-promote.*$/\1/p')
+
+if [ -z "$SNAP_TYPE" ]; then
+ echo "ERROR: SNAP_TYPE not provided...exiting"
+ exit 1
+fi
echo "Creating Apex snapshot..."
echo "-------------------------"
diff --git a/jjb/apex/apex-snapshot-deploy.sh b/jjb/apex/apex-snapshot-deploy.sh
index 06c002319..3eb3cf23a 100644
--- a/jjb/apex/apex-snapshot-deploy.sh
+++ b/jjb/apex/apex-snapshot-deploy.sh
@@ -129,6 +129,7 @@ if [ -z "$virsh_vm_defs" ]; then
fi
for node_def in ${virsh_vm_defs}; do
+ sed -ri "s/machine='[^\s]+'/machine='pc'/" ${node_def}
sudo virsh define ${node_def}
node=$(echo ${node_def} | awk -F '.' '{print $1}')
sudo cp -f ${node}.qcow2 /var/lib/libvirt/images/
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
index c2de7d70d..f53451d41 100755
--- a/jjb/apex/apex-upload-artifact.sh
+++ b/jjb/apex/apex-upload-artifact.sh
@@ -3,80 +3,82 @@ set -o errexit
set -o nounset
set -o pipefail
+if [ -z "$ARTIFACT_TYPE" ]; then
+ echo "ERROR: ARTIFACT_TYPE not provided...exiting"
+ exit 1
+fi
+
# log info to console
-echo "Uploading the Apex artifact. This could take some time..."
+echo "Uploading the Apex ${ARTIFACT_TYPE} artifact. This could take some time..."
echo "--------------------------------------------------------"
echo
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-BUILD_DIRECTORY=${WORKSPACE}/.build
-
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- source $WORKSPACE/releng/utils/gpg_import_key.sh
+if [[ ! "$ARTIFACT_VERSION" =~ dev ]]; then
+ source $BUILD_DIRECTORY/../opnfv.properties
fi
+importkey () {
+ # clone releng repository
+ echo "Cloning releng repository..."
+ [ -d releng ] && rm -rf releng
+ git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
+ #this is where we import the siging key
+ if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
+ source $WORKSPACE/releng/utils/gpg_import_key.sh
+ fi
+}
+
signrpm () {
-for artifact in $RPM_LIST $SRPM_LIST; do
- echo "Signing artifact: ${artifact}"
- gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $artifact
- gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig)
- echo "Upload complete for ${artifact} signature"
-done
+ for artifact in $RPM_LIST $SRPM_LIST; do
+ echo "Signing artifact: ${artifact}"
+ gpg2 -vvv --batch --yes --no-tty \
+ --default-key opnfv-helpdesk@rt.linuxfoundation.org \
+ --passphrase besteffort \
+ --detach-sig $artifact
+ gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig)
+ echo "Upload complete for ${artifact} signature"
+ done
}
signiso () {
-time gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
+ gpg2 -vvv --batch --yes --no-tty \
+ --default-key opnfv-helpdesk@rt.linuxfoundation.org \
+ --passphrase besteffort \
+ --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
-gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
-echo "ISO signature Upload Complete!"
+ gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
+ echo "ISO signature Upload Complete!"
}
uploadiso () {
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log
-echo "ISO Upload Complete!"
-RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
-RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
-VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
-for pkg in common undercloud; do # removed onos for danube
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
-done
-SRPM_INSTALL_PATH=$BUILD_DIRECTORY
-SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
-VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
-for pkg in common undercloud; do # removed onos for danube
- SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
-done
+ gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log
+ echo "ISO Upload Complete!"
}
uploadrpm () {
-#This is where we upload the rpms
-for artifact in $RPM_LIST $SRPM_LIST; do
- echo "Uploading artifact: ${artifact}"
- gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log
- echo "Upload complete for ${artifact}"
-done
-gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log
-gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log
+ for artifact in $RPM_LIST $SRPM_LIST; do
+ echo "Uploading artifact: ${artifact}"
+ gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log
+ echo "Upload complete for ${artifact}"
+ done
+ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log
+ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log
+
+ # Make the property files viewable on the artifact site
+ gsutil -m setmeta \
+ -h "Content-Type:text/html" \
+ -h "Cache-Control:private, max-age=0, no-transform" \
+ gs://$GS_URL/latest.properties \
+ gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
}
uploadsnap () {
# Uploads snapshot artifact and updated properties file
echo "Uploading snapshot artifacts"
- SNAP_TYPE=$(echo ${JOB_NAME} | sed -n 's/^apex-\(.\+\)-promote.*$/\1/p')
+ if [ -z "$SNAP_TYPE" ]; then
+ echo "ERROR: SNAP_TYPE not provided...exiting"
+ exit 1
+ fi
gsutil cp $WORKSPACE/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
if [ "$SNAP_TYPE" == 'csit' ]; then
gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
@@ -84,21 +86,69 @@ uploadsnap () {
echo "Upload complete for Snapshot"
}
-if echo $WORKSPACE | grep promote > /dev/null; then
- uploadsnap
-elif gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
+uploadimages () {
+ # Uploads dev tarball
+ GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
+ export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
+ echo "Uploading development build tarball"
+ pushd $BUILD_DIRECTORY > /dev/null
+ tar czf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz *.qcow2 *.vmlinuz *.initrd
+ gsutil cp apex-${OPNFV_ARTIFACT_VERSION}.tar.gz gs://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz > gsutil.latest.log
+ popd > /dev/null
+}
+
+# Always import the signing key, if it's available the artifacts will be
+# signed before being uploaded
+importkey
+
+if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
echo "Signing Key avaliable"
- signiso
+ SIGN_ARTIFACT="true"
+fi
+
+if [ "$ARTIFACT_TYPE" == 'snapshot' ]; then
+ uploadsnap
+elif [ "$ARTIFACT_TYPE" == 'iso' ]; then
+ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
+ echo "Skipping artifact upload for ${ARTIFACT_TYPE} due to dev build"
+ exit 0
+ fi
+ if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
+ signiso
+ fi
uploadiso
- signrpm
- uploadrpm
+elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
+ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
+ echo "dev build detected, will upload image tarball"
+ ARTIFACT_TYPE=tarball
+ uploadimages
+ else
+ RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
+ RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
+ VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
+ for pkg in common undercloud onos; do
+ RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
+ done
+ SRPM_INSTALL_PATH=$BUILD_DIRECTORY
+ SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
+ VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
+ for pkg in common undercloud onos; do
+ SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
+ done
+
+ if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
+ signrpm
+ fi
+ uploadrpm
+ fi
else
- uploadiso
- uploadrpm
+ echo "ERROR: Unknown artifact type ${ARTIFACT_TYPE} to upload...exiting"
+ exit 1
fi
echo
echo "--------------------------------------------------------"
echo "Done!"
-echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)"
+if [ "$ARTIFACT_TYPE" == 'iso' ]; then echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"; fi
+if [ "$ARTIFACT_TYPE" == 'rpm' ]; then echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)"; fi
+if [ "$ARTIFACT_TYPE" == 'tarball' ]; then echo "Dev tarball Artifact is available as http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz)"; fi
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index e7982ba55..a395cf2e5 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -1,64 +1,42 @@
- project:
- name: apex
+ name: 'apex'
+ project: 'apex'
jobs:
- 'apex-verify-{stream}'
- 'apex-verify-gate-{stream}'
- 'apex-verify-unit-tests-{stream}'
- - 'apex-runner-{platform}-{scenario}-{stream}'
- 'apex-runner-cperf-{stream}'
- 'apex-build-{stream}'
- - 'apex-deploy-virtual-{scenario}-{stream}'
- - 'apex-deploy-baremetal-{scenario}-{stream}'
- - 'apex-daily-{stream}'
+ - 'apex-deploy-{platform}-{stream}'
+ - 'apex-daily-master'
+ - 'apex-daily-danube'
- 'apex-csit-promote-daily-{stream}'
- 'apex-fdio-promote-daily-{stream}'
-
+ - 'apex-verify-iso-{stream}'
+ - 'apex-run-deploy-test-baremetal-{stream}'
+ - 'apex-upload-snapshot'
+ - 'apex-create-snapshot'
# stream: branch with - in place of / (eg. stable-arno)
# branch: branch (eg. stable/arno)
stream:
- master:
branch: 'master'
gs-pathname: ''
- slave: 'lf-pod1'
- verify-slave: 'apex-verify-master'
- daily-slave: 'apex-daily-master'
+ build-slave: 'apex-build-master'
+ virtual-slave: 'apex-virtual-master'
+ baremetal-slave: 'apex-baremetal-master'
+ verify-scenario: 'os-odl-nofeature-ha'
+ concurrent-builds: 3
+
- danube:
branch: 'stable/danube'
gs-pathname: '/danube'
- slave: 'lf-pod1'
- verify-slave: 'apex-verify-danube'
- daily-slave: 'apex-daily-danube'
-
- project: 'apex'
-
- scenario:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-nosdn-nofeature-ha-ipv6'
- - 'os-nosdn-ovs-noha'
- - 'os-nosdn-ovs-ha'
- - 'os-nosdn-fdio-noha'
- - 'os-nosdn-fdio-ha'
- - 'os-nosdn-kvm-ha'
- - 'os-nosdn-kvm-noha'
- - 'os-odl_l2-fdio-noha'
- - 'os-odl_l2-fdio-ha'
- - 'os-odl_l2-netvirt_gbp_fdio-noha'
- - 'os-odl_l2-sfc-noha'
- - 'os-odl_l3-nofeature-noha'
- - 'os-odl_l3-nofeature-ha'
- - 'os-odl_l3-ovs-noha'
- - 'os-odl_l3-ovs-ha'
- - 'os-odl-bgpvpn-ha'
- - 'os-odl-gluon-noha'
- - 'os-odl_l3-fdio-noha'
- - 'os-odl_l3-fdio-ha'
- - 'os-odl_l3-fdio_dvr-noha'
- - 'os-odl_l3-fdio_dvr-ha'
- - 'os-odl_l3-csit-noha'
- - 'os-onos-nofeature-ha'
- - 'os-ovn-nofeature-noha'
- - 'gate'
+ build-slave: 'apex-build-danube'
+ virtual-slave: 'apex-virtual-danube'
+ baremetal-slave: 'apex-baremetal-danube'
+ verify-scenario: 'os-odl_l3-nofeature-ha'
+ concurrent-builds: 1
+ disabled: false
platform:
- 'baremetal'
@@ -68,7 +46,7 @@
- job-template:
name: 'apex-verify-unit-tests-{stream}'
- node: '{verify-slave}'
+ node: '{build-slave}'
concurrent: true
@@ -78,10 +56,6 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
scm:
- git-scm-gerrit
@@ -116,27 +90,29 @@
option: 'project'
builders:
- - 'apex-unit-test'
+ - shell:
+ !include-raw-escape: ./apex-unit-test.sh
# Verify
- job-template:
name: 'apex-verify-{stream}'
- node: '{verify-slave}'
+ node: '{virtual-slave}'
concurrent: true
+ project-type: 'multijob'
+
parameters:
- apex-parameter:
- gs-pathname: '{gs-pathname}'
+ gs-pathname: '{gs-pathname}/dev'
- project-parameter:
project: '{project}'
branch: '{branch}'
- string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
+ name: ARTIFACT_VERSION
+ default: dev
+ description: "Used for overriding the ARTIFACT_VERSION"
scm:
- git-scm-gerrit
@@ -171,51 +147,94 @@
properties:
- logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-daily.*'
- - 'apex-deploy.*'
- - 'apex-build.*'
- - 'apex-runner.*'
- - 'apex-verify.*'
- throttle:
- max-per-node: 1
+ max-per-node: 3
max-total: 10
option: 'project'
builders:
- - 'apex-unit-test'
- - 'apex-build'
- - trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l3-nofeature-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream}
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- same-node: true
- - trigger-builds:
- - project: 'functest-apex-{verify-slave}-suite-{stream}'
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
- FUNCTEST_SUITE_NAME=healthcheck
- block: true
- same-node: true
- - 'apex-workspace-cleanup'
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: basic
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-verify-unit-tests-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ ARTIFACT_VERSION=$ARTIFACT_VERSION
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ ARTIFACT_VERSION=$ARTIFACT_VERSION
+ DEPLOY_SCENARIO={verify-scenario}
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: functest-smoke
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-apex-virtual-suite-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO={verify-scenario}
+ FUNCTEST_SUITE_NAME=healthcheck
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
# Verify Scenario Gate
- job-template:
name: 'apex-verify-gate-{stream}'
- node: '{verify-slave}'
+ node: '{virtual-slave}'
concurrent: true
+ project-type: 'multijob'
+
parameters:
- apex-parameter:
- gs-pathname: '{gs-pathname}'
+ gs-pathname: '{gs-pathname}/dev'
- project-parameter:
project: '{project}'
branch: '{branch}'
@@ -223,6 +242,10 @@
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: "Used for overriding the GIT URL coming from parameters macro."
+ - string:
+ name: ARTIFACT_VERSION
+ default: dev
+ description: "Used for overriding the ARTIFACT_VERSION"
scm:
- git-scm-gerrit
@@ -251,46 +274,55 @@
properties:
- logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-daily.*'
- - 'apex-deploy.*'
- - 'apex-build.*'
- - 'apex-runner.*'
- - 'apex-verify.*'
- throttle:
- max-per-node: 1
+ max-per-node: 3
max-total: 10
option: 'project'
builders:
- - 'apex-build'
- - trigger-builds:
- - project: 'apex-deploy-virtual-gate-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-gate-{stream}
- OPNFV_CLEAN=yes
- current-parameters: true
- git-revision: false
- block: true
- same-node: true
- - trigger-builds:
- - project: 'functest-apex-{verify-slave}-suite-{stream}'
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
- FUNCTEST_SUITE_NAME=healthcheck
- block: true
- same-node: true
- - 'apex-workspace-cleanup'
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ ARTIFACT_VERSION=$ARTIFACT_VERSION
+ DEPLOY_SCENARIO=gate
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: functest-smoke
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-apex-virtual-suite-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO={verify-scenario}
+ FUNCTEST_SUITE_NAME=healthcheck
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
- job-template:
- name: 'apex-runner-{platform}-{scenario}-{stream}'
-
- # runner jobs for deploying manually
+ name: 'apex-runner-cperf-{stream}'
- node: '{slave}'
+ # runner cperf job
+ project-type: 'multijob'
+ node: 'intel-pod2'
disabled: false
@@ -311,97 +343,105 @@
properties:
- logrotate-default
- build-blocker:
- use-build-blocker: true
+ use-build-blocker: false
+ block-level: 'NODE'
blocking-jobs:
- - 'apex-daily.*'
- - 'apex-verify.*'
- - 'apex-.*-promote.*'
+ - 'apex-deploy.*'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
builders:
- - trigger-builds:
- - project: 'apex-deploy-{platform}-{scenario}-{stream}'
- predefined-parameters:
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'functest-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
+ - description-setter:
+ description: "Deployed on $NODE_NAME"
+ - multijob:
+ name: 'Baremetal Deploy'
+ condition: ALWAYS
+ projects:
+ - name: 'apex-deploy-baremetal-{stream}'
+ node-parameters: false
+ current-parameters: true
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ OPNFV_CLEAN=yes
+ DEPLOY_SCENARIO={verify-scenario}
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: CPERF
+ condition: SUCCESSFUL
+ projects:
+ - name: 'cperf-apex-intel-pod2-daily-master'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={verify-scenario}
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+
+# Build phase
- job-template:
- name: 'apex-runner-cperf-{stream}'
-
- # runner cperf job
+ name: 'apex-build-{stream}'
- node: 'intel-pod2'
+ # Job template for builds
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: '{build-slave}'
disabled: false
+ concurrent: true
+
parameters:
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
+ - '{project}-defaults'
- project-parameter:
project: '{project}'
branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
scm:
- - git-scm
+ - git-scm-gerrit
+
+ wrappers:
+ - timeout:
+ timeout: 150
+ fail: true
properties:
- logrotate-default
- - build-blocker:
- use-build-blocker: false
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- throttle:
- max-per-node: 1
+ max-per-node: {concurrent-builds}
max-total: 10
option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify-iso-{stream}'
builders:
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l3-nofeature-noha-{stream}'
- predefined-parameters:
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- same-node: true
- - trigger-builds:
- - project: 'cperf-apex-intel-pod2-daily-master'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
- block: true
- same-node: true
+ - 'apex-build'
+ - inject:
+ properties-content: ARTIFACT_TYPE=rpm
+ - 'apex-upload-artifact'
+# ISO verify job
- job-template:
- name: 'apex-build-{stream}'
+ name: 'apex-verify-iso-{stream}'
# Job template for builds
#
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: '{daily-slave}'
+ node: '{virtual-slave}'
disabled: false
@@ -423,44 +463,39 @@
properties:
- logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- throttle:
max-per-node: 1
max-total: 10
option: 'project'
builders:
- - 'apex-build'
- - trigger-builds:
- - project: 'apex-deploy-virtual-os-nosdn-nofeature-noha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: false
- same-node: true
- block: true
+ - 'apex-iso-verify'
+ - inject:
+ properties-content: ARTIFACT_TYPE=iso
- 'apex-upload-artifact'
+# Deploy job
- job-template:
- name: 'apex-deploy-virtual-{scenario}-{stream}'
+ name: 'apex-deploy-{platform}-{stream}'
# Job template for virtual deployment
#
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: '{slave}'
+ node: 'apex-{platform}-{stream}'
concurrent: true
disabled: false
scm:
- - git-scm
+ - git-scm-gerrit
+
+ wrappers:
+ - timeout:
+ timeout: 120
+ fail: true
parameters:
- project-parameter:
@@ -470,7 +505,7 @@
gs-pathname: '{gs-pathname}'
- string:
name: DEPLOY_SCENARIO
- default: '{scenario}'
+ default: '{verify-scenario}'
description: "Scenario to deploy with."
- string:
name: OPNFV_CLEAN
@@ -484,24 +519,31 @@
block-level: 'NODE'
blocking-jobs:
- 'apex-deploy.*'
+ - 'functest.*'
+ - 'yardstick.*'
- throttle:
max-per-node: 1
max-total: 10
option: 'project'
builders:
+ - description-setter:
+ description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+ - 'apex-download-artifact'
- 'apex-deploy'
- 'apex-workspace-cleanup'
+
+# Baremetal Deploy and Test
- job-template:
- name: 'apex-deploy-baremetal-{scenario}-{stream}'
+ name: 'apex-run-deploy-test-baremetal-{stream}'
- # Job template for baremetal deployment
+ # Job template for daily build
#
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: '{slave}'
+ project-type: 'multijob'
disabled: false
@@ -509,6 +551,8 @@
- git-scm
parameters:
+ - '{project}-defaults'
+ - '{project}-baremetal-{stream}-defaults'
- project-parameter:
project: '{project}'
branch: '{branch}'
@@ -516,9 +560,8 @@
gs-pathname: '{gs-pathname}'
- string:
name: DEPLOY_SCENARIO
- default: '{scenario}'
+ default: '{verify-scenario}'
description: "Scenario to deploy with."
-
properties:
- logrotate-default
- build-blocker:
@@ -526,24 +569,65 @@
block-level: 'NODE'
blocking-jobs:
- 'apex-verify.*'
- - 'apex-deploy.*'
- - 'apex-build.*'
-
-
+ - 'apex-runner.*'
+ - 'apex-.*-promote.*'
+ - 'apex-run.*'
builders:
- - 'apex-deploy'
- - 'apex-workspace-cleanup'
+ - description-setter:
+ description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+ - multijob:
+ name: 'Baremetal Deploy'
+ condition: ALWAYS
+ projects:
+ - name: 'apex-deploy-baremetal-{stream}'
+ node-parameters: true
+ current-parameters: true
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: Functest
+ condition: ALWAYS
+ projects:
+ - name: 'functest-apex-baremetal-daily-{stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+ - multijob:
+ name: Yardstick
+ condition: ALWAYS
+ projects:
+ - name: 'yardstick-apex-baremetal-daily-{stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+
-# Daily
+# danube Daily
- job-template:
- name: 'apex-daily-{stream}'
+ name: 'apex-daily-danube'
# Job template for daily build
#
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: '{daily-slave}'
+ project-type: 'multijob'
+
+ node: 'apex-baremetal-danube'
disabled: false
@@ -551,422 +635,330 @@
- git-scm
parameters:
+ - '{project}-defaults'
+ - '{project}-baremetal-danube-defaults'
- project-parameter:
project: '{project}'
- branch: '{branch}'
+ branch: 'stable/danube'
- apex-parameter:
- gs-pathname: '{gs-pathname}'
+ gs-pathname: '/danube'
properties:
- logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-deploy.*'
- - 'apex-build.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
triggers:
- - 'apex-{stream}'
+ - 'apex-danube'
builders:
- - trigger-builds:
- - project: 'apex-build-{stream}'
- git-revision: true
- current-parameters: true
- same-node: true
- block: true
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-nosdn-nofeature-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- # 1.dovetail only master by now, not sync with A/B/C branches
- # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
- # 3.only debug testsuite here(includes basic testcase,
- # i.e. one tempest smoke ipv6, two vping from functest)
- # 4.not used for release criteria or compliance,
- # only to debug the dovetail tool bugs with apex
- #- trigger-builds:
- # - project: 'dovetail-apex-{slave}-debug-{stream}'
- # current-parameters: false
- # predefined-parameters:
- # DEPLOY_SCENARIO=os-nosdn-nofeature-ha
- # block: true
- # same-node: true
- # block-thresholds:
- # build-step-failure-threshold: 'never'
- # failure-threshold: 'never'
- # unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l3-nofeature-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl-bgpvpn-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl-bgpvpn-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl-bgpvpn-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl-gluon-noha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl-gluon-noha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl-gluon-noha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-fdio-noha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-fdio-noha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-fdio-noha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-fdio-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-fdio-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-nosdn-kvm-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-kvm-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-kvm-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l3-fdio-noha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-fdio-noha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-fdio-noha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-nosdn-fdio-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-fdio-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-fdio-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-nosdn-ovs-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-ovs-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-ovs-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l3-ovs-ha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-ovs-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-ovs-ha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-ovn-nofeature-noha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream}/.build
- OPNFV_CLEAN=yes
- git-revision: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- block: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-ovn-nofeature-noha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-ovn-nofeature-noha
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-danube'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: 'Verify and upload ISO'
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-verify-iso-danube'
+ current-parameters: false
+ predefined-parameters: |
+ BUILD_DIRECTORY=$WORKSPACE/../apex-build-danube/.build
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: Baremetal Deploy and Test Phase
+ condition: SUCCESSFUL
+ projects:
-# CSIT promote
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-nofeature-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-nofeature-ha-ipv6
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-ovs-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-ovs-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-fdio-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-fdio-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-kvm-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-kvm-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l2-fdio-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l2-fdio-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l2-sfc-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-ovs-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-ovs-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl-bgpvpn-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl-gluon-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-fdio-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-fdio-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-fdio_dvr-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-fdio_dvr-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-csit-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-onos-nofeature-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-danube'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-ovn-nofeature-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+
+# master Daily
- job-template:
- name: 'apex-csit-promote-daily-{stream}'
+ name: 'apex-daily-master'
- # Job template for promoting CSIT Snapshots
+ # Job template for daily build
#
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: '{daily-slave}'
+ project-type: 'multijob'
+
+ node: 'apex-baremetal-master'
disabled: false
@@ -974,59 +966,141 @@
- git-scm
parameters:
+ - '{project}-defaults'
+ - '{project}-baremetal-master-defaults'
- project-parameter:
project: '{project}'
- branch: '{branch}'
+ branch: 'master'
- apex-parameter:
- gs-pathname: '{gs-pathname}'
+ gs-pathname: ''
properties:
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-deploy.*'
- - 'apex-build.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
+ - logrotate-default
triggers:
- - timed: '0 12 * * 0'
+ - 'apex-master'
+
+ builders:
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-master'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: 'Verify and upload ISO'
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-verify-iso-master'
+ current-parameters: false
+ predefined-parameters: |
+ BUILD_DIRECTORY=$WORKSPACE/../apex-build-master/.build
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: Baremetal Deploy and Test Phase
+ condition: SUCCESSFUL
+ projects:
+
+ - name: 'apex-run-deploy-test-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-nofeature-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl-nofeature-ha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-run-deploy-test-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl-nofeature-noha
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+
+
+# snapshot create
+- job-template:
+ name: 'apex-create-snapshot'
+
+ # Job template for clean
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+
+ disabled: false
builders:
- - 'apex-build'
- - trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l3-csit-noha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-csit-promote-daily-{stream}
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- same-node: true
- - trigger-builds:
- - project: 'functest-apex-{daily-slave}-suite-{stream}'
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
- FUNCTEST_SUITE_NAME=tempest_smoke_serial
- block: true
- same-node: true
- shell:
!include-raw-escape: ./apex-snapshot-create.sh
- - shell:
- !include-raw-escape: ./apex-upload-artifact.sh
-# FDIO promote
+# snapshot upload
- job-template:
- name: 'apex-fdio-promote-daily-{stream}'
+ name: 'apex-upload-snapshot'
+
+ # Job template for clean
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+
+ disabled: false
+
+ builders:
+ - inject:
+ properties-content: ARTIFACT_TYPE=snapshot
+ - 'apex-upload-artifact'
+
+# CSIT promote
+- job-template:
+ name: 'apex-csit-promote-daily-{stream}'
# Job template for promoting CSIT Snapshots
#
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: '{daily-slave}'
-
+ node: '{virtual-slave}'
+ project-type: 'multijob'
disabled: false
scm:
@@ -1046,36 +1120,104 @@
blocking-jobs:
- 'apex-verify.*'
- 'apex-deploy.*'
- - 'apex-build.*'
- 'apex-runner.*'
- 'apex-daily.*'
+ triggers:
+ - timed: '0 12 * * 0'
+
builders:
- - 'apex-build'
- - trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l2-fdio-noha-{stream}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-fdio-promote-daily-{stream}
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- same-node: true
- - shell:
- !include-raw-escape: ./apex-snapshot-create.sh
- - shell:
- !include-raw-escape: ./apex-upload-artifact.sh
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-csit-noha
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ PROMOTE=True
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: functest-smoke
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-apex-virtual-suite-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
+ FUNCTEST_SUITE_NAME=tempest_smoke_serial
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: create snapshot
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-create-snapshot'
+ current-parameters: false
+ predefined-parameters: |
+ SNAP_TYPE=csit
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: upload snapshot
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-upload-snapshot'
+ current-parameters: false
+ predefined-parameters: |
+ SNAP_TYPE=csit
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+# FDIO promote
- job-template:
- name: 'apex-gs-clean-{stream}'
+ name: 'apex-fdio-promote-daily-{stream}'
- # Job template for clean
+ # Job template for promoting CSIT Snapshots
#
# Required Variables:
# stream: branch with - in place of / (eg. stable)
- node: '{slave}'
-
+ # branch: branch (eg. stable)
+ node: '{virtual-slave}'
+ project-type: 'multijob'
disabled: false
+ scm:
+ - git-scm
+
parameters:
- project-parameter:
project: '{project}'
@@ -1083,12 +1225,74 @@
- apex-parameter:
gs-pathname: '{gs-pathname}'
- builders:
- - 'apex-gs-clean'
-
- triggers:
- - 'apex-gs-clean-{stream}'
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-deploy.*'
+ - 'apex-runner.*'
+ - 'apex-daily.*'
+ builders:
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ PROMOTE=True
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: create snapshot
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-create-snapshot'
+ current-parameters: false
+ predefined-parameters: |
+ SNAP_TYPE=fdio
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: upload snapshot
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-upload-snapshot'
+ current-parameters: false
+ predefined-parameters: |
+ SNAP_TYPE=fdio
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
########################
# parameter macros
@@ -1106,11 +1310,11 @@
description: "Artifact version type"
- string:
name: BUILD_DIRECTORY
- default: $WORKSPACE/build
+ default: $WORKSPACE/.build
description: "Directory where the build artifact will be located upon the completion of the build."
- string:
name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache{gs-pathname}
+ default: $HOME/opnfv/apex-cache{gs-pathname}
description: "Directory where the cache to be used during the build is located."
- string:
name: GIT_BASE
@@ -1122,8 +1326,12 @@
description: "Version directory where opnfv artifacts are stored in gs repository"
- string:
name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+ default: $GS_BASE{gs-pathname}
description: "URL to Google Storage."
+ - string:
+ name: PROMOTE
+ default: 'False'
+ description: "Flag to know if we should promote/upload snapshot artifacts."
########################
# builder macros
@@ -1147,16 +1355,23 @@
!include-raw: ./apex-workspace-cleanup.sh
- builder:
+ name: 'apex-iso-verify'
+ builders:
+ - shell:
+ !include-raw: ./apex-iso-verify.sh
+
+
+- builder:
name: 'apex-upload-artifact'
builders:
- shell:
!include-raw: ./apex-upload-artifact.sh
- builder:
- name: 'apex-gs-cleanup'
+ name: 'apex-download-artifact'
builders:
- shell:
- !include-raw: ./apex-gs-cleanup.sh
+ !include-raw: ./apex-download-artifact.sh
- builder:
name: 'apex-deploy'
@@ -1164,7 +1379,6 @@
- shell:
!include-raw: ./apex-deploy.sh
-
#######################
# trigger macros
########################
@@ -1176,7 +1390,3 @@
name: 'apex-danube'
triggers:
- timed: '0 12 * * *'
-- trigger:
- name: 'apex-gs-clean-{stream}'
- triggers:
- - timed: '0 2 * * *'
diff --git a/jjb/apex/apex.yml.j2 b/jjb/apex/apex.yml.j2
new file mode 100644
index 000000000..752cf281d
--- /dev/null
+++ b/jjb/apex/apex.yml.j2
@@ -0,0 +1,1032 @@
+- project:
+ name: 'apex'
+ project: 'apex'
+ jobs:
+ - 'apex-verify-{stream}'
+ - 'apex-verify-gate-{stream}'
+ - 'apex-verify-unit-tests-{stream}'
+ - 'apex-runner-cperf-{stream}'
+ - 'apex-build-{stream}'
+ - 'apex-deploy-{platform}-{stream}'
+ - 'apex-daily-master'
+ - 'apex-daily-danube'
+ - 'apex-csit-promote-daily-{stream}'
+ - 'apex-fdio-promote-daily-{stream}'
+ - 'apex-verify-iso-{stream}'
+ - 'apex-run-deploy-test-baremetal-{stream}'
+ - 'apex-upload-snapshot'
+ - 'apex-create-snapshot'
+ # stream: branch with - in place of / (eg. stable-arno)
+ # branch: branch (eg. stable/arno)
+ stream:
+ - master:
+ branch: 'master'
+ gs-pathname: ''
+ build-slave: 'apex-build-master'
+ virtual-slave: 'apex-virtual-master'
+ baremetal-slave: 'apex-baremetal-master'
+ verify-scenario: 'os-odl-nofeature-ha'
+ concurrent-builds: 3
+
+ - danube:
+ branch: 'stable/danube'
+ gs-pathname: '/danube'
+ build-slave: 'apex-build-danube'
+ virtual-slave: 'apex-virtual-danube'
+ baremetal-slave: 'apex-baremetal-danube'
+ verify-scenario: 'os-odl_l3-nofeature-ha'
+ concurrent-builds: 1
+ disabled: false
+
+ platform:
+ - 'baremetal'
+ - 'virtual'
+
+# Unit Test
+- job-template:
+ name: 'apex-verify-unit-tests-{stream}'
+
+ node: '{build-slave}'
+
+ concurrent: true
+
+ parameters:
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'true'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'apex'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'tests/**'
+ properties:
+ - logrotate-default
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+
+ builders:
+ - shell:
+ !include-raw-escape: ./apex-unit-test.sh
+
+# Verify
+- job-template:
+ name: 'apex-verify-{stream}'
+
+ node: '{virtual-slave}'
+
+ concurrent: true
+
+ project-type: 'multijob'
+
+ parameters:
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}/dev'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: ARTIFACT_VERSION
+ default: dev
+ description: "Used for overriding the ARTIFACT_VERSION"
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'true'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'apex'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'ci/**'
+ - compare-type: ANT
+ pattern: 'build/**'
+ - compare-type: ANT
+ pattern: 'lib/**'
+ - compare-type: ANT
+ pattern: 'config/**'
+
+ properties:
+ - logrotate-default
+ - throttle:
+ max-per-node: 3
+ max-total: 10
+ option: 'project'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: basic
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-verify-unit-tests-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ ARTIFACT_VERSION=$ARTIFACT_VERSION
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ ARTIFACT_VERSION=$ARTIFACT_VERSION
+ DEPLOY_SCENARIO={verify-scenario}
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: functest-smoke
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-apex-virtual-suite-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO={verify-scenario}
+ FUNCTEST_SUITE_NAME=healthcheck
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+# Verify Scenario Gate
+- job-template:
+ name: 'apex-verify-gate-{stream}'
+
+ node: '{virtual-slave}'
+
+ concurrent: true
+
+ project-type: 'multijob'
+
+ parameters:
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}/dev'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from parameters macro."
+ - string:
+ name: ARTIFACT_VERSION
+ default: dev
+ description: "Used for overriding the ARTIFACT_VERSION"
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - comment-added-contains-event:
+ comment-contains-value: '^Patch Set [0-9]+: Code-Review\+2.*start-gate-scenario:.*'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'apex'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'ci/**'
+ - compare-type: ANT
+ pattern: 'build/**'
+ - compare-type: ANT
+ pattern: 'lib/**'
+ - compare-type: ANT
+ pattern: 'config/**'
+
+ properties:
+ - logrotate-default
+ - throttle:
+ max-per-node: 3
+ max-total: 10
+ option: 'project'
+
+ builders:
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ ARTIFACT_VERSION=$ARTIFACT_VERSION
+ DEPLOY_SCENARIO=gate
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: functest-smoke
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-apex-virtual-suite-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO={verify-scenario}
+ FUNCTEST_SUITE_NAME=healthcheck
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+- job-template:
+ name: 'apex-runner-cperf-{stream}'
+
+ # runner cperf job
+ project-type: 'multijob'
+ node: 'intel-pod2'
+
+ disabled: false
+
+ parameters:
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from parameters macro."
+
+ scm:
+ - git-scm
+
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: false
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-deploy.*'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+
+ builders:
+ - description-setter:
+ description: "Deployed on $NODE_NAME"
+ - multijob:
+ name: 'Baremetal Deploy'
+ condition: ALWAYS
+ projects:
+ - name: 'apex-deploy-baremetal-{stream}'
+ node-parameters: false
+ current-parameters: true
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ OPNFV_CLEAN=yes
+ DEPLOY_SCENARIO={verify-scenario}
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: CPERF
+ condition: SUCCESSFUL
+ projects:
+ - name: 'cperf-apex-intel-pod2-daily-master'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={verify-scenario}
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+
+# Build phase
+- job-template:
+ name: 'apex-build-{stream}'
+
+ # Job template for builds
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: '{build-slave}'
+
+ disabled: false
+
+ concurrent: true
+
+ parameters:
+ - '{project}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+
+ scm:
+ - git-scm-gerrit
+
+ wrappers:
+ - timeout:
+ timeout: 150
+ fail: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ max-per-node: {concurrent-builds}
+ max-total: 10
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify-iso-{stream}'
+
+ builders:
+ - 'apex-build'
+ - inject:
+ properties-content: ARTIFACT_TYPE=rpm
+ - 'apex-upload-artifact'
+
+# ISO verify job
+- job-template:
+ name: 'apex-verify-iso-{stream}'
+
+ # Job template for builds
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: '{virtual-slave}'
+
+ disabled: false
+
+ concurrent: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from parameters macro."
+
+ scm:
+ - git-scm
+
+ properties:
+ - logrotate-default
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+
+ builders:
+ - 'apex-iso-verify'
+ - inject:
+ properties-content: ARTIFACT_TYPE=iso
+ - 'apex-upload-artifact'
+
+# Deploy job
+- job-template:
+ name: 'apex-deploy-{platform}-{stream}'
+
+ # Job template for virtual deployment
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: 'apex-{platform}-{stream}'
+
+ concurrent: true
+
+ disabled: false
+
+ scm:
+ - git-scm-gerrit
+
+ wrappers:
+ - timeout:
+ timeout: 120
+ fail: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{verify-scenario}'
+ description: "Scenario to deploy with."
+ - string:
+ name: OPNFV_CLEAN
+ default: 'no'
+ description: "Use yes in lower case to invoke clean. Indicates if the deploy environment should be cleaned before deployment"
+
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-deploy.*'
+ - 'functest.*'
+ - 'yardstick.*'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+
+ builders:
+ - description-setter:
+ description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+ - 'apex-download-artifact'
+ - 'apex-deploy'
+ - 'apex-workspace-cleanup'
+
+
+# Baremetal Deploy and Test
+- job-template:
+ name: 'apex-run-deploy-test-baremetal-{stream}'
+
+ # Job template for daily build
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ project-type: 'multijob'
+
+ disabled: false
+
+ scm:
+ - git-scm
+
+ parameters:
+ - '{project}-defaults'
+ - '{project}-baremetal-{stream}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{verify-scenario}'
+ description: "Scenario to deploy with."
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-runner.*'
+ - 'apex-.*-promote.*'
+ - 'apex-run.*'
+ builders:
+ - description-setter:
+ description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+ - multijob:
+ name: 'Baremetal Deploy'
+ condition: ALWAYS
+ projects:
+ - name: 'apex-deploy-baremetal-{stream}'
+ node-parameters: true
+ current-parameters: true
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: Functest
+ condition: ALWAYS
+ projects:
+ - name: 'functest-apex-baremetal-daily-{stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+ - multijob:
+ name: Yardstick
+ condition: ALWAYS
+ projects:
+ - name: 'yardstick-apex-baremetal-daily-{stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+
+{% for stream in scenarios %}
+# {{ stream }} Daily
+- job-template:
+ name: 'apex-daily-{{ stream }}'
+
+ # Job template for daily build
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ project-type: 'multijob'
+
+ node: '{{ build_slave[stream] }}'
+
+ disabled: false
+
+ scm:
+ - git-scm
+
+ parameters:
+ - '{project}-defaults'
+ - '{project}-baremetal-{{ stream }}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{{ branch[stream] }}'
+ - apex-parameter:
+ gs-pathname: '{{ gspathname[stream] }}'
+
+ properties:
+ - logrotate-default
+
+ triggers:
+ - 'apex-{{ stream }}'
+
+ builders:
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-{{ stream }}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: 'Verify and upload ISO'
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-verify-iso-{{ stream }}'
+ current-parameters: false
+ predefined-parameters: |
+ BUILD_DIRECTORY=$WORKSPACE/../apex-build-{{ stream }}/.build
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: Baremetal Deploy and Test Phase
+ condition: SUCCESSFUL
+ projects:
+{% for scenario in scenarios[stream] %}
+ - name: 'apex-run-deploy-test-baremetal-{{ stream }}'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO={{scenario}}
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+{% endfor %}
+{% endfor %}
+
+# snapshot create
+- job-template:
+ name: 'apex-create-snapshot'
+
+ # Job template for clean
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+
+ disabled: false
+
+ builders:
+ - shell:
+ !include-raw-escape: ./apex-snapshot-create.sh
+
+# snapshot upload
+- job-template:
+ name: 'apex-upload-snapshot'
+
+ # Job template for clean
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+
+ disabled: false
+
+ builders:
+ - inject:
+ properties-content: ARTIFACT_TYPE=snapshot
+ - 'apex-upload-artifact'
+
+# CSIT promote
+- job-template:
+ name: 'apex-csit-promote-daily-{stream}'
+
+ # Job template for promoting CSIT Snapshots
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: '{virtual-slave}'
+ project-type: 'multijob'
+ disabled: false
+
+ scm:
+ - git-scm
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-deploy.*'
+ - 'apex-runner.*'
+ - 'apex-daily.*'
+
+ triggers:
+ - timed: '0 12 * * 0'
+
+ builders:
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-csit-noha
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ PROMOTE=True
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: functest-smoke
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-apex-virtual-suite-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
+ FUNCTEST_SUITE_NAME=tempest_smoke_serial
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: create snapshot
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-create-snapshot'
+ current-parameters: false
+ predefined-parameters: |
+ SNAP_TYPE=csit
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: upload snapshot
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-upload-snapshot'
+ current-parameters: false
+ predefined-parameters: |
+ SNAP_TYPE=csit
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+
+# FDIO promote
+- job-template:
+ name: 'apex-fdio-promote-daily-{stream}'
+
+ # Job template for promoting CSIT Snapshots
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: '{virtual-slave}'
+ project-type: 'multijob'
+ disabled: false
+
+ scm:
+ - git-scm
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-deploy.*'
+ - 'apex-runner.*'
+ - 'apex-daily.*'
+
+ builders:
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-build-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ PROMOTE=True
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: create snapshot
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-create-snapshot'
+ current-parameters: false
+ predefined-parameters: |
+ SNAP_TYPE=fdio
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: upload snapshot
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-upload-snapshot'
+ current-parameters: false
+ predefined-parameters: |
+ SNAP_TYPE=fdio
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+
+########################
+# parameter macros
+########################
+- parameter:
+ name: apex-parameter
+ parameters:
+ - string:
+ name: ARTIFACT_NAME
+ default: 'latest'
+ description: "RPM Artifact name that will be appended to GS_URL to deploy a specific artifact"
+ - string:
+ name: ARTIFACT_VERSION
+ default: 'daily'
+ description: "Artifact version type"
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/.build
+ description: "Directory where the build artifact will be located upon the completion of the build."
+ - string:
+ name: CACHE_DIRECTORY
+ default: $HOME/opnfv/apex-cache{gs-pathname}
+ description: "Directory where the cache to be used during the build is located."
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
+ - string:
+ name: GS_PATHNAME
+ default: '{gs-pathname}'
+ description: "Version directory where opnfv artifacts are stored in gs repository"
+ - string:
+ name: GS_URL
+ default: $GS_BASE{gs-pathname}
+ description: "URL to Google Storage."
+ - string:
+ name: PROMOTE
+ default: 'False'
+ description: "Flag to know if we should promote/upload snapshot artifacts."
+
+########################
+# builder macros
+########################
+- builder:
+ name: 'apex-unit-test'
+ builders:
+ - shell:
+ !include-raw: ./apex-unit-test.sh
+
+- builder:
+ name: 'apex-build'
+ builders:
+ - shell:
+ !include-raw: ./apex-build.sh
+
+- builder:
+ name: 'apex-workspace-cleanup'
+ builders:
+ - shell:
+ !include-raw: ./apex-workspace-cleanup.sh
+
+- builder:
+ name: 'apex-iso-verify'
+ builders:
+ - shell:
+ !include-raw: ./apex-iso-verify.sh
+
+
+- builder:
+ name: 'apex-upload-artifact'
+ builders:
+ - shell:
+ !include-raw: ./apex-upload-artifact.sh
+
+- builder:
+ name: 'apex-download-artifact'
+ builders:
+ - shell:
+ !include-raw: ./apex-download-artifact.sh
+
+- builder:
+ name: 'apex-deploy'
+ builders:
+ - shell:
+ !include-raw: ./apex-deploy.sh
+
+#######################
+# trigger macros
+########################
+- trigger:
+ name: 'apex-master'
+ triggers:
+ - timed: '0 3 1 1 7'
+- trigger:
+ name: 'apex-danube'
+ triggers:
+ - timed: '0 12 * * *'
+
diff --git a/jjb/apex/scenarios.yaml.hidden b/jjb/apex/scenarios.yaml.hidden
new file mode 100644
index 000000000..dc9107a11
--- /dev/null
+++ b/jjb/apex/scenarios.yaml.hidden
@@ -0,0 +1,32 @@
+master:
+ - 'os-nosdn-nofeature-noha'
+ - 'os-nosdn-nofeature-ha'
+ - 'os-odl-nofeature-ha'
+ - 'os-odl-nofeature-noha'
+danube:
+ - 'os-nosdn-nofeature-noha'
+ - 'os-nosdn-nofeature-ha'
+ - 'os-nosdn-nofeature-ha-ipv6'
+ - 'os-nosdn-ovs-noha'
+ - 'os-nosdn-ovs-ha'
+ - 'os-nosdn-fdio-noha'
+ - 'os-nosdn-fdio-ha'
+ - 'os-nosdn-kvm-ha'
+ - 'os-nosdn-kvm-noha'
+ - 'os-odl_l2-fdio-noha'
+ - 'os-odl_l2-fdio-ha'
+ - 'os-odl_netvirt-fdio-noha'
+ - 'os-odl_l2-sfc-noha'
+ - 'os-odl_l3-nofeature-noha'
+ - 'os-odl_l3-nofeature-ha'
+ - 'os-odl_l3-ovs-noha'
+ - 'os-odl_l3-ovs-ha'
+ - 'os-odl-bgpvpn-ha'
+ - 'os-odl-gluon-noha'
+ - 'os-odl_l3-fdio-noha'
+ - 'os-odl_l3-fdio-ha'
+ - 'os-odl_l3-fdio_dvr-noha'
+ - 'os-odl_l3-fdio_dvr-ha'
+ - 'os-odl_l3-csit-noha'
+ - 'os-onos-nofeature-ha'
+ - 'os-ovn-nofeature-noha'
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 38a729de6..55d8ff975 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -56,8 +56,12 @@
slave-label: arm-pod3
installer: fuel
<<: *danube
- - arm-pod3-2:
- slave-label: arm-pod3-2
+ - arm-pod4:
+ slave-label: arm-pod4
+ installer: fuel
+ <<: *danube
+ - arm-virtual1:
+ slave-label: arm-virtual1
installer: fuel
<<: *danube
#--------------------------------
@@ -71,8 +75,12 @@
slave-label: arm-pod3
installer: fuel
<<: *master
- - arm-pod3-2:
- slave-label: arm-pod3-2
+ - arm-pod4:
+ slave-label: arm-pod4
+ installer: fuel
+ <<: *master
+ - arm-virtual1:
+ slave-label: arm-virtual1
installer: fuel
<<: *master
#--------------------------------
@@ -174,23 +182,28 @@
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- # 1.dovetail only master by now, not sync with A/B/C branches
+ # 1.dovetail only master, based on D release
# 2.here the stream means the SUT stream, dovetail stream is defined in its own job
- # 3.only debug testsuite here(includes 3 basic testcase,
- # i.e. one tempest smoke ipv6, two vping from functest)
+ # 3.only proposed_tests testsuite here(refstack, ha, ipv6, bgpvpn)
# 4.not used for release criteria or compliance,
# only to debug the dovetail tool bugs with arm pods
- - trigger-builds:
- - project: 'dovetail-{installer}-{pod}-debug-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
+ # 5.only run against scenario os-(nosdn|odl_l2)-(nofeature-bgpvpn)-ha
+ - conditional-step:
+ condition-kind: regex-match
+ regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
+ label: '{scenario}'
+ steps:
+ - trigger-builds:
+ - project: 'dovetail-{installer}-{pod}-proposed_tests-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
- job-template:
name: '{installer}-deploy-{pod}-daily-{stream}'
@@ -270,31 +283,31 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0 * * 1'
+ - timed: ''
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0 * * 2'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0 * * 3'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0 * * 4'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0 * * 5'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0,20 * * 6'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0,20 * * 7'
+ - timed: ''
#----------------------------------------------------------------------
# Enea Armband CI Baremetal Triggers running against danube branch
@@ -302,62 +315,62 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 4 * * 1,2,3,4,5'
+ - timed: '0 0,16 * * 2,4'
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 8 * * 1,2,3,4,5'
+ - timed: '0 0 * * 1,5,7'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 12 * * 1,2,3,4,5'
+ - timed: '0 16 * * 1,5,7'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 16 * * 1,2,3,4,5'
+ - timed: '0 8 * * 2,4,6'
- trigger:
name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 20 * * 1,2,3,4,5'
+ - timed: '0 8 * * 1,3,5,7'
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 4,8 * * 6,7'
+ - timed: '0 0 * * 3,6'
- trigger:
name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-danube-trigger'
triggers:
- - timed: '0 12,16 * * 6,7'
+ - timed: '0 16 * * 3,6'
#---------------------------------------------------------------
# Enea Armband CI Virtual Triggers running against master branch
#---------------------------------------------------------------
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-master-trigger'
triggers:
- - timed: '0 2 * * 1'
+ - timed: ''
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-master-trigger'
triggers:
- - timed: '0 2 * * 2'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-master-trigger'
triggers:
- - timed: '0 2 * * 3'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-master-trigger'
triggers:
- - timed: '0 2 * * 4'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-master-trigger'
triggers:
- - timed: '0 2 * * 5'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-master-trigger'
triggers:
- - timed: '0 2 * * 6'
+ - timed: ''
- trigger:
name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-master-trigger'
triggers:
- - timed: '0 2 * * 7'
+ - timed: ''
#--------------------------------------------------------------------
# Enea Armband CI Virtual Triggers running against danube branch
#--------------------------------------------------------------------
@@ -389,6 +402,71 @@
name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-danube-trigger'
triggers:
- timed: ''
+
+#--------------------------------------------------------------------
+# Enea Armband Non CI Virtual Triggers running against danube branch
+#--------------------------------------------------------------------
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-danube-trigger'
+ triggers:
+ - timed: ''
+
+#--------------------------------------------------------------------
+# Enea Armband Non CI Virtual Triggers running against master branch
+#--------------------------------------------------------------------
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-master-trigger'
+ triggers:
+ - timed: ''
+
#----------------------------------------------------------
# Enea Armband POD 2 Triggers running against master branch
#----------------------------------------------------------
@@ -517,61 +595,61 @@
# Enea Armband POD 3 Triggers running against master branch (aarch64 slave)
#--------------------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-2-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-pod4-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-2-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-arm-pod4-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-2-master-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-pod4-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-2-master-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod4-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-2-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-pod4-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-2-master-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-arm-pod4-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-2-master-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-arm-pod4-master-trigger'
triggers:
- timed: ''
#--------------------------------------------------------------------------
# Enea Armband POD 3 Triggers running against danube branch (aarch64 slave)
#--------------------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-2-danube-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-pod4-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-2-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-arm-pod4-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-2-danube-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-pod4-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-2-danube-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod4-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-2-danube-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-pod4-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-2-danube-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-arm-pod4-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-2-danube-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-arm-pod4-danube-trigger'
triggers:
- timed: ''
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
index 2e5aa3924..e445e0850 100755
--- a/jjb/armband/armband-deploy.sh
+++ b/jjb/armband/armband-deploy.sh
@@ -33,10 +33,10 @@ fi
# set deployment parameters
export TMPDIR=${WORKSPACE}/tmpdir
-# arm-pod3-2 is an aarch64 jenkins slave for the same POD as the
+# arm-pod4 is an aarch64 jenkins slave for the same POD as the
# x86 jenkins slave arm-pod3; therefore we use the same pod name
# to deploy the pod from both jenkins slaves
-if [[ "${NODE_NAME}" == "arm-pod3-2" ]]; then
+if [[ "${NODE_NAME}" == "arm-pod4" ]]; then
NODE_NAME="arm-pod3"
fi
diff --git a/jjb/barometer/barometer.yml b/jjb/barometer/barometer.yml
index 9ec30e809..68b8a04c0 100644
--- a/jjb/barometer/barometer.yml
+++ b/jjb/barometer/barometer.yml
@@ -126,21 +126,16 @@
disabled: '{obj:disabled}'
- concurrent: true
+ concurrent: false
properties:
- logrotate-default
- - throttle:
- enabled: true
- max-total: 3
- max-per-node: 2
- option: 'project'
parameters:
- project-parameter:
project: '{project}'
branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
+ - 'opnfv-build-centos-defaults'
scm:
- git-scm
@@ -151,7 +146,6 @@
builders:
- shell: |
pwd
- cd src
- ./install_build_deps.sh
- make clobber
- make
+ cd ci
+ ./install_dependencies.sh
+ ./build_rpm.sh
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
index 0df659af2..e6f8d1ba5 100644
--- a/jjb/bottlenecks/bottlenecks-run-suite.sh
+++ b/jjb/bottlenecks/bottlenecks-run-suite.sh
@@ -2,6 +2,7 @@
#set -e
[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
BOTTLENECKS_IMAGE=opnfv/bottlenecks
+REPORT="True"
if [[ $SUITE_NAME == rubbos || $SUITE_NAME == vstf ]]; then
echo "Bottlenecks: to pull image $BOTTLENECKS_IMAGE:${DOCKER_TAG}"
@@ -53,11 +54,11 @@ else
echo "Bottlenecks: pulling tutum/influxdb for yardstick"
docker pull tutum/influxdb:0.13
sleep 5
- docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+ docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE $REPORT
elif [[ $SUITE_NAME == posca_stress_ping ]]; then
TEST_CASE=posca_factor_ping
sleep 5
- docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+ docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE $REPORT
fi
echo "Bottlenecks: cleaning up docker-compose images and dockers"
diff --git a/jjb/ci_gate_security/anteater-report-to-gerrit.sh b/jjb/ci_gate_security/anteater-report-to-gerrit.sh
new file mode 100644
index 000000000..71c5a0679
--- /dev/null
+++ b/jjb/ci_gate_security/anteater-report-to-gerrit.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+set -o errexit
+set -o pipefail
+export PATH=$PATH:/usr/local/bin/
+EXITSTATUS=0
+
+# This Log should always exist
+if [[ -e securityaudit.log ]] ; then
+ echo -e "\nposting security audit report to gerrit...\n"
+
+ #check if log has errors
+ if grep ERROR securityaudit.log; then
+ EXITSTATUS=1
+ fi
+
+ cat securityaudit.log | awk -F"ERROR - " '{print $2}' > shortlog
+
+ ssh -p 29418 gerrit.opnfv.org \
+ "gerrit review -p $GERRIT_PROJECT \
+ -m \"$(cat shortlog)\" \
+ $GERRIT_PATCHSET_REVISION \
+ --notify NONE"
+
+ exit $EXITSTATUS
+fi
diff --git a/jjb/ci_gate_security/anteater-security-audit.sh b/jjb/ci_gate_security/anteater-security-audit.sh
new file mode 100644
index 000000000..d5c0e407c
--- /dev/null
+++ b/jjb/ci_gate_security/anteater-security-audit.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+cd $WORKSPACE
+echo "Generating patchset file to list changed files"
+git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
+echo "Changed files are"
+echo "--------------------------------------------------------"
+cat $WORKSPACE/patchset
+echo "--------------------------------------------------------"
+
+vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT"
+envs="-e PROJECT=$PROJECT"
+
+echo "Pulling releng-anteater docker image"
+echo "--------------------------------------------------------"
+docker pull opnfv/releng-anteater
+echo "--------------------------------------------------------"
+
+cmd="sudo docker run --privileged=true -id $envs $vols opnfv/releng-anteater /bin/bash"
+echo "Running docker command $cmd"
+container_id=$($cmd)
+echo "Container ID is $container_id"
+cmd="anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
+echo "Executing command inside container"
+echo "$cmd"
+echo "--------------------------------------------------------"
+docker exec $container_id $cmd > $WORKSPACE/securityaudit.log 2>&1
+exit_code=$?
+echo "--------------------------------------------------------"
+echo "Stopping docker container with ID $container_id"
+docker stop $container_id
+cat securityaudit.log
+exit 0
diff --git a/jjb/securityaudit/opnfv-security-audit.yml b/jjb/ci_gate_security/opnfv-ci-gate-security.yml
index 732df8925..e2ad03eae 100644
--- a/jjb/securityaudit/opnfv-security-audit.yml
+++ b/jjb/ci_gate_security/opnfv-ci-gate-security.yml
@@ -1,5 +1,5 @@
########################
-# Job configuration for opnfv-lint
+# Job configuration for opnfv-anteater (security audit)
########################
- project:
@@ -25,9 +25,17 @@
disabled: '{obj:disabled}'
parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-build3'
+ description: 'Slave label on Jenkins'
- project-parameter:
project: $GERRIT_PROJECT
branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
scm:
- git-scm-gerrit
@@ -47,59 +55,33 @@
comment-contains-value: 'reverify'
projects:
- project-compare-type: 'REG_EXP'
- project-pattern: 'sandbox'
+ project-pattern: 'sandbox|releng'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
file-paths:
- compare-type: ANT
- pattern: '**/*.py'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
+ pattern: '**'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
builders:
- - security-audit-python-code
+ - anteater-security-audit
- report-security-audit-result-to-gerrit
########################
# builder macros
########################
- builder:
- name: security-audit-python-code
+ name: anteater-security-audit
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
+ - shell:
+ !include-raw: ./anteater-security-audit.sh
- # this is where the security/license audit script will be executed
- echo "Hello World!"
- builder:
name: report-security-audit-result-to-gerrit
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
-
- # If no violations were found, no lint log will exist.
- if [[ -e securityaudit.log ]] ; then
- echo -e "\nposting security audit report to gerrit...\n"
-
- cat securityaudit.log
- echo
-
- ssh -p 29418 gerrit.opnfv.org \
- "gerrit review -p $GERRIT_PROJECT \
- -m \"$(cat securityaudit.log)\" \
- $GERRIT_PATCHSET_REVISION \
- --notify NONE"
-
- exit 1
- fi
+ - shell:
+ !include-raw: ./anteater-report-to-gerrit.sh
diff --git a/jjb/compass4nfv/compass-build.sh b/jjb/compass4nfv/compass-build.sh
index 093debba7..673a9f106 100644
--- a/jjb/compass4nfv/compass-build.sh
+++ b/jjb/compass4nfv/compass-build.sh
@@ -24,7 +24,16 @@ then
fi
cd $WORKSPACE/
-./build.sh --iso-dir $BUILD_DIRECTORY/ --iso-name compass.iso -c $CACHE_DIRECTORY
+
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+ ./build.sh --iso-dir $BUILD_DIRECTORY/ --iso-name compass.iso -c $CACHE_DIRECTORY
+ OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)
+ OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso
+else
+ ./build.sh --tar-dir $BUILD_DIRECTORY/ --tar-name compass.tar.gz -c $CACHE_DIRECTORY
+ OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.tar.gz | cut -d' ' -f1)
+ OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz
+fi
# list the build artifacts
ls -al $BUILD_DIRECTORY
@@ -34,8 +43,8 @@ ls -al $BUILD_DIRECTORY
echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)"
+ echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
+ echo "OPNFV_ARTIFACT_SHA512SUM=$OPNFV_ARTIFACT_SHA512SUM"
echo "OPNFV_BUILD_URL=$BUILD_URL"
) > $BUILD_DIRECTORY/opnfv.properties
echo
diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml
index 237f8944d..0c9f64d9a 100644
--- a/jjb/compass4nfv/compass-ci-jobs.yml
+++ b/jjb/compass4nfv/compass-ci-jobs.yml
@@ -11,11 +11,13 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
+ openstack-version: ocata
danube: &danube
stream: danube
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
disabled: false
+ openstack-version: newton
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
@@ -23,19 +25,19 @@
#--------------------------------
pod:
- baremetal:
- slave-label: compass-baremetal
+ slave-label: compass-baremetal-master
os-version: 'xenial'
<<: *master
- virtual:
- slave-label: compass-virtual
+ slave-label: compass-virtual-master
os-version: 'xenial'
<<: *master
- baremetal:
- slave-label: compass-baremetal
+ slave-label: compass-baremetal-branch
os-version: 'xenial'
<<: *danube
- virtual:
- slave-label: compass-virtual
+ slave-label: compass-virtual-branch
os-version: 'xenial'
<<: *danube
#--------------------------------
@@ -107,6 +109,7 @@
wrappers:
- build-name:
name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+ - fix-workspace-permissions
parameters:
- project-parameter:
@@ -133,6 +136,7 @@
predefined-parameters: |
DEPLOY_SCENARIO={scenario}
COMPASS_OS_VERSION={os-version}
+ COMPASS_OPENSTACK_VERSION={openstack-version}
same-node: true
block: true
- trigger-builds:
@@ -157,19 +161,25 @@
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- #dovetail only master by now, not sync with A/B/C branches
- #here the stream means the SUT stream, dovetail stream is defined in its own job
- - trigger-builds:
- - project: 'dovetail-compass-{pod}-debug-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
+ # dovetail only master by now, not sync with A/B/C branches
+ # here the stream means the SUT stream, dovetail stream is defined in its own job
+ # only run on os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha scenario
+ - conditional-step:
+ condition-kind: regex-match
+ regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
+ label: '{scenario}'
+ steps:
+ - trigger-builds:
+ - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
- job-template:
name: 'compass-deploy-{pod}-daily-{stream}'
@@ -195,8 +205,9 @@
- build-name:
name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- timeout:
- timeout: 120
+ timeout: 150
abort: true
+ - fix-workspace-permissions
parameters:
- project-parameter:
@@ -211,11 +222,6 @@
scm:
- git-scm
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-
builders:
- description-setter:
description: "POD: $NODE_NAME"
@@ -238,10 +244,6 @@
name: GS_URL
default: '$GS_BASE{gs-pathname}'
description: "URL to Google Storage."
- - choice:
- name: COMPASS_OPENSTACK_VERSION
- choices:
- - 'newton'
########################
# trigger macros
diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh
index 534e17e62..2668ccdf8 100644
--- a/jjb/compass4nfv/compass-deploy.sh
+++ b/jjb/compass4nfv/compass-deploy.sh
@@ -23,7 +23,11 @@ fi
echo 1 > /proc/sys/vm/drop_caches
export CONFDIR=$WORKSPACE/deploy/conf
-export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
+if [[ "$BRANCH" = 'stable/danube' ]]; then
+ export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
+else
+ export ISO_URL=file://$BUILD_DIRECTORY/compass.tar.gz
+fi
cd $WORKSPACE
diff --git a/jjb/compass4nfv/compass-dovetail-jobs.yml b/jjb/compass4nfv/compass-dovetail-jobs.yml
index 30c80e648..b46c73221 100644
--- a/jjb/compass4nfv/compass-dovetail-jobs.yml
+++ b/jjb/compass4nfv/compass-dovetail-jobs.yml
@@ -55,6 +55,7 @@
wrappers:
- build-name:
name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+ - fix-workspace-permissions
triggers:
- '{auto-trigger-name}'
@@ -98,7 +99,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'dovetail-compass-{pod}-debug-weekly-{stream}'
+ - project: 'dovetail-compass-{pod}-proposed_tests-weekly-{stream}'
current-parameters: false
predefined-parameters:
DEPLOY_SCENARIO={scenario}
@@ -135,8 +136,9 @@
- build-name:
name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- timeout:
- timeout: 120
+ timeout: 150
abort: true
+ - fix-workspace-permissions
parameters:
- project-parameter:
@@ -151,11 +153,6 @@
scm:
- git-scm
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
-
builders:
- description-setter:
description: "POD: $NODE_NAME"
diff --git a/jjb/compass4nfv/compass-download-artifact.sh b/jjb/compass4nfv/compass-download-artifact.sh
index 5a63c4aeb..f8915643f 100644
--- a/jjb/compass4nfv/compass-download-artifact.sh
+++ b/jjb/compass4nfv/compass-download-artifact.sh
@@ -18,12 +18,18 @@ curl -s -o $BUILD_DIRECTORY/latest.properties http://$GS_URL/latest.properties
# source the file so we get OPNFV vars
source $BUILD_DIRECTORY/latest.properties
-# download the file
-curl -s -o $BUILD_DIRECTORY/compass.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
-
-# list the file
-ls -al $BUILD_DIRECTORY/compass.iso
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+ # download the file
+ curl -s -o $BUILD_DIRECTORY/compass.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
+ # list the file
+ ls -al $BUILD_DIRECTORY/compass.iso
+else
+ # download the file
+ curl -s -o $BUILD_DIRECTORY/compass.tar.gz http://$OPNFV_ARTIFACT_URL > gsutil.tar.gz.log 2>&1
+ # list the file
+ ls -al $BUILD_DIRECTORY/compass.tar.gz
+fi
echo
echo "--------------------------------------------------------"
-echo "Done!" \ No newline at end of file
+echo "Done!"
diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml
index f962518e0..59482459e 100644
--- a/jjb/compass4nfv/compass-project-jobs.yml
+++ b/jjb/compass4nfv/compass-project-jobs.yml
@@ -125,7 +125,7 @@
description: "URL to Google Storage."
- string:
name: PPA_REPO
- default: "http://205.177.226.237:9999{ppa-pathname}"
+ default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
- string:
name: PPA_CACHE
default: "$WORKSPACE/work/repo/"
diff --git a/jjb/compass4nfv/compass-upload-artifact.sh b/jjb/compass4nfv/compass-upload-artifact.sh
index 73b7f07fa..87a9334b2 100644
--- a/jjb/compass4nfv/compass-upload-artifact.sh
+++ b/jjb/compass4nfv/compass-upload-artifact.sh
@@ -7,6 +7,11 @@ echo "Uploading the $INSTALLER_TYPE artifact. This could take some time..."
echo "--------------------------------------------------------"
echo
+if [[ "$BRANCH" == 'stable/danube' ]]; then
+ FILETYPE='iso'
+else
+ FILETYPE='tar.gz'
+fi
# source the opnfv.properties to get ARTIFACT_VERSION
source $BUILD_DIRECTORY/opnfv.properties
@@ -23,16 +28,16 @@ signiso () {
time gpg2 -vvv --batch --yes --no-tty \
--default-key opnfv-helpdesk@rt.linuxfoundation.org \
--passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/compass.iso
+ --detach-sig $BUILD_DIRECTORY/compass.$FILETYPE
-gsutil cp $BUILD_DIRECTORY/compass.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
+gsutil cp $BUILD_DIRECTORY/compass.$FILETYPE.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE.sig
echo "ISO signature Upload Complete!"
}
signiso
# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/compass.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
+gsutil cp $BUILD_DIRECTORY/compass.$FILETYPE gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > gsutil.$FILETYPE.log 2>&1
gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
@@ -44,19 +49,19 @@ gsutil -m setmeta \
gsutil -m setmeta \
-h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
+ gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > /dev/null 2>&1
# disabled errexit due to gsutil setmeta complaints
# BadRequestException: 400 Invalid argument
# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
+gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > /dev/null 2>&1
if [[ $? -ne 0 ]]; then
echo "Problem while uploading artifact!"
- echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done."
+ echo "Check log $WORKSPACE/gsutil.$FILETYPE.log on the machine where this build is done."
exit 1
fi
echo
echo "--------------------------------------------------------"
echo "Done!"
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
+echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE"
diff --git a/jjb/compass4nfv/compass-verify-jobs.yml b/jjb/compass4nfv/compass-verify-jobs.yml
index 14279e649..258315844 100644
--- a/jjb/compass4nfv/compass-verify-jobs.yml
+++ b/jjb/compass4nfv/compass-verify-jobs.yml
@@ -13,11 +13,15 @@
gs-pathname: ''
ppa-pathname: '/{stream}'
disabled: false
+ openstack-version: 'ocata'
+ branch-type: 'master'
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
ppa-pathname: '/{stream}'
disabled: false
+ openstack-version: 'newton'
+ branch-type: 'branch'
distro:
- 'xenial':
@@ -67,14 +71,15 @@
- 'compass-os-.*?-virtual-daily-.*?'
block-level: 'NODE'
- scm:
- - git-scm-gerrit
-
wrappers:
- ssh-agent-wrapper
- timeout:
- timeout: 120
+ timeout: 150
fail: true
+ - fix-workspace-permissions
+
+ scm:
+ - git-scm-gerrit
triggers:
- gerrit:
@@ -108,7 +113,7 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - 'compass-virtual-defaults'
+ - 'compass-virtual-{branch-type}-defaults'
- '{installer}-defaults'
- 'compass-verify-defaults':
installer: '{installer}'
@@ -143,6 +148,7 @@
current-parameters: true
predefined-parameters: |
COMPASS_OS_VERSION={os-version}
+ COMPASS_OPENSTACK_VERSION={openstack-version}
node-parameters: true
kill-phase-on: FAILURE
abort-all-job: true
@@ -188,14 +194,15 @@
- 'compass-os-.*?-virtual-daily-.*?'
block-level: 'NODE'
- scm:
- - git-scm-gerrit
-
wrappers:
- ssh-agent-wrapper
- timeout:
- timeout: 120
+ timeout: 150
fail: true
+ - fix-workspace-permissions
+
+ scm:
+ - git-scm-gerrit
triggers:
- gerrit:
@@ -227,7 +234,7 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - 'compass-virtual-defaults'
+ - 'compass-virtual-{branch-type}-defaults'
- '{installer}-defaults'
- 'compass-verify-defaults':
installer: '{installer}'
@@ -287,14 +294,15 @@
- 'functest-compass-virtual.*'
block-level: 'NODE'
- scm:
- - git-scm-gerrit
-
wrappers:
- ssh-agent-wrapper
- timeout:
- timeout: 120
+ timeout: 150
fail: true
+ - fix-workspace-permissions
+
+ scm:
+ - git-scm-gerrit
builders:
- description-setter:
@@ -339,15 +347,11 @@
description: "URL to Google Storage."
- string:
name: PPA_REPO
- default: "http://205.177.226.237:9999{ppa-pathname}"
+ default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
- string:
name: PPA_CACHE
default: "$WORKSPACE/work/repo/"
- choice:
- name: COMPASS_OPENSTACK_VERSION
- choices:
- - 'newton'
- - choice:
name: COMPASS_OS_VERSION
choices:
- 'xenial'
diff --git a/jjb/cperf/cperf-ci-jobs.yml b/jjb/cperf/cperf-ci-jobs.yml
index f6e068530..dc209d644 100644
--- a/jjb/cperf/cperf-ci-jobs.yml
+++ b/jjb/cperf/cperf-ci-jobs.yml
@@ -162,7 +162,7 @@
-v of_port:6653"
robot_suite="/home/opnfv/repos/odl_test/csit/suites/openflowplugin/Performance/010_Cbench.robot"
- docker run -ti -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
+ docker run -i -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
- builder:
name: cperf-cleanup
diff --git a/jjb/daisy4nfv/daisy-deploy.sh b/jjb/daisy4nfv/daisy-deploy.sh
index b512e3f60..785f3a571 100755
--- a/jjb/daisy4nfv/daisy-deploy.sh
+++ b/jjb/daisy4nfv/daisy-deploy.sh
@@ -6,7 +6,7 @@ echo "--------------------------------------------------------"
echo "This is $INSTALLER_TYPE deploy job!"
echo "--------------------------------------------------------"
-DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-ha"}
+DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-noha"}
BRIDGE=${BRIDGE:-pxebr}
LAB_NAME=${NODE_NAME/-*}
POD_NAME=${NODE_NAME/*-}
@@ -29,7 +29,7 @@ git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/securedlab --quiet \
cp -r securedlab/labs .
DEPLOY_COMMAND="sudo ./ci/deploy/deploy.sh -b $BASE_DIR \
- -l $LAB_NAME -p $POD_NAME -B $BRIDGE"
+ -l $LAB_NAME -p $POD_NAME -B $BRIDGE -s $DEPLOY_SCENARIO"
# log info to console
echo """
diff --git a/jjb/daisy4nfv/daisy-project-jobs.yml b/jjb/daisy4nfv/daisy-project-jobs.yml
index e631ee9b9..0a9d43d25 100644
--- a/jjb/daisy4nfv/daisy-project-jobs.yml
+++ b/jjb/daisy4nfv/daisy-project-jobs.yml
@@ -71,7 +71,7 @@
project: '{project}'
branch: '{branch}'
- 'opnfv-build-centos-defaults'
- - 'daisy-defaults'
+ - '{installer}-defaults'
- '{installer}-project-parameter':
gs-pathname: '{gs-pathname}'
@@ -157,7 +157,8 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - 'daisy-defaults'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -165,8 +166,6 @@
- string:
name: DEPLOY_SCENARIO
default: 'os-nosdn-nofeature-ha'
- - 'daisy-defaults'
- - '{slave-label}-defaults'
- '{installer}-project-parameter':
gs-pathname: '{gs-pathname}'
diff --git a/jjb/daisy4nfv/daisy4nfv-basic.sh b/jjb/daisy4nfv/daisy4nfv-basic.sh
index 04b9b7bfa..87f5482e0 100755
--- a/jjb/daisy4nfv/daisy4nfv-basic.sh
+++ b/jjb/daisy4nfv/daisy4nfv-basic.sh
@@ -4,4 +4,3 @@ echo "--------------------------------------------------------"
echo "This is diasy4nfv basic job!"
echo "--------------------------------------------------------"
-sudo rm -rf /home/jenkins-ci/opnfv/slave_root/workspace/daisy4nfv-verify-build-master/*
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
index 375d80733..925f68e18 100755
--- a/jjb/daisy4nfv/daisy4nfv-build.sh
+++ b/jjb/daisy4nfv/daisy4nfv-build.sh
@@ -1,5 +1,9 @@
#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
echo "--------------------------------------------------------"
echo "This is diasy4nfv build job!"
echo "--------------------------------------------------------"
diff --git a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
index 1cc0443ad..a64c80e5c 100755
--- a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
+++ b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
@@ -57,12 +57,18 @@ fi
# log info to console
echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
-echo "This could take some time..."
+echo "This could take some time... Now the time is $(date -u)"
echo "--------------------------------------------------------"
echo
# download the file
-curl -L -s -o $WORKSPACE/opnfv.bin http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
+if [[ "$NODE_NAME" =~ (zte) ]] && [ -x "$(command -v aria2c)" ]; then
+ DOWNLOAD_CMD="aria2c -x 3 --allow-overwrite=true -d $WORKSPACE -o opnfv.bin"
+else
+ DOWNLOAD_CMD="curl -L -s -o $WORKSPACE/opnfv.bin"
+fi
+
+$DOWNLOAD_CMD http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
# list the file
ls -al $WORKSPACE/opnfv.bin
diff --git a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
index 9e7b867af..561ffbe24 100644
--- a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
@@ -54,6 +54,11 @@
enabled: true
max-total: 4
option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - '{alias}-merge-(master|danube)'
+ block-level: 'NODE'
scm:
- git-scm
@@ -148,7 +153,7 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - '{alias}-merge-(master|danube)'
+ - '{alias}-merge-{phase}-.*'
block-level: 'NODE'
scm:
@@ -164,6 +169,7 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
+ - '{installer}-defaults'
- '{slave-label}-defaults'
- '{alias}-merge-defaults':
gs-pathname: '{gs-pathname}'
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
index a0ec2ebd7..dff0ff0a4 100644
--- a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
@@ -47,8 +47,14 @@
enabled: true
max-total: 4
option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - '{installer}-merge-build-.*'
+ block-level: 'NODE'
+
scm:
- - git-scm
+ - git-scm-gerrit
wrappers:
- ssh-agent-wrapper
- timeout:
@@ -103,8 +109,13 @@
name: unit
condition: SUCCESSFUL
projects:
- - name: '{alias}-verify-{name}-{stream}'
- current-parameters: true
+ - name: '{alias}-verify-unit-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
node-parameters: false
kill-phase-on: FAILURE
abort-all-job: true
@@ -133,8 +144,15 @@
enabled: true
max-total: 6
option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - '{installer}-merge-build-.*'
+ - '{alias}-verify-build-.*'
+ block-level: 'NODE'
+
scm:
- - git-scm
+ - git-scm-gerrit
wrappers:
- ssh-agent-wrapper
- timeout:
@@ -144,9 +162,11 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
+ - '{installer}-defaults'
- '{slave-label}-defaults'
- '{alias}-verify-defaults':
gs-pathname: '{gs-pathname}'
+
builders:
- description-setter:
description: "Built on $NODE_NAME"
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index 3401b6d33..eb230b59d 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -145,7 +145,7 @@
# functest-suite-parameter
- string:
name: FUNCTEST_SUITE_NAME
- default: '{project}'
+ default: 'doctor-notification'
- string:
name: TESTCASE_OPTIONS
default: '-e INSPECTOR_TYPE={inspector} -e PROFILER_TYPE={profiler} -v $WORKSPACE:/home/opnfv/repos/doctor'
diff --git a/jjb/dovetail/dovetail-artifacts-upload.sh b/jjb/dovetail/dovetail-artifacts-upload.sh
index b23decad1..f1a9e7222 100755
--- a/jjb/dovetail/dovetail-artifacts-upload.sh
+++ b/jjb/dovetail/dovetail-artifacts-upload.sh
@@ -52,7 +52,7 @@ echo "signature Upload Complete!"
upload () {
# log info to console
-echo "Uploading to artifact. This could take some time..."
+echo "Uploading ${STORE_FILE_NAME} to artifact. This could take some time..."
echo
cd $WORKSPACE
diff --git a/jjb/dovetail/dovetail-artifacts-upload.yml b/jjb/dovetail/dovetail-artifacts-upload.yml
index 3d9af5ed7..0c8efbe0d 100644
--- a/jjb/dovetail/dovetail-artifacts-upload.yml
+++ b/jjb/dovetail/dovetail-artifacts-upload.yml
@@ -19,6 +19,8 @@
- 'dovetail'
- 'functest'
- 'yardstick'
+ - 'testapi'
+ - 'mongo'
#############################################
# job template
@@ -55,7 +57,8 @@
builders:
- 'dovetail-builder-artifacts-upload'
- - 'dovetail-workspace-cleanup'
+ - 'dovetail-upload-artifacts-cache-cleanup'
+ - 'dovetail-images-cleanup'
####################
# parameter macros
@@ -94,7 +97,7 @@
!include-raw: ./dovetail-artifacts-upload.sh
- builder:
- name: dovetail-workspace-cleanup
+ name: dovetail-upload-artifacts-cache-cleanup
builders:
- shell: |
#!/bin/bash
@@ -104,27 +107,8 @@
/bin/rm -rf $CACHE_DIR
- # Remove previous running containers if exist
- if [[ -n "$(docker ps -a | grep $DOCKER_REPO_NAME)" ]]; then
- echo "Removing existing $DOCKER_REPO_NAME containers..."
- docker ps -a | grep $DOCKER_REPO_NAME | awk '{print $1}' | xargs docker rm -f
- t=60
- # Wait max 60 sec for containers to be removed
- while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep $DOCKER_REPO_NAME)" ]]; do
- sleep 1
- let t=t-1
- done
- fi
-
- # Remove existing images if exist
- if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep $DOCKER_REPO_NAME
- image_tags=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $tag)" ]]; then
- echo "Removing docker image $DOCKER_REPO_NAME:$tag..."
- docker rmi -f $DOCKER_REPO_NAME:$tag
- fi
- done
- fi
+- builder:
+ name: dovetail-images-cleanup
+ builders:
+ - shell:
+ !include-raw: ./dovetail-cleanup.sh
diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml
index 869048088..682948d8b 100644
--- a/jjb/dovetail/dovetail-ci-jobs.yml
+++ b/jjb/dovetail/dovetail-ci-jobs.yml
@@ -137,10 +137,41 @@
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
+ - arm-virtual1:
+ slave-label: '{pod}'
+ SUT: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - zte-pod1:
+ slave-label: zte-pod1
+ SUT: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - zte-pod2:
+ slave-label: zte-pod2
+ SUT: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - zte-pod3:
+ slave-label: zte-pod3
+ SUT: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - zte-pod1:
+ slave-label: zte-pod1
+ SUT: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *danube
+ - zte-pod3:
+ slave-label: zte-pod3
+ SUT: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *danube
#--------------------------------
testsuite:
- 'debug'
- 'compliance_set'
+ - 'proposed_tests'
jobs:
- 'dovetail-{SUT}-{pod}-{testsuite}-{stream}'
diff --git a/jjb/dovetail/dovetail-cleanup.sh b/jjb/dovetail/dovetail-cleanup.sh
index 22b2ba2ce..0ee789a97 100755
--- a/jjb/dovetail/dovetail-cleanup.sh
+++ b/jjb/dovetail/dovetail-cleanup.sh
@@ -2,8 +2,8 @@
[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-#clean up dependent project docker images, which has no containers and image tag None
-clean_images=(opnfv/functest opnfv/yardstick)
+# clean up dependent project docker images, which has no containers and image tag None
+clean_images=(opnfv/functest opnfv/yardstick opnfv/testapi mongo)
for clean_image in "${clean_images[@]}"; do
echo "Removing image $image_id, which has no containers and image tag is None"
dangling_images=($(docker images -f "dangling=true" | grep ${clean_image} | awk '{print $3}'))
@@ -14,7 +14,7 @@ for clean_image in "${clean_images[@]}"; do
fi
done
-echo "Remove containers with image dovetail:<None>..."
+echo "Remove containers with image opnfv/dovetail:<None>..."
dangling_images=($(docker images -f "dangling=true" | grep opnfv/dovetail | awk '{print $3}'))
if [[ -n ${dangling_images} ]]; then
for image_id in "${dangling_images[@]}"; do
@@ -24,7 +24,7 @@ if [[ -n ${dangling_images} ]]; then
done
fi
-echo "Cleaning up dovetail docker containers/images..."
+echo "Cleaning up dovetail docker containers..."
if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then
echo "Removing existing opnfv/dovetail containers..."
docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect}
diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh
index 5161a3c7c..dce7e5862 100755
--- a/jjb/dovetail/dovetail-run.sh
+++ b/jjb/dovetail/dovetail-run.sh
@@ -6,9 +6,19 @@
set -e
[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+DOVETAIL_HOME=${WORKSPACE}/cvp
+if [ -d ${DOVETAIL_HOME} ]; then
+ sudo rm -rf ${DOVETAIL_HOME}/*
+else
+ sudo mkdir -p ${DOVETAIL_HOME}
+fi
+
+DOVETAIL_CONFIG=${DOVETAIL_HOME}/pre_config
+sudo mkdir -p ${DOVETAIL_CONFIG}
+
sshkey=""
# The path of openrc.sh is defined in fetch_os_creds.sh
-OPENRC=$WORKSPACE/opnfv-openrc.sh
+OPENRC=${DOVETAIL_CONFIG}/env_config.sh
if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
instack_mac=$(sudo virsh domiflist undercloud | grep default | \
grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
@@ -32,31 +42,78 @@ if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FOR
sudo iptables -I FORWARD -j RETURN
fi
+releng_repo=${WORKSPACE}/releng
+[ -d ${releng_repo} ] && sudo rm -rf ${releng_repo}
+git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
+
if [[ ${INSTALLER_TYPE} != 'joid' ]]; then
- releng_repo=${WORKSPACE}/releng
- [ -d ${releng_repo} ] && sudo rm -rf ${releng_repo}
- git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
- ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
+ sudo /bin/bash ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
fi
if [[ -f $OPENRC ]]; then
echo "INFO: openstack credentials path is $OPENRC"
cat $OPENRC
else
- echo "ERROR: file $OPENRC does not exist."
+ echo "ERROR: cannot find file $OPENRC. Please check if it is existing."
+ sudo ls -al ${DOVETAIL_CONFIG}
exit 1
fi
+sudo pip install virtualenv
+
+cd ${releng_repo}/modules
+sudo virtualenv venv
+source venv/bin/activate
+sudo pip install -e ./ >/dev/null
+sudo pip install netaddr
+
+if [[ ${INSTALLER_TYPE} == compass ]]; then
+ options="-u root -p root"
+elif [[ ${INSTALLER_TYPE} == fuel ]]; then
+ options="-u root -p r00tme"
+else
+ echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
+ echo "HA test cases may not run properly."
+fi
+
+cmd="sudo python ${releng_repo}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
+ -i ${INSTALLER_IP} ${options} -f ${DOVETAIL_CONFIG}/pod.yaml"
+echo ${cmd}
+${cmd}
+
+deactivate
+
+cd ${WORKSPACE}
+
+if [ -f ${DOVETAIL_CONFIG}/pod.yaml ]; then
+ echo "file ${DOVETAIL_CONFIG}/pod.yaml:"
+ cat ${DOVETAIL_CONFIG}/pod.yaml
+else
+ echo "Error: cannot find file ${DOVETAIL_CONFIG}/pod.yaml. Please check if it is existing."
+ sudo ls -al ${DOVETAIL_CONFIG}
+ echo "HA test cases may not run properly."
+fi
+
+ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+
+if [ "$INSTALLER_TYPE" == "fuel" ]; then
+ echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
+ sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
+fi
+
+# sdnvpn test case needs to download this image first before running
+sudo wget -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
+
opts="--privileged=true -id"
-results_envs="-v /var/run/docker.sock:/var/run/docker.sock \
- -v /home/opnfv/dovetail/results:/home/opnfv/dovetail/results"
-openrc_volume="-v ${OPENRC}:${OPENRC}"
+
+docker_volume="-v /var/run/docker.sock:/var/run/docker.sock"
+dovetail_home_volume="-v ${DOVETAIL_HOME}:${DOVETAIL_HOME}"
# Pull the image with correct tag
echo "Dovetail: Pulling image opnfv/dovetail:${DOCKER_TAG}"
docker pull opnfv/dovetail:$DOCKER_TAG >$redirect
-cmd="docker run ${opts} ${results_envs} ${openrc_volume} \
+cmd="docker run ${opts} -e DOVETAIL_HOME=${DOVETAIL_HOME} ${docker_volume} ${dovetail_home_volume} \
${sshkey} opnfv/dovetail:${DOCKER_TAG} /bin/bash"
echo "Dovetail: running docker run command: ${cmd}"
${cmd} >${redirect}
@@ -78,13 +135,13 @@ if [ $(docker ps | grep "opnfv/dovetail:${DOCKER_TAG}" | wc -l) == 0 ]; then
fi
list_cmd="dovetail list ${TESTSUITE}"
-run_cmd="dovetail run --openrc ${OPENRC} --testsuite ${TESTSUITE} -d"
+run_cmd="dovetail run --testsuite ${TESTSUITE} -d"
echo "Container exec command: ${list_cmd}"
docker exec $container_id ${list_cmd}
echo "Container exec command: ${run_cmd}"
docker exec $container_id ${run_cmd}
-sudo cp -r ${DOVETAIL_REPO_DIR}/results ./
+sudo cp -r ${DOVETAIL_HOME}/results ./
# To make sure the file owner is the current user, for the copied results files in the above line
# if not, there will be error when next time to wipe workspace
# CURRENT_USER=${SUDO_USER:-$USER}
diff --git a/jjb/dovetail/dovetail-weekly-jobs.yml b/jjb/dovetail/dovetail-weekly-jobs.yml
index 915feb5e8..700657d68 100644
--- a/jjb/dovetail/dovetail-weekly-jobs.yml
+++ b/jjb/dovetail/dovetail-weekly-jobs.yml
@@ -46,6 +46,7 @@
testsuite:
- 'debug'
- 'compliance_set'
+ - 'proposed_tests'
loop:
- 'weekly':
diff --git a/jjb/fuel/fuel-build.sh b/jjb/fuel/fuel-build.sh
index e1a4c0267..2c0d12a80 100755
--- a/jjb/fuel/fuel-build.sh
+++ b/jjb/fuel/fuel-build.sh
@@ -7,6 +7,15 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+
+# disable Fuel iso build for master branch
+if [[ "$BRANCH" == 'master' ]]; then
+ touch $WORKSPACE/.noupload
+ echo "--------------------------------------------------------"
+ echo "Done!"
+ exit 0
+fi
+
set -o errexit
set -o nounset
set -o pipefail
diff --git a/jjb/fuel/fuel-daily-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml
index 32abad624..68677089d 100644
--- a/jjb/fuel/fuel-daily-jobs.yml
+++ b/jjb/fuel/fuel-daily-jobs.yml
@@ -189,6 +189,28 @@
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
+ # 1.dovetail only has master, based on D release
+ # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
+ # 3.only debug testsuite here(refstack, ha, ipv6, bgpvpn)
+ # 4.not used for release criteria or compliance,
+ # only to debug the dovetail tool bugs with bgpvpn
+ # 5,only run against scenario os-odl_l2-bgpvpn-ha(regex used here, can extend to more scenarios future)
+ - conditional-step:
+ condition-kind: regex-match
+ regex: os-odl_l2-bgpvpn-ha
+ label: '{scenario}'
+ steps:
+ - trigger-builds:
+ - project: 'dovetail-fuel-{pod}-proposed_tests-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
publishers:
- email:
@@ -271,6 +293,10 @@
name: GS_URL
default: artifacts.opnfv.org/$PROJECT{gs-pathname}
description: "URL to Google Storage."
+ - string:
+ name: SSH_KEY
+ default: "/tmp/mcp.rsa"
+ description: "Path to private SSH key to access environment nodes. For MCP deployments only."
########################
# trigger macros
########################
@@ -281,19 +307,19 @@
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '5 20 * * *'
+ - timed: '' # '5 20 * * *'
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '5 23 * * *'
+ - timed: '' # '5 23 * * *'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '5 2 * * *'
+ - timed: '' # '5 2 * * *'
- trigger:
name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '5 5 * * *'
+ - timed: '' # '5 5 * * *'
- trigger:
name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
triggers:
@@ -305,23 +331,23 @@
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '5 11 * * *'
+ - timed: '' # '5 11 * * *'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '5 14 * * *'
+ - timed: '' # '5 14 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '5 17 * * *'
+ - timed: '' # '5 17 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '30 12 * * *'
+ - timed: '' # '30 12 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '30 8 * * *'
+ - timed: '' # '30 8 * * *'
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
@@ -515,11 +541,11 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: '35 15 * * *'
+ - timed: '' # '35 15 * * *'
- trigger:
name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: '5 18 * * *'
+ - timed: '' # '5 18 * * *'
- trigger:
name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
triggers:
@@ -531,27 +557,27 @@
- trigger:
name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-master-trigger'
triggers:
- - timed: '35 1 * * *'
+ - timed: '' # '35 1 * * *'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-master-trigger'
triggers:
- - timed: '5 4 * * *'
+ - timed: '' # '5 4 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger'
triggers:
- - timed: '35 6 * * *'
+ - timed: '' # '35 6 * * *'
- trigger:
name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
triggers:
- - timed: '5 9 * * *'
+ - timed: '' # '5 9 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
triggers:
- - timed: '30 16 * * *'
+ - timed: '' # '30 16 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-master-trigger'
triggers:
- - timed: '30 20 * * *'
+ - timed: '' # '30 20 * * *'
#-----------------------------------------------
# Triggers for job running on fuel-virtual against danube branch
#-----------------------------------------------
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index f5bbd1818..2fb5c71e4 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -12,16 +12,18 @@ set -o pipefail
export TERM="vt220"
-# source the file so we get OPNFV vars
-source latest.properties
+if [[ "$BRANCH" != 'master' ]]; then
+ # source the file so we get OPNFV vars
+ source latest.properties
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+ # echo the info about artifact that is used during the deployment
+ echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+fi
if [[ "$JOB_NAME" =~ "merge" ]]; then
# set simplest scenario for virtual deploys to run for merges
DEPLOY_SCENARIO="os-nosdn-nofeature-ha"
-else
+elif [[ "$BRANCH" != 'master' ]]; then
# for none-merge deployments
# checkout the commit that was used for building the downloaded artifact
# to make sure the ISO and deployment mechanism uses same versions
@@ -75,7 +77,7 @@ echo "--------------------------------------------------------"
echo "Scenario: $DEPLOY_SCENARIO"
echo "Lab: $LAB_NAME"
echo "POD: $POD_NAME"
-echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
+[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
echo
echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
echo "--------------------------------------------------------"
diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh
index 8cc552e8d..c3b8253de 100755
--- a/jjb/fuel/fuel-download-artifact.sh
+++ b/jjb/fuel/fuel-download-artifact.sh
@@ -10,6 +10,9 @@
set -o errexit
set -o pipefail
+# disable Fuel ISO download for master branch
+[[ "$BRANCH" == 'master' ]] && exit 0
+
# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
diff --git a/jjb/functest/functest-daily-jobs.yml b/jjb/functest/functest-daily-jobs.yml
index e8d14321f..8de092d29 100644
--- a/jjb/functest/functest-daily-jobs.yml
+++ b/jjb/functest/functest-daily-jobs.yml
@@ -80,20 +80,20 @@
installer: compass
<<: *danube
# apex CI PODs
- - apex-verify-master:
- slave-label: '{pod}'
+ - virtual:
+ slave-label: apex-virtual-master
installer: apex
<<: *master
- - apex-daily-master:
- slave-label: '{pod}'
+ - baremetal:
+ slave-label: apex-baremetal-master
installer: apex
<<: *master
- - apex-verify-danube:
- slave-label: '{pod}'
+ - virtual:
+ slave-label: apex-virtual-danube
installer: apex
<<: *danube
- - apex-daily-danube:
- slave-label: '{pod}'
+ - baremetal:
+ slave-label: apex-baremetal-danube
installer: apex
<<: *danube
# armband CI PODs
@@ -158,7 +158,11 @@
slave-label: '{pod}'
installer: fuel
<<: *master
- - arm-pod3-2:
+ - arm-pod4:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *master
+ - arm-virtual1:
slave-label: '{pod}'
installer: fuel
<<: *master
@@ -190,7 +194,11 @@
slave-label: '{pod}'
installer: fuel
<<: *danube
- - arm-pod3-2:
+ - arm-pod4:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *danube
+ - arm-virtual1:
slave-label: '{pod}'
installer: fuel
<<: *danube
@@ -294,6 +302,7 @@
- 'vims'
- 'multisite'
- 'parser'
+ - 'opera_vims'
- string:
name: TESTCASE_OPTIONS
default: ''
@@ -347,6 +356,8 @@
- 'functest-cleanup'
- 'set-functest-env'
- 'functest-suite'
+ - 'functest-store-results'
+ - 'functest-exit'
- builder:
name: functest-daily
@@ -365,7 +376,10 @@
name: set-functest-env
builders:
- shell:
- !include-raw: ./set-functest-env.sh
+ !include-raw:
+ - ./functest-env-presetup.sh
+ - ../../utils/fetch_os_creds.sh
+ - ./set-functest-env.sh
- builder:
name: functest-store-results
diff --git a/jjb/functest/functest-env-presetup.sh b/jjb/functest/functest-env-presetup.sh
new file mode 100755
index 000000000..7a9b09d2c
--- /dev/null
+++ b/jjb/functest/functest-env-presetup.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# Fetch INSTALLER_IP for APEX deployments
+if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
+ echo "Gathering IP information for Apex installer VM"
+ ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+ if sudo virsh list | grep undercloud; then
+ echo "Installer VM detected"
+ undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
+ grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+ export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+ export sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
+ sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
+ export stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
+
+ if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+ sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
+ fi
+ if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+ sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
+ fi
+ echo "Installer ip is ${INSTALLER_IP}"
+ else
+ echo "No available installer VM exists and no credentials provided...exiting"
+ exit 1
+ fi
+fi
+
diff --git a/jjb/functest/functest-loop.sh b/jjb/functest/functest-loop.sh
index 893c428a2..676890644 100755
--- a/jjb/functest/functest-loop.sh
+++ b/jjb/functest/functest-loop.sh
@@ -1,15 +1,9 @@
#!/bin/bash
set +e
-branch=${GIT_BRANCH##*/}
[[ "$PUSH_RESULTS_TO_DB" == "true" ]] && flags+="-r"
-if [[ "$BRANCH" =~ 'brahmaputra' ]]; then
- cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh -s ${flags}"
-elif [[ "$BRANCH" =~ 'colorado' ]]; then
- cmd="python ${FUNCTEST_REPO_DIR}/ci/run_tests.py -t all ${flags}"
-else
- cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
-fi
+cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
+
container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
docker exec $container_id $cmd
diff --git a/jjb/functest/functest-project-jobs.yml b/jjb/functest/functest-project-jobs.yml
index 14ad73a91..7036f20c0 100644
--- a/jjb/functest/functest-project-jobs.yml
+++ b/jjb/functest/functest-project-jobs.yml
@@ -88,4 +88,4 @@
name: functest-unit-tests-and-docs-build
builders:
- shell: |
- $WORKSPACE/run_unit_tests.sh
+ cd $WORKSPACE && tox
diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh
index 228cc3da4..5d1ed28f5 100755
--- a/jjb/functest/functest-suite.sh
+++ b/jjb/functest/functest-suite.sh
@@ -15,4 +15,7 @@ for test in ${tests[@]}; do
let global_ret_val+=$?
done
-exit $global_ret_val
+ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
+echo ${global_ret_val}>${ret_val_file}
+
+exit 0
diff --git a/jjb/functest/functest-weekly-jobs.yml b/jjb/functest/functest-weekly-jobs.yml
index f44f7b8aa..59d24cc87 100644
--- a/jjb/functest/functest-weekly-jobs.yml
+++ b/jjb/functest/functest-weekly-jobs.yml
@@ -115,7 +115,10 @@
- shell:
!include-raw: ./functest-cleanup.sh
- shell:
- !include-raw: ./set-functest-env.sh
+ !include-raw:
+ - ./functest-env-presetup.sh
+ - ../../utils/fetch_os_creds.sh
+ - ./set-functest-env.sh
- shell:
!include-raw: ./functest-loop.sh
- shell:
diff --git a/jjb/functest/set-functest-env.sh b/jjb/functest/set-functest-env.sh
index 05e3d5792..558e2487d 100755
--- a/jjb/functest/set-functest-env.sh
+++ b/jjb/functest/set-functest-env.sh
@@ -1,54 +1,19 @@
#!/bin/bash
set -e
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-# LAB_CONFIG is used only for joid
+set +u
+set +o pipefail
+[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+# Prepare OpenStack credentials volume
if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
- # If production lab then creds may be retrieved dynamically
- # creds are on the jumphost, always in the same folder
rc_file_vol="-v $LAB_CONFIG/admin-openrc:/home/opnfv/functest/conf/openstack.creds"
- # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
- # replace the default one by the customized one provided by jenkins config
-fi
-
-if [[ ${RC_FILE_PATH} != '' ]] && [[ -f ${RC_FILE_PATH} ]] ; then
- echo "Credentials file detected: ${RC_FILE_PATH}"
- # volume if credentials file path is given to Functest
- rc_file_vol="-v ${RC_FILE_PATH}:/home/opnfv/functest/conf/openstack.creds"
- RC_FLAG=1
-fi
-
-
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
- if sudo virsh list | grep undercloud; then
- echo "Installer VM detected"
- undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
- sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
- stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
-
- if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- elif [[ "$RC_FLAG" == 1 ]]; then
- echo "No available installer VM, but credentials provided...continuing"
- else
- echo "No available installer VM exists and no credentials provided...exiting"
- exit 1
- fi
-
+else
+ rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds"
fi
-
# Set iptables rule to allow forwarding return traffic for container
if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
sudo iptables -I FORWARD -j RETURN
@@ -59,6 +24,14 @@ DEPLOY_TYPE=baremetal
echo "Functest: Start Docker and prepare environment"
+if [ "$BRANCH" != 'stable/danube' ]; then
+ echo "Functest: Download images that will be used by test cases"
+ images_dir="${HOME}/opnfv/functest/images"
+ chmod +x ${WORKSPACE}/functest/ci/download_images.sh
+ ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} 2> ${redirect}
+ images_vol="-v ${images_dir}:/home/opnfv/functest/images"
+fi
+
dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
mkdir -p ${dir_result}
sudo rm -rf ${dir_result}/*
@@ -70,8 +43,21 @@ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
-e BUILD_TAG=${BUILD_TAG} -e CI_DEBUG=${CI_DEBUG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
+if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-ha'* ]]; then
+ ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+ openo_msb_port=${openo_msb_port:-80}
+ openo_msb_endpoint="$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+ 'mysql -ucompass -pcompass -Dcompass -e "select package_config from cluster;" \
+ | sed s/,/\\n/g | grep openo_ip | cut -d \" -f 4'):$openo_msb_port"
-volumes="${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+ envs=${env}" -e OPENO_MSB_ENDPOINT=${openo_msb_endpoint}"
+fi
+
+if [ "$BRANCH" != 'stable/danube' ]; then
+ volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+else
+ volumes="${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+fi
HOST_ARCH=$(uname -m)
FUNCTEST_IMAGE="opnfv/functest"
@@ -79,7 +65,7 @@ if [ "$HOST_ARCH" = "aarch64" ]; then
FUNCTEST_IMAGE="${FUNCTEST_IMAGE}_${HOST_ARCH}"
fi
-echo "Functest: Pulling image ${FUNCTEST_IMAGE}:${DOCKER_TAG}"
+echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE}:${DOCKER_TAG}"
docker pull ${FUNCTEST_IMAGE}:$DOCKER_TAG >/dev/null
cmd="sudo docker run --privileged=true -id ${envs} ${volumes} \
@@ -103,12 +89,8 @@ if [ $(docker ps | grep "${FUNCTEST_IMAGE}:${DOCKER_TAG}" | wc -l) == 0 ]; then
echo "The container ${FUNCTEST_IMAGE} with ID=${container_id} has not been properly started. Exiting..."
exit 1
fi
-if [[ "$BRANCH" =~ 'brahmaputra' ]]; then
- cmd="${FUNCTEST_REPO_DIR}/docker/prepare_env.sh"
-elif [[ "$BRANCH" =~ 'colorado' ]]; then
- cmd="python ${FUNCTEST_REPO_DIR}/ci/prepare_env.py start"
-else
- cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
-fi
+
+cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
+
echo "Executing command inside the docker: ${cmd}"
docker exec ${container_id} ${cmd}
diff --git a/jjb/global/installer-params.yml b/jjb/global/installer-params.yml
index fc9f34a48..e9f48aea1 100644
--- a/jjb/global/installer-params.yml
+++ b/jjb/global/installer-params.yml
@@ -38,6 +38,10 @@
default: '10.20.0.2'
description: 'IP of the installer'
- string:
+ name: SALT_MASTER_IP
+ default: '192.168.10.100'
+ description: 'IP of the salt master (for mcp deployments)'
+ - string:
name: INSTALLER_TYPE
default: fuel
description: 'Installer used for deploying OPNFV on this POD'
@@ -95,12 +99,16 @@
parameters:
- string:
name: INSTALLER_IP
- default: '10.20.0.2'
+ default: '10.20.7.3'
description: 'IP of the installer'
- string:
name: INSTALLER_TYPE
default: daisy
description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: BRIDGE
+ default: 'br7'
+ description: 'pxe bridge for booting of Fuel master'
- parameter:
name: 'infra-defaults'
diff --git a/jjb/global/releng-macros.yml b/jjb/global/releng-macros.yml
index ced335cb9..5341db464 100644
--- a/jjb/global/releng-macros.yml
+++ b/jjb/global/releng-macros.yml
@@ -170,7 +170,7 @@
- pre-scm-buildstep:
- shell: |
#!/bin/bash
- sudo chown -R $USER $WORKSPACE || exit 1
+ sudo chown -R $USER:$USER $WORKSPACE || exit 1
- builder:
name: build-html-and-pdf-docs-output
diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml
index 1905a098a..57442225c 100644
--- a/jjb/global/slave-params.yml
+++ b/jjb/global/slave-params.yml
@@ -4,11 +4,11 @@
# use of the new labels are in place
#####################################################
- parameter:
- name: 'apex-daily-master-defaults'
+ name: 'apex-baremetal-master-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'apex-daily-master'
+ default: 'apex-baremetal-master'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -25,11 +25,11 @@
default-slaves:
- lf-pod1
- parameter:
- name: 'apex-daily-danube-defaults'
+ name: 'apex-baremetal-danube-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'apex-daily-danube'
+ default: 'apex-baremetal-danube'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -46,11 +46,11 @@
default-slaves:
- lf-pod1
- parameter:
- name: 'apex-verify-master-defaults'
+ name: 'apex-virtual-master-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'apex-verify-master'
+ default: 'apex-virtual-master'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -63,19 +63,18 @@
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - intel-virtual3
- - intel-virtual4
- - intel-virtual5
+ - lf-virtual2
+ - lf-virtual3
default-slaves:
- - intel-virtual3
- - intel-virtual4
- - intel-virtual5
+ - lf-virtual2
+ - lf-virtual3
+
- parameter:
- name: 'apex-verify-danube-defaults'
+ name: 'apex-virtual-danube-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'apex-verify-danube'
+ default: 'apex-virtual-danube'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -88,13 +87,9 @@
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - intel-virtual3
- - intel-virtual4
- - intel-virtual5
+ - lf-pod3
default-slaves:
- - intel-virtual3
- - intel-virtual4
- - intel-virtual5
+ - lf-pod3
- parameter:
name: 'lf-pod1-defaults'
parameters:
@@ -113,6 +108,24 @@
name: SSH_KEY
default: /root/.ssh/id_rsa
description: 'SSH key to use for Apex'
+- parameter:
+ name: 'lf-pod3-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod3
+ default-slaves:
+ - lf-pod3
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
#####################################################
# Parameters for CI baremetal PODs
#####################################################
@@ -141,6 +154,26 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
+ name: 'compass-baremetal-master-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-baremetal-master'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+ name: 'compass-baremetal-branch-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-baremetal-branch'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
name: 'fuel-baremetal-defaults'
parameters:
- label:
@@ -192,6 +225,10 @@
name: SLAVE_LABEL
default: 'daisy-baremetal'
- string:
+ name: INSTALLER_IP
+ default: '10.20.11.2'
+ description: 'IP of the installer'
+ - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
@@ -223,6 +260,26 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
+ name: 'compass-virtual-master-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-virtual-master'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+ name: 'compass-virtual-branch-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-virtual-branch'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
name: 'fuel-virtual-defaults'
parameters:
- label:
@@ -271,6 +328,14 @@
name: SLAVE_LABEL
default: 'daisy-virtual'
- string:
+ name: INSTALLER_IP
+ default: '10.20.11.2'
+ description: 'IP of the installer'
+ - string:
+ name: BRIDGE
+ default: 'daisy1'
+ description: 'pxe bridge for booting of Fuel master'
+ - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
@@ -580,7 +645,7 @@
description: 'Git URL to use on this Jenkins Slave'
- string:
name: INSTALLER_IP
- default: '10.20.7.2'
+ default: '10.20.7.3'
description: 'IP of the installer'
- string:
name: BRIDGE
@@ -747,15 +812,33 @@
default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
description: 'Base URI to the configuration directory'
- parameter:
- name: 'arm-pod3-2-defaults'
+ name: 'arm-pod4-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - arm-pod4
+ default-slaves:
+ - arm-pod4
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: LAB_CONFIG_URL
+ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
+ description: 'Base URI to the configuration directory'
+- parameter:
+ name: 'arm-virtual1-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - arm-pod3-2
+ - arm-virtual1
default-slaves:
- - arm-pod3-2
+ - arm-virtual1
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
diff --git a/jjb/joid/joid-daily-jobs.yml b/jjb/joid/joid-daily-jobs.yml
index 7dc718950..13ea9b308 100644
--- a/jjb/joid/joid-daily-jobs.yml
+++ b/jjb/joid/joid-daily-jobs.yml
@@ -164,7 +164,7 @@
# 4.not used for release criteria or compliance,
# only to debug the dovetail tool bugs with joid
#- trigger-builds:
- # - project: 'dovetail-joid-{pod}-debug-{stream}'
+ # - project: 'dovetail-joid-{pod}-proposed_tests-{stream}'
# current-parameters: false
# predefined-parameters:
# DEPLOY_SCENARIO={scenario}
diff --git a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
index 56fb4f9c1..91b6f4481 100755
--- a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
+++ b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
@@ -1,5 +1,4 @@
#!/bin/bash
-set -o errexit
set -o nounset
if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
@@ -51,6 +50,13 @@ else
#Uploading logs to artifacts
echo "Uploading artifacts for future debugging needs...."
gsutil cp -r $WORKSPACE/build_output/log-*.tar.gz $GS_LOG_LOCATION > $WORKSPACE/gsutil.log 2>&1
+ # verifying the logs uploaded by cyclictest daily test job
+ gsutil ls $GS_LOG_LOCATION > /dev/null 2>&1
+ if [[ $? -ne 0 ]]; then
+ echo "Problem while uploading logs to artifacts!"
+ echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
+ exit 1
+ fi
else
echo "No test logs/artifacts available for uploading"
fi
@@ -58,7 +64,7 @@ else
fi
# upload metadata file for the artifacts built by daily job
-if [[ "$JOB_TYPE" == "daily" ]]; then
+if [[ "$JOB_TYPE" == "daily" && "$PHASE" == "build" ]]; then
gsutil cp $WORKSPACE/opnfv.properties $GS_UPLOAD_LOCATION/opnfv.properties > $WORKSPACE/gsutil.log 2>&1
gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > $WORKSPACE/gsutil.log 2>&1
gsutil -m setmeta -r \
@@ -67,10 +73,13 @@ if [[ "$JOB_TYPE" == "daily" ]]; then
gs://$GS_URL/latest.properties > /dev/null 2>&1
fi
-gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifacts!"
- echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
- exit 1
+# verifying the artifacts uploading by verify/daily build job
+if [[ "$PHASE" == "build" ]]; then
+ gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1
+ if [[ $? -ne 0 ]]; then
+ echo "Problem while uploading artifacts!"
+ echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
+ exit 1
+ fi
fi
echo "Uploaded artifacts!"
diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml
index 8d607f985..e5b56bf9b 100644
--- a/jjb/kvmfornfv/kvmfornfv.yml
+++ b/jjb/kvmfornfv/kvmfornfv.yml
@@ -11,7 +11,7 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
+ disabled: true
#####################################
# patch verification phases
#####################################
@@ -34,7 +34,8 @@
- 'kvmfornfv-verify-{phase}-{stream}'
- 'kvmfornfv-merge-{stream}'
- 'kvmfornfv-daily-{stream}'
- - 'kvmfornfv-{testname}-daily-{phase}-{stream}'
+ - 'kvmfornfv-daily-build-{stream}'
+ - 'kvmfornfv-{testname}-daily-test-{stream}'
#####################################
# job templates
#####################################
@@ -209,7 +210,7 @@
name: cyclictest-build
condition: SUCCESSFUL
projects:
- - name: 'kvmfornfv-cyclictest-daily-build-{stream}'
+ - name: 'kvmfornfv-daily-build-{stream}'
current-parameters: false
node-parameters: false
git-revision: true
@@ -226,16 +227,6 @@
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
- name: packetforward-build
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-packet_forward-daily-build-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
name: packetforward-test
condition: SUCCESSFUL
projects:
@@ -247,7 +238,7 @@
abort-all-job: true
- job-template:
- name: 'kvmfornfv-{testname}-daily-{phase}-{stream}'
+ name: 'kvmfornfv-daily-build-{stream}'
disabled: '{obj:disabled}'
@@ -265,7 +256,39 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - '{slave-label}-defaults'
+ - 'opnfv-build-ubuntu-defaults'
+ - 'kvmfornfv-defaults':
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: PHASE
+ default: 'build'
+ description: "Execution of kvmfornfv daily 'build' job ."
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-daily-build-macro'
+
+- job-template:
+ name: 'kvmfornfv-{testname}-daily-test-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: false
+
+ scm:
+ - git-scm
+
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 360
+ fail: true
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'intel-pod10-defaults'
- 'kvmfornfv-defaults':
gs-pathname: '{gs-pathname}'
- string:
@@ -274,13 +297,13 @@
description: "Daily job to execute kvmfornfv '{testname}' testcase."
- string:
name: PHASE
- default: '{phase}'
- description: "Execution of kvmfornfv daily '{phase}' job ."
+ default: 'test'
+ description: "Execution of kvmfornfv daily 'test' job ."
builders:
- description-setter:
description: "Built on $NODE_NAME"
- - '{project}-{testname}-daily-{phase}-macro'
+ - '{project}-{testname}-daily-test-macro'
#####################################
# builder macros
#####################################
@@ -299,7 +322,7 @@
- shell:
!include-raw: ./kvmfornfv-test.sh
- builder:
- name: 'kvmfornfv-cyclictest-daily-build-macro'
+ name: 'kvmfornfv-daily-build-macro'
builders:
- shell:
!include-raw: ./kvmfornfv-build.sh
@@ -315,13 +338,6 @@
- shell:
!include-raw: ./kvmfornfv-upload-artifact.sh
- builder:
- name: 'kvmfornfv-packet_forward-daily-build-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-build.sh
- - shell:
- !include-raw: ./kvmfornfv-upload-artifact.sh
-- builder:
name: 'kvmfornfv-packet_forward-daily-test-macro'
builders:
- shell:
diff --git a/jjb/opera/opera-daily-jobs.yml b/jjb/opera/opera-daily-jobs.yml
index 5d2cc03f3..596d3771f 100644
--- a/jjb/opera/opera-daily-jobs.yml
+++ b/jjb/opera/opera-daily-jobs.yml
@@ -6,30 +6,32 @@
#####################################
# branch definitions
#####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
+ master: &master
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
#####################################
-# patch verification phases
+# pod definitions
#####################################
- phase:
- - 'basic'
- - 'deploy'
+ pod:
+ - virtual:
+ slave-label: 'huawei-virtual7'
+ os-version: 'xenial'
+ <<: *master
#####################################
# jobs
#####################################
jobs:
- - 'opera-daily-{stream}'
- - 'opera-daily-{phase}-{stream}'
+ - 'opera-{pod}-daily-{stream}'
+
#####################################
# job templates
#####################################
- job-template:
- name: 'opera-daily-{stream}'
+ name: 'opera-{pod}-daily-{stream}'
project-type: multijob
@@ -62,86 +64,35 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - 'huawei-virtual7-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: os-nosdn-openo-ha
+ - '{slave-label}-defaults'
builders:
- description-setter:
description: "Built on $NODE_NAME"
- multijob:
- name: basic
+ name: deploy
condition: SUCCESSFUL
projects:
- - name: 'opera-daily-basic-{stream}'
- current-parameters: true
+ - name: 'compass-deploy-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-openo-ha
+ COMPASS_OS_VERSION=xenial
node-parameters: true
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
- name: deploy
+ name: functest
condition: SUCCESSFUL
projects:
- - name: 'compass-deploy-virtual-daily-{stream}'
+ - name: 'functest-compass-{pod}-suite-{stream}'
current-parameters: false
predefined-parameters: |
DEPLOY_SCENARIO=os-nosdn-openo-ha
- COMPASS_OS_VERSION=xenial
+ FUNCTEST_SUITE_NAME=opera_vims
node-parameters: true
- kill-phase-on: FAILURE
+ kill-phase-on: NEVER
abort-all-job: true
-# - multijob:
-# name: functest
-# condition: SUCCESSFUL
-# projects:
-# - name: 'functest-compass-baremetal-suite-{stream}'
-# current-parameters: false
-# predefined-parameters:
-# FUNCTEST_SUITE_NAME=opera
-# node-parameters: true
-# kill-phase-on: NEVER
-# abort-all-job: true
-
-- job-template:
- name: 'opera-daily-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 120
- fail: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-daily-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'opera-daily-basic-macro'
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello world!"
-
-- builder:
- name: 'opera-daily-deploy-macro'
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello world!"
-
diff --git a/jjb/qtip/helpers/validate-deploy.sh b/jjb/qtip/helpers/validate-deploy.sh
index 9f3dbe414..af8f8c200 100644
--- a/jjb/qtip/helpers/validate-deploy.sh
+++ b/jjb/qtip/helpers/validate-deploy.sh
@@ -1,6 +1,6 @@
#!/bin/bash
##############################################################################
-# Copyright (c) 2016 ZTE and others.
+# Copyright (c) 2017 ZTE and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
@@ -8,40 +8,20 @@
##############################################################################
set -e
-envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}
--e NODE_NAME=${NODE_NAME} -e CI_DEBUG=${CI_DEBUG}"
-ramfs=/tmp/qtip/ramfs
-cfg_dir=$(dirname $ramfs)
-dir_imgstore="${HOME}/imgstore"
-ramfs_volume="$ramfs:/mnt/ramfs"
-
echo "--------------------------------------------------------"
echo "POD: $NODE_NAME"
-echo "INSTALLER: $INSTALLER_TYPE"
echo "Scenario: $DEPLOY_SCENARIO"
+echo "INSTALLER: $INSTALLER_TYPE"
+echo "INSTALLER_IP: $INSTALLER_IP"
echo "--------------------------------------------------------"
echo "Qtip: Pulling docker image: opnfv/qtip:${DOCKER_TAG}"
-docker pull opnfv/qtip:$DOCKER_TAG
-
-# use ramfs to fix docker socket connection issue with overlay mode in centos
-if [ ! -d $ramfs ]; then
- mkdir -p $ramfs
-fi
+docker pull opnfv/qtip:$DOCKER_TAG >/dev/null
-if [ ! -z "$(df $ramfs | tail -n -1 | grep $ramfs)" ]; then
- sudo mount -t tmpfs -o size=32M tmpfs $ramfs
-fi
-
-# enable contro path in docker
-cat <<EOF > ${cfg_dir}/ansible.cfg
-[defaults]
-callback_whitelist = profile_tasks
-[ssh_connection]
-control_path=/mnt/ramfs/ansible-ssh-%%h-%%p-%%r
-EOF
+envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}
+-e POD_NAME=${NODE_NAME} -e SCENARIO=${DEPLOY_SCENARIO}"
-cmd=" docker run -id -e $envs -v ${ramfs_volume} opnfv/qtip:${DOCKER_TAG} /bin/bash"
+cmd=" docker run -id -e $envs opnfv/qtip:${DOCKER_TAG} /bin/bash"
echo "Qtip: Running docker command: ${cmd}"
${cmd}
@@ -49,14 +29,12 @@ container_id=$(docker ps | grep "opnfv/qtip:${DOCKER_TAG}" | awk '{print $1}' |
if [ $(docker ps | grep 'opnfv/qtip' | wc -l) == 0 ]; then
echo "The container opnfv/qtip with ID=${container_id} has not been properly started. Exiting..."
exit 1
-else
- echo "The container ID is: ${container_id}"
- QTIP_REPO=/home/opnfv/repos/qtip
- docker cp ${cfg_dir}/ansible.cfg ${container_id}:/home/opnfv/.ansible.cfg
-# TODO(zhihui_wu): use qtip cli to execute benchmark test in the future
- docker exec -t ${container_id} bash -c "cd ${QTIP_REPO}/qtip/runner/ &&
- python runner.py -d /home/opnfv/qtip/results/ -b all"
-
fi
+echo "The container ID is: ${container_id}"
+QTIP_REPO=/home/opnfv/repos/qtip
+
+docker exec -t ${container_id} bash -c "bash ${QTIP_REPO}/tests/ci/run_ci.sh"
+
echo "Qtip done!"
+exit 0 \ No newline at end of file
diff --git a/jjb/releng/opnfv-docker-arm.yml b/jjb/releng/opnfv-docker-arm.yml
index ba540ed76..417fc702c 100644
--- a/jjb/releng/opnfv-docker-arm.yml
+++ b/jjb/releng/opnfv-docker-arm.yml
@@ -18,6 +18,11 @@
receivers: >
cristina.pauna@enea.com
alexandru.avadanii@enea.com
+ dovetail-arm-receivers: &dovetail-arm-receivers
+ receivers: >
+ cristina.pauna@enea.com
+ alexandru.avadanii@enea.com
+ alexandru.nemes@enea.com
other-receivers: &other-receivers
receivers: ''
@@ -26,6 +31,9 @@
- 'functest':
<<: *master
<<: *functest-arm-receivers
+ - 'dovetail':
+ <<: *master
+ <<: *dovetail-arm-receivers
# projects with jobs for stable
jobs:
diff --git a/jjb/releng/opnfv-docker.sh b/jjb/releng/opnfv-docker.sh
index 5d73a9d70..2aa52adc5 100644
--- a/jjb/releng/opnfv-docker.sh
+++ b/jjb/releng/opnfv-docker.sh
@@ -75,14 +75,11 @@ echo "Current branch: $BRANCH"
if [[ "$BRANCH" == "master" ]]; then
DOCKER_TAG="latest"
+elif [[ -n "${RELEASE_VERSION-}" ]]; then
+ DOCKER_TAG=${BRANCH##*/}.${RELEASE_VERSION}
+ # e.g. danube.1.0, danube.2.0, danube.3.0
else
- if [[ -n "${RELEASE_VERSION-}" ]]; then
- release=${BRANCH##*/}
- DOCKER_TAG=${release}.${RELEASE_VERSION}
- # e.g. colorado.1.0, colorado.2.0, colorado.3.0
- else
- DOCKER_TAG="stable"
- fi
+ DOCKER_TAG="stable"
fi
# Start the build
@@ -90,6 +87,9 @@ echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
echo "--------------------------------------------------------"
echo
if [[ $DOCKER_REPO_NAME == *"dovetail"* ]]; then
+ if [[ -n "${RELEASE_VERSION-}" ]]; then
+ DOCKER_TAG=${RELEASE_VERSION}
+ fi
cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG -f $DOCKERFILE ."
else
cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
diff --git a/jjb/releng/opnfv-docker.yml b/jjb/releng/opnfv-docker.yml
index 3b7ec3478..5fe0eb913 100644
--- a/jjb/releng/opnfv-docker.yml
+++ b/jjb/releng/opnfv-docker.yml
@@ -25,6 +25,9 @@
project:
# projects with jobs for master
+ - 'releng-anteater':
+ <<: *master
+ <<: *other-receivers
- 'bottlenecks':
<<: *master
<<: *other-receivers
diff --git a/jjb/releng/opnfv-lint.yml b/jjb/releng/opnfv-lint.yml
index 166aea8f9..8c231c3e8 100644
--- a/jjb/releng/opnfv-lint.yml
+++ b/jjb/releng/opnfv-lint.yml
@@ -53,7 +53,7 @@
comment-contains-value: 'reverify'
projects:
- project-compare-type: 'REG_EXP'
- project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng'
+ project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng|pharos|octopus|securedlab'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
diff --git a/jjb/releng/testapi-automate.yml b/jjb/releng/testapi-automate.yml
index 8f3ae0c23..dd76538a3 100644
--- a/jjb/releng/testapi-automate.yml
+++ b/jjb/releng/testapi-automate.yml
@@ -258,8 +258,7 @@
name: 'testapi-automate-docker-deploy-macro'
builders:
- shell: |
- echo 'disable TestAPI update temporarily due to frequent change'
-# bash ./jjb/releng/testapi-docker-deploy.sh
+ bash ./jjb/releng/testapi-docker-deploy.sh
################################
# job publishers
diff --git a/jjb/securedlab/check-jinja2.sh b/jjb/securedlab/check-jinja2.sh
new file mode 100755
index 000000000..4c1927d77
--- /dev/null
+++ b/jjb/securedlab/check-jinja2.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+set +x
+set -o errexit
+for lab_configs in $(find labs/ -name 'pod*.yaml'); do
+ while IFS= read -r jinja_templates; do
+ echo "./utils/generate_config.py -y $lab_configs -j $jinja_templates"
+ ./utils/generate_config.py -y $lab_configs -j $jinja_templates
+ done < <(find installers/ -name '*.j2')
+done
diff --git a/jjb/securedlab/check-jinja2.yml b/jjb/securedlab/check-jinja2.yml
new file mode 100644
index 000000000..1e85536e7
--- /dev/null
+++ b/jjb/securedlab/check-jinja2.yml
@@ -0,0 +1,80 @@
+########################
+# Job configuration to validate jninja2 files
+########################
+- project:
+
+ name: validate-templates
+
+ project: 'securedlab'
+
+ jobs:
+ - 'validate-jinja2-templates-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ disabled: false
+
+########################
+# job templates
+########################
+
+- job-template:
+ name: 'validate-jinja2-templates-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ parameters:
+ - project-parameter:
+ project: $GERRIT_PROJECT
+ branch: '{branch}'
+ - node:
+ name: SLAVE_NAME
+ description: Slave to execute jnija template test
+ default-slaves:
+ - lf-build1
+ allowed-multiselect: true
+ ignore-offline-nodes: true
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'REG_EXP'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'utils/generate_config.yml'
+ - compare-type: ANT
+ pattern: '**/*.jinja2'
+ - compare-type: ANT
+ pattern: '**/*.yaml'
+ builders:
+ - check-jinja
+
+- builder:
+ name: check-jinja
+ builders:
+ - shell:
+ !include-raw-escape: ./check-jinja2.sh
diff --git a/jjb/storperf/storperf.yml b/jjb/storperf/storperf.yml
index 709a1ebab..be53b27b4 100644
--- a/jjb/storperf/storperf.yml
+++ b/jjb/storperf/storperf.yml
@@ -13,10 +13,12 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
+ docker-tag: 'latest'
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
disabled: false
+ docker-tag: 'stable'
- job-template:
name: 'storperf-verify-{stream}'
@@ -149,12 +151,57 @@
project: '{project}'
branch: '{branch}'
- 'intel-pod9-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-noha'
+ - string:
+ name: DOCKER_TAG
+ default: '{docker-tag}'
+ description: 'Tag to pull docker image'
+ - choice:
+ name: DISK_TYPE
+ choices:
+ - 'SSD'
+ - 'HDD'
+ default: 'HDD'
+ description: 'The type of hard disk that Cinder uses'
+ - string:
+ name: AGENT_COUNT
+ description: 'The number of slave agents to start. Defaults to the cinder node count'
+ - string:
+ name: VOLUME_SIZE
+ default: '4'
+ description: 'Size of Cinder volume (in GB)'
+ - string:
+ name: WORKLOADS
+ default: 'wr,rr,rw'
+ description: 'Workloads to run'
+ - string:
+ name: BLOCK_SIZES
+ default: '2048,16384'
+ description: 'Block sizes for VM I/O operations'
+ - string:
+ name: QUEUE_DEPTHS
+ default: '1,4'
+ description: 'Number of simultaneous I/O operations to keep active'
+ - string:
+ name: STEADY_STATE_SAMPLES
+ default: '10'
+ description: 'Number of samples to use (1 per minute) to measure steady state'
+ - string:
+ name: DEADLINE
+ description: 'Maximum run time in minutes if steady state cannot be found. Defaults to 3 times steady state samples'
+ - choice:
+ name: TEST_CASE
+ choices:
+ - 'snia_steady_state'
+ description: 'The test case to run'
scm:
- git-scm
triggers:
- - timed: 'H H * * *'
+ - timed: '0 18 * * *'
builders:
- shell: |
diff --git a/jjb/xci/bifrost-provision.sh b/jjb/xci/bifrost-provision.sh
index 4724c2ee5..b37da9059 100755
--- a/jjb/xci/bifrost-provision.sh
+++ b/jjb/xci/bifrost-provision.sh
@@ -82,13 +82,13 @@ sudo -E ./scripts/destroy-env.sh
# provision VMs for the flavor
cd /opt/bifrost
-sudo -E ./scripts/bifrost-provision.sh
+./scripts/bifrost-provision.sh
# list the provisioned VMs
cd /opt/bifrost
source env-vars
ironic node-list
-virsh list
+sudo -H -E virsh list
echo "OpenStack nodes are provisioned!"
# here we have to do something in order to capture what was the working sha1
diff --git a/jjb/xci/bifrost-verify-jobs.yml b/jjb/xci/bifrost-verify-jobs.yml
index 806829620..319f8eb28 100644
--- a/jjb/xci/bifrost-verify-jobs.yml
+++ b/jjb/xci/bifrost-verify-jobs.yml
@@ -12,17 +12,17 @@
project:
- 'openstack':
project-repo: 'https://git.openstack.org/openstack/bifrost'
- clone-location: '/opt/bifrost'
+ clone-location: '$WORKSPACE/bifrost'
- 'opnfv':
project-repo: 'https://gerrit.opnfv.org/gerrit/releng'
- clone-location: '/opt/releng'
+ clone-location: '$WORKSPACE/releng'
#--------------------------------
# distros
#--------------------------------
distro:
- - 'trusty':
+ - 'xenial':
disabled: false
- dib-os-release: 'trusty'
+ dib-os-release: 'xenial'
dib-os-element: 'ubuntu-minimal'
dib-os-packages: 'vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl'
extra-dib-elements: 'openssh-server'
diff --git a/jjb/xci/bifrost-verify.sh b/jjb/xci/bifrost-verify.sh
index f596d7527..b522b8969 100755
--- a/jjb/xci/bifrost-verify.sh
+++ b/jjb/xci/bifrost-verify.sh
@@ -89,38 +89,38 @@ function cleanup_and_upload() {
}
# check distro to see if we support it
-if [[ ! "$DISTRO" =~ (trusty|centos7|suse) ]]; then
+if [[ ! "$DISTRO" =~ (xenial|centos7|suse) ]]; then
echo "Distro $DISTRO is not supported!"
exit 1
fi
# remove previously cloned repos
-sudo /bin/rm -rf /opt/bifrost /opt/releng
+/bin/rm -rf $WORKSPACE/bifrost $WORKSPACE/releng
# Fix up permissions
fix_ownership
# clone all the repos first and checkout the patch afterwards
-sudo git clone https://git.openstack.org/openstack/bifrost /opt/bifrost
-sudo git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng
+git clone https://git.openstack.org/openstack/bifrost $WORKSPACE/bifrost
+git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng
# checkout the patch
cd $CLONE_LOCATION
-sudo git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD
+git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD
# combine opnfv and upstream scripts/playbooks
-sudo /bin/cp -rf /opt/releng/prototypes/bifrost/* /opt/bifrost/
+/bin/cp -rf $WORKSPACE/releng/prototypes/bifrost/* $WORKSPACE/bifrost/
# cleanup remnants of previous deployment
-cd /opt/bifrost
-sudo -E ./scripts/destroy-env.sh
+cd $WORKSPACE/bifrost
+sudo -H -E ./scripts/destroy-env.sh
# provision 3 VMs; xcimaster, controller, and compute
-cd /opt/bifrost
-sudo -E ./scripts/bifrost-provision.sh
+cd $WORKSPACE/bifrost
+./scripts/bifrost-provision.sh
# list the provisioned VMs
-cd /opt/bifrost
+cd $WORKSPACE/bifrost
source env-vars
ironic node-list
-virsh list
+sudo -H -E virsh list
diff --git a/jjb/xci/xci-deploy.sh b/jjb/xci/xci-deploy.sh
index b007b852f..8ad637805 100755
--- a/jjb/xci/xci-deploy.sh
+++ b/jjb/xci/xci-deploy.sh
@@ -54,7 +54,7 @@ fi
# proceed with the deployment
cd $WORKSPACE/prototypes/xci
-sudo -E ./xci-deploy.sh
+./xci-deploy.sh
if [[ "$JOB_NAME" =~ "periodic" && "$OPENSTACK_OSA_VERSION" == "master" ]]; then
# if we arrived here without failing, it means we have something we can pin
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-daily-jobs.yml
index 1f2f3122c..5ff36f842 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-daily-jobs.yml
@@ -182,6 +182,16 @@
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *danube
+ - arm-virtual1:
+ slave-label: '{pod}'
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - arm-virtual1:
+ slave-label: '{pod}'
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *danube
- orange-pod2:
slave-label: '{pod}'
installer: joid
@@ -338,6 +348,13 @@
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
+ name: 'yardstick-params-arm-virtual1'
+ parameters:
+ - string:
+ name: YARDSTICK_DB_BACKEND
+ default: '-i 104.197.68.199:8086'
+ description: 'Arguments to use in order to choose the backend DB'
+- parameter:
name: 'yardstick-params-joid-baremetal'
parameters:
- string:
diff --git a/jjb/yardstick/yardstick-project-jobs.yml b/jjb/yardstick/yardstick-project-jobs.yml
index bbfa152a2..643c1f932 100644
--- a/jjb/yardstick/yardstick-project-jobs.yml
+++ b/jjb/yardstick/yardstick-project-jobs.yml
@@ -58,6 +58,13 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 30
+ fail: true
+
builders:
- yardstick-unit-tests-and-docs-build
@@ -93,6 +100,12 @@
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 30
+ fail: true
+
builders:
- yardstick-unit-tests-and-docs-build