summaryrefslogtreecommitdiffstats
path: root/jjb
diff options
context:
space:
mode:
Diffstat (limited to 'jjb')
-rwxr-xr-xjjb/apex/apex-build.sh59
-rwxr-xr-xjjb/apex/apex-deploy-baremetal.sh84
-rwxr-xr-xjjb/apex/apex-deploy-virtual.sh151
-rwxr-xr-xjjb/apex/apex-gs-cleanup.sh19
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh37
-rwxr-xr-xjjb/apex/apex-workspace-cleanup.sh7
-rw-r--r--jjb/apex/apex.yml372
-rw-r--r--jjb/fuel/fuel-ci-jobs.yml360
-rw-r--r--[-rwxr-xr-x]jjb/fuel/fuel-project-jobs.yml2
-rw-r--r--jjb/functest/functest-ci-jobs.yml111
-rw-r--r--jjb/opnfv/slave-params.yml38
-rw-r--r--jjb/parser/parser.yml13
-rw-r--r--jjb/releng-macros.yaml5
-rw-r--r--jjb/yardstick/yardstick-ci-jobs.yml169
14 files changed, 628 insertions, 799 deletions
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
new file mode 100755
index 000000000..ca1821abf
--- /dev/null
+++ b/jjb/apex/apex-build.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+# log info to console
+echo "Starting the build of Apex using OpenStack Master packages. This will take some time..."
+echo "---------------------------------------------------------------------------------------"
+echo
+# create the cache directory if it doesn't exist
+[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
+# set OPNFV_ARTIFACT_VERSION
+if echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then
+ if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then
+ export OPNFV_ARTIFACT_VERSION=brahmaputra-dev${BUILD_NUMBER}
+ export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY"
+ else
+ export OPNFV_ARTIFACT_VERSION=dev${BUILD_NUMBER}
+ export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY"
+ fi
+elif [ "$ARTIFACT_VERSION" == "daily" ]; then
+ if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then
+ export OPNFV_ARTIFACT_VERSION=brahmaputra-$(date -u +"%Y-%m-%d")
+ export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY"
+ else
+ export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
+ export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY --iso"
+ fi
+else
+ export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION}
+ if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then
+ export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY"
+ else
+ export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY --iso"
+ fi
+fi
+# clean for stable but doesn't matter for master
+if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then
+ sudo opnfv-clean
+fi
+# start the build
+cd $WORKSPACE/ci
+./build.sh $BUILD_ARGS
+RPM_VERSION=$(grep Version: $BUILD_DIRECTORY/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
+# list the contents of BUILD_OUTPUT directory
+ls -al $BUILD_DIRECTORY
+# save information regarding artifact into file
+(
+ echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
+ echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
+ echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
+ echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
+ echo "OPNFV_ARTIFACT_MD5SUM=$(md5sum $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
+ echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm"
+ echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm"
+ echo "OPNFV_RPM_MD5SUM=$(md5sum $BUILD_DIRECTORY/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
+ echo "OPNFV_BUILD_URL=$BUILD_URL"
+) > $WORKSPACE/opnfv.properties
+echo "--------------------------------------------------------"
+echo "Done!"
diff --git a/jjb/apex/apex-deploy-baremetal.sh b/jjb/apex/apex-deploy-baremetal.sh
new file mode 100755
index 000000000..efb6561d7
--- /dev/null
+++ b/jjb/apex/apex-deploy-baremetal.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# log info to console
+echo "Starting the Apex baremetal deployment."
+echo "--------------------------------------------------------"
+echo
+
+if [[ ! "$ARTIFACT_NAME" == "latest" ]]; then
+ # if artifact name is passed the pull a
+ # specific artifact from artifacts.opnfv.org
+ RPM_INSTALL_PATH=$GS_URL/$ARTIFACT_NAME
+else
+ if [[ $BUILD_DIRECTORY == *apex-build* ]]; then
+ BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
+ echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
+ fi
+ if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then
+ # if opnfv.properties exists then use the
+ # local build. Source the file so we get local OPNFV vars
+ source ${BUILD_DIRECTORY}/../opnfv.properties
+ RPM_INSTALL_PATH=${BUILD_DIRECTORY}/$(basename $OPNFV_RPM_URL)
+ else
+ # no opnfv.properties means use the latest from artifacts.opnfv.org
+ # get the latest.properties to get the link to the latest artifact
+ curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties
+ [[ -f opnfv.properties ]] || exit 1
+ # source the file so we get OPNFV vars
+ source opnfv.properties
+ RPM_INSTALL_PATH=$OPNFV_RPM_URL
+ fi
+fi
+
+if [ ! -e "$RPM_INSTALL_PATH" ]; then
+ RPM_INSTALL_PATH=http://${OPNFV_RPM_URL}
+fi
+
+RPM_LIST=$RPM_INSTALL_PATH
+for pkg in common undercloud; do
+ RPM_LIST+=" ${RPM_INSTALL_PATH/opnfv-apex/opnfv-apex-${pkg}}"
+done
+
+# update / install the new rpm
+if rpm -q opnfv-apex > /dev/null; then
+ if [ $(basename $OPNFV_RPM_URL) == $(rpm -q opnfv-apex).rpm ]; then
+ echo "RPM is already installed"
+ elif sudo yum update -y $RPM_LIST | grep "does not update installed package"; then
+ if ! sudo yum downgrade -y $RPM_LIST; then
+ sudo yum remove -y opnfv-undercloud opnfv-common
+ sudo yum downgrade -y $RPM_INSTALL_PATH
+ fi
+ fi
+else
+ sudo yum install -y $RPM_LIST;
+fi
+
+# cleanup environment before we start
+sudo opnfv-clean
+# initiate baremetal deployment
+if [ -e /etc/opnfv-apex/network_settings.yaml ]; then
+ if [ -n "$DEPLOY_SCENARIO" ]; then
+ echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
+ if [ -e /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml ]; then
+ sudo opnfv-deploy -i /root/inventory/pod_settings.yaml \
+ -d /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml \
+ -n /root/network/network_settings.yaml --debug
+ else
+ echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
+ exit 1
+ fi
+ else
+ echo "Deploy scenario not set!"
+ exit 1
+ fi
+else
+ echo "File /etc/opnfv-apex/network_settings.yaml does not exist!"
+ exit 1
+fi
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
diff --git a/jjb/apex/apex-deploy-virtual.sh b/jjb/apex/apex-deploy-virtual.sh
new file mode 100755
index 000000000..4d9b03088
--- /dev/null
+++ b/jjb/apex/apex-deploy-virtual.sh
@@ -0,0 +1,151 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# log info to console
+echo "Starting the Apex virtual deployment."
+echo "--------------------------------------------------------"
+echo
+
+if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
+ cd $WORKSPACE/../${BUILD_DIRECTORY/build_output/}
+ WORKSPACE=$(pwd)
+ echo "WORKSPACE modified to $WORKSPACE"
+ cd $WORKSPACE/ci
+elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then
+ # if artifact name is passed the pull a
+ # specific artifact from artifacts.opnfv.org
+ RPM_INSTALL_PATH=$GS_URL
+ RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME
+else
+ if [[ $BUILD_DIRECTORY == *verify* ]]; then
+ BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
+ echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
+ elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then
+ BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
+ echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
+ fi
+
+ if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then
+ # if opnfv.properties exists then use the
+ # local build. Source the file so we get local OPNFV vars
+ source ${BUILD_DIRECTORY}/../opnfv.properties
+ RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch
+ RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
+ else
+ if [[ $BUILD_DIRECTORY == *verify* ]]; then
+ echo "BUILD_DIRECTORY is from a verify job, so will not use latest from URL"
+ echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY"
+ exit 1
+ elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then
+ echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL"
+ echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY"
+ exit 1
+ fi
+ # no opnfv.properties means use the latest from artifacts.opnfv.org
+ # get the latest.properties to get the link to the latest artifact
+ curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties
+ [[ -f opnfv.properties ]] || exit 1
+ # source the file so we get OPNFV vars
+ source opnfv.properties
+ RPM_INSTALL_PATH=$(echo $OPNFV_RPM_URL | sed 's/'"$(basename $OPNFV_RPM_URL)"'//')
+ RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
+ fi
+fi
+
+if [ -z "$DEPLOY_SCENARIO" ]; then
+ echo "Deploy scenario not set!"
+ exit 1
+fi
+
+# use local build for verify
+if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
+ if [ ! -e "${WORKSPACE}/build/lib" ]; then ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib; fi
+ DEPLOY_CMD="CONFIG=${WORKSPACE}/build RESOURCES=${WORKSPACE}/build/images/ ./deploy.sh -c ${WORKSPACE}/build -r ${WORKSPACE}/build/images/"
+ DEPLOY_FILE="${WORKSPACE}/config/deploy/${DEPLOY_SCENARIO}.yaml"
+ NETWORK_FILE="${WORKSPACE}/config/network/network_settings.yaml"
+ # Make sure python34 is installed
+ if ! rpm -q python34 > /dev/null; then
+ sudo yum install -y epel-release
+ if ! sudo yum install -y python34; then
+ echo "Failed to install python34"
+ exit 1
+ fi
+ fi
+ if ! rpm -q python34-PyYAML > /dev/null; then
+ sudo yum install -y epel-release
+ if ! sudo yum install -y python34-PyYAML; then
+ echo "Failed to install python34-PyYAML"
+ exit 1
+ fi
+ fi
+ if ! rpm -q python34-setuptools > /dev/null; then
+ if ! sudo yum install -y python34-setuptools; then
+ echo "Failed to install python34-setuptools"
+ exit 1
+ fi
+ fi
+ if [ -z ${PYTHONPATH:-} ]; then
+ export PYTHONPATH=${WORKSPACE}/lib/python
+ else
+ export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python
+ fi
+else
+ VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}')
+ for pkg in common undercloud opendaylight-sfc onos; do
+ RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm"
+ done
+
+ # update / install the new rpm
+ if rpm -q opnfv-apex > /dev/null; then
+ INSTALLED_RPMS=$(rpm -qa | grep apex)
+ for x in $INSTALLED_RPMS; do
+ INSTALLED_RPM_VER=$(echo $x | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}')
+ # Does each RPM's version match the version required for deployment
+ if [ "$INSTALLED_RPM_VER" == "$VERSION_EXTENSION" ]; then
+ echo "RPM $x is already installed"
+ else
+ echo "RPM $x does not match version $VERSION_EXTENSION"
+ echo "Will upgrade/downgrade RPMs..."
+ # Try to upgrade/downgrade RPMS
+ if sudo yum update -y $RPM_LIST | grep "does not update installed package"; then
+ if ! sudo yum downgrade -y $RPM_LIST; then
+ sudo yum remove -y opnfv-apex-undercloud opnfv-apex-common opnfv-apex-opendaylight-sfc opnfv-apex-onos
+ if ! sudo yum downgrade -y $RPM_LIST; then
+ echo "Unable to downgrade RPMs: $RPM_LIST"
+ exit 1
+ fi
+ fi
+ fi
+ break
+ fi
+ done
+ else
+ sudo yum install -y $RPM_LIST;
+ fi
+ DEPLOY_CMD=opnfv-deploy
+ DEPLOY_FILE="/etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
+ NETWORK_FILE="/etc/opnfv-apex/network_settings.yaml"
+ export RESOURCES="/var/opt/opnfv/images"
+ export CONFIG="/var/opt/opnfv"
+fi
+
+if [ "$OPNFV_CLEAN" == 'yes' ]; then
+ if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
+ sudo CONFIG=${WORKSPACE}/build ./clean.sh
+ else
+ sudo opnfv-clean
+ fi
+fi
+# initiate virtual deployment
+echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
+if [ -e $DEPLOY_FILE ]; then
+ sudo $DEPLOY_CMD -v -d ${DEPLOY_FILE} -n $NETWORK_FILE --debug
+else
+ echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
+ exit 1
+fi
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
diff --git a/jjb/apex/apex-gs-cleanup.sh b/jjb/apex/apex-gs-cleanup.sh
new file mode 100755
index 000000000..1629aa85e
--- /dev/null
+++ b/jjb/apex/apex-gs-cleanup.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# log info to console
+echo "Cleaning Google Storage"
+echo "-----------------------"
+echo
+
+thirty_days_ago=$(date -d "30 days ago" +"%Y%m%d")
+
+for i in $(gsutil ls gs://$GS_URL/*201?*); do
+ filedate=$(date -d "$(echo $i | grep -Eo 201[0-9]-?[0-9][0-9]-?[0-9][0-9])" +"%Y%m%d")
+ if [ $filedate -lt $thirty_days_ago ]; then
+ # gsutil indicates what it is removing so no need for output here
+ gsutil rm $i
+ fi
+done
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
new file mode 100755
index 000000000..d148258c0
--- /dev/null
+++ b/jjb/apex/apex-upload-artifact.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# log info to console
+echo "Uploading the Apex artifact. This could take some time..."
+echo "--------------------------------------------------------"
+echo
+
+# source the opnfv.properties to get ARTIFACT_VERSION
+source $WORKSPACE/opnfv.properties
+
+# upload artifact and additional files to google storage
+gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
+RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
+RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
+VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
+for pkg in common undercloud opendaylight-sfc onos; do
+ RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
+done
+SRPM_INSTALL_PATH=$BUILD_DIRECTORY
+SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
+for pkg in common undercloud opendaylight-sfc onos; do
+ SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
+done
+for artifact in $RPM_LIST $SRPM_LIST; do
+ gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log 2>&1
+done
+gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
+gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+
+echo
+echo "--------------------------------------------------------"
+echo "Done!"
+echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
+echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)"
diff --git a/jjb/apex/apex-workspace-cleanup.sh b/jjb/apex/apex-workspace-cleanup.sh
new file mode 100755
index 000000000..d2f71a562
--- /dev/null
+++ b/jjb/apex/apex-workspace-cleanup.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
+
+# delete everything that is in $WORKSPACE
+sudo /bin/rm -rf $WORKSPACE
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index 115e75bf8..ed06113d5 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -732,386 +732,38 @@
- builder:
name: 'apex-build'
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
- # log info to console
- echo "Starting the build of Apex using OpenStack Master packages. This will take some time..."
- echo "---------------------------------------------------------------------------------------"
- echo
- # create the cache directory if it doesn't exist
- [[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
- # set OPNFV_ARTIFACT_VERSION
- if echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then
- if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then
- export OPNFV_ARTIFACT_VERSION=brahmaputra-dev${BUILD_NUMBER}
- export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY"
- else
- export OPNFV_ARTIFACT_VERSION=dev${BUILD_NUMBER}
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY"
- fi
- elif [ "$ARTIFACT_VERSION" == "daily" ]; then
- if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then
- export OPNFV_ARTIFACT_VERSION=brahmaputra-$(date -u +"%Y-%m-%d")
- export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY"
- else
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY --iso"
- fi
- else
- export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION}
- if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then
- export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY"
- else
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY --iso"
- fi
- fi
- # clean for stable but doesn't matter for master
- if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then
- sudo opnfv-clean
- fi
- # start the build
- cd $WORKSPACE/ci
- ./build.sh $BUILD_ARGS
- RPM_VERSION=$(grep Version: $BUILD_DIRECTORY/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
- # list the contents of BUILD_OUTPUT directory
- ls -al $BUILD_DIRECTORY
- # save information regarding artifact into file
- (
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_MD5SUM=$(md5sum $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
- echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm"
- echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm"
- echo "OPNFV_RPM_MD5SUM=$(md5sum $BUILD_DIRECTORY/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
- ) > $WORKSPACE/opnfv.properties
- echo "--------------------------------------------------------"
- echo "Done!"
+ - shell:
+ !include-raw: ./apex-build.sh
- builder:
name: 'apex-workspace-cleanup'
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # delete everything that is in $WORKSPACE
- sudo /bin/rm -rf $WORKSPACE
+ - shell:
+ !include-raw: ./apex-workspace-cleanup.sh
- builder:
name: 'apex-upload-artifact'
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # log info to console
- echo "Uploading the Apex artifact. This could take some time..."
- echo "--------------------------------------------------------"
- echo
-
- # source the opnfv.properties to get ARTIFACT_VERSION
- source $WORKSPACE/opnfv.properties
-
- # upload artifact and additional files to google storage
- gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
- RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
- RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
- VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
- for pkg in common undercloud opendaylight-sfc onos; do
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
- done
- SRPM_INSTALL_PATH=$BUILD_DIRECTORY
- SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
- for pkg in common undercloud opendaylight-sfc onos; do
- SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
- done
- for artifact in $RPM_LIST $SRPM_LIST; do
- gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log 2>&1
- done
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
- echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)"
+ - shell:
+ !include-raw: ./apex-upload-artifact.sh
- builder:
name: 'apex-gs-cleanup'
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # log info to console
- echo "Cleaning Google Storage"
- echo "-----------------------"
- echo
-
- thirty_days_ago=$(date -d "30 days ago" +"%Y%m%d")
-
- for i in $(gsutil ls gs://$GS_URL/*201?*); do
- filedate=$(date -d "$(echo $i | grep -Eo 201[0-9]-?[0-9][0-9]-?[0-9][0-9])" +"%Y%m%d")
- if [ $filedate -lt $thirty_days_ago ]; then
- # gsutil indicates what it is removing so no need for output here
- gsutil rm $i
- fi
- done
+ - shell:
+ !include-raw: ./apex-gs-cleanup.sh
- builder:
name: 'apex-deploy-virtual'
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # log info to console
- echo "Starting the Apex virtual deployment."
- echo "--------------------------------------------------------"
- echo
-
- if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
- cd $WORKSPACE/../${BUILD_DIRECTORY/build_output/}
- WORKSPACE=$(pwd)
- echo "WORKSPACE modified to $WORKSPACE"
- cd $WORKSPACE/ci
- elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then
- # if artifact name is passed the pull a
- # specific artifact from artifacts.opnfv.org
- RPM_INSTALL_PATH=$GS_URL
- RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME
- else
- if [[ $BUILD_DIRECTORY == *verify* ]]; then
- BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
- echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
- elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then
- BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
- echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
- fi
-
- if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then
- # if opnfv.properties exists then use the
- # local build. Source the file so we get local OPNFV vars
- source ${BUILD_DIRECTORY}/../opnfv.properties
- RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch
- RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
- else
- if [[ $BUILD_DIRECTORY == *verify* ]]; then
- echo "BUILD_DIRECTORY is from a verify job, so will not use latest from URL"
- echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY"
- exit 1
- elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then
- echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL"
- echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY"
- exit 1
- fi
- # no opnfv.properties means use the latest from artifacts.opnfv.org
- # get the latest.properties to get the link to the latest artifact
- curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties
- [[ -f opnfv.properties ]] || exit 1
- # source the file so we get OPNFV vars
- source opnfv.properties
- RPM_INSTALL_PATH=$(echo $OPNFV_RPM_URL | sed 's/'"$(basename $OPNFV_RPM_URL)"'//')
- RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
- fi
- fi
-
- if [ -z "$DEPLOY_SCENARIO" ]; then
- echo "Deploy scenario not set!"
- exit 1
- fi
-
- # use local build for verify
- if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
- if [ ! -e "${WORKSPACE}/build/lib" ]; then ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib; fi
- DEPLOY_CMD="CONFIG=${WORKSPACE}/build RESOURCES=${WORKSPACE}/build/images/ ./deploy.sh -c ${WORKSPACE}/build -r ${WORKSPACE}/build/images/"
- DEPLOY_FILE="${WORKSPACE}/config/deploy/${DEPLOY_SCENARIO}.yaml"
- NETWORK_FILE="${WORKSPACE}/config/network/network_settings.yaml"
- # Make sure python34 is installed
- if ! rpm -q python34 > /dev/null; then
- sudo yum install -y epel-release
- if ! sudo yum install -y python34; then
- echo "Failed to install python34"
- exit 1
- fi
- fi
- if ! rpm -q python34-PyYAML > /dev/null; then
- sudo yum install -y epel-release
- if ! sudo yum install -y python34-PyYAML; then
- echo "Failed to install python34-PyYAML"
- exit 1
- fi
- fi
- if ! rpm -q python34-setuptools > /dev/null; then
- if ! sudo yum install -y python34-setuptools; then
- echo "Failed to install python34-setuptools"
- exit 1
- fi
- fi
- if [ -z ${PYTHONPATH:-} ]; then
- export PYTHONPATH=${WORKSPACE}/lib/python
- else
- export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python
- fi
- else
- VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | sed 's/opnfv-apex-//')
- for pkg in common undercloud opendaylight-sfc onos; do
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
- done
-
- # update / install the new rpm
- if rpm -q opnfv-apex > /dev/null; then
- INSTALLED_RPMS=$(rpm -qa | grep apex)
- for x in $INSTALLED_RPMS; do
- INSTALLED_RPM_VER=$(echo $x | sed 's/opnfv-apex-//').rpm
- # Does each RPM's version match the version required for deployment
- if [ "$INSTALLED_RPM_VER" == "$VERSION_EXTENSION" ]; then
- echo "RPM $x is already installed"
- else
- echo "RPM $x does not match $VERSION_EXTENSION"
- echo "Will upgrade/downgrade RPMs..."
- # Try to upgrade/downgrade RPMS
- if sudo yum update -y $RPM_LIST | grep "does not update installed package"; then
- if ! sudo yum downgrade -y $RPM_LIST; then
- sudo yum remove -y opnfv-apex-undercloud opnfv-apex-common opnfv-apex-opendaylight-sfc opnfv-apex-onos
- sudo yum downgrade -y $RPM_INSTALL_PATH
- fi
- fi
- break
- fi
- done
- else
- sudo yum install -y $RPM_LIST;
- fi
- DEPLOY_CMD=opnfv-deploy
- DEPLOY_FILE="/etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
- NETWORK_FILE="/etc/opnfv-apex/network_settings.yaml"
- export RESOURCES="/var/opt/opnfv/images"
- export CONFIG="/var/opt/opnfv"
- fi
-
- if [ "$OPNFV_CLEAN" == 'yes' ]; then
- if [[ $BUILD_DIRECTORY == *verify-master* ]]; then
- sudo CONFIG=${WORKSPACE}/build ./clean.sh
- else
- sudo opnfv-clean
- fi
- fi
- # initiate virtual deployment
- echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
- if [ -e $DEPLOY_FILE ]; then
- sudo $DEPLOY_CMD -v -d ${DEPLOY_FILE} -n $NETWORK_FILE --debug
- else
- echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
- exit 1
- fi
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
+ - shell:
+ !include-raw: ./apex-deploy-virtual.sh
- builder:
name: 'apex-deploy-baremetal'
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # log info to console
- echo "Starting the Apex baremetal deployment."
- echo "--------------------------------------------------------"
- echo
-
- if [[ ! "$ARTIFACT_NAME" == "latest" ]]; then
- # if artifact name is passed the pull a
- # specific artifact from artifacts.opnfv.org
- RPM_INSTALL_PATH=$GS_URL/$ARTIFACT_NAME
- else
- if [[ $BUILD_DIRECTORY == *apex-build* ]]; then
- BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY
- echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY"
- fi
- if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then
- # if opnfv.properties exists then use the
- # local build. Source the file so we get local OPNFV vars
- source ${BUILD_DIRECTORY}/../opnfv.properties
- RPM_INSTALL_PATH=${BUILD_DIRECTORY}/$(basename $OPNFV_RPM_URL)
- else
- # no opnfv.properties means use the latest from artifacts.opnfv.org
- # get the latest.properties to get the link to the latest artifact
- curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties
- [[ -f opnfv.properties ]] || exit 1
- # source the file so we get OPNFV vars
- source opnfv.properties
- RPM_INSTALL_PATH=$OPNFV_RPM_URL
- fi
- fi
-
- if [ ! -e "$RPM_INSTALL_PATH" ]; then
- RPM_INSTALL_PATH=http://${OPNFV_RPM_URL}
- fi
-
- RPM_LIST=$RPM_INSTALL_PATH
- for pkg in common undercloud; do
- RPM_LIST+=" ${RPM_INSTALL_PATH/opnfv-apex/opnfv-apex-${pkg}}"
- done
-
- # update / install the new rpm
- if rpm -q opnfv-apex > /dev/null; then
- if [ $(basename $OPNFV_RPM_URL) == $(rpm -q opnfv-apex).rpm ]; then
- echo "RPM is already installed"
- elif sudo yum update -y $RPM_LIST | grep "does not update installed package"; then
- if ! sudo yum downgrade -y $RPM_LIST; then
- sudo yum remove -y opnfv-undercloud opnfv-common
- sudo yum downgrade -y $RPM_INSTALL_PATH
- fi
- fi
- else
- sudo yum install -y $RPM_LIST;
- fi
-
- # cleanup environment before we start
- sudo opnfv-clean
- # initiate baremetal deployment
- if [ -e /etc/opnfv-apex/network_settings.yaml ]; then
- if [ -n "$DEPLOY_SCENARIO" ]; then
- echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
- if [ -e /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml ]; then
- sudo opnfv-deploy -i /root/inventory/pod_settings.yaml \
- -d /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml \
- -n /root/network/network_settings.yaml --debug
- else
- echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml"
- exit 1
- fi
- else
- echo "Deploy scenario not set!"
- exit 1
- fi
- else
- echo "File /etc/opnfv-apex/network_settings.yaml does not exist!"
- exit 1
- fi
-
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
+ - shell:
+ !include-raw: ./apex-deploy-baremetal.sh
#######################
# trigger macros
diff --git a/jjb/fuel/fuel-ci-jobs.yml b/jjb/fuel/fuel-ci-jobs.yml
index f6c21e72e..5d2b91db8 100644
--- a/jjb/fuel/fuel-ci-jobs.yml
+++ b/jjb/fuel/fuel-ci-jobs.yml
@@ -18,32 +18,35 @@
stream: brahmaputra
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ auto-trigger-name: 'daily-trigger-disabled'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
-# brahmaputra
+# CI PODs
#--------------------------------
pod:
- - lf-pod2:
- <<: *brahmaputra
- - ericsson-pod1:
- <<: *brahmaputra
- - ericsson-pod2:
+ - baremetal:
+ slave-label: fuel-baremetal
+ <<: *master
+ - virtual:
+ slave-label: fuel-virtual
+ <<: *master
+ - baremetal:
+ slave-label: fuel-baremetal
<<: *brahmaputra
- virtual:
+ slave-label: fuel-virtual
<<: *brahmaputra
-#--------------------------------
-# master
-#--------------------------------
+
+# just in case if things go wrong
- lf-pod2:
+ slave-label: '{pod}'
<<: *master
- - ericsson-pod1:
- <<: *master
- - ericsson-pod2:
- <<: *master
- - virtual:
- <<: *master
+#--------------------------------
+# None-CI PODs
+#--------------------------------
- zte-pod1:
+ slave-label: zte-pod1
<<: *master
#--------------------------------
# scenarios
@@ -51,44 +54,23 @@
scenario:
# HA scenarios
- 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-odl_l2-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-odl_l3-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-onos-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-odl_l2-bgpvpn-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-odl_l2-sfc-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-kvm-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-ovs-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-kvm_ovs-ha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-nosdn-vlan-ha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- # NOHA scenarios
- - 'os-odl_l2-nofeature-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-odl_l3-nofeature-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-onos-nofeature-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-nosdn-kvm-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-nosdn-kvm_ovs-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-nosdn-ovs-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-odl_l2-bgpvpn-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- - 'os-odl_l2-sfc-noha':
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+ auto-trigger-name: 'daily-trigger-disabled'
jobs:
- 'fuel-{scenario}-{pod}-daily-{stream}'
@@ -105,7 +87,7 @@
properties:
- throttle:
enabled: true
- max-total: 1
+ max-total: 4
max-per-node: 1
- build-blocker:
use-build-blocker: true
@@ -124,7 +106,7 @@
- project-parameter:
project: '{project}'
- '{installer}-defaults'
- - '{pod}-defaults':
+ - '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
@@ -184,7 +166,7 @@
- project-parameter:
project: '{project}'
- '{installer}-defaults'
- - '{pod}-defaults':
+ - '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
@@ -236,352 +218,154 @@
# os-nosdn-nofeature-ha trigger
# CI PODs
#-----------------------------------------------
-# LF POD2 Triggers running against master branch
+# Triggers for job running on fuel-baremetal against master branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-lf-pod2-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- timed: '0 0 * * *'
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-lf-pod2-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- timed: '0 3 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-lf-pod2-master-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- timed: '0 6 * * *'
- trigger:
- name: 'fuel-os-onos-nofeature-ha-lf-pod2-master-trigger'
+ name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- timed: '0 9 * * *'
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-lf-pod2-master-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
triggers:
- timed: '0 12 * * *'
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-lf-pod2-master-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
triggers:
- timed: '0 15 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-lf-pod2-master-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
triggers:
- timed: '0 18 * * *'
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-lf-pod2-master-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
triggers:
- timed: '0 21 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-lf-pod2-master-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-master-trigger'
triggers:
- timed: ''
#-----------------------------------------------
-# Ericsson POD2 Triggers running against brahmaputra branch
+# Triggers for job running on lf-pod2 against master branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-onos-nofeature-ha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-ericsson-pod2-brahmaputra-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-lf-pod2-daily-master-trigger'
triggers:
- timed: ''
-
#-----------------------------------------------
-# ZTE POD1 Triggers running against master branch
+# Triggers for job running on fuel-virtual against master branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-master-trigger'
- triggers:
- - timed: '0 12 * * *'
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod1-master-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-master-trigger'
+ name: 'fuel-os-onos-nofeature-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-master-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod1-master-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod1-master-trigger'
- triggers:
- - timed: ''
-#-----------------------------------------------
-# Triggers for other PODs
-#-----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-ericsson-pod1-master-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-ericsson-pod2-master-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-lf-pod2-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-ericsson-pod1-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-virtual-brahmaputra-trigger'
- triggers:
- - timed: ''
-
-# os-nosdn-kvm-ha trigger
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-ericsson-pod1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-ericsson-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-lf-pod2-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-ericsson-pod1-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-virtual-brahmaputra-trigger'
- triggers:
- - timed: ''
-
-# os-odl_l2-nofeature-ha trigger
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-ericsson-pod1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-ericsson-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-lf-pod2-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-ericsson-pod1-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-virtual-brahmaputra-trigger'
- triggers:
- - timed: ''
-
-# os-odl_l3-nofeature-ha trigger
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-ericsson-pod1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-ericsson-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-lf-pod2-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-ericsson-pod1-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-virtual-brahmaputra-trigger'
- triggers:
- - timed: ''
-
-# os-onos-nofeature-ha trigger
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-ericsson-pod1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-ericsson-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-lf-pod2-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-ericsson-pod1-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-virtual-brahmaputra-trigger'
- triggers:
- - timed: ''
-
-
-# os-nosdn-ovs-ha trigger
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-ericsson-pod1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-ericsson-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-lf-pod2-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-ericsson-pod1-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-virtual-brahmaputra-trigger'
- triggers:
- - timed: ''
-
-# os-odl_l2-bgpvpn-ha trigger
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-ericsson-pod1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-ericsson-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-lf-pod2-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-ericsson-pod1-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-brahmaputra-trigger'
- triggers:
- - timed: ''
-
-# os-odl_l2-sfc-ha trigger
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-ericsson-pod1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-ericsson-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-lf-pod2-brahmaputra-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger'
triggers:
- timed: ''
+#-----------------------------------------------
+# ZTE POD1 Triggers running against master branch
+#-----------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-ericsson-pod1-brahmaputra-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-virtual-brahmaputra-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
- - timed: ''
-
-# os-nosdn-kvm-ha trigger
+ - timed: '0 12 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-ericsson-pod1-master-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-ericsson-pod2-master-trigger'
+ name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-virtual-master-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-lf-pod2-brahmaputra-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-ericsson-pod1-brahmaputra-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-virtual-brahmaputra-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml
index eee588d24..4e59c01d8 100755..100644
--- a/jjb/fuel/fuel-project-jobs.yml
+++ b/jjb/fuel/fuel-project-jobs.yml
@@ -232,7 +232,7 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'virtual-defaults':
+ - 'fuel-virtual-defaults':
installer: '{installer}'
- '{installer}-defaults'
- fuel-project-parameter:
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml
index 5db40a3d7..1ccaa1b10 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-ci-jobs.yml
@@ -22,90 +22,119 @@
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
-# brahmaputra
+# Installers using labels
+# CI PODs
+# This section should only contain the installers
+# that have been switched using labels for slaves
#--------------------------------
pod:
- - lf-pod1:
- installer: apex
- <<: *brahmaputra
- - intel-pod5:
- installer: joid
- <<: *brahmaputra
- - orange-pod2:
- installer: joid
- <<: *brahmaputra
- - huawei-pod1:
- installer: compass
- <<: *brahmaputra
- - ericsson-pod1:
- installer: fuel
- <<: *brahmaputra
- - ericsson-pod2:
- installer: fuel
- <<: *brahmaputra
- - lf-pod2:
+ - baremetal:
+ slave-label: fuel-baremetal
installer: fuel
- <<: *brahmaputra
+ <<: *master
- virtual:
+ slave-label: fuel-virtual
+ installer: fuel
+ <<: *master
+ - baremetal:
+ slave-label: fuel-baremetal
installer: fuel
- <<: *brahmaputra
- - huawei-virtual:
- installer: compass
<<: *brahmaputra
- virtual:
- installer: joid
- <<: *brahmaputra
- - arm-pod1:
+ slave-label: fuel-virtual
installer: fuel
<<: *brahmaputra
-#--------------------------------
-# master
-#--------------------------------
+
+# just in case if things go wrong
- lf-pod2:
+ slave-label: fuel-baremetal
installer: fuel
<<: *master
- - intel-pod6:
- installer: joid
+#--------------------------------
+# Installers not using labels
+# CI PODs
+# This section should only contain the installers
+# that have not been switched using labels for slaves
+#--------------------------------
+ - intel-pod7:
+ slave-label: '{pod}'
+ installer: apex
<<: *master
+ - lf-pod1:
+ slave-label: '{pod}'
+ installer: apex
+ <<: *brahmaputra
- intel-pod8:
+ slave-label: '{pod}'
installer: compass
<<: *master
- - intel-pod7:
- installer: apex
- <<: *master
- - ericsson-pod1:
- installer: fuel
+ - huawei-pod1:
+ slave-label: '{pod}'
+ installer: compass
+ <<: *brahmaputra
+ - intel-pod6:
+ slave-label: '{pod}'
+ installer: joid
<<: *master
+ - intel-pod5:
+ slave-label: '{pod}'
+ installer: joid
+ <<: *brahmaputra
+#--------------------------------
+# None-CI PODs
+#--------------------------------
+ - orange-pod2:
+ slave-label: '{pod}'
+ installer: joid
+ <<: *brahmaputra
- orange-pod5:
+ slave-label: '{pod}'
installer: fuel
<<: *master
- orange-pod2:
+ slave-label: '{pod}'
installer: joid
<<: *master
- - virtual:
- installer: fuel
- <<: *master
- huawei-virtual:
+ slave-label: '{pod}'
installer: compass
<<: *master
- virtual:
+ slave-label: '{pod}'
installer: joid
<<: *master
- huawei-pod2:
+ slave-label: '{pod}'
installer: compass
<<: *master
- huawei-pod1:
+ slave-label: '{pod}'
installer: compass
<<: *master
- nokia-pod1:
+ slave-label: '{pod}'
installer: apex
<<: *master
- arm-pod1:
+ slave-label: '{pod}'
installer: fuel
<<: *master
- zte-pod1:
+ slave-label: '{pod}'
installer: fuel
<<: *master
+ - arm-pod1:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *brahmaputra
+ - huawei-virtual:
+ slave-label: '{pod}'
+ installer: compass
+ <<: *brahmaputra
+ - virtual:
+ slave-label: '{pod}'
+ installer: joid
+ <<: *brahmaputra
#--------------------------------
testsuite:
@@ -138,7 +167,7 @@
parameters:
- project-parameter:
project: '{project}'
- - '{pod}-defaults'
+ - '{slave-label}-defaults'
- '{installer}-defaults'
- 'functest-{testsuite}-parameter'
- string:
diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml
index 7216f7cbc..ddeeaab09 100644
--- a/jjb/opnfv/slave-params.yml
+++ b/jjb/opnfv/slave-params.yml
@@ -1,4 +1,24 @@
- parameter:
+ name: 'fuel-baremetal-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'fuel-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+ name: 'fuel-virtual-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'fuel-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
name: 'lf-pod1-defaults'
parameters:
- node:
@@ -32,14 +52,9 @@
default-slaves:
- lf-pod2
- string:
- name: INSTALLER_VERSION
- default: stable
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: ssh://gerrit.opnfv.org:29418/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'ericsson-pod1-defaults'
parameters:
@@ -51,18 +66,9 @@
default-slaves:
- ericsson-pod1
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: POD_CONF_DIR
- default: $WORKSPACE/deploy/templates/hardware_environment/conf/ericsson_montreal_lab/pod1
- description: 'Directory where POD configuration files are located.'
-
- parameter:
name: 'ericsson-pod2-defaults'
parameters:
@@ -74,10 +80,6 @@
default-slaves:
- ericsson-pod2
- string:
- name: INSTALLER_VERSION
- default: latest
- description: 'Version of the installer to deploy'
- - string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
diff --git a/jjb/parser/parser.yml b/jjb/parser/parser.yml
index 5e603f026..2ab1af833 100644
--- a/jjb/parser/parser.yml
+++ b/jjb/parser/parser.yml
@@ -58,4 +58,15 @@
builders:
- shell: |
- echo "Nothing to verify!"
+ #/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
+
+ # pep8 check parser/tosca2heat/tosca-parser
+ echo "Running tox -e pep8 on tosca2heat ..."
+ cd $WORKSPACE/parser/tosca2heat/tosca-parser
+ tox -e pep8
+ cd $WORKSPACE/parser/tosca2heat/heat-translator
+ tox -e pep8
diff --git a/jjb/releng-macros.yaml b/jjb/releng-macros.yaml
index d7f9be109..cdbb0bac3 100644
--- a/jjb/releng-macros.yaml
+++ b/jjb/releng-macros.yaml
@@ -57,6 +57,11 @@
fail: true
- trigger:
+ name: 'daily-trigger-disabled'
+ triggers:
+ - timed: ''
+
+- trigger:
name: 'brahmaputra-trigger-daily-disabled'
triggers:
- timed: ''
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml
index 55a07a520..c6630d7bc 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-ci-jobs.yml
@@ -22,152 +22,148 @@
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
-# brahmaputra
+# Installers using labels
+# CI PODs
+# This section should only contain the installers
+# that have been switched using labels for slaves
#--------------------------------
pod:
- - lf-pod1:
- installer: apex
- suite: daily
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *brahmaputra
- - lf-pod2:
+ - baremetal:
+ slave-label: fuel-baremetal
installer: fuel
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *brahmaputra
- - intel-pod5:
- installer: joid
- suite: daily
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *brahmaputra
- - orange-pod2:
- installer: joid
- suite: daily
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *brahmaputra
- - huawei-pod1:
- installer: compass
- suite: daily
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *brahmaputra
- - ericsson-pod1:
+ <<: *master
+ - virtual:
+ slave-label: fuel-virtual
installer: fuel
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *brahmaputra
- - ericsson-pod2:
+ <<: *master
+ - baremetal:
+ slave-label: fuel-baremetal
installer: fuel
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *brahmaputra
- virtual:
+ slave-label: fuel-virtual
installer: fuel
- suite: daily
- auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *brahmaputra
- - huawei-virtual:
- installer: compass
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *brahmaputra
- - virtual:
- installer: joid
- suite: daily
+
+# just in case if things go wrong
+ - lf-pod2:
+ slave-label: '{pod}'
+ installer: fuel
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *brahmaputra
+ <<: *master
#--------------------------------
-# master
+# Installers not using labels
+# CI PODs
+# This section should only contain the installers
+# that have not been switched using labels for slaves
#--------------------------------
- lf-pod1:
+ slave-label: '{pod}'
installer: apex
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- - lf-pod2:
- installer: fuel
- suite: daily
+ - lf-pod1:
+ slave-label: '{pod}'
+ installer: apex
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *master
- - ericsson-pod1:
- installer: fuel
- suite: daily
+ <<: *brahmaputra
+ - intel-pod8:
+ slave-label: '{pod}'
+ installer: compass
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- - ericsson-pod1:
- installer: fuel
- suite: vtcdaily
- auto-trigger-name: 'yardstick-vtcdaily-ericsson-pod1-trigger'
- <<: *master
- - ericsson-pod2:
- installer: fuel
- suite: daily
+ - huawei-pod1:
+ slave-label: '{pod}'
+ installer: compass
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *master
+ <<: *brahmaputra
- intel-pod6:
+ slave-label: '{pod}'
installer: joid
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- - intel-pod8:
- installer: compass
- suite: daily
+ - intel-pod5:
+ slave-label: '{pod}'
+ installer: joid
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
- <<: *master
+ <<: *brahmaputra
+#--------------------------------
+# None-CI PODs
+#--------------------------------
+ - orange-pod2:
+ slave-label: '{pod}'
+ installer: joid
+ auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+ <<: *brahmaputra
- zte-pod1:
+ slave-label: '{pod}'
installer: fuel
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- orange-pod2:
+ slave-label: '{pod}'
installer: joid
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- huawei-pod1:
+ slave-label: '{pod}'
installer: compass
suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- huawei-pod2:
+ slave-label: '{pod}'
installer: compass
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- - virtual:
- installer: fuel
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- huawei-virtual:
+ slave-label: '{pod}'
installer: compass
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
- huawei-pod3:
+ slave-label: '{pod}'
installer: compass
- suite: daily
auto-trigger-name: 'yardstick-daily-huawei-pod3-trigger'
<<: *master
- huawei-pod4:
+ slave-label: '{pod}'
installer: compass
- suite: daily
auto-trigger-name: 'yardstick-daily-huawei-pod4-trigger'
<<: *master
- virtual:
+ slave-label: '{pod}'
installer: joid
- suite: daily
auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
<<: *master
+ - huawei-virtual:
+ slave-label: '{pod}'
+ installer: compass
+ auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+ <<: *brahmaputra
+ - virtual:
+ slave-label: '{pod}'
+ installer: joid
+ auto-trigger-name: 'brahmaputra-trigger-daily-disabled'
+ <<: *brahmaputra
#--------------------------------
+ testsuite:
+ - 'daily'
jobs:
- - 'yardstick-{installer}-{pod}-{suite}-{stream}'
+ - 'yardstick-{installer}-{pod}-{testsuite}-{stream}'
################################
# job templates
################################
- job-template:
- name: 'yardstick-{installer}-{pod}-{suite}-{stream}'
+ name: 'yardstick-{installer}-{pod}-{testsuite}-{stream}'
disabled: false
@@ -191,9 +187,9 @@
parameters:
- project-parameter:
project: '{project}'
- - '{pod}-defaults'
+ - '{slave-label}-defaults'
- '{installer}-defaults'
- - 'yardstick-params-{pod}'
+ - 'yardstick-params-{slave-label}'
- string:
name: DEPLOY_SCENARIO
default: 'os-odl_l2-nofeature-ha'
@@ -203,7 +199,7 @@
description: 'Tag to pull docker image'
- string:
name: YARDSTICK_SUITE_NAME
- default: opnfv_${{NODE_NAME}}_{suite}.yaml
+ default: opnfv_${{NODE_NAME}}_{testsuite}.yaml
description: 'Path to test suite'
- string:
name: CI_DEBUG
@@ -219,7 +215,7 @@
builders:
- 'yardstick-cleanup'
#- 'yardstick-fetch-os-creds'
- - 'yardstick-{suite}'
+ - 'yardstick-{testsuite}'
publishers:
- email:
@@ -277,9 +273,8 @@
name: YARDSTICK_DB_BACKEND
default: ''
description: 'Arguments to use in order to choose the backend DB'
-
- parameter:
- name: 'yardstick-params-ericsson-pod1'
+ name: 'yardstick-params-lf-pod1'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -287,15 +282,14 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-ericsson-pod2'
+ name: 'yardstick-params-lf-pod2'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
-
- parameter:
- name: 'yardstick-params-lf-pod1'
+ name: 'yardstick-params-fuel-baremetal'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -303,11 +297,11 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-lf-pod2'
+ name: 'yardstick-params-fuel-virtual'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
+ default: ''
description: 'Arguments to use in order to choose the backend DB'
- parameter:
@@ -379,11 +373,6 @@
#######################
# trigger for PODs to only run yardstick test suites
- trigger:
- name: 'yardstick-vtcdaily-ericsson-pod1-trigger'
- triggers:
- - timed: '0 1 * * *'
-
-- trigger:
name: 'yardstick-daily-huawei-pod3-trigger'
triggers:
- timed: '0 1 * * *'