diff options
Diffstat (limited to 'jjb')
-rwxr-xr-x | jjb/apex/apex-deploy-baremetal.sh | 84 | ||||
-rwxr-xr-x | jjb/apex/apex-deploy-virtual.sh | 151 | ||||
-rwxr-xr-x | jjb/apex/apex-deploy.sh | 168 | ||||
-rw-r--r-- | jjb/apex/apex.yml | 13 | ||||
-rw-r--r-- | jjb/armband/armband-ci-jobs.yml | 2 | ||||
-rw-r--r-- | jjb/compass4nfv/compass-ci-jobs.yml | 93 | ||||
-rw-r--r-- | jjb/compass4nfv/compass-project-jobs.yml | 4 | ||||
-rw-r--r-- | jjb/functest/functest-ci-jobs.yml | 38 | ||||
-rw-r--r-- | jjb/opnfv/slave-params.yml | 47 | ||||
-rw-r--r-- | jjb/qtip/qtip-ci-jobs.yml | 4 | ||||
-rw-r--r-- | jjb/releng-macros.yaml | 5 | ||||
-rw-r--r-- | jjb/yardstick/yardstick-ci-jobs.yml | 84 |
12 files changed, 290 insertions, 403 deletions
diff --git a/jjb/apex/apex-deploy-baremetal.sh b/jjb/apex/apex-deploy-baremetal.sh deleted file mode 100755 index efb6561d7..000000000 --- a/jjb/apex/apex-deploy-baremetal.sh +++ /dev/null @@ -1,84 +0,0 @@ -#!/bin/bash -set -o errexit -set -o nounset -set -o pipefail - -# log info to console -echo "Starting the Apex baremetal deployment." -echo "--------------------------------------------------------" -echo - -if [[ ! "$ARTIFACT_NAME" == "latest" ]]; then - # if artifact name is passed the pull a - # specific artifact from artifacts.opnfv.org - RPM_INSTALL_PATH=$GS_URL/$ARTIFACT_NAME -else - if [[ $BUILD_DIRECTORY == *apex-build* ]]; then - BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY - echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" - fi - if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then - # if opnfv.properties exists then use the - # local build. Source the file so we get local OPNFV vars - source ${BUILD_DIRECTORY}/../opnfv.properties - RPM_INSTALL_PATH=${BUILD_DIRECTORY}/$(basename $OPNFV_RPM_URL) - else - # no opnfv.properties means use the latest from artifacts.opnfv.org - # get the latest.properties to get the link to the latest artifact - curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties - [[ -f opnfv.properties ]] || exit 1 - # source the file so we get OPNFV vars - source opnfv.properties - RPM_INSTALL_PATH=$OPNFV_RPM_URL - fi -fi - -if [ ! -e "$RPM_INSTALL_PATH" ]; then - RPM_INSTALL_PATH=http://${OPNFV_RPM_URL} -fi - -RPM_LIST=$RPM_INSTALL_PATH -for pkg in common undercloud; do - RPM_LIST+=" ${RPM_INSTALL_PATH/opnfv-apex/opnfv-apex-${pkg}}" -done - -# update / install the new rpm -if rpm -q opnfv-apex > /dev/null; then - if [ $(basename $OPNFV_RPM_URL) == $(rpm -q opnfv-apex).rpm ]; then - echo "RPM is already installed" - elif sudo yum update -y $RPM_LIST | grep "does not update installed package"; then - if ! sudo yum downgrade -y $RPM_LIST; then - sudo yum remove -y opnfv-undercloud opnfv-common - sudo yum downgrade -y $RPM_INSTALL_PATH - fi - fi -else - sudo yum install -y $RPM_LIST; -fi - -# cleanup environment before we start -sudo opnfv-clean -# initiate baremetal deployment -if [ -e /etc/opnfv-apex/network_settings.yaml ]; then - if [ -n "$DEPLOY_SCENARIO" ]; then - echo "Deploy Scenario set to ${DEPLOY_SCENARIO}" - if [ -e /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml ]; then - sudo opnfv-deploy -i /root/inventory/pod_settings.yaml \ - -d /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml \ - -n /root/network/network_settings.yaml --debug - else - echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" - exit 1 - fi - else - echo "Deploy scenario not set!" - exit 1 - fi -else - echo "File /etc/opnfv-apex/network_settings.yaml does not exist!" - exit 1 -fi - -echo -echo "--------------------------------------------------------" -echo "Done!" diff --git a/jjb/apex/apex-deploy-virtual.sh b/jjb/apex/apex-deploy-virtual.sh deleted file mode 100755 index 4d9b03088..000000000 --- a/jjb/apex/apex-deploy-virtual.sh +++ /dev/null @@ -1,151 +0,0 @@ -#!/bin/bash -set -o errexit -set -o nounset -set -o pipefail - -# log info to console -echo "Starting the Apex virtual deployment." -echo "--------------------------------------------------------" -echo - -if [[ $BUILD_DIRECTORY == *verify-master* ]]; then - cd $WORKSPACE/../${BUILD_DIRECTORY/build_output/} - WORKSPACE=$(pwd) - echo "WORKSPACE modified to $WORKSPACE" - cd $WORKSPACE/ci -elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then - # if artifact name is passed the pull a - # specific artifact from artifacts.opnfv.org - RPM_INSTALL_PATH=$GS_URL - RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME -else - if [[ $BUILD_DIRECTORY == *verify* ]]; then - BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY - echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" - elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then - BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY - echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" - fi - - if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then - # if opnfv.properties exists then use the - # local build. Source the file so we get local OPNFV vars - source ${BUILD_DIRECTORY}/../opnfv.properties - RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch - RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) - else - if [[ $BUILD_DIRECTORY == *verify* ]]; then - echo "BUILD_DIRECTORY is from a verify job, so will not use latest from URL" - echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY" - exit 1 - elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then - echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL" - echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY" - exit 1 - fi - # no opnfv.properties means use the latest from artifacts.opnfv.org - # get the latest.properties to get the link to the latest artifact - curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties - [[ -f opnfv.properties ]] || exit 1 - # source the file so we get OPNFV vars - source opnfv.properties - RPM_INSTALL_PATH=$(echo $OPNFV_RPM_URL | sed 's/'"$(basename $OPNFV_RPM_URL)"'//') - RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) - fi -fi - -if [ -z "$DEPLOY_SCENARIO" ]; then - echo "Deploy scenario not set!" - exit 1 -fi - -# use local build for verify -if [[ $BUILD_DIRECTORY == *verify-master* ]]; then - if [ ! -e "${WORKSPACE}/build/lib" ]; then ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib; fi - DEPLOY_CMD="CONFIG=${WORKSPACE}/build RESOURCES=${WORKSPACE}/build/images/ ./deploy.sh -c ${WORKSPACE}/build -r ${WORKSPACE}/build/images/" - DEPLOY_FILE="${WORKSPACE}/config/deploy/${DEPLOY_SCENARIO}.yaml" - NETWORK_FILE="${WORKSPACE}/config/network/network_settings.yaml" - # Make sure python34 is installed - if ! rpm -q python34 > /dev/null; then - sudo yum install -y epel-release - if ! sudo yum install -y python34; then - echo "Failed to install python34" - exit 1 - fi - fi - if ! rpm -q python34-PyYAML > /dev/null; then - sudo yum install -y epel-release - if ! sudo yum install -y python34-PyYAML; then - echo "Failed to install python34-PyYAML" - exit 1 - fi - fi - if ! rpm -q python34-setuptools > /dev/null; then - if ! sudo yum install -y python34-setuptools; then - echo "Failed to install python34-setuptools" - exit 1 - fi - fi - if [ -z ${PYTHONPATH:-} ]; then - export PYTHONPATH=${WORKSPACE}/lib/python - else - export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python - fi -else - VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}') - for pkg in common undercloud opendaylight-sfc onos; do - RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm" - done - - # update / install the new rpm - if rpm -q opnfv-apex > /dev/null; then - INSTALLED_RPMS=$(rpm -qa | grep apex) - for x in $INSTALLED_RPMS; do - INSTALLED_RPM_VER=$(echo $x | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}') - # Does each RPM's version match the version required for deployment - if [ "$INSTALLED_RPM_VER" == "$VERSION_EXTENSION" ]; then - echo "RPM $x is already installed" - else - echo "RPM $x does not match version $VERSION_EXTENSION" - echo "Will upgrade/downgrade RPMs..." - # Try to upgrade/downgrade RPMS - if sudo yum update -y $RPM_LIST | grep "does not update installed package"; then - if ! sudo yum downgrade -y $RPM_LIST; then - sudo yum remove -y opnfv-apex-undercloud opnfv-apex-common opnfv-apex-opendaylight-sfc opnfv-apex-onos - if ! sudo yum downgrade -y $RPM_LIST; then - echo "Unable to downgrade RPMs: $RPM_LIST" - exit 1 - fi - fi - fi - break - fi - done - else - sudo yum install -y $RPM_LIST; - fi - DEPLOY_CMD=opnfv-deploy - DEPLOY_FILE="/etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" - NETWORK_FILE="/etc/opnfv-apex/network_settings.yaml" - export RESOURCES="/var/opt/opnfv/images" - export CONFIG="/var/opt/opnfv" -fi - -if [ "$OPNFV_CLEAN" == 'yes' ]; then - if [[ $BUILD_DIRECTORY == *verify-master* ]]; then - sudo CONFIG=${WORKSPACE}/build ./clean.sh - else - sudo opnfv-clean - fi -fi -# initiate virtual deployment -echo "Deploy Scenario set to ${DEPLOY_SCENARIO}" -if [ -e $DEPLOY_FILE ]; then - sudo $DEPLOY_CMD -v -d ${DEPLOY_FILE} -n $NETWORK_FILE --debug -else - echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" - exit 1 -fi -echo -echo "--------------------------------------------------------" -echo "Done!" diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh new file mode 100755 index 000000000..4706d45a8 --- /dev/null +++ b/jjb/apex/apex-deploy.sh @@ -0,0 +1,168 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +APEX_PKGS="common undercloud opendaylight-sfc onos" + +# log info to console +echo "Starting the Apex virtual deployment." +echo "--------------------------------------------------------" +echo + +if ! rpm -q wget > /dev/null; then + sudo yum -y install wget +fi + +if [[ $BUILD_DIRECTORY == *verify* ]]; then + # Build is from a verify, use local build artifacts (not RPMs) + cd $WORKSPACE/../${BUILD_DIRECTORY} + WORKSPACE=$(pwd) + echo "WORKSPACE modified to $WORKSPACE" + cd $WORKSPACE/ci +elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then + # if artifact name is passed the pull a + # specific artifact from artifacts.opnfv.org + # artifact specified should be opnfv-apex-<version>.noarch.rpm + RPM_INSTALL_PATH=$GS_URL + RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME +else + # Use latest RPMS + if [[ $BUILD_DIRECTORY == *apex-build* ]]; then + # Triggered from a daily so RPMS should be in local directory + BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY + echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" + + if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then + # if opnfv.properties exists then use the + # local build. Source the file so we get local OPNFV vars + source ${BUILD_DIRECTORY}/../opnfv.properties + RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch + RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) + else + echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL" + echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY" + exit 1 + fi + else + # use the latest from artifacts.opnfv.org + # get the latest.properties to get the link to the latest artifact + if ! wget -O $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties; then + echo "ERROR: Unable to find latest.properties at ${GS_URL}...exiting" + exit 1 + fi + # source the file so we get OPNFV vars + source opnfv.properties + RPM_INSTALL_PATH=$(echo $OPNFV_RPM_URL | sed 's/'"$(basename $OPNFV_RPM_URL)"'//') + RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) + fi +fi + +if [ -z "$DEPLOY_SCENARIO" ]; then + echo "Deploy scenario not set!" + exit 1 +fi + +# use local build for verify +if [[ "$BUILD_DIRECTORY" == *verify* ]]; then + if [ ! -e "${WORKSPACE}/build/lib" ]; then + ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib + fi + DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy" + NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network" + DEPLOY_CMD="$(pwd)/deploy.sh" + RESOURCES="${WORKSPACE}/build/images/" + CONFIG="${WORKSPACE}/build" + LIB="${WORKSPACE}/lib" + # Make sure python34 deps are installed + for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do + if ! rpm -q ${dep_pkg} > /dev/null; then + if ! sudo yum install -y ${dep_pkg}; then + echo "Failed to install ${dep_pkg}" + exit 1 + fi + fi + done + + if [ -z ${PYTHONPATH:-} ]; then + export PYTHONPATH=${WORKSPACE}/lib/python + else + export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python + fi +# use RPMs +else + # find version of RPM + VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}') + # build RPM List which already includes base Apex RPM + for pkg in ${APEX_PKGS}; do + RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm" + done + + # remove old / install new RPMs + if rpm -q opnfv-apex > /dev/null; then + INSTALLED_RPMS=$(rpm -qa | grep apex) + if [ -n "$INSTALLED_RPMS" ]; then + sudo yum remove -y ${INSTALLED_RPMS} + fi + fi + + if ! sudo yum install -y $RPM_LIST; then + echo "Unable to install new RPMs: $RPM_LIST" + exit 1 + fi + + DEPLOY_CMD=opnfv-deploy + DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/" + NETWORK_SETTINGS_DIR="/etc/opnfv-apex/" + RESOURCES="/var/opt/opnfv/images" + CONFIG="/var/opt/opnfv" + LIB="/var/opt/opnfv/lib" +fi + +# set env vars to deploy cmd +DEPLOY_CMD="CONFIG=${CONFIG} RESOURCES=${RESOURCES} LIB=${LIB} ${DEPLOY_CMD}" + +if [ "$OPNFV_CLEAN" == 'yes' ]; then + if [[ "$BUILD_DIRECTORY" == *verify* ]]; then + sudo CONFIG=${CONFIG} LIB=${LIB} ./clean.sh + else + sudo CONFIG=${CONFIG} LIB=${LIB} opnfv-clean + fi +fi + +echo "Deploy Scenario set to ${DEPLOY_SCENARIO}" +DEPLOY_FILE="${DEPLOY_SETTINGS_DIR}/${DEPLOY_SCENARIO}.yaml" + +if [ ! -e "$DEPLOY_FILE" ]; then + echo "ERROR: Required settings file missing: Deploy settings file ${DEPLOY_FILE}" +fi + +if [[ "$JOB_NAME" == *virtual* ]]; then + # settings for virtual deployment + NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml" + DEPLOY_CMD="${DEPLOY_CMD} -v" +else + # settings for bare metal deployment + NETWORK_FILE="/root/network/network_settings.yaml" + INVENTORY_FILE="/root/inventory/pod_settings.yaml" + + if [ ! -e "$INVENTORY_FILE" ]; then + echo "ERROR: Required settings file missing: Inventory settings file ${INVENTORY_FILE}" + fi + # include inventory file for bare metal deployment + DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}" +fi + +# Check that network settings file exists +if [ ! -e "$NETWORK_FILE" ]; then + echo "ERROR: Required settings file missing for Network Settings" + echo "Network settings file: ${NETWORK_FILE}" + exit 1 +fi + +# start deployment +sudo ${DEPLOY_CMD} -d ${DEPLOY_FILE} -n ${NETWORK_FILE} --debug + +echo +echo "--------------------------------------------------------" +echo "Done!" diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index ed06113d5..c121d635f 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -412,7 +412,7 @@ max-total: 10 builders: - - 'apex-deploy-virtual' + - 'apex-deploy' - 'apex-workspace-cleanup' - job-template: @@ -453,7 +453,7 @@ builders: - - 'apex-deploy-baremetal' + - 'apex-deploy' - 'apex-workspace-cleanup' # Brahmaputra Daily @@ -754,16 +754,11 @@ !include-raw: ./apex-gs-cleanup.sh - builder: - name: 'apex-deploy-virtual' + name: 'apex-deploy' builders: - shell: - !include-raw: ./apex-deploy-virtual.sh + !include-raw: ./apex-deploy.sh -- builder: - name: 'apex-deploy-baremetal' - builders: - - shell: - !include-raw: ./apex-deploy-baremetal.sh ####################### # trigger macros diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml index 031c972b3..7d392414f 100644 --- a/jjb/armband/armband-ci-jobs.yml +++ b/jjb/armband/armband-ci-jobs.yml @@ -39,7 +39,7 @@ # NOHA scenarios - 'os-odl_l2-nofeature-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' jobs: - 'armband-{scenario}-{pod}-daily-{stream}' diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml index 40f5445dd..7ce926890 100644 --- a/jjb/compass4nfv/compass-ci-jobs.yml +++ b/jjb/compass4nfv/compass-ci-jobs.yml @@ -14,33 +14,35 @@ stream: brahmaputra branch: 'stable/{stream}' gs-pathname: '/{stream}' - #-------------------------------- # POD, INSTALLER, AND BRANCH MAPPING #-------------------------------- - pod: -#-------------------------------- -# brahmaputra +# CI PODs #-------------------------------- - - huawei-pod1: + pod: + - baremetal: + slave-label: compass-baremetal + os-version: 'trusty' + <<: *master + - virtual: + slave-label: compass-virtual + os-version: 'trusty' + <<: *master + - baremetal: + slave-label: compass-baremetal os-version: 'trusty' <<: *brahmaputra - - huawei-virtual: + - virtual: + slave-label: compass-virtual os-version: 'trusty' <<: *brahmaputra #-------------------------------- # master #-------------------------------- - huawei-pod2: + slave-label: '{pod}' os-version: 'centos7' <<: *master -# - intel-pod8: - - huawei-pod1: - os-version: 'trusty' - <<: *master - - huawei-virtual: - os-version: 'trusty' - <<: *master scenario: - 'os-nosdn-nofeature-ha': @@ -95,7 +97,7 @@ - string: name: DEPLOY_SCENARIO default: '{scenario}' - - '{pod}-defaults' + - '{slave-label}-defaults' - '{installer}-defaults' triggers: @@ -164,7 +166,7 @@ - compass-ci-parameter: installer: '{installer}' gs-pathname: '{gs-pathname}' - - '{pod}-defaults' + - '{slave-label}-defaults' - '{installer}-defaults' scm: @@ -235,105 +237,84 @@ - timed: '' - trigger: - name: 'compass-os-nosdn-nofeature-ha-huawei-pod1-master-trigger' + name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger' triggers: - timed: '0 3 * * *' - trigger: - name: 'compass-os-odl_l2-nofeature-ha-huawei-pod1-master-trigger' + name: 'compass-os-odl_l2-nofeature-ha-baremetal-master-trigger' triggers: - timed: '0 21 * * *' - trigger: - name: 'compass-os-odl_l3-nofeature-ha-huawei-pod1-master-trigger' + name: 'compass-os-odl_l3-nofeature-ha-baremetal-master-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-onos-nofeature-ha-huawei-pod1-master-trigger' + name: 'compass-os-onos-nofeature-ha-baremetal-master-trigger' triggers: - timed: '0 15 * * *' - trigger: - name: 'compass-os-ocl-nofeature-ha-huawei-pod1-master-trigger' + name: 'compass-os-ocl-nofeature-ha-baremetal-master-trigger' triggers: - timed: '0 9 * * *' - trigger: - name: 'compass-os-nosdn-nofeature-ha-huawei-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'compass-os-odl_l2-nofeature-ha-huawei-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'compass-os-odl_l3-nofeature-ha-huawei-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'compass-os-onos-nofeature-ha-huawei-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'compass-os-ocl-nofeature-ha-huawei-pod1-brahmaputra-trigger' - triggers: - - timed: '' - -- trigger: - name: 'compass-os-nosdn-nofeature-ha-intel-pod8-master-trigger' + name: 'compass-os-nosdn-nofeature-ha-baremetal-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-odl_l2-nofeature-ha-intel-pod8-master-trigger' + name: 'compass-os-odl_l2-nofeature-ha-baremetal-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-odl_l3-nofeature-ha-intel-pod8-master-trigger' + name: 'compass-os-odl_l3-nofeature-ha-baremetal-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-onos-nofeature-ha-intel-pod8-master-trigger' + name: 'compass-os-onos-nofeature-ha-baremetal-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-ocl-nofeature-ha-intel-pod8-master-trigger' + name: 'compass-os-ocl-nofeature-ha-baremetal-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-nosdn-nofeature-ha-huawei-virtual-master-trigger' + name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger' triggers: - timed: '0 3 * * *' - trigger: - name: 'compass-os-odl_l2-nofeature-ha-huawei-virtual-master-trigger' + name: 'compass-os-odl_l2-nofeature-ha-virtual-master-trigger' triggers: - timed: '0 21 * * *' - trigger: - name: 'compass-os-odl_l3-nofeature-ha-huawei-virtual-master-trigger' + name: 'compass-os-odl_l3-nofeature-ha-virtual-master-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-onos-nofeature-ha-huawei-virtual-master-trigger' + name: 'compass-os-onos-nofeature-ha-virtual-master-trigger' triggers: - timed: '0 15 * * *' - trigger: - name: 'compass-os-ocl-nofeature-ha-huawei-virtual-master-trigger' + name: 'compass-os-ocl-nofeature-ha-virtual-master-trigger' triggers: - timed: '0 9 * * *' - trigger: - name: 'compass-os-nosdn-nofeature-ha-huawei-virtual-brahmaputra-trigger' + name: 'compass-os-nosdn-nofeature-ha-virtual-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-odl_l2-nofeature-ha-huawei-virtual-brahmaputra-trigger' + name: 'compass-os-odl_l2-nofeature-ha-virtual-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-odl_l3-nofeature-ha-huawei-virtual-brahmaputra-trigger' + name: 'compass-os-odl_l3-nofeature-ha-virtual-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-onos-nofeature-ha-huawei-virtual-brahmaputra-trigger' + name: 'compass-os-onos-nofeature-ha-virtual-brahmaputra-trigger' triggers: - timed: '' - trigger: - name: 'compass-os-ocl-nofeature-ha-huawei-virtual-brahmaputra-trigger' + name: 'compass-os-ocl-nofeature-ha-virtual-brahmaputra-trigger' triggers: - timed: '' diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml index 920923a34..9db709dfe 100644 --- a/jjb/compass4nfv/compass-project-jobs.yml +++ b/jjb/compass4nfv/compass-project-jobs.yml @@ -6,7 +6,7 @@ project: 'compass4nfv' - pod: 'huawei-virtual' + pod: 'compass-virtual' stream: - master: @@ -119,7 +119,7 @@ - build-blocker: use-build-blocker: true blocking-jobs: - - 'compass-deploy-huawei-virtual-daily-.*?' + - 'compass-deploy-virtual-daily-.*?' block-level: 'NODE' scm: diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml index 24673612b..d7e37292a 100644 --- a/jjb/functest/functest-ci-jobs.yml +++ b/jjb/functest/functest-ci-jobs.yml @@ -69,6 +69,23 @@ slave-label: joid-virtual installer: joid <<: *brahmaputra +# compass CI PODs + - baremetal: + slave-label: compass-baremetal + installer: compass + <<: *master + - virtual: + slave-label: compass-virtual + installer: compass + <<: *master + - baremetal: + slave-label: compass-baremetal + installer: compass + <<: *brahmaputra + - virtual: + slave-label: compass-virtual + installer: compass + <<: *brahmaputra #-------------------------------- # Installers not using labels # CI PODs @@ -83,14 +100,6 @@ slave-label: '{pod}' installer: apex <<: *brahmaputra - - intel-pod8: - slave-label: '{pod}' - installer: compass - <<: *master - - huawei-pod1: - slave-label: '{pod}' - installer: compass - <<: *brahmaputra #-------------------------------- # None-CI PODs #-------------------------------- @@ -106,18 +115,10 @@ slave-label: '{pod}' installer: joid <<: *master - - huawei-virtual: - slave-label: '{pod}' - installer: compass - <<: *master - huawei-pod2: slave-label: '{pod}' installer: compass <<: *master - - huawei-pod1: - slave-label: '{pod}' - installer: compass - <<: *master - nokia-pod1: slave-label: '{pod}' installer: apex @@ -134,10 +135,6 @@ slave-label: '{pod}' installer: fuel <<: *brahmaputra - - huawei-virtual: - slave-label: '{pod}' - installer: compass - <<: *brahmaputra #-------------------------------- testsuite: @@ -263,7 +260,6 @@ - 'set-functest-env' - 'functest-suite' - - builder: name: functest-suite builders: diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml index d89af419a..02d74abff 100644 --- a/jjb/opnfv/slave-params.yml +++ b/jjb/opnfv/slave-params.yml @@ -47,6 +47,26 @@ default: https://gerrit.opnfv.org/gerrit/$PROJECT description: 'Git URL to use on this Jenkins Slave' - parameter: + name: 'compass-baremetal-defaults' + parameters: + - label: + name: SLAVE_LABEL + default: 'compass-baremetal' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' +- parameter: + name: 'compass-virtual-defaults' + parameters: + - label: + name: SLAVE_LABEL + default: 'compass-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' +- parameter: name: 'lf-pod1-defaults' parameters: - node: @@ -68,7 +88,6 @@ name: SSH_KEY default: /root/.ssh/id_rsa description: 'SSH key to use for Apex' - - parameter: name: 'lf-pod2-defaults' parameters: @@ -304,21 +323,6 @@ description: 'Git URL to use on this Jenkins Slave' - parameter: - name: 'huawei-virtual-defaults' - parameters: - - label: - name: SLAVE_LABEL - default: 'huawei-deploy-vm' - - string: - name: INSTALLER_VERSION - default: stable - description: 'Version of the installer to deploy' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' - -- parameter: name: 'huawei-pod3-defaults' parameters: - node: @@ -508,17 +512,6 @@ description: 'Git URL to use on this Jenkins Slave' - parameter: - name: 'virtual-defaults' - parameters: - - label: - name: SLAVE_LABEL - default: '{installer}-deploy-virtual' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on these Jenkins Slaves' - -- parameter: name: 'ericsson-build-defaults' parameters: - label: diff --git a/jjb/qtip/qtip-ci-jobs.yml b/jjb/qtip/qtip-ci-jobs.yml index 39d856ea7..ee321eb5b 100644 --- a/jjb/qtip/qtip-ci-jobs.yml +++ b/jjb/qtip/qtip-ci-jobs.yml @@ -29,7 +29,7 @@ <<: *brahmaputra - orange-pod2: installer: joid - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *brahmaputra #-------------------------------- # master @@ -37,7 +37,7 @@ - juniper-pod1: installer: joid <<: *master - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' # - zte-pod1: #would be confirmed with the ZTE lab by tomorrow # installer: fuel # <<: *master diff --git a/jjb/releng-macros.yaml b/jjb/releng-macros.yaml index cdbb0bac3..d8866bdb0 100644 --- a/jjb/releng-macros.yaml +++ b/jjb/releng-macros.yaml @@ -62,11 +62,6 @@ - timed: '' - trigger: - name: 'brahmaputra-trigger-daily-disabled' - triggers: - - timed: '' - -- trigger: name: 'brahmaputra-trigger-daily-enabled' triggers: - timed: '0 2 * * *' diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml index 3d59e61ce..48c335a03 100644 --- a/jjb/yardstick/yardstick-ci-jobs.yml +++ b/jjb/yardstick/yardstick-ci-jobs.yml @@ -32,51 +32,73 @@ - baremetal: slave-label: fuel-baremetal installer: fuel - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master - virtual: slave-label: fuel-virtual installer: fuel - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master - baremetal: slave-label: fuel-baremetal installer: fuel - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *brahmaputra - virtual: slave-label: fuel-virtual installer: fuel - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *brahmaputra # just in case if things go wrong - lf-pod2: slave-label: '{pod}' installer: fuel - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master # joid CI PODs - baremetal: slave-label: joid-baremetal installer: joid - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master - virtual: slave-label: joid-virtual installer: joid - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master - baremetal: slave-label: joid-baremetal installer: joid - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *brahmaputra - virtual: slave-label: joid-virtual installer: joid - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' + <<: *brahmaputra + +# compass CI PODs + - baremetal: + slave-label: compass-baremetal + installer: compass + auto-trigger-name: 'daily-trigger-disabled' + <<: *master + - virtual: + slave-label: compass-virtual + installer: compass + auto-trigger-name: 'daily-trigger-disabled' + <<: *master + - baremetal: + slave-label: compass-baremetal + installer: compass + auto-trigger-name: 'daily-trigger-disabled' + <<: *brahmaputra + - virtual: + slave-label: compass-virtual + installer: compass + auto-trigger-name: 'daily-trigger-disabled' <<: *brahmaputra #-------------------------------- # Installers not using labels @@ -87,22 +109,12 @@ - lf-pod1: slave-label: '{pod}' installer: apex - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master - lf-pod1: slave-label: '{pod}' installer: apex - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra - - intel-pod8: - slave-label: '{pod}' - installer: compass - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *master - - huawei-pod1: - slave-label: '{pod}' - installer: compass - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *brahmaputra #-------------------------------- # None-CI PODs @@ -110,35 +122,22 @@ - orange-pod2: slave-label: '{pod}' installer: joid - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *brahmaputra - zte-pod1: slave-label: '{pod}' installer: fuel - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master - orange-pod2: slave-label: '{pod}' installer: joid - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *master - - huawei-pod1: - slave-label: '{pod}' - installer: compass - suite: daily - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master - huawei-pod2: slave-label: '{pod}' installer: compass - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *master - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *master - - huawei-virtual: - slave-label: '{pod}' - installer: compass - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' <<: *master - huawei-pod3: slave-label: '{pod}' @@ -150,11 +149,6 @@ installer: compass auto-trigger-name: 'yardstick-daily-huawei-pod4-trigger' <<: *master - - huawei-virtual: - slave-label: '{pod}' - installer: compass - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra #-------------------------------- testsuite: - 'daily' @@ -305,7 +299,7 @@ description: 'Arguments to use in order to choose the backend DB' - parameter: - name: 'yardstick-params-huawei-pod1' + name: 'yardstick-params-compass-baremetal' parameters: - string: name: YARDSTICK_DB_BACKEND @@ -345,7 +339,7 @@ description: 'Arguments to use in order to choose the backend DB' - parameter: - name: 'yardstick-params-huawei-virtual' + name: 'yardstick-params-compass-virtual' parameters: - string: name: YARDSTICK_DB_BACKEND |