diff options
Diffstat (limited to 'jjb')
-rwxr-xr-x | jjb/3rd_party_ci/install-netvirt.sh | 24 | ||||
-rw-r--r-- | jjb/3rd_party_ci/odl-netvirt.yml | 29 | ||||
-rwxr-xr-x | jjb/apex/apex-build.sh | 3 | ||||
-rwxr-xr-x | jjb/apex/apex-deploy.sh | 11 | ||||
-rw-r--r-- | jjb/apex/apex-snapshot-create.sh | 93 | ||||
-rw-r--r-- | jjb/apex/apex-snapshot-deploy.sh | 154 | ||||
-rwxr-xr-x | jjb/apex/apex-upload-artifact.sh | 12 | ||||
-rw-r--r-- | jjb/apex/apex.yml | 65 | ||||
-rw-r--r-- | jjb/compass4nfv/compass-dovetail-jobs.yml | 209 | ||||
-rw-r--r-- | jjb/opera/opera-daily-jobs.yml | 3 |
10 files changed, 577 insertions, 26 deletions
diff --git a/jjb/3rd_party_ci/install-netvirt.sh b/jjb/3rd_party_ci/install-netvirt.sh index f111d4847..c9aa4c501 100755 --- a/jjb/3rd_party_ci/install-netvirt.sh +++ b/jjb/3rd_party_ci/install-netvirt.sh @@ -2,14 +2,28 @@ set -e if [ -z ${WORKSPACE} ]; then - echo "WORKSPACE is unset. Please do so." + echo "WORKSPACE is unset. Please set." exit 1 fi # wipe the WORKSPACE /bin/rm -rf $WORKSPACE/* +set -o errexit +set -o nounset +set -o pipefail + +SNAP_CACHE=$HOME/snap_cache # clone opnfv sdnvpn repo git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn -. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh -pushd $LIB -./odl_reinstaller.sh --cloner-info $CLONER_INFO --odl-artifact $NETVIRT_ARTIFACT -popd
\ No newline at end of file + +if [ ! -f "$NETVIRT_ARTIFACT" ]; then + echo "ERROR: ${NETVIRT_ARTIFACT} specified as NetVirt Artifact, but file does not exist" + exit 1 +fi + +# TODO (trozet) snapshot should have already been unpacked into cache folder +# but we really should check the cache here, and not use a single cache folder +# for when we support multiple jobs on a single slave +pushd sdnvpn/odl-pipeline/lib > /dev/null +./odl_reinstaller.sh --pod-config ${SNAP_CACHE}/node.yaml \ + --odl-artifact ${NETVIRT_ARTIFACT} --ssh-key-file ${SNAP_CACHE}/id_rsa +popd > /dev/null diff --git a/jjb/3rd_party_ci/odl-netvirt.yml b/jjb/3rd_party_ci/odl-netvirt.yml index f07c37298..6e25425ba 100644 --- a/jjb/3rd_party_ci/odl-netvirt.yml +++ b/jjb/3rd_party_ci/odl-netvirt.yml @@ -17,11 +17,11 @@ ##################################### phase: - 'create-apex-vms': - slave-label: 'ericsson-virtual5' + slave-label: 'odl-netvirt-virtual-intel' - 'install-netvirt': - slave-label: 'odl-netvirt-virtual' + slave-label: 'odl-netvirt-virtual-intel' - 'postprocess': - slave-label: 'odl-netvirt-virtual' + slave-label: 'odl-netvirt-virtual-intel' ##################################### # jobs ##################################### @@ -61,10 +61,10 @@ - gerrit: server-name: 'git.opendaylight.org' trigger-on: - - comment-added-contains-event: - comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS' - - comment-added-contains-event: - comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE' + # - comment-added-contains-event: + # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS' + # - comment-added-contains-event: + # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE' - comment-added-contains-event: comment-contains-value: 'opnfv-test' projects: @@ -117,10 +117,10 @@ name: functest condition: SUCCESSFUL projects: - - name: 'functest-netvirt-virtual-daily-{stream}' + - name: 'functest-netvirt-virtual-suite-{stream}' predefined-parameters: | - RC_FILE_PATH=/home/jenkins/cloner-info/overcloudrc - DEPLOY_SCENARIO=os-odl_l2-bgpvpn-noha + DEPLOY_SCENARIO=os-odl_l3-nofeature-ha + FUNCTEST_SUITE_NAME=healthcheck node-parameters: true kill-phase-on: FAILURE abort-all-job: false @@ -140,7 +140,7 @@ NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT node-parameters: true kill-phase-on: FAILURE - abort-all-job: true + abort-all-job: false - job-template: name: 'odl-netvirt-verify-virtual-{phase}-{stream}' @@ -159,8 +159,9 @@ - build-blocker: use-build-blocker: true blocking-jobs: - - 'odl-netvirt-verify-virtual-install-.*' - - 'functest-netvirt-virtual-daily-.*' + - 'odl-netvirt-verify-virtual-create-apex-vms-.*' + - 'odl-netvirt-verify-virtual-install-netvirt-.*' + - 'functest-netvirt-virtual-suite-.*' - 'odl-netvirt-verify-virtual-postprocess-.*' block-level: 'NODE' @@ -192,7 +193,7 @@ name: 'netvirt-verify-create-apex-vms-builder' builders: - shell: - !include-raw: ./create-apex-vms.sh + !include-raw: ../apex/apex-snapshot-deploy.sh - builder: name: 'netvirt-verify-install-netvirt-builder' builders: diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh index ee1dfb5d3..220d02435 100755 --- a/jjb/apex/apex-build.sh +++ b/jjb/apex/apex-build.sh @@ -12,6 +12,9 @@ echo if echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then export OPNFV_ARTIFACT_VERSION=dev${BUILD_NUMBER} export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY" +elif echo $BUILD_TAG | grep "csit" 1> /dev/null; then + export OPNFV_ARTIFACT_VERSION=csit${BUILD_NUMBER} + export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY" elif [ "$ARTIFACT_VERSION" == "daily" ]; then export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d") export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso" diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh index 8d5c4cb13..9535e7fb7 100755 --- a/jjb/apex/apex-deploy.sh +++ b/jjb/apex/apex-deploy.sh @@ -15,7 +15,7 @@ if ! rpm -q wget > /dev/null; then sudo yum -y install wget fi -if [[ $BUILD_DIRECTORY == *verify* ]]; then +if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then # Build is from a verify, use local build artifacts (not RPMs) cd $WORKSPACE/../${BUILD_DIRECTORY} WORKSPACE=$(pwd) @@ -64,8 +64,8 @@ if [ -z "$DEPLOY_SCENARIO" ]; then exit 1 fi -# use local build for verify -if [[ "$BUILD_DIRECTORY" == *verify* ]]; then +# use local build for verify and csit promote +if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then if [ ! -e "${WORKSPACE}/build/lib" ]; then ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib fi @@ -144,7 +144,7 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then else clean_opts='' fi - if [[ "$BUILD_DIRECTORY" == *verify* ]]; then + if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then sudo CONFIG=${CONFIG} LIB=${LIB} ./clean.sh ${clean_opts} else sudo CONFIG=${CONFIG} LIB=${LIB} opnfv-clean ${clean_opts} @@ -172,6 +172,9 @@ if [[ "$JOB_NAME" == *virtual* ]]; then NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml" fi DEPLOY_CMD="${DEPLOY_CMD} -v" + if [[ "$JOB_NAME" == *csit* ]]; then + DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml --virtual-computes 2" + fi else # settings for bare metal deployment if [ "$IPV6_FLAG" == "True" ]; then diff --git a/jjb/apex/apex-snapshot-create.sh b/jjb/apex/apex-snapshot-create.sh new file mode 100644 index 000000000..09c6a1197 --- /dev/null +++ b/jjb/apex/apex-snapshot-create.sh @@ -0,0 +1,93 @@ +#!/usr/bin/env bash +############################################################################## +# Copyright (c) 2016 Tim Rozet (Red Hat) and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +set -o errexit +set -o nounset +set -o pipefail + +SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error) + +echo "Creating Apex snapshot..." +echo "-------------------------" +echo + +# create tmp directory +tmp_dir=$(pwd)/.tmp +mkdir -p ${tmp_dir} + +# TODO(trozet) remove this after fix goes in for tripleo_inspector to copy these +pushd ${tmp_dir} > /dev/null +echo "Copying overcloudrc and ssh key from Undercloud..." +# Store overcloudrc +UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]') +scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./ +# Copy out ssh key of stack from undercloud +scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./ +popd > /dev/null + +echo "Gathering introspection information" +git clone https://gerrit.opnfv.org/gerrit/sdnvpn.git +pushd sdnvpn/odl-pipeline/lib > /dev/null +./tripleo_introspector.sh --out-file ${tmp_dir}/node.yaml +popd > /dev/null + +echo "Shutting down nodes" +# Shut down nodes +nodes=$(sudo virsh list | grep -Eo "baremetal[0-9]") +for node in $nodes; do + sudo virsh shutdown ${node} --mode acpi +done + +for node in $nodes; do + count=0 + while [ "$count" -lt 10 ]; do + sleep 10 + if sudo virsh list | grep ${node}; then + echo "Waiting for $node to shutdown, try $count" + else + break + fi + count=$((count+1)) + done + + if [ "$count" -ge 10 ]; then + echo "Node $node failed to shutdown" + exit 1 + fi +done + +echo "Gathering virsh definitions" +# copy qcow2s, virsh definitions +for node in $nodes; do + cp -f /var/lib/libvirt/images/${node}.qcow2 ./ + sudo virsh dumpxml ${node} > ${node}.xml +done + +# copy virsh net definitions +for net in admin api external storage tenant; do + sudo virsh net-dumpxml ${net} > ${net}.xml +done + +# tar up artifacts +DATE=`date +%Y-%m-%d` +tar czf ../apex-csit-snap-${DATE}.tar.gz . +popd > /dev/null +rm -rf ./.tmp +echo "Snapshot saved as apex-csit-snap-${DATE}.tar.gz" + +# update opnfv properties file +curl -O -L http://$GS_URL/snapshot.properties +sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties +snap_sha=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1) +sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties +echo "OPNFV_SNAP_URL=$GS_URL/apex-csit-snap-${DATE}.tar.gz" +echo "OPNFV_SNAP_SHA512SUM=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)" +echo "Updated properties file: " +cat snapshot.properties diff --git a/jjb/apex/apex-snapshot-deploy.sh b/jjb/apex/apex-snapshot-deploy.sh new file mode 100644 index 000000000..3bb65a0b3 --- /dev/null +++ b/jjb/apex/apex-snapshot-deploy.sh @@ -0,0 +1,154 @@ +#!/usr/bin/env bash +############################################################################## +# Copyright (c) 2016 Tim Rozet (Red Hat) and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +set -o errexit +set -o nounset +set -o pipefail + +SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error) +SNAP_CACHE=$HOME/snap_cache + + +echo "Deploying Apex snapshot..." +echo "--------------------------" +echo + +echo "Cleaning server" +git clone https://gerrit.opnfv.org/gerrit/apex.git +pushd apex/ci > /dev/null +sudo CONFIG=../build/ LIB=../lib ./clean.sh +popd > /dev/null + +echo "Downloading latest snapshot properties file" +if ! wget -O $WORKSPACE/opnfv.properties http://$GS_URL/snapshot.properties; then + echo "ERROR: Unable to find snapshot.properties at ${GS_URL}...exiting" + exit 1 +fi + +# find latest check sum +latest_snap_checksum=$(cat opnfv.properties | grep OPNFV_SNAP_SHA512SUM | awk -F "=" '{print $2}') +if [ -z "$latest_snap_checksum" ]; then + echo "ERROR: checksum of latest snapshot from snapshot.properties is null!" + exit 1 +fi + +local_snap_checksum="" + +# check snap cache directory exists +if [ -d "$SNAP_CACHE" ]; then + latest_snap=$(ls -Art | grep tar.gz | tail -n 1) + if [ -n "$latest_snap" ]; then + local_snap_checksum=$(sha512sum ${latest_snap} | cut -d' ' -f1) + fi +else + mkdir -p ${SNAP_CACHE} +fi + +# compare check sum and download latest snap if not up to date +if [ "$local_snap_checksum" -ne "$latest_snap_checksum" ]; then + snap_url=$(cat opnfv.properties | grep OPNFV_SNAP_URL | awk -F "=" '{print $2}') + if [ -z "$snap_url" ]; then + echo "ERROR: Snap URL from snapshot.properties is null!" + exit 1 + fi + echo "INFO: SHA mismatch, will download latest snapshot" + wget --directory-prefix=${SNAP_CACHE}/ ${snap_url} + snap_tar=$(basename ${snap_url}) +else + snap_tar=${latest_snap} +fi + +echo "INFO: Snapshot to be used is ${snap_tar}" + +# create tmp directory and unpack snap +mkdir -p ./tmp +pushd ./tmp > /dev/null +tar xvf ${snap_tar} + +# create each network +virsh_networks=$(ls *.xml | grep -v baremetal) + +if [ -z "$virsh_networks" ]; then + echo "ERROR: no virsh networks found in snapshot unpack" + exit 1 +fi + +for network_def in ${virsh_networks}; do + sudo virsh net-create ${network_def} + network=$(echo ${network_def} | awk -F '.' '{print $1}') + if ! sudo virsh net-list | grep ${network}; then + sudo virsh net-start ${network} + fi + echo "Checking if OVS bridge is missing for network: ${network}" + if ! ovs-vsctl show | grep "br-${network}"; then + ovs-vsctl add-br br-${network} + echo "OVS Bridge created: br-${network}" + if [ "br-${network}" == 'br-admin' ]; then + echo "Configuring IP 192.0.2.99 on br-admin" + sudo ip addr add 192.0.2.99/24 dev br-admin + sudo ip link set up dev br-admin + elif [ "br-${network}" == 'br-external' ]; then + echo "Configuring IP 192.168.37.99 on br-external" + sudo ip addr add 192.168.37.99/24 dev br-external + sudo ip link set up dev br-external + fi + fi +done + +echo "Virsh networks up: $(virsh net-list)" +echo "Bringing up Overcloud VMs..." +virsh_vm_defs=$(ls baremetal*.xml) + +if [ -z "$virsh_vm_defs" ]; then + echo "ERROR: no virsh VMs found in snapshot unpack" + exit 1 +fi + +for node_def in ${virsh_vm_defs}; do + sudo virsh define ${node_def} + node=$(echo ${node_def} | awk -F '.' '{print $1}') + sudo cp -f ${node}.qcow2 /var/lib/libvirt/images/ + sudo virsh start ${node} + echo "Node: ${node} started" +done + +echo "Checking overcloudrc" +if ! stat overcloudrc; then + echo "ERROR: overcloudrc does not exist in snap unpack" + exit 1 +fi + +# copy overcloudrc for functest +mkdir -p $HOME/cloner-info +cp -f overcloudrc $HOME/cloner-info/ + +admin_controller_ip=$(cat overcloudrc | grep -Eo "192.0.2.[0-9]+") +netvirt_url="http://${admin_controller_ip}:8081/restconf/operational/network-topology:network-topology/topology/netvirt:1" + +source overcloudrc +counter=1 +while [ "$counter" -le 10 ]; do + if curl --fail ${admin_controller_ip}:80; then + echo "Overcloud Horizon is up...Checking if OpenDaylight NetVirt is up..." + if curl --fail ${netvirt_url} > /dev/null; then + echo "OpenDaylight is up. Overcloud deployment complete" + exit 0 + else + echo "OpenDaylight not yet up, try ${counter}" + fi + else + echo "Horizon/Apache not yet up, try ${counter}" + fi + counter=$((counter+1)) + sleep 60 +done + +echo "ERROR: Deployment not up after 10 minutes...exiting." +exit 1 diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh index 64f13f4e6..89fd5ed36 100755 --- a/jjb/apex/apex-upload-artifact.sh +++ b/jjb/apex/apex-upload-artifact.sh @@ -73,7 +73,17 @@ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log } -if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then +uploadsnap () { + # Uploads snapshot artifact and updated properties file + echo "Uploading snapshot artifacts" + gsutil cp $WORKSPACE/apex-csit-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log + gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log + echo "Upload complete for Snapshot" +} + +if grep csit $WORKSPACE; then + uploadsnap +elif gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then echo "Signing Key avaliable" signiso uploadiso diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index 9f9fffb70..fcf08ed86 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -12,6 +12,7 @@ - 'apex-daily-colorado' - 'apex-build-colorado' - 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado' + - 'apex-csit-promote-daily-{stream}' # stream: branch with - in place of / (eg. stable-arno) # branch: branch (eg. stable/arno) @@ -37,11 +38,12 @@ - 'os-odl_l2-sfc-noha' - 'os-odl_l3-nofeature-ha' - 'os-odl-bgpvpn-ha' - - 'os-odl-gluon-ha' + - 'os-odl-gluon-noha' - 'os-odl_l3-fdio-noha' - 'os-odl_l3-fdio-ha' - 'os-odl_l3-fdio_dvr-noha' - 'os-odl_l3-fdio_dvr-ha' + - 'os-odl_l3-csit-noha' - 'os-onos-nofeature-ha' platform: @@ -234,6 +236,7 @@ blocking-jobs: - 'apex-daily.*' - 'apex-verify.*' + - 'apex-csit.*' builders: - trigger-builds: @@ -485,6 +488,7 @@ - 'apex-deploy.*' - 'apex-build.*' - 'apex-runner.*' + - 'apex-csit.*' triggers: - 'apex-{stream}' @@ -711,6 +715,65 @@ failure-threshold: 'never' unstable-threshold: 'FAILURE' +# CSIT promote +- job-template: + name: 'apex-csit-promote-daily-{stream}' + + # Job template for promoting CSIT Snapshots + # + # Required Variables: + # stream: branch with - in place of / (eg. stable) + # branch: branch (eg. stable) + node: '{daily-slave}' + + disabled: false + + scm: + - git-scm + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - apex-parameter: + gs-pathname: '{gs-pathname}' + + properties: + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' + - 'apex-deploy.*' + - 'apex-build.*' + - 'apex-runner.*' + - 'apex-daily.*' + + triggers: + - timed: '0 12 * * 0' + + builders: + - 'apex-build' + - trigger-builds: + - project: 'apex-deploy-virtual-os-odl_l3-csit-noha-{stream}' + predefined-parameters: | + BUILD_DIRECTORY=apex-csit-promote-daily-{stream} + OPNFV_CLEAN=yes + git-revision: false + block: true + same-node: true + - trigger-builds: + - project: 'functest-apex-{daily-slave}-suite-{stream}' + predefined-parameters: | + DEPLOY_SCENARIO=os-odl_l3-nofeature-noha + FUNCTEST_SUITE_NAME=tempest_smoke_serial + block: true + same-node: true + - shell: + !include-raw-escape: ./apex-snapshot-create.sh + - shell: + !include-raw-escape: ./apex-upload-artifact.sh + - job-template: name: 'apex-gs-clean-{stream}' diff --git a/jjb/compass4nfv/compass-dovetail-jobs.yml b/jjb/compass4nfv/compass-dovetail-jobs.yml new file mode 100644 index 000000000..3337cd0ca --- /dev/null +++ b/jjb/compass4nfv/compass-dovetail-jobs.yml @@ -0,0 +1,209 @@ +- project: + + name: 'compass-dovetail-jobs' + installer: 'compass' + project: 'compass4nfv' +#---------------------------------- +# BRANCH ANCHORS +#---------------------------------- + colorado: &colorado + stream: colorado + branch: 'stable/{stream}' + gs-pathname: '/{stream}' + disabled: false + dovetail-branch: master +#------------------------------------ +# POD, INSTALLER, AND BRANCH MAPPING +#------------------------------------ +# CI PODs +#------------------------------------ + pod: + - baremetal: + slave-label: compass-baremetal + os-version: 'trusty' + <<: *colorado +#----------------------------------- +# scenarios +#----------------------------------- + scenario: + - 'os-nosdn-nofeature-ha': + disabled: false + auto-trigger-name: 'weekly-trigger-disabled' + + jobs: + - 'compass-{scenario}-{pod}-weekly-{stream}' + - 'compass-deploy-{pod}-weekly-{stream}' + +######################## +# job templates +######################## +- job-template: + name: 'compass-{scenario}-{pod}-weekly-{stream}' + + disabled: '{obj:disabled}' + + concurrent: false + + properties: + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'compass-os-.*?-{pod}-daily-.*?' + - 'compass-os-.*?-{pod}-weekly-.*?' + block-level: 'NODE' + + wrappers: + - build-name: + name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO' + + triggers: + - '{auto-trigger-name}' + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - compass-ci-parameter: + installer: '{installer}' + gs-pathname: '{gs-pathname}' + - string: + name: DEPLOY_SCENARIO + default: '{scenario}' + - '{slave-label}-defaults' + - '{installer}-defaults' + + triggers: + - '{auto-trigger-name}' + + builders: + - description-setter: + description: "POD: $NODE_NAME" + - trigger-builds: + - project: 'compass-deploy-{pod}-weekly-{stream}' + current-parameters: false + predefined-parameters: | + DEPLOY_SCENARIO={scenario} + COMPASS_OS_VERSION={os-version} + same-node: true + block: true + - trigger-builds: + - project: 'dovetail-compass-{pod}-compliance_set-weekly-{stream}' + current-parameters: false + predefined-parameters: + DEPLOY_SCENARIO={scenario} + block: true + same-node: true + block-thresholds: + build-step-failure-threshold: 'never' + failure-threshold: 'never' + unstable-threshold: 'FAILURE' + - trigger-builds: + - project: 'dovetail-compass-{pod}-debug-weekly-{stream}' + current-parameters: false + predefined-parameters: + DEPLOY_SCENARIO={scenario} + block: true + same-node: true + block-thresholds: + build-step-failure-threshold: 'never' + failure-threshold: 'never' + unstable-threshold: 'FAILURE' + - trigger-builds: + - project: 'dovetail-compass-{pod}-proposed_tests-weekly-{stream}' + current-parameters: false + predefined-parameters: + DEPLOY_SCENARIO={scenario} + block: true + same-node: true + block-thresholds: + build-step-failure-threshold: 'never' + failure-threshold: 'never' + unstable-threshold: 'FAILURE' + +- job-template: + name: 'compass-deploy-{pod}-weekly-{stream}' + + disabled: false + + concurrent: true + + properties: + - logrotate-default + - throttle: + enabled: true + max-total: 4 + max-per-node: 1 + option: 'project' + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'compass-deploy-{pod}-daily-.*?' + - 'compass-deploy-{pod}-weekly-.*' + - 'compass-verify-deploy-.*?' + block-level: 'NODE' + + wrappers: + - build-name: + name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO' + - timeout: + timeout: 120 + abort: true + + parameters: + - project-parameter: + project: '{project}' + branch: '{branch}' + - compass-ci-parameter: + installer: '{installer}' + gs-pathname: '{gs-pathname}' + - '{slave-label}-defaults' + - '{installer}-defaults' + + scm: + - git-scm + + wrappers: + - build-name: + name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO' + + + builders: + - description-setter: + description: "POD: $NODE_NAME" + - shell: + !include-raw-escape: ./compass-download-artifact.sh + - shell: + !include-raw-escape: ./compass-deploy.sh + + publishers: + - archive: + artifacts: 'ansible.log' + allow-empty: 'true' + fingerprint: true + +######################## +# parameter macros +######################## +- parameter: + name: compass-dovetail-parameter + parameters: + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + - string: + name: GS_URL + default: '$GS_BASE{gs-pathname}' + description: "URL to Google Storage." + - choice: + name: COMPASS_OPENSTACK_VERSION + choices: + - 'mitaka' + +######################## +# trigger macros +######################## +- trigger: + name: 'compass-os-nosdn-nofeature-ha-baremetal-weekly-colorado-trigger' + triggers: + - timed: '' diff --git a/jjb/opera/opera-daily-jobs.yml b/jjb/opera/opera-daily-jobs.yml index a990072fa..47aa2a43d 100644 --- a/jjb/opera/opera-daily-jobs.yml +++ b/jjb/opera/opera-daily-jobs.yml @@ -64,7 +64,8 @@ branch: '{branch}' - 'huawei-virtual7-defaults' - 'compass-defaults' - - 'opera-compass-parameter' + - 'opera-compass-parameter': + gs-pathname: '{gs-pathname}' builders: - description-setter: |