diff options
23 files changed, 670 insertions, 835 deletions
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh new file mode 100755 index 000000000..ca1821abf --- /dev/null +++ b/jjb/apex/apex-build.sh @@ -0,0 +1,59 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail +# log info to console +echo "Starting the build of Apex using OpenStack Master packages. This will take some time..." +echo "---------------------------------------------------------------------------------------" +echo +# create the cache directory if it doesn't exist +[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY +# set OPNFV_ARTIFACT_VERSION +if echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then + if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then + export OPNFV_ARTIFACT_VERSION=brahmaputra-dev${BUILD_NUMBER} + export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY" + else + export OPNFV_ARTIFACT_VERSION=dev${BUILD_NUMBER} + export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY" + fi +elif [ "$ARTIFACT_VERSION" == "daily" ]; then + if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then + export OPNFV_ARTIFACT_VERSION=brahmaputra-$(date -u +"%Y-%m-%d") + export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY" + else + export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d") + export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY --iso" + fi +else + export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION} + if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then + export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY" + else + export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY --iso" + fi +fi +# clean for stable but doesn't matter for master +if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then + sudo opnfv-clean +fi +# start the build +cd $WORKSPACE/ci +./build.sh $BUILD_ARGS +RPM_VERSION=$(grep Version: $BUILD_DIRECTORY/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-') +# list the contents of BUILD_OUTPUT directory +ls -al $BUILD_DIRECTORY +# save information regarding artifact into file +( + echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION" + echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)" + echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)" + echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso" + echo "OPNFV_ARTIFACT_MD5SUM=$(md5sum $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)" + echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm" + echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm" + echo "OPNFV_RPM_MD5SUM=$(md5sum $BUILD_DIRECTORY/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)" + echo "OPNFV_BUILD_URL=$BUILD_URL" +) > $WORKSPACE/opnfv.properties +echo "--------------------------------------------------------" +echo "Done!" diff --git a/jjb/apex/apex-deploy-baremetal.sh b/jjb/apex/apex-deploy-baremetal.sh new file mode 100755 index 000000000..efb6561d7 --- /dev/null +++ b/jjb/apex/apex-deploy-baremetal.sh @@ -0,0 +1,84 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +# log info to console +echo "Starting the Apex baremetal deployment." +echo "--------------------------------------------------------" +echo + +if [[ ! "$ARTIFACT_NAME" == "latest" ]]; then + # if artifact name is passed the pull a + # specific artifact from artifacts.opnfv.org + RPM_INSTALL_PATH=$GS_URL/$ARTIFACT_NAME +else + if [[ $BUILD_DIRECTORY == *apex-build* ]]; then + BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY + echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" + fi + if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then + # if opnfv.properties exists then use the + # local build. Source the file so we get local OPNFV vars + source ${BUILD_DIRECTORY}/../opnfv.properties + RPM_INSTALL_PATH=${BUILD_DIRECTORY}/$(basename $OPNFV_RPM_URL) + else + # no opnfv.properties means use the latest from artifacts.opnfv.org + # get the latest.properties to get the link to the latest artifact + curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties + [[ -f opnfv.properties ]] || exit 1 + # source the file so we get OPNFV vars + source opnfv.properties + RPM_INSTALL_PATH=$OPNFV_RPM_URL + fi +fi + +if [ ! -e "$RPM_INSTALL_PATH" ]; then + RPM_INSTALL_PATH=http://${OPNFV_RPM_URL} +fi + +RPM_LIST=$RPM_INSTALL_PATH +for pkg in common undercloud; do + RPM_LIST+=" ${RPM_INSTALL_PATH/opnfv-apex/opnfv-apex-${pkg}}" +done + +# update / install the new rpm +if rpm -q opnfv-apex > /dev/null; then + if [ $(basename $OPNFV_RPM_URL) == $(rpm -q opnfv-apex).rpm ]; then + echo "RPM is already installed" + elif sudo yum update -y $RPM_LIST | grep "does not update installed package"; then + if ! sudo yum downgrade -y $RPM_LIST; then + sudo yum remove -y opnfv-undercloud opnfv-common + sudo yum downgrade -y $RPM_INSTALL_PATH + fi + fi +else + sudo yum install -y $RPM_LIST; +fi + +# cleanup environment before we start +sudo opnfv-clean +# initiate baremetal deployment +if [ -e /etc/opnfv-apex/network_settings.yaml ]; then + if [ -n "$DEPLOY_SCENARIO" ]; then + echo "Deploy Scenario set to ${DEPLOY_SCENARIO}" + if [ -e /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml ]; then + sudo opnfv-deploy -i /root/inventory/pod_settings.yaml \ + -d /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml \ + -n /root/network/network_settings.yaml --debug + else + echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" + exit 1 + fi + else + echo "Deploy scenario not set!" + exit 1 + fi +else + echo "File /etc/opnfv-apex/network_settings.yaml does not exist!" + exit 1 +fi + +echo +echo "--------------------------------------------------------" +echo "Done!" diff --git a/jjb/apex/apex-deploy-virtual.sh b/jjb/apex/apex-deploy-virtual.sh new file mode 100755 index 000000000..4d9b03088 --- /dev/null +++ b/jjb/apex/apex-deploy-virtual.sh @@ -0,0 +1,151 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +# log info to console +echo "Starting the Apex virtual deployment." +echo "--------------------------------------------------------" +echo + +if [[ $BUILD_DIRECTORY == *verify-master* ]]; then + cd $WORKSPACE/../${BUILD_DIRECTORY/build_output/} + WORKSPACE=$(pwd) + echo "WORKSPACE modified to $WORKSPACE" + cd $WORKSPACE/ci +elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then + # if artifact name is passed the pull a + # specific artifact from artifacts.opnfv.org + RPM_INSTALL_PATH=$GS_URL + RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME +else + if [[ $BUILD_DIRECTORY == *verify* ]]; then + BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY + echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" + elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then + BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY + echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" + fi + + if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then + # if opnfv.properties exists then use the + # local build. Source the file so we get local OPNFV vars + source ${BUILD_DIRECTORY}/../opnfv.properties + RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch + RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) + else + if [[ $BUILD_DIRECTORY == *verify* ]]; then + echo "BUILD_DIRECTORY is from a verify job, so will not use latest from URL" + echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY" + exit 1 + elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then + echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL" + echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY" + exit 1 + fi + # no opnfv.properties means use the latest from artifacts.opnfv.org + # get the latest.properties to get the link to the latest artifact + curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties + [[ -f opnfv.properties ]] || exit 1 + # source the file so we get OPNFV vars + source opnfv.properties + RPM_INSTALL_PATH=$(echo $OPNFV_RPM_URL | sed 's/'"$(basename $OPNFV_RPM_URL)"'//') + RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) + fi +fi + +if [ -z "$DEPLOY_SCENARIO" ]; then + echo "Deploy scenario not set!" + exit 1 +fi + +# use local build for verify +if [[ $BUILD_DIRECTORY == *verify-master* ]]; then + if [ ! -e "${WORKSPACE}/build/lib" ]; then ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib; fi + DEPLOY_CMD="CONFIG=${WORKSPACE}/build RESOURCES=${WORKSPACE}/build/images/ ./deploy.sh -c ${WORKSPACE}/build -r ${WORKSPACE}/build/images/" + DEPLOY_FILE="${WORKSPACE}/config/deploy/${DEPLOY_SCENARIO}.yaml" + NETWORK_FILE="${WORKSPACE}/config/network/network_settings.yaml" + # Make sure python34 is installed + if ! rpm -q python34 > /dev/null; then + sudo yum install -y epel-release + if ! sudo yum install -y python34; then + echo "Failed to install python34" + exit 1 + fi + fi + if ! rpm -q python34-PyYAML > /dev/null; then + sudo yum install -y epel-release + if ! sudo yum install -y python34-PyYAML; then + echo "Failed to install python34-PyYAML" + exit 1 + fi + fi + if ! rpm -q python34-setuptools > /dev/null; then + if ! sudo yum install -y python34-setuptools; then + echo "Failed to install python34-setuptools" + exit 1 + fi + fi + if [ -z ${PYTHONPATH:-} ]; then + export PYTHONPATH=${WORKSPACE}/lib/python + else + export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python + fi +else + VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}') + for pkg in common undercloud opendaylight-sfc onos; do + RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm" + done + + # update / install the new rpm + if rpm -q opnfv-apex > /dev/null; then + INSTALLED_RPMS=$(rpm -qa | grep apex) + for x in $INSTALLED_RPMS; do + INSTALLED_RPM_VER=$(echo $x | grep -Eo '[0-9]+\.[0-9]+-[0-9]{8}') + # Does each RPM's version match the version required for deployment + if [ "$INSTALLED_RPM_VER" == "$VERSION_EXTENSION" ]; then + echo "RPM $x is already installed" + else + echo "RPM $x does not match version $VERSION_EXTENSION" + echo "Will upgrade/downgrade RPMs..." + # Try to upgrade/downgrade RPMS + if sudo yum update -y $RPM_LIST | grep "does not update installed package"; then + if ! sudo yum downgrade -y $RPM_LIST; then + sudo yum remove -y opnfv-apex-undercloud opnfv-apex-common opnfv-apex-opendaylight-sfc opnfv-apex-onos + if ! sudo yum downgrade -y $RPM_LIST; then + echo "Unable to downgrade RPMs: $RPM_LIST" + exit 1 + fi + fi + fi + break + fi + done + else + sudo yum install -y $RPM_LIST; + fi + DEPLOY_CMD=opnfv-deploy + DEPLOY_FILE="/etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" + NETWORK_FILE="/etc/opnfv-apex/network_settings.yaml" + export RESOURCES="/var/opt/opnfv/images" + export CONFIG="/var/opt/opnfv" +fi + +if [ "$OPNFV_CLEAN" == 'yes' ]; then + if [[ $BUILD_DIRECTORY == *verify-master* ]]; then + sudo CONFIG=${WORKSPACE}/build ./clean.sh + else + sudo opnfv-clean + fi +fi +# initiate virtual deployment +echo "Deploy Scenario set to ${DEPLOY_SCENARIO}" +if [ -e $DEPLOY_FILE ]; then + sudo $DEPLOY_CMD -v -d ${DEPLOY_FILE} -n $NETWORK_FILE --debug +else + echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" + exit 1 +fi +echo +echo "--------------------------------------------------------" +echo "Done!" diff --git a/jjb/apex/apex-gs-cleanup.sh b/jjb/apex/apex-gs-cleanup.sh new file mode 100755 index 000000000..1629aa85e --- /dev/null +++ b/jjb/apex/apex-gs-cleanup.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +# log info to console +echo "Cleaning Google Storage" +echo "-----------------------" +echo + +thirty_days_ago=$(date -d "30 days ago" +"%Y%m%d") + +for i in $(gsutil ls gs://$GS_URL/*201?*); do + filedate=$(date -d "$(echo $i | grep -Eo 201[0-9]-?[0-9][0-9]-?[0-9][0-9])" +"%Y%m%d") + if [ $filedate -lt $thirty_days_ago ]; then + # gsutil indicates what it is removing so no need for output here + gsutil rm $i + fi +done diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh new file mode 100755 index 000000000..d148258c0 --- /dev/null +++ b/jjb/apex/apex-upload-artifact.sh @@ -0,0 +1,37 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +# log info to console +echo "Uploading the Apex artifact. This could take some time..." +echo "--------------------------------------------------------" +echo + +# source the opnfv.properties to get ARTIFACT_VERSION +source $WORKSPACE/opnfv.properties + +# upload artifact and additional files to google storage +gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1 +RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch +RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) +VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//') +for pkg in common undercloud opendaylight-sfc onos; do + RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}" +done +SRPM_INSTALL_PATH=$BUILD_DIRECTORY +SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL) +for pkg in common undercloud opendaylight-sfc onos; do + SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}" +done +for artifact in $RPM_LIST $SRPM_LIST; do + gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log 2>&1 +done +gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1 +gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1 + +echo +echo "--------------------------------------------------------" +echo "Done!" +echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso" +echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)" diff --git a/jjb/apex/apex-workspace-cleanup.sh b/jjb/apex/apex-workspace-cleanup.sh new file mode 100755 index 000000000..d2f71a562 --- /dev/null +++ b/jjb/apex/apex-workspace-cleanup.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +# delete everything that is in $WORKSPACE +sudo /bin/rm -rf $WORKSPACE diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index 115e75bf8..ed06113d5 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -732,386 +732,38 @@ - builder: name: 'apex-build' builders: - - shell: | - #!/bin/bash - set -o errexit - set -o nounset - set -o pipefail - # log info to console - echo "Starting the build of Apex using OpenStack Master packages. This will take some time..." - echo "---------------------------------------------------------------------------------------" - echo - # create the cache directory if it doesn't exist - [[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY - # set OPNFV_ARTIFACT_VERSION - if echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then - if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then - export OPNFV_ARTIFACT_VERSION=brahmaputra-dev${BUILD_NUMBER} - export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY" - else - export OPNFV_ARTIFACT_VERSION=dev${BUILD_NUMBER} - export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY" - fi - elif [ "$ARTIFACT_VERSION" == "daily" ]; then - if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then - export OPNFV_ARTIFACT_VERSION=brahmaputra-$(date -u +"%Y-%m-%d") - export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY" - else - export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d") - export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY --iso" - fi - else - export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION} - if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then - export BUILD_ARGS="-v $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY" - else - export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c file://$CACHE_DIRECTORY --iso" - fi - fi - # clean for stable but doesn't matter for master - if echo $GERRIT_BRANCH | grep "brahmaputra" 1> /dev/null; then - sudo opnfv-clean - fi - # start the build - cd $WORKSPACE/ci - ./build.sh $BUILD_ARGS - RPM_VERSION=$(grep Version: $BUILD_DIRECTORY/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-') - # list the contents of BUILD_OUTPUT directory - ls -al $BUILD_DIRECTORY - # save information regarding artifact into file - ( - echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION" - echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)" - echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)" - echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso" - echo "OPNFV_ARTIFACT_MD5SUM=$(md5sum $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)" - echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm" - echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm" - echo "OPNFV_RPM_MD5SUM=$(md5sum $BUILD_DIRECTORY/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)" - echo "OPNFV_BUILD_URL=$BUILD_URL" - ) > $WORKSPACE/opnfv.properties - echo "--------------------------------------------------------" - echo "Done!" + - shell: + !include-raw: ./apex-build.sh - builder: name: 'apex-workspace-cleanup' builders: - - shell: | - #!/bin/bash - set -o errexit - set -o nounset - set -o pipefail - - # delete everything that is in $WORKSPACE - sudo /bin/rm -rf $WORKSPACE + - shell: + !include-raw: ./apex-workspace-cleanup.sh - builder: name: 'apex-upload-artifact' builders: - - shell: | - #!/bin/bash - set -o errexit - set -o nounset - set -o pipefail - - # log info to console - echo "Uploading the Apex artifact. This could take some time..." - echo "--------------------------------------------------------" - echo - - # source the opnfv.properties to get ARTIFACT_VERSION - source $WORKSPACE/opnfv.properties - - # upload artifact and additional files to google storage - gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1 - RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch - RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) - VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//') - for pkg in common undercloud opendaylight-sfc onos; do - RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}" - done - SRPM_INSTALL_PATH=$BUILD_DIRECTORY - SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL) - for pkg in common undercloud opendaylight-sfc onos; do - SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}" - done - for artifact in $RPM_LIST $SRPM_LIST; do - gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log 2>&1 - done - gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1 - gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1 - - echo - echo "--------------------------------------------------------" - echo "Done!" - echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso" - echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)" + - shell: + !include-raw: ./apex-upload-artifact.sh - builder: name: 'apex-gs-cleanup' builders: - - shell: | - #!/bin/bash - set -o errexit - set -o nounset - set -o pipefail - - # log info to console - echo "Cleaning Google Storage" - echo "-----------------------" - echo - - thirty_days_ago=$(date -d "30 days ago" +"%Y%m%d") - - for i in $(gsutil ls gs://$GS_URL/*201?*); do - filedate=$(date -d "$(echo $i | grep -Eo 201[0-9]-?[0-9][0-9]-?[0-9][0-9])" +"%Y%m%d") - if [ $filedate -lt $thirty_days_ago ]; then - # gsutil indicates what it is removing so no need for output here - gsutil rm $i - fi - done + - shell: + !include-raw: ./apex-gs-cleanup.sh - builder: name: 'apex-deploy-virtual' builders: - - shell: | - #!/bin/bash - set -o errexit - set -o nounset - set -o pipefail - - # log info to console - echo "Starting the Apex virtual deployment." - echo "--------------------------------------------------------" - echo - - if [[ $BUILD_DIRECTORY == *verify-master* ]]; then - cd $WORKSPACE/../${BUILD_DIRECTORY/build_output/} - WORKSPACE=$(pwd) - echo "WORKSPACE modified to $WORKSPACE" - cd $WORKSPACE/ci - elif [[ ! "$ARTIFACT_NAME" == "latest" ]]; then - # if artifact name is passed the pull a - # specific artifact from artifacts.opnfv.org - RPM_INSTALL_PATH=$GS_URL - RPM_LIST=$RPM_INSTALL_PATH/$ARTIFACT_NAME - else - if [[ $BUILD_DIRECTORY == *verify* ]]; then - BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY - echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" - elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then - BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY - echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" - fi - - if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then - # if opnfv.properties exists then use the - # local build. Source the file so we get local OPNFV vars - source ${BUILD_DIRECTORY}/../opnfv.properties - RPM_INSTALL_PATH=${BUILD_DIRECTORY}/noarch - RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) - else - if [[ $BUILD_DIRECTORY == *verify* ]]; then - echo "BUILD_DIRECTORY is from a verify job, so will not use latest from URL" - echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY" - exit 1 - elif [[ $BUILD_DIRECTORY == *apex-build* ]]; then - echo "BUILD_DIRECTORY is from a daily job, so will not use latest from URL" - echo "Check that the slave has opnfv.properties in $BUILD_DIRECTORY" - exit 1 - fi - # no opnfv.properties means use the latest from artifacts.opnfv.org - # get the latest.properties to get the link to the latest artifact - curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties - [[ -f opnfv.properties ]] || exit 1 - # source the file so we get OPNFV vars - source opnfv.properties - RPM_INSTALL_PATH=$(echo $OPNFV_RPM_URL | sed 's/'"$(basename $OPNFV_RPM_URL)"'//') - RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL) - fi - fi - - if [ -z "$DEPLOY_SCENARIO" ]; then - echo "Deploy scenario not set!" - exit 1 - fi - - # use local build for verify - if [[ $BUILD_DIRECTORY == *verify-master* ]]; then - if [ ! -e "${WORKSPACE}/build/lib" ]; then ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib; fi - DEPLOY_CMD="CONFIG=${WORKSPACE}/build RESOURCES=${WORKSPACE}/build/images/ ./deploy.sh -c ${WORKSPACE}/build -r ${WORKSPACE}/build/images/" - DEPLOY_FILE="${WORKSPACE}/config/deploy/${DEPLOY_SCENARIO}.yaml" - NETWORK_FILE="${WORKSPACE}/config/network/network_settings.yaml" - # Make sure python34 is installed - if ! rpm -q python34 > /dev/null; then - sudo yum install -y epel-release - if ! sudo yum install -y python34; then - echo "Failed to install python34" - exit 1 - fi - fi - if ! rpm -q python34-PyYAML > /dev/null; then - sudo yum install -y epel-release - if ! sudo yum install -y python34-PyYAML; then - echo "Failed to install python34-PyYAML" - exit 1 - fi - fi - if ! rpm -q python34-setuptools > /dev/null; then - if ! sudo yum install -y python34-setuptools; then - echo "Failed to install python34-setuptools" - exit 1 - fi - fi - if [ -z ${PYTHONPATH:-} ]; then - export PYTHONPATH=${WORKSPACE}/lib/python - else - export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python - fi - else - VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | sed 's/opnfv-apex-//') - for pkg in common undercloud opendaylight-sfc onos; do - RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}" - done - - # update / install the new rpm - if rpm -q opnfv-apex > /dev/null; then - INSTALLED_RPMS=$(rpm -qa | grep apex) - for x in $INSTALLED_RPMS; do - INSTALLED_RPM_VER=$(echo $x | sed 's/opnfv-apex-//').rpm - # Does each RPM's version match the version required for deployment - if [ "$INSTALLED_RPM_VER" == "$VERSION_EXTENSION" ]; then - echo "RPM $x is already installed" - else - echo "RPM $x does not match $VERSION_EXTENSION" - echo "Will upgrade/downgrade RPMs..." - # Try to upgrade/downgrade RPMS - if sudo yum update -y $RPM_LIST | grep "does not update installed package"; then - if ! sudo yum downgrade -y $RPM_LIST; then - sudo yum remove -y opnfv-apex-undercloud opnfv-apex-common opnfv-apex-opendaylight-sfc opnfv-apex-onos - sudo yum downgrade -y $RPM_INSTALL_PATH - fi - fi - break - fi - done - else - sudo yum install -y $RPM_LIST; - fi - DEPLOY_CMD=opnfv-deploy - DEPLOY_FILE="/etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" - NETWORK_FILE="/etc/opnfv-apex/network_settings.yaml" - export RESOURCES="/var/opt/opnfv/images" - export CONFIG="/var/opt/opnfv" - fi - - if [ "$OPNFV_CLEAN" == 'yes' ]; then - if [[ $BUILD_DIRECTORY == *verify-master* ]]; then - sudo CONFIG=${WORKSPACE}/build ./clean.sh - else - sudo opnfv-clean - fi - fi - # initiate virtual deployment - echo "Deploy Scenario set to ${DEPLOY_SCENARIO}" - if [ -e $DEPLOY_FILE ]; then - sudo $DEPLOY_CMD -v -d ${DEPLOY_FILE} -n $NETWORK_FILE --debug - else - echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" - exit 1 - fi - echo - echo "--------------------------------------------------------" - echo "Done!" + - shell: + !include-raw: ./apex-deploy-virtual.sh - builder: name: 'apex-deploy-baremetal' builders: - - shell: | - #!/bin/bash - set -o errexit - set -o nounset - set -o pipefail - - # log info to console - echo "Starting the Apex baremetal deployment." - echo "--------------------------------------------------------" - echo - - if [[ ! "$ARTIFACT_NAME" == "latest" ]]; then - # if artifact name is passed the pull a - # specific artifact from artifacts.opnfv.org - RPM_INSTALL_PATH=$GS_URL/$ARTIFACT_NAME - else - if [[ $BUILD_DIRECTORY == *apex-build* ]]; then - BUILD_DIRECTORY=$WORKSPACE/../$BUILD_DIRECTORY - echo "BUILD DIRECTORY modified to $BUILD_DIRECTORY" - fi - if [[ -f ${BUILD_DIRECTORY}/../opnfv.properties ]]; then - # if opnfv.properties exists then use the - # local build. Source the file so we get local OPNFV vars - source ${BUILD_DIRECTORY}/../opnfv.properties - RPM_INSTALL_PATH=${BUILD_DIRECTORY}/$(basename $OPNFV_RPM_URL) - else - # no opnfv.properties means use the latest from artifacts.opnfv.org - # get the latest.properties to get the link to the latest artifact - curl -s -o $WORKSPACE/opnfv.properties http://$GS_URL/latest.properties - [[ -f opnfv.properties ]] || exit 1 - # source the file so we get OPNFV vars - source opnfv.properties - RPM_INSTALL_PATH=$OPNFV_RPM_URL - fi - fi - - if [ ! -e "$RPM_INSTALL_PATH" ]; then - RPM_INSTALL_PATH=http://${OPNFV_RPM_URL} - fi - - RPM_LIST=$RPM_INSTALL_PATH - for pkg in common undercloud; do - RPM_LIST+=" ${RPM_INSTALL_PATH/opnfv-apex/opnfv-apex-${pkg}}" - done - - # update / install the new rpm - if rpm -q opnfv-apex > /dev/null; then - if [ $(basename $OPNFV_RPM_URL) == $(rpm -q opnfv-apex).rpm ]; then - echo "RPM is already installed" - elif sudo yum update -y $RPM_LIST | grep "does not update installed package"; then - if ! sudo yum downgrade -y $RPM_LIST; then - sudo yum remove -y opnfv-undercloud opnfv-common - sudo yum downgrade -y $RPM_INSTALL_PATH - fi - fi - else - sudo yum install -y $RPM_LIST; - fi - - # cleanup environment before we start - sudo opnfv-clean - # initiate baremetal deployment - if [ -e /etc/opnfv-apex/network_settings.yaml ]; then - if [ -n "$DEPLOY_SCENARIO" ]; then - echo "Deploy Scenario set to ${DEPLOY_SCENARIO}" - if [ -e /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml ]; then - sudo opnfv-deploy -i /root/inventory/pod_settings.yaml \ - -d /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml \ - -n /root/network/network_settings.yaml --debug - else - echo "File does not exist /etc/opnfv-apex/${DEPLOY_SCENARIO}.yaml" - exit 1 - fi - else - echo "Deploy scenario not set!" - exit 1 - fi - else - echo "File /etc/opnfv-apex/network_settings.yaml does not exist!" - exit 1 - fi - - echo - echo "--------------------------------------------------------" - echo "Done!" + - shell: + !include-raw: ./apex-deploy-baremetal.sh ####################### # trigger macros diff --git a/jjb/fuel/fuel-ci-jobs.yml b/jjb/fuel/fuel-ci-jobs.yml index f6c21e72e..5d2b91db8 100644 --- a/jjb/fuel/fuel-ci-jobs.yml +++ b/jjb/fuel/fuel-ci-jobs.yml @@ -18,32 +18,35 @@ stream: brahmaputra branch: 'stable/{stream}' gs-pathname: '/{stream}' + auto-trigger-name: 'daily-trigger-disabled' #-------------------------------- # POD, INSTALLER, AND BRANCH MAPPING #-------------------------------- -# brahmaputra +# CI PODs #-------------------------------- pod: - - lf-pod2: - <<: *brahmaputra - - ericsson-pod1: - <<: *brahmaputra - - ericsson-pod2: + - baremetal: + slave-label: fuel-baremetal + <<: *master + - virtual: + slave-label: fuel-virtual + <<: *master + - baremetal: + slave-label: fuel-baremetal <<: *brahmaputra - virtual: + slave-label: fuel-virtual <<: *brahmaputra -#-------------------------------- -# master -#-------------------------------- + +# just in case if things go wrong - lf-pod2: + slave-label: '{pod}' <<: *master - - ericsson-pod1: - <<: *master - - ericsson-pod2: - <<: *master - - virtual: - <<: *master +#-------------------------------- +# None-CI PODs +#-------------------------------- - zte-pod1: + slave-label: zte-pod1 <<: *master #-------------------------------- # scenarios @@ -51,44 +54,23 @@ scenario: # HA scenarios - 'os-nosdn-nofeature-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger' + auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-odl_l2-nofeature-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger' + auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-odl_l3-nofeature-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger' + auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-onos-nofeature-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger' + auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-odl_l2-bgpvpn-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger' + auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-odl_l2-sfc-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger' + auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-nosdn-kvm-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger' + auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-nosdn-ovs-ha': - auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger' + auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger' - 'os-nosdn-kvm_ovs-ha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-nosdn-vlan-ha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - # NOHA scenarios - - 'os-odl_l2-nofeature-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-odl_l3-nofeature-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-onos-nofeature-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-nosdn-nofeature-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-nosdn-kvm-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-nosdn-kvm_ovs-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-nosdn-ovs-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-odl_l2-bgpvpn-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - - 'os-odl_l2-sfc-noha': - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + auto-trigger-name: 'daily-trigger-disabled' jobs: - 'fuel-{scenario}-{pod}-daily-{stream}' @@ -105,7 +87,7 @@ properties: - throttle: enabled: true - max-total: 1 + max-total: 4 max-per-node: 1 - build-blocker: use-build-blocker: true @@ -124,7 +106,7 @@ - project-parameter: project: '{project}' - '{installer}-defaults' - - '{pod}-defaults': + - '{slave-label}-defaults': installer: '{installer}' - string: name: DEPLOY_SCENARIO @@ -184,7 +166,7 @@ - project-parameter: project: '{project}' - '{installer}-defaults' - - '{pod}-defaults': + - '{slave-label}-defaults': installer: '{installer}' - string: name: DEPLOY_SCENARIO @@ -236,352 +218,154 @@ # os-nosdn-nofeature-ha trigger # CI PODs #----------------------------------------------- -# LF POD2 Triggers running against master branch +# Triggers for job running on fuel-baremetal against master branch #----------------------------------------------- - trigger: - name: 'fuel-os-nosdn-nofeature-ha-lf-pod2-master-trigger' + name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger' triggers: - timed: '0 0 * * *' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-lf-pod2-master-trigger' + name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger' triggers: - timed: '0 3 * * *' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-lf-pod2-master-trigger' + name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger' triggers: - timed: '0 6 * * *' - trigger: - name: 'fuel-os-onos-nofeature-ha-lf-pod2-master-trigger' + name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger' triggers: - timed: '0 9 * * *' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-lf-pod2-master-trigger' + name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger' triggers: - timed: '0 12 * * *' - trigger: - name: 'fuel-os-odl_l2-sfc-ha-lf-pod2-master-trigger' + name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger' triggers: - timed: '0 15 * * *' - trigger: - name: 'fuel-os-nosdn-kvm-ha-lf-pod2-master-trigger' + name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger' triggers: - timed: '0 18 * * *' - trigger: - name: 'fuel-os-nosdn-ovs-ha-lf-pod2-master-trigger' + name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger' triggers: - timed: '0 21 * * *' - trigger: - name: 'fuel-os-nosdn-kvm-noha-lf-pod2-master-trigger' + name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-master-trigger' triggers: - timed: '' #----------------------------------------------- -# Ericsson POD2 Triggers running against brahmaputra branch +# Triggers for job running on lf-pod2 against master branch #----------------------------------------------- - trigger: - name: 'fuel-os-nosdn-nofeature-ha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-nosdn-nofeature-ha-lf-pod2-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-odl_l2-nofeature-ha-lf-pod2-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l3-nofeature-ha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-odl_l3-nofeature-ha-lf-pod2-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-onos-nofeature-ha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-onos-nofeature-ha-lf-pod2-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-odl_l2-bgpvpn-ha-lf-pod2-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-ha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-odl_l2-sfc-ha-lf-pod2-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-ha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-nosdn-kvm-ha-lf-pod2-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-ovs-ha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-nosdn-ovs-ha-lf-pod2-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-noha-ericsson-pod2-brahmaputra-trigger' + name: 'fuel-os-nosdn-kvm-noha-lf-pod2-daily-master-trigger' triggers: - timed: '' - #----------------------------------------------- -# ZTE POD1 Triggers running against master branch +# Triggers for job running on fuel-virtual against master branch #----------------------------------------------- - trigger: - name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-master-trigger' + name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-master-trigger' - triggers: - - timed: '0 12 * * *' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-master-trigger' + name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-onos-nofeature-ha-zte-pod1-master-trigger' + name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-master-trigger' + name: 'fuel-os-onos-nofeature-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-master-trigger' + name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-ha-zte-pod1-master-trigger' + name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-ovs-ha-zte-pod1-master-trigger' - triggers: - - timed: '' -#----------------------------------------------- -# Triggers for other PODs -#----------------------------------------------- -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-ericsson-pod1-master-trigger' + name: 'fuel-os-nosdn-kvm-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-nofeature-ha-ericsson-pod2-master-trigger' + name: 'fuel-os-nosdn-ovs-ha-virtual-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-nofeature-ha-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-lf-pod2-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-ericsson-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-nofeature-ha-virtual-brahmaputra-trigger' - triggers: - - timed: '' - -# os-nosdn-kvm-ha trigger -- trigger: - name: 'fuel-os-nosdn-kvm-ha-ericsson-pod1-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-kvm-ha-ericsson-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-kvm-ha-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-kvm-ha-lf-pod2-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-kvm-ha-ericsson-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-kvm-ha-virtual-brahmaputra-trigger' - triggers: - - timed: '' - -# os-odl_l2-nofeature-ha trigger -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-ericsson-pod1-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-ericsson-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-lf-pod2-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-ericsson-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-nofeature-ha-virtual-brahmaputra-trigger' - triggers: - - timed: '' - -# os-odl_l3-nofeature-ha trigger -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-ericsson-pod1-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-ericsson-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-lf-pod2-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-ericsson-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l3-nofeature-ha-virtual-brahmaputra-trigger' - triggers: - - timed: '' - -# os-onos-nofeature-ha trigger -- trigger: - name: 'fuel-os-onos-nofeature-ha-ericsson-pod1-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-onos-nofeature-ha-ericsson-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-onos-nofeature-ha-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-onos-nofeature-ha-lf-pod2-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-onos-nofeature-ha-ericsson-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-onos-nofeature-ha-virtual-brahmaputra-trigger' - triggers: - - timed: '' - - -# os-nosdn-ovs-ha trigger -- trigger: - name: 'fuel-os-nosdn-ovs-ha-ericsson-pod1-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-ovs-ha-ericsson-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-ovs-ha-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-ovs-ha-lf-pod2-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-ovs-ha-ericsson-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-nosdn-ovs-ha-virtual-brahmaputra-trigger' - triggers: - - timed: '' - -# os-odl_l2-bgpvpn-ha trigger -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-ericsson-pod1-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-ericsson-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-lf-pod2-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-ericsson-pod1-brahmaputra-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-brahmaputra-trigger' - triggers: - - timed: '' - -# os-odl_l2-sfc-ha trigger -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-ericsson-pod1-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-ericsson-pod2-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-virtual-master-trigger' - triggers: - - timed: '' -- trigger: - name: 'fuel-os-odl_l2-sfc-ha-lf-pod2-brahmaputra-trigger' + name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger' triggers: - timed: '' +#----------------------------------------------- +# ZTE POD1 Triggers running against master branch +#----------------------------------------------- - trigger: - name: 'fuel-os-odl_l2-sfc-ha-ericsson-pod1-brahmaputra-trigger' + name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-odl_l2-sfc-ha-virtual-brahmaputra-trigger' + name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-master-trigger' triggers: - - timed: '' - -# os-nosdn-kvm-ha trigger + - timed: '0 12 * * *' - trigger: - name: 'fuel-os-nosdn-kvm-noha-ericsson-pod1-master-trigger' + name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-noha-ericsson-pod2-master-trigger' + name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-noha-virtual-master-trigger' + name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-noha-lf-pod2-brahmaputra-trigger' + name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-noha-ericsson-pod1-brahmaputra-trigger' + name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' - trigger: - name: 'fuel-os-nosdn-kvm-noha-virtual-brahmaputra-trigger' + name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-master-trigger' triggers: - timed: '' diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml index eee588d24..4e59c01d8 100755..100644 --- a/jjb/fuel/fuel-project-jobs.yml +++ b/jjb/fuel/fuel-project-jobs.yml @@ -232,7 +232,7 @@ project: '{project}' - gerrit-parameter: branch: '{branch}' - - 'virtual-defaults': + - 'fuel-virtual-defaults': installer: '{installer}' - '{installer}-defaults' - fuel-project-parameter: diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml index 5db40a3d7..1ccaa1b10 100644 --- a/jjb/functest/functest-ci-jobs.yml +++ b/jjb/functest/functest-ci-jobs.yml @@ -22,90 +22,119 @@ #-------------------------------- # POD, INSTALLER, AND BRANCH MAPPING #-------------------------------- -# brahmaputra +# Installers using labels +# CI PODs +# This section should only contain the installers +# that have been switched using labels for slaves #-------------------------------- pod: - - lf-pod1: - installer: apex - <<: *brahmaputra - - intel-pod5: - installer: joid - <<: *brahmaputra - - orange-pod2: - installer: joid - <<: *brahmaputra - - huawei-pod1: - installer: compass - <<: *brahmaputra - - ericsson-pod1: - installer: fuel - <<: *brahmaputra - - ericsson-pod2: - installer: fuel - <<: *brahmaputra - - lf-pod2: + - baremetal: + slave-label: fuel-baremetal installer: fuel - <<: *brahmaputra + <<: *master - virtual: + slave-label: fuel-virtual + installer: fuel + <<: *master + - baremetal: + slave-label: fuel-baremetal installer: fuel - <<: *brahmaputra - - huawei-virtual: - installer: compass <<: *brahmaputra - virtual: - installer: joid - <<: *brahmaputra - - arm-pod1: + slave-label: fuel-virtual installer: fuel <<: *brahmaputra -#-------------------------------- -# master -#-------------------------------- + +# just in case if things go wrong - lf-pod2: + slave-label: fuel-baremetal installer: fuel <<: *master - - intel-pod6: - installer: joid +#-------------------------------- +# Installers not using labels +# CI PODs +# This section should only contain the installers +# that have not been switched using labels for slaves +#-------------------------------- + - intel-pod7: + slave-label: '{pod}' + installer: apex <<: *master + - lf-pod1: + slave-label: '{pod}' + installer: apex + <<: *brahmaputra - intel-pod8: + slave-label: '{pod}' installer: compass <<: *master - - intel-pod7: - installer: apex - <<: *master - - ericsson-pod1: - installer: fuel + - huawei-pod1: + slave-label: '{pod}' + installer: compass + <<: *brahmaputra + - intel-pod6: + slave-label: '{pod}' + installer: joid <<: *master + - intel-pod5: + slave-label: '{pod}' + installer: joid + <<: *brahmaputra +#-------------------------------- +# None-CI PODs +#-------------------------------- + - orange-pod2: + slave-label: '{pod}' + installer: joid + <<: *brahmaputra - orange-pod5: + slave-label: '{pod}' installer: fuel <<: *master - orange-pod2: + slave-label: '{pod}' installer: joid <<: *master - - virtual: - installer: fuel - <<: *master - huawei-virtual: + slave-label: '{pod}' installer: compass <<: *master - virtual: + slave-label: '{pod}' installer: joid <<: *master - huawei-pod2: + slave-label: '{pod}' installer: compass <<: *master - huawei-pod1: + slave-label: '{pod}' installer: compass <<: *master - nokia-pod1: + slave-label: '{pod}' installer: apex <<: *master - arm-pod1: + slave-label: '{pod}' installer: fuel <<: *master - zte-pod1: + slave-label: '{pod}' installer: fuel <<: *master + - arm-pod1: + slave-label: '{pod}' + installer: fuel + <<: *brahmaputra + - huawei-virtual: + slave-label: '{pod}' + installer: compass + <<: *brahmaputra + - virtual: + slave-label: '{pod}' + installer: joid + <<: *brahmaputra #-------------------------------- testsuite: @@ -138,7 +167,7 @@ parameters: - project-parameter: project: '{project}' - - '{pod}-defaults' + - '{slave-label}-defaults' - '{installer}-defaults' - 'functest-{testsuite}-parameter' - string: diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml index 7216f7cbc..ddeeaab09 100644 --- a/jjb/opnfv/slave-params.yml +++ b/jjb/opnfv/slave-params.yml @@ -1,4 +1,24 @@ - parameter: + name: 'fuel-baremetal-defaults' + parameters: + - label: + name: SLAVE_LABEL + default: 'fuel-baremetal' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' +- parameter: + name: 'fuel-virtual-defaults' + parameters: + - label: + name: SLAVE_LABEL + default: 'fuel-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' +- parameter: name: 'lf-pod1-defaults' parameters: - node: @@ -32,14 +52,9 @@ default-slaves: - lf-pod2 - string: - name: INSTALLER_VERSION - default: stable - description: 'Version of the installer to deploy' - - string: name: GIT_BASE default: ssh://gerrit.opnfv.org:29418/$PROJECT description: 'Git URL to use on this Jenkins Slave' - - parameter: name: 'ericsson-pod1-defaults' parameters: @@ -51,18 +66,9 @@ default-slaves: - ericsson-pod1 - string: - name: INSTALLER_VERSION - default: latest - description: 'Version of the installer to deploy' - - string: name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT description: 'Git URL to use on this Jenkins Slave' - - string: - name: POD_CONF_DIR - default: $WORKSPACE/deploy/templates/hardware_environment/conf/ericsson_montreal_lab/pod1 - description: 'Directory where POD configuration files are located.' - - parameter: name: 'ericsson-pod2-defaults' parameters: @@ -74,10 +80,6 @@ default-slaves: - ericsson-pod2 - string: - name: INSTALLER_VERSION - default: latest - description: 'Version of the installer to deploy' - - string: name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT description: 'Git URL to use on this Jenkins Slave' diff --git a/jjb/parser/parser.yml b/jjb/parser/parser.yml index 5e603f026..2ab1af833 100644 --- a/jjb/parser/parser.yml +++ b/jjb/parser/parser.yml @@ -58,4 +58,15 @@ builders: - shell: | - echo "Nothing to verify!" + #/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + # pep8 check parser/tosca2heat/tosca-parser + echo "Running tox -e pep8 on tosca2heat ..." + cd $WORKSPACE/parser/tosca2heat/tosca-parser + tox -e pep8 + cd $WORKSPACE/parser/tosca2heat/heat-translator + tox -e pep8 diff --git a/jjb/releng-macros.yaml b/jjb/releng-macros.yaml index d7f9be109..cdbb0bac3 100644 --- a/jjb/releng-macros.yaml +++ b/jjb/releng-macros.yaml @@ -57,6 +57,11 @@ fail: true - trigger: + name: 'daily-trigger-disabled' + triggers: + - timed: '' + +- trigger: name: 'brahmaputra-trigger-daily-disabled' triggers: - timed: '' diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml index 55a07a520..c6630d7bc 100644 --- a/jjb/yardstick/yardstick-ci-jobs.yml +++ b/jjb/yardstick/yardstick-ci-jobs.yml @@ -22,152 +22,148 @@ #-------------------------------- # POD, INSTALLER, AND BRANCH MAPPING #-------------------------------- -# brahmaputra +# Installers using labels +# CI PODs +# This section should only contain the installers +# that have been switched using labels for slaves #-------------------------------- pod: - - lf-pod1: - installer: apex - suite: daily - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra - - lf-pod2: + - baremetal: + slave-label: fuel-baremetal installer: fuel - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra - - intel-pod5: - installer: joid - suite: daily - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra - - orange-pod2: - installer: joid - suite: daily - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra - - huawei-pod1: - installer: compass - suite: daily - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra - - ericsson-pod1: + <<: *master + - virtual: + slave-label: fuel-virtual installer: fuel - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra - - ericsson-pod2: + <<: *master + - baremetal: + slave-label: fuel-baremetal installer: fuel - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *brahmaputra - virtual: + slave-label: fuel-virtual installer: fuel - suite: daily - auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra - - huawei-virtual: - installer: compass - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *brahmaputra - - virtual: - installer: joid - suite: daily + +# just in case if things go wrong + - lf-pod2: + slave-label: '{pod}' + installer: fuel auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *brahmaputra + <<: *master #-------------------------------- -# master +# Installers not using labels +# CI PODs +# This section should only contain the installers +# that have not been switched using labels for slaves #-------------------------------- - lf-pod1: + slave-label: '{pod}' installer: apex - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - - lf-pod2: - installer: fuel - suite: daily + - lf-pod1: + slave-label: '{pod}' + installer: apex auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *master - - ericsson-pod1: - installer: fuel - suite: daily + <<: *brahmaputra + - intel-pod8: + slave-label: '{pod}' + installer: compass auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - - ericsson-pod1: - installer: fuel - suite: vtcdaily - auto-trigger-name: 'yardstick-vtcdaily-ericsson-pod1-trigger' - <<: *master - - ericsson-pod2: - installer: fuel - suite: daily + - huawei-pod1: + slave-label: '{pod}' + installer: compass auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *master + <<: *brahmaputra - intel-pod6: + slave-label: '{pod}' installer: joid - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - - intel-pod8: - installer: compass - suite: daily + - intel-pod5: + slave-label: '{pod}' + installer: joid auto-trigger-name: 'brahmaputra-trigger-daily-disabled' - <<: *master + <<: *brahmaputra +#-------------------------------- +# None-CI PODs +#-------------------------------- + - orange-pod2: + slave-label: '{pod}' + installer: joid + auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + <<: *brahmaputra - zte-pod1: + slave-label: '{pod}' installer: fuel - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - orange-pod2: + slave-label: '{pod}' installer: joid - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - huawei-pod1: + slave-label: '{pod}' installer: compass suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - huawei-pod2: + slave-label: '{pod}' installer: compass - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - - virtual: - installer: fuel - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - huawei-virtual: + slave-label: '{pod}' installer: compass - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master - huawei-pod3: + slave-label: '{pod}' installer: compass - suite: daily auto-trigger-name: 'yardstick-daily-huawei-pod3-trigger' <<: *master - huawei-pod4: + slave-label: '{pod}' installer: compass - suite: daily auto-trigger-name: 'yardstick-daily-huawei-pod4-trigger' <<: *master - virtual: + slave-label: '{pod}' installer: joid - suite: daily auto-trigger-name: 'brahmaputra-trigger-daily-disabled' <<: *master + - huawei-virtual: + slave-label: '{pod}' + installer: compass + auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + <<: *brahmaputra + - virtual: + slave-label: '{pod}' + installer: joid + auto-trigger-name: 'brahmaputra-trigger-daily-disabled' + <<: *brahmaputra #-------------------------------- + testsuite: + - 'daily' jobs: - - 'yardstick-{installer}-{pod}-{suite}-{stream}' + - 'yardstick-{installer}-{pod}-{testsuite}-{stream}' ################################ # job templates ################################ - job-template: - name: 'yardstick-{installer}-{pod}-{suite}-{stream}' + name: 'yardstick-{installer}-{pod}-{testsuite}-{stream}' disabled: false @@ -191,9 +187,9 @@ parameters: - project-parameter: project: '{project}' - - '{pod}-defaults' + - '{slave-label}-defaults' - '{installer}-defaults' - - 'yardstick-params-{pod}' + - 'yardstick-params-{slave-label}' - string: name: DEPLOY_SCENARIO default: 'os-odl_l2-nofeature-ha' @@ -203,7 +199,7 @@ description: 'Tag to pull docker image' - string: name: YARDSTICK_SUITE_NAME - default: opnfv_${{NODE_NAME}}_{suite}.yaml + default: opnfv_${{NODE_NAME}}_{testsuite}.yaml description: 'Path to test suite' - string: name: CI_DEBUG @@ -219,7 +215,7 @@ builders: - 'yardstick-cleanup' #- 'yardstick-fetch-os-creds' - - 'yardstick-{suite}' + - 'yardstick-{testsuite}' publishers: - email: @@ -277,9 +273,8 @@ name: YARDSTICK_DB_BACKEND default: '' description: 'Arguments to use in order to choose the backend DB' - - parameter: - name: 'yardstick-params-ericsson-pod1' + name: 'yardstick-params-lf-pod1' parameters: - string: name: YARDSTICK_DB_BACKEND @@ -287,15 +282,14 @@ description: 'Arguments to use in order to choose the backend DB' - parameter: - name: 'yardstick-params-ericsson-pod2' + name: 'yardstick-params-lf-pod2' parameters: - string: name: YARDSTICK_DB_BACKEND default: '-i 104.197.68.199:8086' description: 'Arguments to use in order to choose the backend DB' - - parameter: - name: 'yardstick-params-lf-pod1' + name: 'yardstick-params-fuel-baremetal' parameters: - string: name: YARDSTICK_DB_BACKEND @@ -303,11 +297,11 @@ description: 'Arguments to use in order to choose the backend DB' - parameter: - name: 'yardstick-params-lf-pod2' + name: 'yardstick-params-fuel-virtual' parameters: - string: name: YARDSTICK_DB_BACKEND - default: '-i 104.197.68.199:8086' + default: '' description: 'Arguments to use in order to choose the backend DB' - parameter: @@ -379,11 +373,6 @@ ####################### # trigger for PODs to only run yardstick test suites - trigger: - name: 'yardstick-vtcdaily-ericsson-pod1-trigger' - triggers: - - timed: '0 1 * * *' - -- trigger: name: 'yardstick-daily-huawei-pod3-trigger' triggers: - timed: '0 1 * * *' diff --git a/utils/test/result_collection_api/dashboard/bottlenecks2Dashboard.py b/utils/test/result_collection_api/dashboard/bottlenecks2Dashboard.py index 9a7e4ce1b..2e106bec8 100755 --- a/utils/test/result_collection_api/dashboard/bottlenecks2Dashboard.py +++ b/utils/test/result_collection_api/dashboard/bottlenecks2Dashboard.py @@ -67,7 +67,7 @@ def format_rubbos_for_dashboard(results): #new_element = [] #for each_result in results: # throughput_data = [record['throughput'] for record in each_result['details']] - # new_element.append({'x': each_result['creation_date'], + # new_element.append({'x': each_result['start_date'], # 'y': max(throughput_data)}) #test_data.append({'name': "Rubbos max throughput", diff --git a/utils/test/result_collection_api/dashboard/doctor2Dashboard.py b/utils/test/result_collection_api/dashboard/doctor2Dashboard.py index eba35b57b..38b23abb4 100644 --- a/utils/test/result_collection_api/dashboard/doctor2Dashboard.py +++ b/utils/test/result_collection_api/dashboard/doctor2Dashboard.py @@ -78,7 +78,7 @@ def format_doctor_notification_case_for_dashboard(results): # consider only seconds => 09 for data in results: t = data['details']['duration'] - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': t}) test_data.append({'name': "doctor-notification duration ", diff --git a/utils/test/result_collection_api/dashboard/functest2Dashboard.py b/utils/test/result_collection_api/dashboard/functest2Dashboard.py index 379b93279..86521b984 100644 --- a/utils/test/result_collection_api/dashboard/functest2Dashboard.py +++ b/utils/test/result_collection_api/dashboard/functest2Dashboard.py @@ -89,7 +89,7 @@ def format_vIMS_for_dashboard(results): new_element = [] for data in results: - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y1': data['details']['orchestrator']['duration'], 'y2': data['details']['vIMS']['duration'], 'y3': data['details']['sig_test']['duration']}) @@ -127,7 +127,7 @@ def format_vIMS_for_dashboard(results): except: nbTests = 0 - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y1': nbTests, 'y2': nbFailures, 'y3': nbSkipped}) @@ -181,7 +181,7 @@ def format_Tempest_for_dashboard(results): # ******************************** new_element = [] for data in results: - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': data['details']['duration']}) test_data.append({'name': "Tempest duration", @@ -194,7 +194,7 @@ def format_Tempest_for_dashboard(results): # *************************************** new_element = [] for data in results: - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y1': data['details']['tests'], 'y2': data['details']['failures']}) @@ -229,7 +229,7 @@ def format_Tempest_for_dashboard(results): except: success_rate = 0 - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y1': success_rate}) test_data.append({'name': "Tempest success rate", @@ -257,7 +257,7 @@ def format_ODL_for_dashboard(results): for odl in odl_results: if (odl['test_status']['@status'] == "FAIL"): nbFailures += 1 - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y1': len(odl_results), 'y2': nbFailures}) @@ -288,7 +288,7 @@ def format_ONOS_for_dashboard(results): new_duration = int(datetime.timedelta(hours=int(h), minutes=int(m), seconds=int(s)).total_seconds()) - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': new_duration}) test_data.append({'name': "ONOS FUNCvirNet duration ", @@ -307,7 +307,7 @@ def format_ONOS_for_dashboard(results): for onos in onos_results: if (onos['Case result'] == "FAIL"): nbFailures += 1 - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y1': len(onos_results), 'y2': nbFailures}) @@ -331,7 +331,7 @@ def format_ONOS_for_dashboard(results): new_duration = int(datetime.timedelta(hours=int(h), minutes=int(m), seconds=int(s)).total_seconds()) - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': new_duration}) test_data.append({'name': "ONOS FUNCvirNetL3 duration", @@ -350,7 +350,7 @@ def format_ONOS_for_dashboard(results): for onos in onos_results: if (onos['Case result'] == "FAIL"): nbFailures += 1 - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y1': len(onos_results), 'y2': nbFailures}) @@ -373,7 +373,7 @@ def format_Rally_for_dashboard(results): new_element = [] for data in results: summary_cursor = len(data['details']) - 1 - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': int(data['details'][summary_cursor]['summary']['duration'])}) test_data.append({'name': "rally duration", @@ -386,7 +386,7 @@ def format_Rally_for_dashboard(results): # ******************************** new_element = [] for data in results: - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': float(data['details'][summary_cursor]['summary']['nb success'])}) test_data.append({'name': "rally success rate", @@ -408,7 +408,7 @@ def format_vPing_for_dashboard(results): # ******************************** new_element = [] for data in results: - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': data['details']['duration']}) test_data.append({'name': "vPing duration", @@ -445,7 +445,7 @@ def format_vPing_userdata_for_dashboard(results): # ******************************** new_element = [] for data in results: - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': data['details']['duration']}) test_data.append({'name': "vPing_userdata duration", diff --git a/utils/test/result_collection_api/dashboard/qtip2Dashboard.py b/utils/test/result_collection_api/dashboard/qtip2Dashboard.py index 0112945b1..6ceccd374 100644 --- a/utils/test/result_collection_api/dashboard/qtip2Dashboard.py +++ b/utils/test/result_collection_api/dashboard/qtip2Dashboard.py @@ -93,7 +93,7 @@ def _get_results(db_url, testcase): rawresults = datajson['test_results'][x]['details'] index = rawresults['index'] - resultarray[str(datajson['test_results'][x]['creation_date'])]=index + resultarray[str(datajson['test_results'][x]['start_date'])]=index return resultarray diff --git a/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py b/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py index 323d3915c..5a6882da4 100755 --- a/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py +++ b/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py @@ -79,7 +79,7 @@ def format_common_for_dashboard(case, results): # ******************************** new_element = [] for data in results: - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y1': data['details']['64'], 'y2': data['details']['128'], 'y3': data['details']['512'], @@ -105,8 +105,8 @@ def format_common_for_dashboard(case, results): import os def _test(): - ans = [{'creation_date': '2015-09-12', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '26.804', '1024': '1097.284', '512': '178.137', '1518': '12635.860', '128': '100.564'}}, - {'creation_date': '2015-09-33', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '16.804', '1024': '1087.284', '512': '168.137', '1518': '12625.860', '128': '99.564'}}] + ans = [{'start_date': '2015-09-12', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '26.804', '1024': '1097.284', '512': '178.137', '1518': '12635.860', '128': '100.564'}}, + {'start_date': '2015-09-33', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '16.804', '1024': '1087.284', '512': '168.137', '1518': '12625.860', '128': '99.564'}}] result = format_vsperf_for_dashboard("pvp_cont_ovsdpdkcuse", ans) print result diff --git a/utils/test/result_collection_api/dashboard/yardstick2Dashboard.py b/utils/test/result_collection_api/dashboard/yardstick2Dashboard.py index 20b086496..4f022d5b9 100644 --- a/utils/test/result_collection_api/dashboard/yardstick2Dashboard.py +++ b/utils/test/result_collection_api/dashboard/yardstick2Dashboard.py @@ -84,7 +84,7 @@ def format_Ping_for_dashboard(results): if "benchmark" in record] avg_rtt = sum(records) / len(records) - new_element.append({'x': data['creation_date'], + new_element.append({'x': data['start_date'], 'y': avg_rtt}) test_data.append({'name': "ping duration", diff --git a/utils/test/result_collection_api/resources/handlers.py b/utils/test/result_collection_api/resources/handlers.py index 6747767ea..435334341 100644 --- a/utils/test/result_collection_api/resources/handlers.py +++ b/utils/test/result_collection_api/resources/handlers.py @@ -620,9 +620,6 @@ class TestResultsHandler(GenericApiHandler): "Could not find testcase [{}] " .format(result.case_name)) - # convert payload to object - result.creation_date = datetime.now() - _id = yield self.db.results.insert(result.format(), check_keys=False) self.finish_request(self._create_response(_id)) diff --git a/utils/test/result_collection_api/resources/result_models.py b/utils/test/result_collection_api/resources/result_models.py index 795621ab1..15684e229 100644 --- a/utils/test/result_collection_api/resources/result_models.py +++ b/utils/test/result_collection_api/resources/result_models.py @@ -6,7 +6,8 @@ class ResultCreateRequest(object): case_name=None, installer=None, version=None, - description=None, + start_date=None, + stop_date=None, details=None, build_tag=None, scenario=None, @@ -17,7 +18,8 @@ class ResultCreateRequest(object): self.case_name = case_name self.installer = installer self.version = version - self.description = description + self.start_date = start_date + self.stop_date = stop_date self.details = details self.build_tag = build_tag self.scenario = scenario @@ -31,7 +33,8 @@ class ResultCreateRequest(object): "case_name": self.case_name, "installer": self.installer, "version": self.version, - "description": self.description, + "start_date": self.start_date, + "stop_date": self.stop_date, "details": self.details, "build_tag": self.build_tag, "scenario": self.scenario, @@ -50,8 +53,8 @@ class TestResult: self.pod_name = None self.installer = None self.version = None - self.description = None - self.creation_date = None + self.start_date = None + self.stop_date = None self.details = None self.build_tag = None self.scenario = None @@ -70,7 +73,8 @@ class TestResult: t.pod_name = a_dict.get('pod_name') t.project_name = a_dict.get('project_name') t.description = a_dict.get('description') - t.creation_date = str(a_dict.get('creation_date')) + t.start_date = str(a_dict.get('start_date')) + t.stop_date = str(a_dict.get('stop_date')) t.details = a_dict.get('details') t.version = a_dict.get('version') t.installer = a_dict.get('installer') @@ -98,7 +102,8 @@ class TestResult: "project_name": self.project_name, "pod_name": self.pod_name, "description": self.description, - "creation_date": str(self.creation_date), + "start_date": str(self.start_date), + "stop_date": str(self.stop_date), "version": self.version, "installer": self.installer, "details": self.details, @@ -115,7 +120,8 @@ class TestResult: "project_name": self.project_name, "pod_name": self.pod_name, "description": self.description, - "creation_date": str(self.creation_date), + "start_date": str(self.start_date), + "stop_date": str(self.stop_date), "version": self.version, "installer": self.installer, "details": self.details, diff --git a/utils/test/result_collection_api/tests/unit/test_result.py b/utils/test/result_collection_api/tests/unit/test_result.py index 40b7dacf1..9c5093ed1 100644 --- a/utils/test/result_collection_api/tests/unit/test_result.py +++ b/utils/test/result_collection_api/tests/unit/test_result.py @@ -47,8 +47,10 @@ class TestResultBase(TestBase): self.version = 'C' self.build_tag = 'v3.0' self.scenario = 'odl-l2' - self.criteria = '10s' + self.criteria = 'passed' self.trust_indicator = 0.7 + self.start_date = "2016-05-23 07:16:09.477097" + self.stop_date = "2016-05-23 07:16:19.477097" super(TestResultBase, self).setUp() self.details = Details(timestart='0', duration='9s', status='OK') self.req_d = ResultCreateRequest(pod_name=self.pod, @@ -56,7 +58,8 @@ class TestResultBase(TestBase): case_name=self.case, installer=self.installer, version=self.version, - description='vping use ssh', + start_date=self.start_date, + stop_date=self.stop_date, details=self.details.format(), build_tag=self.build_tag, scenario=self.scenario, @@ -84,7 +87,6 @@ class TestResultBase(TestBase): self.assertEqual(result.case_name, req.case_name) self.assertEqual(result.installer, req.installer) self.assertEqual(result.version, req.version) - self.assertEqual(result.description, req.description) details_req = Details.from_dict(req.details) details_res = Details.from_dict(result.details) self.assertEqual(details_res.duration, details_req.duration) @@ -94,7 +96,8 @@ class TestResultBase(TestBase): self.assertEqual(result.scenario, req.scenario) self.assertEqual(result.criteria, req.criteria) self.assertEqual(result.trust_indicator, req.trust_indicator) - self.assertIsNotNone(result.creation_date) + self.assertIsNotNone(result.start_date) + self.assertIsNotNone(result.stop_date) self.assertIsNotNone(result._id) |