diff options
39 files changed, 943 insertions, 567 deletions
diff --git a/.gitignore b/.gitignore index 96a76e33f..024dfac4b 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,24 @@ /releng/ .idea *.py[cod] + +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +.venv/ +venv/ +ENV/ diff --git a/jjb/3rd_party_ci/create-apex-vms.sh b/jjb/3rd_party_ci/create-apex-vms.sh new file mode 100755 index 000000000..a076dd084 --- /dev/null +++ b/jjb/3rd_party_ci/create-apex-vms.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e + +# wipe the WORKSPACE +/bin/rm -rf $WORKSPACE/* + +# clone opnfv sdnvpn repo +git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn +. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh +pushd $LIB +./test_environment.sh --env-number $APEX_ENV_NUMBER --cloner-info $CLONER_INFO --snapshot-disks $SNAPSHOT_DISKS --vjump-hosts $VIRTUAL_JUMPHOSTS +popd diff --git a/jjb/3rd_party_ci/download-netvirt-artifact.sh b/jjb/3rd_party_ci/download-netvirt-artifact.sh new file mode 100755 index 000000000..0a48e3aec --- /dev/null +++ b/jjb/3rd_party_ci/download-netvirt-artifact.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -e + +# wipe the WORKSPACE +/bin/rm -rf $WORKSPACE/* + +echo "Attempting to fetch the artifact location from ODL Jenkins" +CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/netvirt~master~$GERRIT_CHANGE_ID/detail" +# due to limitation with the Jenkins Gerrit Trigger, we need to use Gerrit REST API to get the change details +ODL_JOB_URL=$(curl -s $CHANGE_DETAILS_URL | grep netvirt-patch-test-current-carbon | tail -1 | \ + sed 's/\\n//g' | awk '{print $6}') +NETVIRT_ARTIFACT_URL="${ODL_JOB_URL}org.opendaylight.integration\$distribution-karaf/artifact/org.opendaylight.integration/distribution-karaf/0.6.0-SNAPSHOT/distribution-karaf-0.6.0-SNAPSHOT.tar.gz" +echo -e "URL to artifact is\n\t$NETVIRT_ARTIFACT_URL" + +echo "Downloading the artifact. This could take time..." +wget -q -O $NETVIRT_ARTIFACT $NETVIRT_ARTIFACT_URL +if [[ $? -ne 0 ]]; then + echo "The artifact does not exist! Probably removed due to ODL Jenkins artifact retention policy." + echo "Rerun netvirt-patch-test-current-carbon to get artifact rebuilt." + exit 1 +fi +echo "Download complete" +ls -al $NETVIRT_ARTIFACT diff --git a/jjb/3rd_party_ci/functest-netvirt.sh b/jjb/3rd_party_ci/functest-netvirt.sh new file mode 100755 index 000000000..adffaf42d --- /dev/null +++ b/jjb/3rd_party_ci/functest-netvirt.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +# wipe the WORKSPACE +/bin/rm -rf $WORKSPACE/* + +echo "Hello World" diff --git a/jjb/3rd_party_ci/install-netvirt.sh b/jjb/3rd_party_ci/install-netvirt.sh new file mode 100755 index 000000000..96c4b9634 --- /dev/null +++ b/jjb/3rd_party_ci/install-netvirt.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e +# clone opnfv sdnvpn repo +git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn +. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh +pushd $LIB +./odl_reinstaller.sh --cloner-info $CLONER_INFO --odl-artifact $NETVIRT_ARTIFACT +popd
\ No newline at end of file diff --git a/jjb/3rd_party_ci/odl-netvirt.yml b/jjb/3rd_party_ci/odl-netvirt.yml new file mode 100644 index 000000000..3dd4c0b44 --- /dev/null +++ b/jjb/3rd_party_ci/odl-netvirt.yml @@ -0,0 +1,212 @@ +- project: + name: 'netvirt' + + project: 'netvirt' + + installer: 'apex' +##################################### +# branch definitions +##################################### + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false +##################################### +# patch verification phases +##################################### + phase: + - 'create-apex-vms': + slave-label: 'ericsson-virtual5' + - 'install-netvirt': + slave-label: 'odl-netvirt-virtual' + - 'functest': + slave-label: 'odl-netvirt-virtual' + - 'postprocess': + slave-label: 'odl-netvirt-virtual' +##################################### +# jobs +##################################### + jobs: + - 'odl-netvirt-verify-virtual-{stream}' + - 'odl-netvirt-verify-virtual-{phase}-{stream}' +##################################### +# job templates +##################################### +- job-template: + name: 'odl-netvirt-verify-virtual-{stream}' + + project-type: multijob + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 5 + max-per-node: 1 + option: 'project' + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - string: + name: NETVIRT_ARTIFACT + default: $WORKSPACE/distribution-karaf.tar.gz + - 'odl-netvirt-virtual-defaults' + + triggers: + - gerrit: + server-name: 'git.opendaylight.org' + trigger-on: + - comment-added-contains-event: + comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS' + - comment-added-contains-event: + comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE' + - comment-added-contains-event: + comment-contains-value: 'opnfv-test' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + readable-message: true + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - multijob: + name: create-apex-vms + condition: SUCCESSFUL + projects: + - name: 'odl-netvirt-verify-virtual-create-apex-vms-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID + GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER + GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION + NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT + APEX_ENV_NUMBER=$APEX_ENV_NUMBER + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: install-netvirt + condition: SUCCESSFUL + projects: + - name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID + GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER + GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION + NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: functest + condition: SUCCESSFUL + projects: + - name: 'functest-netvirt-virtual-suite-{stream}' + predefined-parameters: | + FUNCTEST_SUITE_NAME=vping_userdata,bgpvpn + RC_FILE_PATH=/home/jenkins/cloner-info/overcloudrc + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: false + - multijob: + name: postprocess + condition: ALWAYS + projects: + - name: 'odl-netvirt-verify-virtual-postprocess-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID + GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER + GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION + NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true + +- job-template: + name: 'odl-netvirt-verify-virtual-{phase}-{stream}' + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 5 + max-per-node: 1 + option: 'project' + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'odl-netvirt-verify-virtual-install-.*' + - 'odl-netvirt-verify-virtual-functest-.*' + - 'odl-netvirt-verify-virtual-postprocess-.*' + block-level: 'NODE' + + wrappers: + - ssh-agent-credentials: + users: + - '{ssh-credentials}' + - timeout: + timeout: 360 + fail: true + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - '{slave-label}-defaults' + - '{installer}-defaults' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - '{project}-verify-{phase}-builder' +##################################### +# builder macros +##################################### +- builder: + name: 'netvirt-verify-create-apex-vms-builder' + builders: + - shell: + !include-raw: ./create-apex-vms.sh +- builder: + name: 'netvirt-verify-install-netvirt-builder' + builders: + - shell: + !include-raw: ./download-netvirt-artifact.sh + - shell: + !include-raw: ./install-netvirt.sh +- builder: + name: 'netvirt-verify-functest-builder' + builders: + - shell: + !include-raw: ./functest-netvirt.sh +- builder: + name: 'netvirt-verify-postprocess-builder' + builders: + - shell: + !include-raw: ./postprocess-netvirt.sh diff --git a/jjb/3rd_party_ci/postprocess-netvirt.sh b/jjb/3rd_party_ci/postprocess-netvirt.sh new file mode 100755 index 000000000..adffaf42d --- /dev/null +++ b/jjb/3rd_party_ci/postprocess-netvirt.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +# wipe the WORKSPACE +/bin/rm -rf $WORKSPACE/* + +echo "Hello World" diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh index e3e3f6194..ee1dfb5d3 100755 --- a/jjb/apex/apex-build.sh +++ b/jjb/apex/apex-build.sh @@ -23,7 +23,7 @@ fi # start the build cd $WORKSPACE/ci ./build.sh $BUILD_ARGS -RPM_VERSION=$(grep Version: $BUILD_DIRECTORY/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-') +RPM_VERSION=$(grep Version: $WORKSPACE/build/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-') # list the contents of BUILD_OUTPUT directory echo "Build Directory is ${BUILD_DIRECTORY}" echo "Build Directory Contents:" diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh index bbceb4b88..8d5c4cb13 100755 --- a/jjb/apex/apex-deploy.sh +++ b/jjb/apex/apex-deploy.sh @@ -72,7 +72,7 @@ if [[ "$BUILD_DIRECTORY" == *verify* ]]; then DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy" NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network" DEPLOY_CMD="$(pwd)/deploy.sh" - RESOURCES="${WORKSPACE}/build/images/" + RESOURCES="${WORKSPACE}/.build/" CONFIG="${WORKSPACE}/build" LIB="${WORKSPACE}/lib" # Make sure python34 deps are installed diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh index f54e4c55a..64f13f4e6 100755 --- a/jjb/apex/apex-upload-artifact.sh +++ b/jjb/apex/apex-upload-artifact.sh @@ -11,6 +11,8 @@ echo # source the opnfv.properties to get ARTIFACT_VERSION source $WORKSPACE/opnfv.properties +BUILD_DIRECTORY=${WORKSPACE}/.build + # clone releng repository echo "Cloning releng repository..." [ -d releng ] && rm -rf releng diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index 6011fe790..d8784c678 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -16,38 +16,10 @@ - master: branch: 'master' gs-pathname: '' - block-stream: 'colorado' - slave: 'lf-pod1' - verify-slave: 'apex-verify-master' - daily-slave: 'apex-daily-master' - - colorado: - branch: 'stable/colorado' - gs-pathname: '/colorado' - block-stream: 'master' - slave: 'lf-pod1' - verify-slave: 'apex-verify-colorado' - daily-slave: 'apex-daily-colorado' - disabled: false - - stream1: - - master: - branch: 'master' - gs-pathname: '' - block-stream: 'colorado' slave: 'lf-pod1' verify-slave: 'apex-verify-master' daily-slave: 'apex-daily-master' - stream2: - - colorado: - branch: 'stable/colorado' - gs-pathname: '/colorado' - block-stream: 'master' - slave: 'lf-pod1' - verify-slave: 'apex-verify-colorado' - daily-slave: 'apex-daily-colorado' - disabled: false - project: 'apex' scenario: @@ -56,15 +28,12 @@ - 'os-nosdn-nofeature-ha-ipv6' - 'os-nosdn-ovs-noha' - 'os-nosdn-fdio-noha' - - 'os-odl_l2-nofeature-ha' - - 'os-odl_l2-bgpvpn-ha' + - 'os-odl_l3-bgpvpn-ha' - 'os-odl_l2-fdio-noha' - 'os-odl_l2-fdio-ha' - 'os-odl_l2-sfc-noha' - 'os-odl_l3-nofeature-ha' - 'os-onos-nofeature-ha' - - 'os-onos-sfc-ha' - - 'os-ocl-nofeature-ha' platform: - 'baremetal' @@ -216,7 +185,7 @@ block: true same-node: true - trigger-builds: - - project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream}' + - project: 'apex-deploy-virtual-os-odl_l3-nofeature-ha-{stream}' predefined-parameters: | BUILD_DIRECTORY=apex-verify-{stream} OPNFV_CLEAN=yes @@ -226,7 +195,7 @@ - trigger-builds: - project: 'functest-apex-{verify-slave}-suite-{stream}' predefined-parameters: | - DEPLOY_SCENARIO=os-odl_l2-nofeature-ha + DEPLOY_SCENARIO=os-odl_l3-nofeature-ha FUNCTEST_SUITE_NAME=healthcheck block: true same-node: true @@ -393,7 +362,7 @@ - trigger-builds: - project: 'apex-deploy-virtual-os-nosdn-nofeature-noha-{stream}' predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build + BUILD_DIRECTORY=apex-build-{stream}/.build OPNFV_CLEAN=yes git-revision: false same-node: true @@ -539,7 +508,7 @@ - trigger-builds: - project: 'apex-deploy-baremetal-os-nosdn-nofeature-ha-{stream}' predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build + BUILD_DIRECTORY=apex-build-{stream}/.build OPNFV_CLEAN=yes git-revision: true same-node: true @@ -567,39 +536,9 @@ failure-threshold: 'never' unstable-threshold: 'FAILURE' - trigger-builds: - - project: 'apex-deploy-baremetal-os-odl_l2-nofeature-ha-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'functest-apex-{daily-slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-odl_l2-nofeature-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'yardstick-apex-{slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-odl_l2-nofeature-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - project: 'apex-deploy-baremetal-os-odl_l3-nofeature-ha-{stream}' predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build + BUILD_DIRECTORY=apex-build-{stream}/.build OPNFV_CLEAN=yes git-revision: true same-node: true @@ -626,226 +565,6 @@ build-step-failure-threshold: 'never' failure-threshold: 'never' unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'apex-deploy-baremetal-os-onos-nofeature-ha-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'functest-apex-{daily-slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-onos-nofeature-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'yardstick-apex-{slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-onos-nofeature-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'apex-deploy-baremetal-os-odl_l2-bgpvpn-ha-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'functest-apex-{daily-slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'yardstick-apex-{slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'apex-deploy-baremetal-os-onos-sfc-ha-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'functest-apex-{daily-slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-onos-sfc-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'yardstick-apex-{slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-onos-sfc-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'apex-deploy-baremetal-os-odl_l2-sfc-noha-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'functest-apex-{daily-slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-odl_l2-sfc-noha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'yardstick-apex-{slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-odl_l2-sfc-noha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'functest-apex-{daily-slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-odl_l2-fdio-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'yardstick-apex-{slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-odl_l2-fdio-ha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'apex-deploy-baremetal-os-nosdn-fdio-noha-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'functest-apex-{daily-slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-nosdn-fdio-noha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'yardstick-apex-{slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-nosdn-fdio-noha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-ipv6-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'apex-deploy-baremetal-os-nosdn-ovs-noha-{stream}' - predefined-parameters: | - BUILD_DIRECTORY=apex-build-{stream}/build - OPNFV_CLEAN=yes - git-revision: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - block: true - - trigger-builds: - - project: 'functest-apex-{daily-slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-nosdn-ovs-noha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'yardstick-apex-{slave}-daily-{stream}' - predefined-parameters: - DEPLOY_SCENARIO=os-nosdn-ovs-noha - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - job-template: name: 'apex-gs-clean-{stream}' @@ -952,16 +671,7 @@ - trigger: name: 'apex-master' triggers: - - timed: '0 3 * * 0' - - timed: '0 3 * * 4' -- trigger: - name: 'apex-colorado' - triggers: - - timed: '0 3 * * 1' - - timed: '0 3 * * 2' - - timed: '0 3 * * 3' - - timed: '0 3 * * 5' - - timed: '0 3 * * 6' + - timed: '0 3 * * *' - trigger: name: 'apex-gs-clean-{stream}' triggers: diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml index b1cd9bf75..77718e181 100644 --- a/jjb/armband/armband-ci-jobs.yml +++ b/jjb/armband/armband-ci-jobs.yml @@ -160,6 +160,23 @@ build-step-failure-threshold: 'never' failure-threshold: 'never' unstable-threshold: 'FAILURE' + # 1.dovetail only master by now, not sync with A/B/C branches + # 2.here the stream means the SUT stream, dovetail stream is defined in its own job + # 3.only debug testsuite here(includes 3 basic testcase, + # i.e. one tempest smoke ipv6, two vping from functest) + # 4.not used for release criteria or compliance, + # only to debug the dovetail tool bugs with arm pods + - trigger-builds: + - project: 'dovetail-{installer}-{pod}-debug-{stream}' + current-parameters: false + predefined-parameters: + DEPLOY_SCENARIO={scenario} + block: true + same-node: true + block-thresholds: + build-step-failure-threshold: 'never' + failure-threshold: 'never' + unstable-threshold: 'FAILURE' - job-template: name: '{installer}-deploy-{pod}-daily-{stream}' diff --git a/jjb/fastpathmetrics/fastpathmetrics.yml b/jjb/barometer/barometer.yml index 6be6a52c3..c763f3001 100644 --- a/jjb/fastpathmetrics/fastpathmetrics.yml +++ b/jjb/barometer/barometer.yml @@ -3,14 +3,14 @@ # They will only be enabled on request by projects! ################################################### - project: - name: fastpathmetrics + name: barometer project: '{name}' jobs: - - 'fastpathmetrics-verify-{stream}' - - 'fastpathmetrics-merge-{stream}' - - 'fastpathmetrics-daily-{stream}' + - 'barometer-verify-{stream}' + - 'barometer-merge-{stream}' + - 'barometer-daily-{stream}' stream: - master: @@ -23,7 +23,7 @@ disabled: false - job-template: - name: 'fastpathmetrics-verify-{stream}' + name: 'barometer-verify-{stream}' disabled: '{obj:disabled}' @@ -71,7 +71,7 @@ make - job-template: - name: 'fastpathmetrics-merge-{stream}' + name: 'barometer-merge-{stream}' project-type: freestyle @@ -120,11 +120,12 @@ - shell: | pwd cd src - make clobber - make + ./install_build_deps.sh + sudo make clobber + sudo make - job-template: - name: 'fastpathmetrics-daily-{stream}' + name: 'barometer-daily-{stream}' project-type: freestyle @@ -159,5 +160,6 @@ - shell: | pwd cd src - make clobber - make + ./install_build_deps.sh + sudo make clobber + sudo make diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml index b70bfc67b..0d97fef75 100644 --- a/jjb/compass4nfv/compass-ci-jobs.yml +++ b/jjb/compass4nfv/compass-ci-jobs.yml @@ -10,10 +10,10 @@ stream: master branch: '{stream}' gs-pathname: '' - colorado: &colorado - stream: colorado - branch: 'stable/{stream}' - gs-pathname: '/{stream}' +# colorado: &colorado +# stream: colorado +# branch: 'stable/{stream}' +# gs-pathname: '/{stream}' #-------------------------------- # POD, INSTALLER, AND BRANCH MAPPING #-------------------------------- @@ -28,14 +28,14 @@ slave-label: compass-virtual os-version: 'trusty' <<: *master - - baremetal: - slave-label: compass-baremetal - os-version: 'trusty' - <<: *colorado - - virtual: - slave-label: compass-virtual - os-version: 'trusty' - <<: *colorado +# - baremetal: +# slave-label: compass-baremetal +# os-version: 'trusty' +# <<: *colorado +# - virtual: +# slave-label: compass-virtual +# os-version: 'trusty' +# <<: *colorado #-------------------------------- # master #-------------------------------- @@ -48,35 +48,27 @@ - 'os-nosdn-nofeature-ha': disabled: false auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger' - openstack-os-version: '' - 'os-odl_l2-nofeature-ha': disabled: false auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger' - openstack-os-version: '' - 'os-odl_l3-nofeature-ha': disabled: false auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger' - openstack-os-version: '' - 'os-onos-nofeature-ha': disabled: false auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger' - openstack-os-version: '' - 'os-ocl-nofeature-ha': disabled: false auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger' - openstack-os-version: '' - 'os-onos-sfc-ha': disabled: false auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger' - openstack-os-version: '' - 'os-odl_l2-moon-ha': disabled: false auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger' - openstack-os-version: 'xenial' - 'os-nosdn-kvm-ha': disabled: false auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger' - openstack-os-version: '' jobs: - 'compass-{scenario}-{pod}-daily-{stream}' @@ -131,7 +123,6 @@ predefined-parameters: | DEPLOY_SCENARIO={scenario} COMPASS_OS_VERSION={os-version} - COMPASS_OS_VERSION_OPTION={openstack-os-version} same-node: true block: true - trigger-builds: @@ -159,7 +150,7 @@ #dovetail only master by now, not sync with A/B/C branches #here the stream means the SUT stream, dovetail stream is defined in its own job - trigger-builds: - - project: 'dovetail-compass-{pod}-basic-{stream}' + - project: 'dovetail-compass-{pod}-debug-{stream}' current-parameters: false predefined-parameters: DEPLOY_SCENARIO={scenario} @@ -250,11 +241,6 @@ - 'mitaka' - 'newton' - 'liberty' - - choice: - name: COMPASS_OS_VERSION_OPTION - choices: - - '' - - 'xenial' ######################## # trigger macros diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh index 256717953..c8d6159a2 100644 --- a/jjb/compass4nfv/compass-deploy.sh +++ b/jjb/compass4nfv/compass-deploy.sh @@ -29,19 +29,9 @@ cd $WORKSPACE export OS_VERSION=${COMPASS_OS_VERSION} export OPENSTACK_VERSION=${COMPASS_OPENSTACK_VERSION} -if [[ "${COMPASS_OS_VERSION_OPTION}" = "xenial" ]] && [[ "${OPENSTACK_VERSION}" = "mitaka" ]]; then - export OPENSTACK_VERSION=${OPENSTACK_VERSION}_${COMPASS_OS_VERSION_OPTION} - export OS_VERSION=${COMPASS_OS_VERSION_OPTION} -fi -if [[ "${OPENSTACK_VERSION}" = "newton" ]]; then - export OS_VERSION="xenial" - export OPENSTACK_VERSION=${OPENSTACK_VERSION}_${OS_VERSION} -fi - if [[ "${DEPLOY_SCENARIO}" =~ "-ocl" ]]; then export NETWORK_CONF_FILE=network_ocl.yml - export OPENSTACK_VERSION=liberty elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then export NETWORK_CONF_FILE=network_onos.yml else @@ -57,7 +47,11 @@ else export DHA_CONF=$CONFDIR/hardware_environment/$NODE_NAME/${DEPLOY_SCENARIO}.yml fi -./deploy.sh --dha ${DHA_CONF} --network ${NETWORK_CONF} +export DHA=${DHA_CONF} +export NETWORK=${NETWORK_CONF} + +source ./ci/deploy_ci.sh + if [ $? -ne 0 ]; then echo "depolyment failed!" deploy_ret=1 diff --git a/jjb/compass4nfv/compass-verify-jobs.yml b/jjb/compass4nfv/compass-verify-jobs.yml index 938214485..69cc958a8 100644 --- a/jjb/compass4nfv/compass-verify-jobs.yml +++ b/jjb/compass4nfv/compass-verify-jobs.yml @@ -138,21 +138,20 @@ current-parameters: true predefined-parameters: | COMPASS_OS_VERSION={os-version} - COMPASS_OS_VERSION_OPTION={openstack-os-version} node-parameters: true kill-phase-on: FAILURE abort-all-job: true - - multijob: - name: smoke-test - condition: SUCCESSFUL - projects: - - name: 'functest-compass-virtual-suite-{stream}' - current-parameters: true - predefined-parameters: - FUNCTEST_SUITE_NAME=healthcheck - node-parameters: true - kill-phase-on: NEVER - abort-all-job: true +# - multijob: +# name: smoke-test +# condition: SUCCESSFUL +# projects: +# - name: 'functest-compass-virtual-suite-{stream}' +# current-parameters: true +# predefined-parameters: +# FUNCTEST_SUITE_NAME=healthcheck +# node-parameters: true +# kill-phase-on: NEVER +# abort-all-job: true - job-template: name: 'compass-verify-{phase}-{distro}-{stream}' diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml index 7a5bd22f2..f9ee3de79 100644 --- a/jjb/doctor/doctor.yml +++ b/jjb/doctor/doctor.yml @@ -96,6 +96,10 @@ default: '{docker-tag}' description: 'Tag to pull docker image' - string: + name: CLEAN_DOCKER_IMAGES + default: 'false' + description: 'Remove downloaded docker images (opnfv/functest:*)' + - string: name: DEPLOY_SCENARIO default: 'os-nosdn-nofeature-ha' description: 'Scenario to deploy and test' @@ -162,7 +166,12 @@ builders: - 'functest-suite-builder' - shell: | - cp $HOME/opnfv/functest/results/{stream}/{project}.log $WORKSPACE/tests/ + functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log" + to_be_archived="$WORKSPACE/tests/functest-{project}.log" + cp $functest_log $to_be_archived + # NOTE: checking the test result, as the previous job could return + # 0 regardless the result of doctor test scenario. + grep -e ' OK$' $functest_log || exit 1 publishers: - archive: diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml index 29212005d..0c7dbe33a 100644 --- a/jjb/dovetail/dovetail-ci-jobs.yml +++ b/jjb/dovetail/dovetail-ci-jobs.yml @@ -102,6 +102,27 @@ SUT: apex auto-trigger-name: 'daily-trigger-disabled' <<: *colorado +#armband CI PODs + - armband_baremetal: + slave-label: armband-baremetal + SUT: fuel + auto-trigger-name: 'daily-trigger-disabled' + <<: *master + - armband_virtual: + slave-label: armband-virtual + SUT: fuel + auto-trigger-name: 'daily-trigger-disabled' + <<: *master + - armband_baremetal: + slave-label: armband-baremetal + SUT: fuel + auto-trigger-name: 'daily-trigger-disabled' + <<: *colorado + - armband_virtual: + slave-label: armband-virtual + SUT: fuel + auto-trigger-name: 'daily-trigger-disabled' + <<: *colorado #-------------------------------- # None-CI PODs #-------------------------------- @@ -110,9 +131,21 @@ SUT: compass auto-trigger-name: 'daily-trigger-disabled' <<: *master + - arm-pod2: + slave-label: '{pod}' + SUT: fuel + auto-trigger-name: 'daily-trigger-disabled' + <<: *master + - arm-pod3: + slave-label: '{pod}' + SUT: fuel + auto-trigger-name: 'daily-trigger-disabled' + <<: *master #-------------------------------- testsuite: - - 'basic' + - 'debug' + - 'proposed_tests' + - 'compliance_set' jobs: - 'dovetail-{SUT}-{pod}-{testsuite}-{stream}' @@ -159,6 +192,14 @@ name: CI_DEBUG default: 'true' description: "Show debug output information" + - string: + name: TESTSUITE + default: '{testsuite}' + description: "dovetail testsuite to run" + - string: + name: DOVETAIL_REPO_DIR + default: "/home/opnfv/dovetail" + description: "Directory where the dovetail repository is cloned" scm: - git-scm: @@ -170,7 +211,7 @@ - description-setter: description: "POD: $NODE_NAME" - 'dovetail-cleanup' - - 'dovetail-{testsuite}' + - 'dovetail-run' publishers: - archive: @@ -182,11 +223,12 @@ # builder macros ######################## - builder: - name: dovetail-basic + name: dovetail-run builders: - shell: !include-raw: ./dovetail-run.sh + - builder: name: dovetail-fetch-os-creds builders: diff --git a/jjb/dovetail/dovetail-cleanup.sh b/jjb/dovetail/dovetail-cleanup.sh index 297222bb3..f215278db 100755 --- a/jjb/dovetail/dovetail-cleanup.sh +++ b/jjb/dovetail/dovetail-cleanup.sh @@ -1,20 +1,21 @@ #!/bin/bash + [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null" echo "Cleaning up docker containers/images..." # Remove previous running containers if exist if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then echo "Removing existing opnfv/dovetail containers..." - docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >$redirect + docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect} fi # Remove existing images if exist if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then echo "Docker images to remove:" - docker images | head -1 && docker images | grep opnfv/dovetail + docker images | head -1 && docker images | grep opnfv/dovetail >${redirect} image_tags=($(docker images | grep opnfv/dovetail | awk '{print $2}')) for tag in "${image_tags[@]}"; do echo "Removing docker image opnfv/dovetail:$tag..." - docker rmi opnfv/dovetail:$tag >$redirect + docker rmi opnfv/dovetail:$tag >${redirect} done fi diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh index 098b7db0c..4082c34fe 100755 --- a/jjb/dovetail/dovetail-run.sh +++ b/jjb/dovetail/dovetail-run.sh @@ -22,7 +22,7 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then # If production lab then creds may be retrieved dynamically # creds are on the jumphost, always in the same folder - labconfig="-v $LAB_CONFIG/admin-openrc:/home/opnfv/openrc" + labconfig="-v $LAB_CONFIG/admin-openrc:/home/opnfv/functest/conf/openstack.creds" # If dev lab, credentials may not be the default ones, just provide a path to put them into docker # replace the default one by the customized one provided by jenkins config fi @@ -32,26 +32,45 @@ if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FOR sudo iptables -I FORWARD -j RETURN fi -opts="--privileged=true --rm" +opts="--privileged=true -id" envs="-e CI_DEBUG=${CI_DEBUG} \ -e INSTALLER_TYPE=${INSTALLER_TYPE} \ -e INSTALLER_IP=${INSTALLER_IP} \ -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \ - -e DEPLOY_TYPE=${DEPLOY_TYPE} \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /home/opnfv/dovetail/results:/home/opnfv/dovetail/results" + -e DEPLOY_TYPE=${DEPLOY_TYPE}" +results_envs="-v /var/run/docker.sock:/var/run/docker.sock \ + -v /home/opnfv/dovetail/results:/home/opnfv/dovetail/results" # Pull the image with correct tag echo "Dovetail: Pulling image opnfv/dovetail:${DOCKER_TAG}" docker pull opnfv/dovetail:$DOCKER_TAG >$redirect -# Run docker -echo "Dovetail: docker running..." -sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/dovetail:${DOCKER_TAG} \ -"/home/opnfv/dovetail/dovetail/run.py" +cmd="sudo docker run ${opts} ${envs} ${results_envs} ${labconfig} ${sshkey} \ + opnfv/dovetail:${DOCKER_TAG} /bin/bash" +echo "Dovetail: running docker run command: ${cmd}" +${cmd} >${redirect} +sleep 5 +container_id=$(docker ps | grep "opnfv/dovetail:${DOCKER_TAG}" | awk '{print $1}' | head -1) +echo "Container ID=${container_id}" +if [ -z ${container_id} ]; then + echo "Cannot find opnfv/dovetail container ID ${container_id}. Please check if it is existing." + docker ps -a + exit 1 +fi +echo "COntainer Start: docker start ${container_id}" +docker start ${container_id} +sleep 5 +docker ps >${redirect} +if [ $(docker ps | grep "opnfv/dovetail:${DOCKER_TAG}" | wc -l) == 0 ]; then + echo "The container opnfv/dovetail with ID=${container_id} has not been properly started. Exiting..." + exit 1 +fi + +exec_cmd="python ${DOVETAIL_REPO_DIR}/dovetail/run.py --testsuite ${TESTSUITE} -d true" +echo "Container exec command: ${exec_cmd}" +docker exec ${container_id} ${exec_cmd} -echo "Dovetail: store results..." -sudo cp -r /home/opnfv/dovetail/results ./ +sudo cp -r ${DOVETAIL_REPO_DIR}/results ./ #To make sure the file owner is jenkins, for the copied results files in the above line #if not, there will be error when next time to wipe workspace sudo chown -R jenkins:jenkins ${WORKSPACE}/results diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml index 1cded61fc..a578fcecd 100644 --- a/jjb/functest/functest-ci-jobs.yml +++ b/jjb/functest/functest-ci-jobs.yml @@ -113,6 +113,11 @@ slave-label: armband-virtual installer: fuel <<: *colorado +# netvirt 3rd party ci + - virtual: + slave-label: odl-netvirt-virtual + installer: netvirt + <<: *master #-------------------------------- # None-CI PODs #-------------------------------- @@ -224,6 +229,10 @@ name: DOCKER_TAG default: '{docker-tag}' description: 'Tag to pull docker image' + - string: + name: CLEAN_DOCKER_IMAGES + default: 'false' + description: 'Remove downloaded docker images (opnfv/functest:*)' - functest-parameter: gs-pathname: '{gs-pathname}' diff --git a/jjb/functest/functest-cleanup.sh b/jjb/functest/functest-cleanup.sh index 4bedfe7de..3c4c7f965 100755 --- a/jjb/functest/functest-cleanup.sh +++ b/jjb/functest/functest-cleanup.sh @@ -10,7 +10,7 @@ if [[ ! -z $(docker ps -a | grep opnfv/functest) ]]; then fi # Remove existing images if exist -if [[ ! -z $(docker images | grep opnfv/functest) ]]; then +if [[ $CLEAN_DOCKER_IMAGES ]] && [[ ! -z $(docker images | grep opnfv/functest) ]]; then echo "Docker images to remove:" docker images | head -1 && docker images | grep opnfv/functest >${redirect} image_tags=($(docker images | grep opnfv/functest | awk '{print $2}')) diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh index df286569f..a30fb5973 100755 --- a/jjb/functest/functest-suite.sh +++ b/jjb/functest/functest-suite.sh @@ -8,7 +8,7 @@ if [[ ${branch} == *"brahmaputra"* ]]; then elif [[ ${branch} == *"colorado"* ]]; then cmd="python ${FUNCTEST_REPO_DIR}/ci/run_tests.py -t $FUNCTEST_SUITE_NAME" else - cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t $FUNCTEST_SUITE_NAME" + cmd="functest testcase run $FUNCTEST_SUITE_NAME" fi container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1) docker exec $container_id $cmd diff --git a/jjb/global/installer-params.yml b/jjb/global/installer-params.yml index 55f4769b7..610072d1b 100644 --- a/jjb/global/installer-params.yml +++ b/jjb/global/installer-params.yml @@ -100,3 +100,22 @@ name: INSTALLER_TYPE default: infra description: 'Installer used for deploying OPNFV on this POD' +- parameter: + name: 'netvirt-defaults' + parameters: + - string: + name: INSTALLER_IP + default: '192.168.X.X' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: apex + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: DEPLOY_SCENARIO + default: 'none' + description: 'Scenario to deploy and test' + - string: + name: EXTERNAL_NETWORK + default: 'external' + description: 'external network for test' diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml index dd0c97996..7eec70918 100644 --- a/jjb/global/slave-params.yml +++ b/jjb/global/slave-params.yml @@ -670,6 +670,26 @@ name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT description: 'Git URL to use on this Jenkins Slave' +- parameter: + name: 'ericsson-virtual5-defaults' + parameters: + - label: + name: SLAVE_LABEL + default: 'ericsson-virtual5' + - string: + name: GIT_BASE + default: https://git.opendaylight.org/gerrit/p/$PROJECT.git + description: 'Git URL to use on this Jenkins Slave' +- parameter: + name: 'odl-netvirt-virtual-defaults' + parameters: + - label: + name: SLAVE_LABEL + default: 'odl-netvirt-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' ##################################################### # These slaves are just dummy slaves for sandbox jobs ##################################################### diff --git a/jjb/infra/bifrost-verify-jobs.yml b/jjb/infra/bifrost-verify-jobs.yml index be3cf9a0e..422a42274 100644 --- a/jjb/infra/bifrost-verify-jobs.yml +++ b/jjb/infra/bifrost-verify-jobs.yml @@ -36,8 +36,7 @@ disabled: false dib-os-release: '42.2' dib-os-element: 'opensuse-minimal' - # python-xml is needed until https://review.openstack.org/#/c/400150/ is merged - dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl,python-xml' + dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl' extra-dib-elements: 'openssh-server' vm-disk: '30' vm-memory: '4096' diff --git a/jjb/infra/bifrost-verify.sh b/jjb/infra/bifrost-verify.sh index ded4ed463..9fbb1d0d9 100755 --- a/jjb/infra/bifrost-verify.sh +++ b/jjb/infra/bifrost-verify.sh @@ -17,6 +17,9 @@ function fix_ownership() { if [ -z "${JOB_URL+x}" ]; then echo "Not running as part of Jenkins. Handle the logs manually." else + # Make sure cache exists + [[ ! -d ${HOME}/.cache ]] && mkdir ${HOME}/.cache + sudo chown -R jenkins:jenkins $WORKSPACE sudo chown -R jenkins:jenkins ${HOME}/.cache fi diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml index 4cba6d1d3..1c2bf5053 100644 --- a/jjb/kvmfornfv/kvmfornfv.yml +++ b/jjb/kvmfornfv/kvmfornfv.yml @@ -24,12 +24,8 @@ # patch verification phases ##################################### testname: - - 'Idle_Idle': - trigger_time: '@midnight' - - 'Stress_Idle': - trigger_time: '@midnight+3hours' - - 'Packet_Forwarding': - trigger_time: '@midnight+6hours' + - 'cyclictest' + - 'packet_forward' ##################################### # patch verification phases ##################################### @@ -37,7 +33,7 @@ - 'kvmfornfv-verify-{stream}' - 'kvmfornfv-verify-{phase}-{stream}' - 'kvmfornfv-merge-{stream}' - - 'kvmfornfv-{testname}-daily-{stream}' + - 'kvmfornfv-daily-{stream}' - 'kvmfornfv-{testname}-daily-{phase}-{stream}' ##################################### # job templates @@ -186,7 +182,7 @@ !include-raw: ./kvmfornfv-build.sh - job-template: - name: 'kvmfornfv-{testname}-daily-{stream}' + name: 'kvmfornfv-daily-{stream}' project-type: multijob @@ -210,7 +206,7 @@ choosing-strategy: 'default' triggers: - - timed: '{trigger_time}' + - timed: '@midnight' builders: - description-setter: @@ -219,7 +215,7 @@ name: build condition: SUCCESSFUL projects: - - name: 'kvmfornfv-Idle_Idle-daily-build-{stream}' + - name: 'kvmfornfv-{testname}-daily-build-{stream}' current-parameters: false node-parameters: false git-revision: true @@ -229,56 +225,13 @@ name: build condition: SUCCESSFUL projects: - - name: 'kvmfornfv-Stress_Idle-daily-build-{stream}' - current-parameters: false - node-parameters: false - git-revision: true - kill-phase-on: FAILURE - abort-all-job: true - - multijob: - name: build - condition: SUCCESSFUL - projects: - - name: 'kvmfornfv-Packet_Forwarding-daily-build-{stream}' - current-parameters: false - node-parameters: false - git-revision: true - kill-phase-on: FAILURE - abort-all-job: true - - - multijob: - name: build - condition: SUCCESSFUL - projects: - - name: 'kvmfornfv-Idle_Idle-daily-test-{stream}' + - name: 'kvmfornfv-{testname}-daily-test-{stream}' current-parameters: false node-parameters: false git-revision: true kill-phase-on: FAILURE abort-all-job: true - - multijob: - name: test - condition: SUCCESSFUL - projects: - - name: 'kvmfornfv-Stress_Idle-daily-test-{stream}' - current-parameters: false - node-parameters: false - git-revision: true - kill-phase-on: FAILURE - abort-all-job: true - - multijob: - name: build - condition: SUCCESSFUL - projects: - - name: 'kvmfornfv-Packet_Forwarding-daily-test-{stream}' - current-parameters: false - node-parameters: false - git-revision: true - kill-phase-on: FAILURE - abort-all-job: true - - - job-template: name: 'kvmfornfv-{testname}-daily-{phase}-{stream}' @@ -307,35 +260,15 @@ - '{slave-label}-defaults' - 'kvmfornfv-defaults': gs-pathname: '{gs-pathname}' + - string: + name: TEST_NAME + default: '{testname}' + description: "Daily job to execute kvmfornfv '{testname}' testcase." builders: - description-setter: description: "Built on $NODE_NAME" - '{project}-{testname}-daily-{phase}-macro' -######################## -# parameter macros -######################## -- parameter: - name: 'kvmfornfv-Idle_Idle-daily-test-{stream}' - parameters: - - string: - name: TEST_NAME - default: 'idle_idle' - description: "Daily job to run cyclictest without applying any stress" -- parameter: - name: 'kvmfornfv-Stress_Idle-daily-test-{stream}' - parameters: - - string: - name: TEST_NAME - default: 'stress_idle' - description: "Daily job to run cyclictest with stress applied" -- parameter: - name: 'kvmfornfv-Packet_Forwarding-daily-test-{stream}' - parameters: - - string: - name: TEST_NAME - default: 'packet_forward' - description: "Daily job to run packet forwarding test cases" ##################################### # builder macros ##################################### @@ -354,48 +287,33 @@ - shell: !include-raw: ./kvmfornfv-test.sh - builder: - name: 'kvmfornfv-Idle_Idle-daily-build-macro' + name: 'kvmfornfv-cyclictest-daily-build-macro' builders: - shell: !include-raw: ./kvmfornfv-build.sh - shell: !include-raw: ./kvmfornfv-upload-artifact.sh - builder: - name: 'kvmfornfv-Stress_Idle-daily-build-macro' - builders: - - shell: - !include-raw: ./kvmfornfv-build.sh - - shell: - !include-raw: ./kvmfornfv-upload-artifact.sh -- builder: - name: 'kvmfornfv-Packet_Forwarding-daily-build-macro' - builders: - - shell: - !include-raw: ./kvmfornfv-build.sh - - shell: - !include-raw: ./kvmfornfv-upload-artifact.sh -- builder: - name: 'kvmfornfv-Idle_Idle-daily-test-macro' + name: 'kvmfornfv-cyclictest-daily-test-macro' builders: - shell: !include-raw: ./kvmfornfv-download-artifact.sh - shell: !include-raw: ./kvmfornfv-test.sh - builder: - name: 'kvmfornfv-Stress_Idle-daily-test-macro' + name: 'kvmfornfv-packet_forward-daily-build-macro' builders: - shell: - !include-raw: ./kvmfornfv-download-artifact.sh + !include-raw: ./kvmfornfv-build.sh - shell: - !include-raw: ./kvmfornfv-test.sh + !include-raw: ./kvmfornfv-upload-artifact.sh - builder: - name: 'kvmfornfv-Packet_Forwarding-daily-test-macro' + name: 'kvmfornfv-packet_forward-daily-test-macro' builders: - shell: !include-raw: ./kvmfornfv-download-artifact.sh - shell: !include-raw: ./kvmfornfv-test.sh - ##################################### # parameter macros ##################################### diff --git a/jjb/qtip/qtip-ci-jobs.yml b/jjb/qtip/qtip-ci-jobs.yml index cca8cee4c..ac9854365 100644 --- a/jjb/qtip/qtip-ci-jobs.yml +++ b/jjb/qtip/qtip-ci-jobs.yml @@ -2,7 +2,7 @@ # job configuration for qtip #################################### - project: - name: qtip-ci-jobs + name: qtip project: 'qtip' @@ -20,22 +20,6 @@ # master #-------------------------------- pod: - - dell-pod1: - installer: compass - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - - orange-pod2: - installer: joid - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - - juniper-pod1: - installer: joid - <<: *master - auto-trigger-name: 'daily-trigger-disabled' - - zte-pod1: - installer: fuel - auto-trigger-name: 'daily-trigger-disabled' - <<: *master - zte-pod2: installer: fuel auto-trigger-name: 'qtip-daily-zte-pod2-trigger' @@ -80,12 +64,14 @@ - '{auto-trigger-name}' builders: + - description-setter: + description: "POD: $NODE_NAME" - 'qtip-cleanup' - 'qtip-daily-ci' publishers: - email: - recipients: nauman.ahad@xflowresearch.com, mofassir.arif@xflowresearch.com, vikram@nvirters.com, zhang.yujunz@zte.com.cn + recipients: wu.zhihui1@zte.com.cn, zhang.yujunz@zte.com.cn ########################### #biuilder macros @@ -105,22 +91,6 @@ ################# #trigger macros ################# - -#- trigger: -# name: 'qtip-daily-dell-pod1-trigger' -# triggers: -# - timed: '0 3 * * *' - -#- trigger: -# name: 'qtip-daily-juniper-pod1-trigger' -# triggers: -# - timed : '0 0 * * *' - -#- trigger: -# name: 'qtip-dailty-orange-pod2-trigger' -# triggers: -# - timed : ' 0 0 * * *' - - trigger: name: 'qtip-daily-zte-pod2-trigger' triggers: diff --git a/jjb/qtip/qtip-project-jobs.yml b/jjb/qtip/qtip-project-jobs.yml index 8b406fea5..8798fd115 100644 --- a/jjb/qtip/qtip-project-jobs.yml +++ b/jjb/qtip/qtip-project-jobs.yml @@ -1,22 +1,20 @@ - project: - name: qtip + name: qtip-project-jobs - project: '{name}' + project: 'qtip' jobs: - 'qtip-verify-{stream}' -# only master branch is enabled at the moment to keep no of jobs sane stream: - master: branch: '{stream}' gs-pathname: '' disabled: false - - colorado: - branch: 'stable/{stream}' - gs-pathname: '/{stream}' - disabled: false +################################ +## job templates +################################# - job-template: name: 'qtip-verify-{stream}' @@ -59,6 +57,14 @@ pattern: 'docs/**|.gitignore' builders: + - qtip-unit-tests-and-docs-build + +################################ +## job builders +################################# +- builder: + name: qtip-unit-tests-and-docs-build + builders: - shell: | #!/bin/bash set -o errexit diff --git a/jjb/releng/opnfv-docker.sh b/jjb/releng/opnfv-docker.sh index 6f8d81a91..e26727abf 100644 --- a/jjb/releng/opnfv-docker.sh +++ b/jjb/releng/opnfv-docker.sh @@ -70,7 +70,7 @@ else release=$(echo $branch|sed 's/.*\///') DOCKER_TAG=${release}.${RELEASE_VERSION} # e.g. colorado.1.0, colorado.2.0, colorado.3.0 - else: + else DOCKER_TAG="stable" fi fi diff --git a/jjb/securityscanning/opnfv-security-scan.yml b/jjb/securityscanning/opnfv-security-scan.yml new file mode 100644 index 000000000..546f4e7e5 --- /dev/null +++ b/jjb/securityscanning/opnfv-security-scan.yml @@ -0,0 +1,109 @@ +######################## +# Job configuration for opnfv-lint +######################## +- project: + + name: anteaterfw + + project: anteaterfw + + jobs: + - 'opnfv-security-scan-verify-{stream}' + + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false + +######################## +# job templates +######################## +- job-template: + name: 'opnfv-security-scan-verify-{stream}' + + disabled: '{obj:disabled}' + + parameters: + - project-parameter: + project: $GERRIT_PROJECT + - gerrit-parameter: + branch: '{branch}' + + scm: + - gerrit-trigger-scm: + credentials-id: '{ssh-credentials}' + refspec: '$GERRIT_REFSPEC' + choosing-strategy: 'gerrit' + + triggers: + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'REG_EXP' + project-pattern: 'sandbox' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + file-paths: + - compare-type: ANT + pattern: '**/*.py' + skip-vote: + successful: true + failed: true + unstable: true + notbuilt: true + + builders: + - security-scan-python-code + - report-security-scan-result-to-gerrit +######################## +# builder macros +######################## +- builder: + name: security-scan-python-code + builders: + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + # this is where the security/license scan script will be executed + echo "Hello World!" +- builder: + name: report-security-scan-result-to-gerrit + builders: + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + # If no violations were found, no lint log will exist. + if [[ -e securityscan.log ]] ; then + echo -e "\nposting security scan report to gerrit...\n" + + cat securityscan.log + echo + + ssh -p 29418 gerrit.opnfv.org \ + "gerrit review -p $GERRIT_PROJECT \ + -m \"$(cat securityscan.log)\" \ + $GERRIT_PATCHSET_REVISION \ + --notify NONE" + + exit 1 + fi diff --git a/modules/opnfv/installer_adapters/fuel/FuelAdapter.py b/modules/opnfv/installer_adapters/fuel/FuelAdapter.py index 6f079404c..8ed8f8937 100644 --- a/modules/opnfv/installer_adapters/fuel/FuelAdapter.py +++ b/modules/opnfv/installer_adapters/fuel/FuelAdapter.py @@ -213,7 +213,7 @@ class FuelAdapter: else: target_ip = ip - installer_jumphost = { + installer_proxy = { 'ip': self.installer_ip, 'username': self.installer_user, 'password': self.installer_password @@ -221,7 +221,7 @@ class FuelAdapter: controller_conn = ssh_utils.get_ssh_client( target_ip, user, - jumphost=installer_jumphost) + proxy=installer_proxy) self.logger.debug("Fetching %s from %s" % (remote_path, target_ip)) diff --git a/modules/opnfv/utils/SSHUtils.py b/modules/opnfv/utils/SSHUtils.py index 6c794c274..16e34c3e5 100644 --- a/modules/opnfv/utils/SSHUtils.py +++ b/modules/opnfv/utils/SSHUtils.py @@ -16,16 +16,16 @@ import os logger = OPNFVLogger.Logger('SSHUtils').getLogger() -def get_ssh_client(hostname, username, password=None, jumphost=None): +def get_ssh_client(hostname, username, password=None, proxy=None): client = None try: - if jumphost is None: + if proxy is None: client = paramiko.SSHClient() else: - client = JumpHostHopClient() - client.configure_jump_host(jumphost['ip'], - jumphost['username'], - jumphost['password']) + client = ProxyHopClient() + client.configure_jump_host(proxy['ip'], + proxy['username'], + proxy['password']) if client is None: raise Exception('Could not connect to client') @@ -62,31 +62,30 @@ def put_file(ssh_conn, src, dest): return None -class JumpHostHopClient(paramiko.SSHClient): +class ProxyHopClient(paramiko.SSHClient): ''' - Connect to a remote server using a jumphost hop + Connect to a remote server using a proxy hop ''' - def __init__(self, *args, **kwargs): - self.logger = OPNFVLogger.Logger("JumpHostHopClient").getLogger() - self.jumphost_ssh = None - self.jumphost_transport = None - self.jumphost_channel = None - self.jumphost_ip = None - self.jumphost_ssh_key = None + self.logger = OPNFVLogger.Logger("ProxyHopClient").getLogger() + self.proxy_ssh = None + self.proxy_transport = None + self.proxy_channel = None + self.proxy_ip = None + self.proxy_ssh_key = None self.local_ssh_key = os.path.join(os.getcwd(), 'id_rsa') - super(JumpHostHopClient, self).__init__(*args, **kwargs) + super(ProxyHopClient, self).__init__(*args, **kwargs) def configure_jump_host(self, jh_ip, jh_user, jh_pass, jh_ssh_key='/root/.ssh/id_rsa'): - self.jumphost_ip = jh_ip - self.jumphost_ssh_key = jh_ssh_key - self.jumphost_ssh = paramiko.SSHClient() - self.jumphost_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - self.jumphost_ssh.connect(jh_ip, - username=jh_user, - password=jh_pass) - self.jumphost_transport = self.jumphost_ssh.get_transport() + self.proxy_ip = jh_ip + self.proxy_ssh_key = jh_ssh_key + self.proxy_ssh = paramiko.SSHClient() + self.proxy_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + self.proxy_ssh.connect(jh_ip, + username=jh_user, + password=jh_pass) + self.proxy_transport = self.proxy_ssh.get_transport() def connect(self, hostname, port=22, username='root', password=None, pkey=None, key_filename=None, timeout=None, allow_agent=True, @@ -94,28 +93,28 @@ class JumpHostHopClient(paramiko.SSHClient): gss_kex=False, gss_deleg_creds=True, gss_host=None, banner_timeout=None): try: - if self.jumphost_ssh is None: + if self.proxy_ssh is None: raise Exception('You must configure the jump ' 'host before calling connect') - get_file_res = get_file(self.jumphost_ssh, - self.jumphost_ssh_key, + get_file_res = get_file(self.proxy_ssh, + self.proxy_ssh_key, self.local_ssh_key) if get_file_res is None: raise Exception('Could\'t fetch SSH key from jump host') - jumphost_key = (paramiko.RSAKey - .from_private_key_file(self.local_ssh_key)) + proxy_key = (paramiko.RSAKey + .from_private_key_file(self.local_ssh_key)) - self.jumphost_channel = self.jumphost_transport.open_channel( + self.proxy_channel = self.proxy_transport.open_channel( "direct-tcpip", (hostname, 22), - (self.jumphost_ip, 22)) + (self.proxy_ip, 22)) self.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - super(JumpHostHopClient, self).connect(hostname, - username=username, - pkey=jumphost_key, - sock=self.jumphost_channel) + super(ProxyHopClient, self).connect(hostname, + username=username, + pkey=proxy_key, + sock=self.proxy_channel) os.remove(self.local_ssh_key) except Exception, e: self.logger.error(e) diff --git a/modules/opnfv/utils/ovs_logger.py b/modules/opnfv/utils/ovs_logger.py new file mode 100644 index 000000000..3159609f1 --- /dev/null +++ b/modules/opnfv/utils/ovs_logger.py @@ -0,0 +1,118 @@ +############################################################################## +# Copyright (c) 2015 Ericsson AB and others. +# Author: George Paraskevopoulos (geopar@intracom-telecom.com) +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +import opnfv.utils.OPNFVLogger as OPNFVLogger +import os +import time +import shutil + +logger = OPNFVLogger.Logger('ovs_logger').getLogger() + + +class OVSLogger(object): + def __init__(self, basedir, ft_resdir): + self.ovs_dir = basedir + self.ft_resdir = ft_resdir + self.__mkdir_p(self.ovs_dir) + self.__mkdir_p(self.ft_resdir) + + def __mkdir_p(self, dirpath): + if not os.path.exists(dirpath): + os.makedirs(dirpath) + + def __ssh_host(self, ssh_conn, host_prefix='10.20.0'): + try: + _, stdout, _ = ssh_conn.exec_command('hostname -I') + hosts = stdout.readline().strip().split(' ') + found_host = [h for h in hosts if h.startswith(host_prefix)][0] + return found_host + except Exception, e: + logger.error(e) + + def __dump_to_file(self, operation, host, text, timestamp=None): + ts = (timestamp if timestamp is not None + else time.strftime("%Y%m%d-%H%M%S")) + dumpdir = os.path.join(self.ovs_dir, ts) + self.__mkdir_p(dumpdir) + fname = '{0}_{1}'.format(operation, host) + with open(os.path.join(dumpdir, fname), 'w') as f: + f.write(text) + + def __remote_cmd(self, ssh_conn, cmd): + try: + _, stdout, stderr = ssh_conn.exec_command(cmd) + errors = stderr.readlines() + if len(errors) > 0: + host = self.__ssh_host(ssh_conn) + logger.error(''.join(errors)) + raise Exception('Could not execute {0} in {1}' + .format(cmd, host)) + output = ''.join(stdout.readlines()) + return output + except Exception, e: + logger.error('[__remote_command(ssh_client, {0})]: {1}' + .format(cmd, e)) + return None + + def create_artifact_archive(self): + shutil.make_archive(self.ovs_dir, + 'zip', + root_dir=os.path.dirname(self.ovs_dir), + base_dir=self.ovs_dir) + shutil.copy2('{0}.zip'.format(self.ovs_dir), self.ft_resdir) + + def ofctl_dump_flows(self, ssh_conn, br='br-int', + choose_table=None, timestamp=None): + try: + cmd = 'ovs-ofctl -OOpenFlow13 dump-flows {0}'.format(br) + if choose_table is not None: + cmd = '{0} table={1}'.format(cmd, choose_table) + output = self.__remote_cmd(ssh_conn, cmd) + operation = 'ofctl_dump_flows' + host = self.__ssh_host(ssh_conn) + self.__dump_to_file(operation, host, output, timestamp=timestamp) + return output + except Exception, e: + logger.error('[ofctl_dump_flows(ssh_client, {0}, {1})]: {2}' + .format(br, choose_table, e)) + return None + + def vsctl_show(self, ssh_conn, timestamp=None): + try: + cmd = 'ovs-vsctl show' + output = self.__remote_cmd(ssh_conn, cmd) + operation = 'vsctl_show' + host = self.__ssh_host(ssh_conn) + self.__dump_to_file(operation, host, output, timestamp=timestamp) + return output + except Exception, e: + logger.error('[vsctl_show(ssh_client)]: {0}'.format(e)) + return None + + def dump_ovs_logs(self, controller_clients, compute_clients, + related_error=None, timestamp=None): + if timestamp is None: + timestamp = time.strftime("%Y%m%d-%H%M%S") + + for controller_client in controller_clients: + self.ofctl_dump_flows(controller_client, + timestamp=timestamp) + self.vsctl_show(controller_client, + timestamp=timestamp) + + for compute_client in compute_clients: + self.ofctl_dump_flows(compute_client, + timestamp=timestamp) + self.vsctl_show(compute_client, + timestamp=timestamp) + + if related_error is not None: + dumpdir = os.path.join(self.ovs_dir, timestamp) + with open(os.path.join(dumpdir, 'error'), 'w') as f: + f.write(related_error) diff --git a/prototypes/bifrost/scripts/test-bifrost-deployment.sh b/prototypes/bifrost/scripts/test-bifrost-deployment.sh index 6e751fed6..90f014c74 100755 --- a/prototypes/bifrost/scripts/test-bifrost-deployment.sh +++ b/prototypes/bifrost/scripts/test-bifrost-deployment.sh @@ -62,11 +62,11 @@ export DIB_DEV_USER_PASSWORD=devuser export DIB_OS_RELEASE=${DIB_OS_RELEASE:-trusty} export DIB_OS_ELEMENT=${DIB_OS_ELEMENT:-ubuntu-minimal} -# for centos 7: "openssh-server,vim,less,bridge-utils,iputils,rsyslog,curl" -export DIB_OS_PACKAGES=${DIB_OS_PACKAGES:-"openssh-server,vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl"} +# for centos 7: "vim,less,bridge-utils,iputils,rsyslog,curl" +export DIB_OS_PACKAGES=${DIB_OS_PACKAGES:-"vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl"} # Additional dib elements -export EXTRA_DIB_ELEMENTS=${EXTRA_DIB_ELEMENTS:-} +export EXTRA_DIB_ELEMENTS=${EXTRA_DIB_ELEMENTS:-"openssh-server"} # Source Ansible # NOTE(TheJulia): Ansible stable-1.9 source method tosses an error deep diff --git a/utils/test/testapi/deployment/deploy.py b/utils/test/testapi/deployment/deploy.py new file mode 100644 index 000000000..748bd34f8 --- /dev/null +++ b/utils/test/testapi/deployment/deploy.py @@ -0,0 +1,40 @@ +import argparse +import os + +from jinja2 import Environment, FileSystemLoader + +env = Environment(loader=FileSystemLoader('./')) +docker_compose_yml = './docker-compose.yml' +docker_compose_template = './docker-compose.yml.template' + + +def render_docker_compose(port, swagger_url): + vars = { + "expose_port": port, + "swagger_url": swagger_url, + } + template = env.get_template(docker_compose_template) + yml = template.render(vars=vars) + + with open(docker_compose_yml, 'w') as f: + f.write(yml) + f.close() + + +def main(args): + render_docker_compose(args.expose_port, args.swagger_url) + os.system('docker-compose -f {} up -d'.format(docker_compose_yml)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Backup MongoDBs') + parser.add_argument('-p', '--expose-port', + type=int, + required=False, + default=8000, + help='testapi exposed port') + parser.add_argument('-su', '--swagger-url', + type=str, + required=True, + help='testapi exposed swagger-url') + main(parser.parse_args()) diff --git a/utils/test/testapi/deployment/docker-compose.yml.template b/utils/test/testapi/deployment/docker-compose.yml.template new file mode 100644 index 000000000..5b131f747 --- /dev/null +++ b/utils/test/testapi/deployment/docker-compose.yml.template @@ -0,0 +1,15 @@ +version: '2' +services: + mongo: + image: mongo:3.2.1 + container_name: opnfv-mongo + testapi: + image: opnfv/testapi:latest + container_name: opnfv-testapi + environment: + - mongodb_url=mongodb://mongo:27017/ + - swagger_url={{ vars.swagger_url }} + ports: + - "{{ vars.expose_port }}:8000" + links: + - mongo diff --git a/utils/test/testapi/opnfv_testapi/resources/testcase_models.py b/utils/test/testapi/opnfv_testapi/resources/testcase_models.py index e1f2fb650..0ed705f0f 100644 --- a/utils/test/testapi/opnfv_testapi/resources/testcase_models.py +++ b/utils/test/testapi/opnfv_testapi/resources/testcase_models.py @@ -11,10 +11,23 @@ from opnfv_testapi.tornado_swagger import swagger @swagger.model() class TestcaseCreateRequest(object): - def __init__(self, name, url=None, description=None): + def __init__(self, name, url=None, description=None, + tier=None, ci_loop=None, criteria=None, + blocking=None, dependencies=None, run=None, + domains=None, tags=None, version=None): self.name = name self.url = url self.description = description + self.tier = tier + self.ci_loop = ci_loop + self.criteria = criteria + self.blocking = blocking + self.dependencies = dependencies + self.run = run + self.domains = domains + self.tags = tags + self.version = version + self.trust = "Silver" def format(self): return { @@ -26,29 +39,66 @@ class TestcaseCreateRequest(object): @swagger.model() class TestcaseUpdateRequest(object): - def __init__(self, name=None, description=None, project_name=None): + def __init__(self, name=None, description=None, project_name=None, + tier=None, ci_loop=None, criteria=None, + blocking=None, dependencies=None, run=None, + domains=None, tags=None, version=None, trust=None): self.name = name self.description = description self.project_name = project_name + self.tier = tier + self.ci_loop = ci_loop + self.criteria = criteria + self.blocking = blocking + self.dependencies = dependencies + self.run = run + self.domains = domains + self.tags = tags + self.version = version + self.trust = trust def format(self): return { "name": self.name, "description": self.description, "project_name": self.project_name, + "tier": self.tier, + "ci_loop": self.ci_loop, + "criteria": self.criteria, + "blocking": self.blocking, + "dependencies": self.dependencies, + "run": self.run, + "domains": self.domains, + "tags": self.tags, + "version": self.version, + "trust": self.trust } @swagger.model() class Testcase(object): def __init__(self, _id=None, name=None, project_name=None, - description=None, url=None, creation_date=None): + description=None, url=None, creation_date=None, + tier=None, ci_loop=None, criteria=None, + blocking=None, dependencies=None, run=None, + domains=None, tags=None, version=None, + trust=None): self._id = None self.name = None self.project_name = None self.description = None self.url = None self.creation_date = None + self.tier=None + self.ci_loop=None + self.criteria=None + self.blocking=None + self.dependencies=None + self.run=None + self.domains=None + self.tags=None + self.version=None + self.trust=None @staticmethod def from_dict(a_dict): @@ -63,6 +113,16 @@ class Testcase(object): t.name = a_dict.get('name') t.description = a_dict.get('description') t.url = a_dict.get('url') + t.tier = a_dict.get('tier') + t.ci_loop = a_dict.get('ci_loop') + t.criteria = a_dict.get('criteria') + t.blocking = a_dict.get('blocking') + t.dependencies = a_dict.get('dependencies') + t.run = a_dict.get('run') + t.domains = a_dict.get('domains') + t.tags = a_dict.get('tags') + t.version = a_dict.get('version') + t.trust = a_dict.get('trust') return t @@ -72,7 +132,17 @@ class Testcase(object): "description": self.description, "project_name": self.project_name, "creation_date": str(self.creation_date), - "url": self.url + "url": self.url, + "tier": self.tier, + "ci_loop": self.ci_loop, + "criteria": self.criteria, + "blocking": self.blocking, + "dependencies": self.dependencies, + "run": self.run, + "domains": self.domains, + "tags": self.tags, + "version": self.version, + "trust": self.trust } def format_http(self): @@ -83,6 +153,16 @@ class Testcase(object): "description": self.description, "creation_date": str(self.creation_date), "url": self.url, + "tier": self.tier, + "ci_loop": self.ci_loop, + "criteria": self.criteria, + "blocking": self.blocking, + "dependencies": self.dependencies, + "run": self.run, + "domains": self.domains, + "tags": self.tags, + "version": self.version, + "trust": self.trust } |