diff options
26 files changed, 897 insertions, 72 deletions
diff --git a/.gitignore b/.gitignore index 96a76e33f..024dfac4b 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,24 @@ /releng/ .idea *.py[cod] + +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +.venv/ +venv/ +ENV/ diff --git a/jjb/3rd_party_ci/create-apex-vms.sh b/jjb/3rd_party_ci/create-apex-vms.sh new file mode 100755 index 000000000..a076dd084 --- /dev/null +++ b/jjb/3rd_party_ci/create-apex-vms.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e + +# wipe the WORKSPACE +/bin/rm -rf $WORKSPACE/* + +# clone opnfv sdnvpn repo +git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn +. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh +pushd $LIB +./test_environment.sh --env-number $APEX_ENV_NUMBER --cloner-info $CLONER_INFO --snapshot-disks $SNAPSHOT_DISKS --vjump-hosts $VIRTUAL_JUMPHOSTS +popd diff --git a/jjb/3rd_party_ci/download-netvirt-artifact.sh b/jjb/3rd_party_ci/download-netvirt-artifact.sh new file mode 100755 index 000000000..0a48e3aec --- /dev/null +++ b/jjb/3rd_party_ci/download-netvirt-artifact.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -e + +# wipe the WORKSPACE +/bin/rm -rf $WORKSPACE/* + +echo "Attempting to fetch the artifact location from ODL Jenkins" +CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/netvirt~master~$GERRIT_CHANGE_ID/detail" +# due to limitation with the Jenkins Gerrit Trigger, we need to use Gerrit REST API to get the change details +ODL_JOB_URL=$(curl -s $CHANGE_DETAILS_URL | grep netvirt-patch-test-current-carbon | tail -1 | \ + sed 's/\\n//g' | awk '{print $6}') +NETVIRT_ARTIFACT_URL="${ODL_JOB_URL}org.opendaylight.integration\$distribution-karaf/artifact/org.opendaylight.integration/distribution-karaf/0.6.0-SNAPSHOT/distribution-karaf-0.6.0-SNAPSHOT.tar.gz" +echo -e "URL to artifact is\n\t$NETVIRT_ARTIFACT_URL" + +echo "Downloading the artifact. This could take time..." +wget -q -O $NETVIRT_ARTIFACT $NETVIRT_ARTIFACT_URL +if [[ $? -ne 0 ]]; then + echo "The artifact does not exist! Probably removed due to ODL Jenkins artifact retention policy." + echo "Rerun netvirt-patch-test-current-carbon to get artifact rebuilt." + exit 1 +fi +echo "Download complete" +ls -al $NETVIRT_ARTIFACT diff --git a/jjb/3rd_party_ci/functest-netvirt.sh b/jjb/3rd_party_ci/functest-netvirt.sh new file mode 100755 index 000000000..adffaf42d --- /dev/null +++ b/jjb/3rd_party_ci/functest-netvirt.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +# wipe the WORKSPACE +/bin/rm -rf $WORKSPACE/* + +echo "Hello World" diff --git a/jjb/3rd_party_ci/install-netvirt.sh b/jjb/3rd_party_ci/install-netvirt.sh new file mode 100755 index 000000000..96c4b9634 --- /dev/null +++ b/jjb/3rd_party_ci/install-netvirt.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e +# clone opnfv sdnvpn repo +git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn +. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh +pushd $LIB +./odl_reinstaller.sh --cloner-info $CLONER_INFO --odl-artifact $NETVIRT_ARTIFACT +popd
\ No newline at end of file diff --git a/jjb/3rd_party_ci/odl-netvirt.yml b/jjb/3rd_party_ci/odl-netvirt.yml new file mode 100644 index 000000000..3dd4c0b44 --- /dev/null +++ b/jjb/3rd_party_ci/odl-netvirt.yml @@ -0,0 +1,212 @@ +- project: + name: 'netvirt' + + project: 'netvirt' + + installer: 'apex' +##################################### +# branch definitions +##################################### + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false +##################################### +# patch verification phases +##################################### + phase: + - 'create-apex-vms': + slave-label: 'ericsson-virtual5' + - 'install-netvirt': + slave-label: 'odl-netvirt-virtual' + - 'functest': + slave-label: 'odl-netvirt-virtual' + - 'postprocess': + slave-label: 'odl-netvirt-virtual' +##################################### +# jobs +##################################### + jobs: + - 'odl-netvirt-verify-virtual-{stream}' + - 'odl-netvirt-verify-virtual-{phase}-{stream}' +##################################### +# job templates +##################################### +- job-template: + name: 'odl-netvirt-verify-virtual-{stream}' + + project-type: multijob + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 5 + max-per-node: 1 + option: 'project' + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - string: + name: NETVIRT_ARTIFACT + default: $WORKSPACE/distribution-karaf.tar.gz + - 'odl-netvirt-virtual-defaults' + + triggers: + - gerrit: + server-name: 'git.opendaylight.org' + trigger-on: + - comment-added-contains-event: + comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS' + - comment-added-contains-event: + comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE' + - comment-added-contains-event: + comment-contains-value: 'opnfv-test' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + readable-message: true + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - multijob: + name: create-apex-vms + condition: SUCCESSFUL + projects: + - name: 'odl-netvirt-verify-virtual-create-apex-vms-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID + GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER + GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION + NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT + APEX_ENV_NUMBER=$APEX_ENV_NUMBER + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: install-netvirt + condition: SUCCESSFUL + projects: + - name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID + GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER + GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION + NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: functest + condition: SUCCESSFUL + projects: + - name: 'functest-netvirt-virtual-suite-{stream}' + predefined-parameters: | + FUNCTEST_SUITE_NAME=vping_userdata,bgpvpn + RC_FILE_PATH=/home/jenkins/cloner-info/overcloudrc + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: false + - multijob: + name: postprocess + condition: ALWAYS + projects: + - name: 'odl-netvirt-verify-virtual-postprocess-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID + GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER + GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION + NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true + +- job-template: + name: 'odl-netvirt-verify-virtual-{phase}-{stream}' + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 5 + max-per-node: 1 + option: 'project' + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'odl-netvirt-verify-virtual-install-.*' + - 'odl-netvirt-verify-virtual-functest-.*' + - 'odl-netvirt-verify-virtual-postprocess-.*' + block-level: 'NODE' + + wrappers: + - ssh-agent-credentials: + users: + - '{ssh-credentials}' + - timeout: + timeout: 360 + fail: true + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - '{slave-label}-defaults' + - '{installer}-defaults' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - '{project}-verify-{phase}-builder' +##################################### +# builder macros +##################################### +- builder: + name: 'netvirt-verify-create-apex-vms-builder' + builders: + - shell: + !include-raw: ./create-apex-vms.sh +- builder: + name: 'netvirt-verify-install-netvirt-builder' + builders: + - shell: + !include-raw: ./download-netvirt-artifact.sh + - shell: + !include-raw: ./install-netvirt.sh +- builder: + name: 'netvirt-verify-functest-builder' + builders: + - shell: + !include-raw: ./functest-netvirt.sh +- builder: + name: 'netvirt-verify-postprocess-builder' + builders: + - shell: + !include-raw: ./postprocess-netvirt.sh diff --git a/jjb/3rd_party_ci/postprocess-netvirt.sh b/jjb/3rd_party_ci/postprocess-netvirt.sh new file mode 100755 index 000000000..adffaf42d --- /dev/null +++ b/jjb/3rd_party_ci/postprocess-netvirt.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e + +# wipe the WORKSPACE +/bin/rm -rf $WORKSPACE/* + +echo "Hello World" diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh index f54e4c55a..64f13f4e6 100755 --- a/jjb/apex/apex-upload-artifact.sh +++ b/jjb/apex/apex-upload-artifact.sh @@ -11,6 +11,8 @@ echo # source the opnfv.properties to get ARTIFACT_VERSION source $WORKSPACE/opnfv.properties +BUILD_DIRECTORY=${WORKSPACE}/.build + # clone releng repository echo "Cloning releng repository..." [ -d releng ] && rm -rf releng diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index 9befaba83..d8784c678 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -606,7 +606,7 @@ description: "Artifact version type" - string: name: BUILD_DIRECTORY - default: $WORKSPACE/.build + default: $WORKSPACE/build description: "Directory where the build artifact will be located upon the completion of the build." - string: name: CACHE_DIRECTORY diff --git a/jjb/fastpathmetrics/fastpathmetrics.yml b/jjb/barometer/barometer.yml index 6be6a52c3..c763f3001 100644 --- a/jjb/fastpathmetrics/fastpathmetrics.yml +++ b/jjb/barometer/barometer.yml @@ -3,14 +3,14 @@ # They will only be enabled on request by projects! ################################################### - project: - name: fastpathmetrics + name: barometer project: '{name}' jobs: - - 'fastpathmetrics-verify-{stream}' - - 'fastpathmetrics-merge-{stream}' - - 'fastpathmetrics-daily-{stream}' + - 'barometer-verify-{stream}' + - 'barometer-merge-{stream}' + - 'barometer-daily-{stream}' stream: - master: @@ -23,7 +23,7 @@ disabled: false - job-template: - name: 'fastpathmetrics-verify-{stream}' + name: 'barometer-verify-{stream}' disabled: '{obj:disabled}' @@ -71,7 +71,7 @@ make - job-template: - name: 'fastpathmetrics-merge-{stream}' + name: 'barometer-merge-{stream}' project-type: freestyle @@ -120,11 +120,12 @@ - shell: | pwd cd src - make clobber - make + ./install_build_deps.sh + sudo make clobber + sudo make - job-template: - name: 'fastpathmetrics-daily-{stream}' + name: 'barometer-daily-{stream}' project-type: freestyle @@ -159,5 +160,6 @@ - shell: | pwd cd src - make clobber - make + ./install_build_deps.sh + sudo make clobber + sudo make diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml index 330c6dd17..0d97fef75 100644 --- a/jjb/compass4nfv/compass-ci-jobs.yml +++ b/jjb/compass4nfv/compass-ci-jobs.yml @@ -10,10 +10,10 @@ stream: master branch: '{stream}' gs-pathname: '' - colorado: &colorado - stream: colorado - branch: 'stable/{stream}' - gs-pathname: '/{stream}' +# colorado: &colorado +# stream: colorado +# branch: 'stable/{stream}' +# gs-pathname: '/{stream}' #-------------------------------- # POD, INSTALLER, AND BRANCH MAPPING #-------------------------------- @@ -28,14 +28,14 @@ slave-label: compass-virtual os-version: 'trusty' <<: *master - - baremetal: - slave-label: compass-baremetal - os-version: 'trusty' - <<: *colorado - - virtual: - slave-label: compass-virtual - os-version: 'trusty' - <<: *colorado +# - baremetal: +# slave-label: compass-baremetal +# os-version: 'trusty' +# <<: *colorado +# - virtual: +# slave-label: compass-virtual +# os-version: 'trusty' +# <<: *colorado #-------------------------------- # master #-------------------------------- diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh index a41a989fb..4082c34fe 100755 --- a/jjb/dovetail/dovetail-run.sh +++ b/jjb/dovetail/dovetail-run.sh @@ -66,7 +66,7 @@ if [ $(docker ps | grep "opnfv/dovetail:${DOCKER_TAG}" | wc -l) == 0 ]; then exit 1 fi -exec_cmd="python ${DOVETAIL_REPO_DIR}/dovetail/run.py --testsuite ${TESTSUITE}" +exec_cmd="python ${DOVETAIL_REPO_DIR}/dovetail/run.py --testsuite ${TESTSUITE} -d true" echo "Container exec command: ${exec_cmd}" docker exec ${container_id} ${exec_cmd} diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml index 915828ca7..a578fcecd 100644 --- a/jjb/functest/functest-ci-jobs.yml +++ b/jjb/functest/functest-ci-jobs.yml @@ -113,6 +113,11 @@ slave-label: armband-virtual installer: fuel <<: *colorado +# netvirt 3rd party ci + - virtual: + slave-label: odl-netvirt-virtual + installer: netvirt + <<: *master #-------------------------------- # None-CI PODs #-------------------------------- @@ -226,7 +231,7 @@ description: 'Tag to pull docker image' - string: name: CLEAN_DOCKER_IMAGES - default: 'true' + default: 'false' description: 'Remove downloaded docker images (opnfv/functest:*)' - functest-parameter: gs-pathname: '{gs-pathname}' diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh index df286569f..a30fb5973 100755 --- a/jjb/functest/functest-suite.sh +++ b/jjb/functest/functest-suite.sh @@ -8,7 +8,7 @@ if [[ ${branch} == *"brahmaputra"* ]]; then elif [[ ${branch} == *"colorado"* ]]; then cmd="python ${FUNCTEST_REPO_DIR}/ci/run_tests.py -t $FUNCTEST_SUITE_NAME" else - cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t $FUNCTEST_SUITE_NAME" + cmd="functest testcase run $FUNCTEST_SUITE_NAME" fi container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1) docker exec $container_id $cmd diff --git a/jjb/global/installer-params.yml b/jjb/global/installer-params.yml index 55f4769b7..610072d1b 100644 --- a/jjb/global/installer-params.yml +++ b/jjb/global/installer-params.yml @@ -100,3 +100,22 @@ name: INSTALLER_TYPE default: infra description: 'Installer used for deploying OPNFV on this POD' +- parameter: + name: 'netvirt-defaults' + parameters: + - string: + name: INSTALLER_IP + default: '192.168.X.X' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: apex + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: DEPLOY_SCENARIO + default: 'none' + description: 'Scenario to deploy and test' + - string: + name: EXTERNAL_NETWORK + default: 'external' + description: 'external network for test' diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml index dd0c97996..7eec70918 100644 --- a/jjb/global/slave-params.yml +++ b/jjb/global/slave-params.yml @@ -670,6 +670,26 @@ name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT description: 'Git URL to use on this Jenkins Slave' +- parameter: + name: 'ericsson-virtual5-defaults' + parameters: + - label: + name: SLAVE_LABEL + default: 'ericsson-virtual5' + - string: + name: GIT_BASE + default: https://git.opendaylight.org/gerrit/p/$PROJECT.git + description: 'Git URL to use on this Jenkins Slave' +- parameter: + name: 'odl-netvirt-virtual-defaults' + parameters: + - label: + name: SLAVE_LABEL + default: 'odl-netvirt-virtual' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: 'Git URL to use on this Jenkins Slave' ##################################################### # These slaves are just dummy slaves for sandbox jobs ##################################################### diff --git a/jjb/netready/netready-gluon-build.sh b/jjb/netready/netready-gluon-build.sh new file mode 100755 index 000000000..141e84cbd --- /dev/null +++ b/jjb/netready/netready-gluon-build.sh @@ -0,0 +1,42 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +echo "Building Gluon packages." +echo "------------------------" +echo + +OPNFV_ARTIFACT_VERSION=$(echo $(date -u +"%Y%m%d")) + +# build all packages +cd $WORKSPACE/ci +./build-gluon-packages.sh + +# list the contents of BUILD_OUTPUT directory +echo "Build Directory is ${BUILD_DIRECTORY}" +echo "Build Directory Contents:" +echo "---------------------------------------" +ls -alR $BUILD_DIRECTORY + +# get version infos from Gluon from spec +GLUON_VERSION=$(grep Version: $BUILD_DIRECTORY/rpm_specs/gluon.spec | awk '{ print $2 }') +GLUON_RELEASE=$(grep 'define release' $BUILD_DIRECTORY/rpm_specs/gluon.spec | awk '{ print $3 }')_$OPNFV_ARTIFACT_VERSION + +ARTIFACT_NAME=gluon-$GLUON_VERSION-$GLUON_RELEASE.noarch.rpm +ARTIFACT_PATH=$BUILD_DIRECTORY/noarch/$ARTIFACT_NAME + +echo "Writing opnfv.properties file" +# save information regarding artifact into file +( + echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION" + echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)" + echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)" + echo "OPNFV_ARTIFACT_URL=$GS_URL/$ARTIFACT_NAME" + echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $ARTIFACT_PATH | cut -d' ' -f1)" + echo "OPNFV_BUILD_URL=$BUILD_URL" + echo "ARTIFACT_LIST=$ARTIFACT_PATH" +) > $WORKSPACE/opnfv.properties + +echo "---------------------------------------" +echo "Done!" diff --git a/jjb/netready/netready-upload-gluon-packages.sh b/jjb/netready/netready-upload-gluon-packages.sh new file mode 100755 index 000000000..7c1e33727 --- /dev/null +++ b/jjb/netready/netready-upload-gluon-packages.sh @@ -0,0 +1,27 @@ +#!/bin/bash +set -o errexit +set -o nounset +set -o pipefail + +echo "Uploading Gluon packages" +echo "--------------------------------------------------------" +echo + +source $WORKSPACE/opnfv.properties + +for artifact in $ARTIFACT_LIST; do + echo "Uploading artifact: ${artifact}" + gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.$(basename $artifact).log + echo "Upload complete for ${artifact}" +done + +gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log +gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.properties.log + +echo "--------------------------------------------------------" +echo "Upload done!" + +echo "Artifacts are not available as:" +for artifact in $ARTIFACT_LIST; do + echo "http://$GS_URL/$(basename $artifact)" +done diff --git a/jjb/netready/netready.yml b/jjb/netready/netready.yml index cc6769e92..eff7b8701 100644 --- a/jjb/netready/netready.yml +++ b/jjb/netready/netready.yml @@ -5,16 +5,13 @@ jobs: - 'netready-verify-{stream}' + - 'netready-build-gluon-packages-daily-{stream}' stream: - master: branch: '{stream}' gs-pathname: '' disabled: false - - colorado: - branch: 'stable/{stream}' - gs-pathname: '/{stream}' - disabled: false - job-template: name: 'netready-verify-{stream}' @@ -58,3 +55,63 @@ builders: - shell: | echo "Nothing to verify!" + + + +- job-template: + name: 'netready-build-gluon-packages-daily-{stream}' + + disabled: false + + concurrent: true + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - 'opnfv-build-ubuntu-defaults' + - 'netready-parameter': + gs-pathname: '{gs-pathname}' + + scm: + - git-scm: + credentials-id: '{ssh-credentials}' + refspec: '' + branch: '{branch}' + + builders: + - 'netready-gluon-build' + + triggers: + - timed: '@midnight' + + +######################## +# builder macros +######################## + +- builder: + name: 'netready-gluon-build' + builders: + - shell: + !include-raw: ./netready-gluon-build.sh + - shell: + !include-raw: ./netready-upload-gluon-packages.sh + + +######################## +# parameter macros +######################## + +- parameter: + name: netready-parameter + parameters: + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build + description: "Directory where the build artifact will be located upon the completion of the build." + - string: + name: GS_URL + default: artifacts.opnfv.org/$PROJECT{gs-pathname} + description: "URL to Google Storage." diff --git a/jjb/securityscanning/opnfv-security-scan.yml b/jjb/securityscanning/opnfv-security-scan.yml new file mode 100644 index 000000000..546f4e7e5 --- /dev/null +++ b/jjb/securityscanning/opnfv-security-scan.yml @@ -0,0 +1,109 @@ +######################## +# Job configuration for opnfv-lint +######################## +- project: + + name: anteaterfw + + project: anteaterfw + + jobs: + - 'opnfv-security-scan-verify-{stream}' + + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false + +######################## +# job templates +######################## +- job-template: + name: 'opnfv-security-scan-verify-{stream}' + + disabled: '{obj:disabled}' + + parameters: + - project-parameter: + project: $GERRIT_PROJECT + - gerrit-parameter: + branch: '{branch}' + + scm: + - gerrit-trigger-scm: + credentials-id: '{ssh-credentials}' + refspec: '$GERRIT_REFSPEC' + choosing-strategy: 'gerrit' + + triggers: + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'REG_EXP' + project-pattern: 'sandbox' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + file-paths: + - compare-type: ANT + pattern: '**/*.py' + skip-vote: + successful: true + failed: true + unstable: true + notbuilt: true + + builders: + - security-scan-python-code + - report-security-scan-result-to-gerrit +######################## +# builder macros +######################## +- builder: + name: security-scan-python-code + builders: + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + # this is where the security/license scan script will be executed + echo "Hello World!" +- builder: + name: report-security-scan-result-to-gerrit + builders: + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + set -o xtrace + export PATH=$PATH:/usr/local/bin/ + + # If no violations were found, no lint log will exist. + if [[ -e securityscan.log ]] ; then + echo -e "\nposting security scan report to gerrit...\n" + + cat securityscan.log + echo + + ssh -p 29418 gerrit.opnfv.org \ + "gerrit review -p $GERRIT_PROJECT \ + -m \"$(cat securityscan.log)\" \ + $GERRIT_PATCHSET_REVISION \ + --notify NONE" + + exit 1 + fi diff --git a/modules/opnfv/installer_adapters/fuel/FuelAdapter.py b/modules/opnfv/installer_adapters/fuel/FuelAdapter.py index 6f079404c..8ed8f8937 100644 --- a/modules/opnfv/installer_adapters/fuel/FuelAdapter.py +++ b/modules/opnfv/installer_adapters/fuel/FuelAdapter.py @@ -213,7 +213,7 @@ class FuelAdapter: else: target_ip = ip - installer_jumphost = { + installer_proxy = { 'ip': self.installer_ip, 'username': self.installer_user, 'password': self.installer_password @@ -221,7 +221,7 @@ class FuelAdapter: controller_conn = ssh_utils.get_ssh_client( target_ip, user, - jumphost=installer_jumphost) + proxy=installer_proxy) self.logger.debug("Fetching %s from %s" % (remote_path, target_ip)) diff --git a/modules/opnfv/utils/SSHUtils.py b/modules/opnfv/utils/SSHUtils.py index 6c794c274..16e34c3e5 100644 --- a/modules/opnfv/utils/SSHUtils.py +++ b/modules/opnfv/utils/SSHUtils.py @@ -16,16 +16,16 @@ import os logger = OPNFVLogger.Logger('SSHUtils').getLogger() -def get_ssh_client(hostname, username, password=None, jumphost=None): +def get_ssh_client(hostname, username, password=None, proxy=None): client = None try: - if jumphost is None: + if proxy is None: client = paramiko.SSHClient() else: - client = JumpHostHopClient() - client.configure_jump_host(jumphost['ip'], - jumphost['username'], - jumphost['password']) + client = ProxyHopClient() + client.configure_jump_host(proxy['ip'], + proxy['username'], + proxy['password']) if client is None: raise Exception('Could not connect to client') @@ -62,31 +62,30 @@ def put_file(ssh_conn, src, dest): return None -class JumpHostHopClient(paramiko.SSHClient): +class ProxyHopClient(paramiko.SSHClient): ''' - Connect to a remote server using a jumphost hop + Connect to a remote server using a proxy hop ''' - def __init__(self, *args, **kwargs): - self.logger = OPNFVLogger.Logger("JumpHostHopClient").getLogger() - self.jumphost_ssh = None - self.jumphost_transport = None - self.jumphost_channel = None - self.jumphost_ip = None - self.jumphost_ssh_key = None + self.logger = OPNFVLogger.Logger("ProxyHopClient").getLogger() + self.proxy_ssh = None + self.proxy_transport = None + self.proxy_channel = None + self.proxy_ip = None + self.proxy_ssh_key = None self.local_ssh_key = os.path.join(os.getcwd(), 'id_rsa') - super(JumpHostHopClient, self).__init__(*args, **kwargs) + super(ProxyHopClient, self).__init__(*args, **kwargs) def configure_jump_host(self, jh_ip, jh_user, jh_pass, jh_ssh_key='/root/.ssh/id_rsa'): - self.jumphost_ip = jh_ip - self.jumphost_ssh_key = jh_ssh_key - self.jumphost_ssh = paramiko.SSHClient() - self.jumphost_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - self.jumphost_ssh.connect(jh_ip, - username=jh_user, - password=jh_pass) - self.jumphost_transport = self.jumphost_ssh.get_transport() + self.proxy_ip = jh_ip + self.proxy_ssh_key = jh_ssh_key + self.proxy_ssh = paramiko.SSHClient() + self.proxy_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + self.proxy_ssh.connect(jh_ip, + username=jh_user, + password=jh_pass) + self.proxy_transport = self.proxy_ssh.get_transport() def connect(self, hostname, port=22, username='root', password=None, pkey=None, key_filename=None, timeout=None, allow_agent=True, @@ -94,28 +93,28 @@ class JumpHostHopClient(paramiko.SSHClient): gss_kex=False, gss_deleg_creds=True, gss_host=None, banner_timeout=None): try: - if self.jumphost_ssh is None: + if self.proxy_ssh is None: raise Exception('You must configure the jump ' 'host before calling connect') - get_file_res = get_file(self.jumphost_ssh, - self.jumphost_ssh_key, + get_file_res = get_file(self.proxy_ssh, + self.proxy_ssh_key, self.local_ssh_key) if get_file_res is None: raise Exception('Could\'t fetch SSH key from jump host') - jumphost_key = (paramiko.RSAKey - .from_private_key_file(self.local_ssh_key)) + proxy_key = (paramiko.RSAKey + .from_private_key_file(self.local_ssh_key)) - self.jumphost_channel = self.jumphost_transport.open_channel( + self.proxy_channel = self.proxy_transport.open_channel( "direct-tcpip", (hostname, 22), - (self.jumphost_ip, 22)) + (self.proxy_ip, 22)) self.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - super(JumpHostHopClient, self).connect(hostname, - username=username, - pkey=jumphost_key, - sock=self.jumphost_channel) + super(ProxyHopClient, self).connect(hostname, + username=username, + pkey=proxy_key, + sock=self.proxy_channel) os.remove(self.local_ssh_key) except Exception, e: self.logger.error(e) diff --git a/modules/opnfv/utils/ovs_logger.py b/modules/opnfv/utils/ovs_logger.py new file mode 100644 index 000000000..3159609f1 --- /dev/null +++ b/modules/opnfv/utils/ovs_logger.py @@ -0,0 +1,118 @@ +############################################################################## +# Copyright (c) 2015 Ericsson AB and others. +# Author: George Paraskevopoulos (geopar@intracom-telecom.com) +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +import opnfv.utils.OPNFVLogger as OPNFVLogger +import os +import time +import shutil + +logger = OPNFVLogger.Logger('ovs_logger').getLogger() + + +class OVSLogger(object): + def __init__(self, basedir, ft_resdir): + self.ovs_dir = basedir + self.ft_resdir = ft_resdir + self.__mkdir_p(self.ovs_dir) + self.__mkdir_p(self.ft_resdir) + + def __mkdir_p(self, dirpath): + if not os.path.exists(dirpath): + os.makedirs(dirpath) + + def __ssh_host(self, ssh_conn, host_prefix='10.20.0'): + try: + _, stdout, _ = ssh_conn.exec_command('hostname -I') + hosts = stdout.readline().strip().split(' ') + found_host = [h for h in hosts if h.startswith(host_prefix)][0] + return found_host + except Exception, e: + logger.error(e) + + def __dump_to_file(self, operation, host, text, timestamp=None): + ts = (timestamp if timestamp is not None + else time.strftime("%Y%m%d-%H%M%S")) + dumpdir = os.path.join(self.ovs_dir, ts) + self.__mkdir_p(dumpdir) + fname = '{0}_{1}'.format(operation, host) + with open(os.path.join(dumpdir, fname), 'w') as f: + f.write(text) + + def __remote_cmd(self, ssh_conn, cmd): + try: + _, stdout, stderr = ssh_conn.exec_command(cmd) + errors = stderr.readlines() + if len(errors) > 0: + host = self.__ssh_host(ssh_conn) + logger.error(''.join(errors)) + raise Exception('Could not execute {0} in {1}' + .format(cmd, host)) + output = ''.join(stdout.readlines()) + return output + except Exception, e: + logger.error('[__remote_command(ssh_client, {0})]: {1}' + .format(cmd, e)) + return None + + def create_artifact_archive(self): + shutil.make_archive(self.ovs_dir, + 'zip', + root_dir=os.path.dirname(self.ovs_dir), + base_dir=self.ovs_dir) + shutil.copy2('{0}.zip'.format(self.ovs_dir), self.ft_resdir) + + def ofctl_dump_flows(self, ssh_conn, br='br-int', + choose_table=None, timestamp=None): + try: + cmd = 'ovs-ofctl -OOpenFlow13 dump-flows {0}'.format(br) + if choose_table is not None: + cmd = '{0} table={1}'.format(cmd, choose_table) + output = self.__remote_cmd(ssh_conn, cmd) + operation = 'ofctl_dump_flows' + host = self.__ssh_host(ssh_conn) + self.__dump_to_file(operation, host, output, timestamp=timestamp) + return output + except Exception, e: + logger.error('[ofctl_dump_flows(ssh_client, {0}, {1})]: {2}' + .format(br, choose_table, e)) + return None + + def vsctl_show(self, ssh_conn, timestamp=None): + try: + cmd = 'ovs-vsctl show' + output = self.__remote_cmd(ssh_conn, cmd) + operation = 'vsctl_show' + host = self.__ssh_host(ssh_conn) + self.__dump_to_file(operation, host, output, timestamp=timestamp) + return output + except Exception, e: + logger.error('[vsctl_show(ssh_client)]: {0}'.format(e)) + return None + + def dump_ovs_logs(self, controller_clients, compute_clients, + related_error=None, timestamp=None): + if timestamp is None: + timestamp = time.strftime("%Y%m%d-%H%M%S") + + for controller_client in controller_clients: + self.ofctl_dump_flows(controller_client, + timestamp=timestamp) + self.vsctl_show(controller_client, + timestamp=timestamp) + + for compute_client in compute_clients: + self.ofctl_dump_flows(compute_client, + timestamp=timestamp) + self.vsctl_show(compute_client, + timestamp=timestamp) + + if related_error is not None: + dumpdir = os.path.join(self.ovs_dir, timestamp) + with open(os.path.join(dumpdir, 'error'), 'w') as f: + f.write(related_error) diff --git a/utils/test/testapi/deployment/deploy.py b/utils/test/testapi/deployment/deploy.py new file mode 100644 index 000000000..748bd34f8 --- /dev/null +++ b/utils/test/testapi/deployment/deploy.py @@ -0,0 +1,40 @@ +import argparse +import os + +from jinja2 import Environment, FileSystemLoader + +env = Environment(loader=FileSystemLoader('./')) +docker_compose_yml = './docker-compose.yml' +docker_compose_template = './docker-compose.yml.template' + + +def render_docker_compose(port, swagger_url): + vars = { + "expose_port": port, + "swagger_url": swagger_url, + } + template = env.get_template(docker_compose_template) + yml = template.render(vars=vars) + + with open(docker_compose_yml, 'w') as f: + f.write(yml) + f.close() + + +def main(args): + render_docker_compose(args.expose_port, args.swagger_url) + os.system('docker-compose -f {} up -d'.format(docker_compose_yml)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Backup MongoDBs') + parser.add_argument('-p', '--expose-port', + type=int, + required=False, + default=8000, + help='testapi exposed port') + parser.add_argument('-su', '--swagger-url', + type=str, + required=True, + help='testapi exposed swagger-url') + main(parser.parse_args()) diff --git a/utils/test/testapi/deployment/docker-compose.yml.template b/utils/test/testapi/deployment/docker-compose.yml.template new file mode 100644 index 000000000..5b131f747 --- /dev/null +++ b/utils/test/testapi/deployment/docker-compose.yml.template @@ -0,0 +1,15 @@ +version: '2' +services: + mongo: + image: mongo:3.2.1 + container_name: opnfv-mongo + testapi: + image: opnfv/testapi:latest + container_name: opnfv-testapi + environment: + - mongodb_url=mongodb://mongo:27017/ + - swagger_url={{ vars.swagger_url }} + ports: + - "{{ vars.expose_port }}:8000" + links: + - mongo diff --git a/utils/test/testapi/opnfv_testapi/resources/testcase_models.py b/utils/test/testapi/opnfv_testapi/resources/testcase_models.py index e1f2fb650..0ed705f0f 100644 --- a/utils/test/testapi/opnfv_testapi/resources/testcase_models.py +++ b/utils/test/testapi/opnfv_testapi/resources/testcase_models.py @@ -11,10 +11,23 @@ from opnfv_testapi.tornado_swagger import swagger @swagger.model() class TestcaseCreateRequest(object): - def __init__(self, name, url=None, description=None): + def __init__(self, name, url=None, description=None, + tier=None, ci_loop=None, criteria=None, + blocking=None, dependencies=None, run=None, + domains=None, tags=None, version=None): self.name = name self.url = url self.description = description + self.tier = tier + self.ci_loop = ci_loop + self.criteria = criteria + self.blocking = blocking + self.dependencies = dependencies + self.run = run + self.domains = domains + self.tags = tags + self.version = version + self.trust = "Silver" def format(self): return { @@ -26,29 +39,66 @@ class TestcaseCreateRequest(object): @swagger.model() class TestcaseUpdateRequest(object): - def __init__(self, name=None, description=None, project_name=None): + def __init__(self, name=None, description=None, project_name=None, + tier=None, ci_loop=None, criteria=None, + blocking=None, dependencies=None, run=None, + domains=None, tags=None, version=None, trust=None): self.name = name self.description = description self.project_name = project_name + self.tier = tier + self.ci_loop = ci_loop + self.criteria = criteria + self.blocking = blocking + self.dependencies = dependencies + self.run = run + self.domains = domains + self.tags = tags + self.version = version + self.trust = trust def format(self): return { "name": self.name, "description": self.description, "project_name": self.project_name, + "tier": self.tier, + "ci_loop": self.ci_loop, + "criteria": self.criteria, + "blocking": self.blocking, + "dependencies": self.dependencies, + "run": self.run, + "domains": self.domains, + "tags": self.tags, + "version": self.version, + "trust": self.trust } @swagger.model() class Testcase(object): def __init__(self, _id=None, name=None, project_name=None, - description=None, url=None, creation_date=None): + description=None, url=None, creation_date=None, + tier=None, ci_loop=None, criteria=None, + blocking=None, dependencies=None, run=None, + domains=None, tags=None, version=None, + trust=None): self._id = None self.name = None self.project_name = None self.description = None self.url = None self.creation_date = None + self.tier=None + self.ci_loop=None + self.criteria=None + self.blocking=None + self.dependencies=None + self.run=None + self.domains=None + self.tags=None + self.version=None + self.trust=None @staticmethod def from_dict(a_dict): @@ -63,6 +113,16 @@ class Testcase(object): t.name = a_dict.get('name') t.description = a_dict.get('description') t.url = a_dict.get('url') + t.tier = a_dict.get('tier') + t.ci_loop = a_dict.get('ci_loop') + t.criteria = a_dict.get('criteria') + t.blocking = a_dict.get('blocking') + t.dependencies = a_dict.get('dependencies') + t.run = a_dict.get('run') + t.domains = a_dict.get('domains') + t.tags = a_dict.get('tags') + t.version = a_dict.get('version') + t.trust = a_dict.get('trust') return t @@ -72,7 +132,17 @@ class Testcase(object): "description": self.description, "project_name": self.project_name, "creation_date": str(self.creation_date), - "url": self.url + "url": self.url, + "tier": self.tier, + "ci_loop": self.ci_loop, + "criteria": self.criteria, + "blocking": self.blocking, + "dependencies": self.dependencies, + "run": self.run, + "domains": self.domains, + "tags": self.tags, + "version": self.version, + "trust": self.trust } def format_http(self): @@ -83,6 +153,16 @@ class Testcase(object): "description": self.description, "creation_date": str(self.creation_date), "url": self.url, + "tier": self.tier, + "ci_loop": self.ci_loop, + "criteria": self.criteria, + "blocking": self.blocking, + "dependencies": self.dependencies, + "run": self.run, + "domains": self.domains, + "tags": self.tags, + "version": self.version, + "trust": self.trust } |