diff options
20 files changed, 924 insertions, 414 deletions
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index 20a7dbcdd..70731b568 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -9,6 +9,9 @@ - 'apex-deploy-virtual-{scenario}-{stream}' - 'apex-deploy-baremetal-{scenario}-{stream}' - 'apex-daily-{stream}' + - 'apex-daily-colorado' + - 'apex-build-colorado' + - 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado' # stream: branch with - in place of / (eg. stable-arno) # branch: branch (eg. stable/arno) @@ -548,6 +551,160 @@ failure-threshold: 'never' unstable-threshold: 'FAILURE' + +# Colorado Build +- job-template: + name: 'apex-build-colorado' + + # Job template for builds + # + # Required Variables: + # stream: branch with - in place of / (eg. stable) + # branch: branch (eg. stable) + node: 'apex-daily-colorado' + + disabled: false + + concurrent: true + + parameters: + - project-parameter: + project: '{project}' + - apex-parameter: + gs-pathname: '/colorado' + - gerrit-parameter: + branch: 'stable/colorado' + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT + description: "Used for overriding the GIT URL coming from parameters macro." + + scm: + - git-scm + + properties: + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-deploy.*' + - throttle: + max-per-node: 1 + max-total: 10 + option: 'project' + + builders: + - 'apex-build' + - 'apex-upload-artifact' + + +# Colorado FDIO Deploy +- job-template: + name: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado' + + # Job template for baremetal deployment + # + # Required Variables: + # stream: branch with - in place of / (eg. stable) + # branch: branch (eg. stable) + node: 'lf-pod1' + + disabled: false + + scm: + - git-scm + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: 'stable/colorado' + - apex-parameter: + gs-pathname: '/colorado' + - string: + name: DEPLOY_SCENARIO + default: 'os-odl_l2-fdio-ha' + description: "Scenario to deploy with." + + properties: + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' + - 'apex-deploy.*' + - 'apex-build.*' + + + builders: + - 'apex-deploy' + - 'apex-workspace-cleanup' + +# Colorado FDIO Daily +- job-template: + name: 'apex-daily-colorado' + + # Job template for daily build + # + # Required Variables: + # stream: branch with - in place of / (eg. stable) + # branch: branch (eg. stable) + node: 'apex-daily-colorado' + + disabled: false + + scm: + - git-scm + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: 'stable/colorado' + - apex-parameter: + gs-pathname: '/colorado' + + properties: + - build-blocker: + use-build-blocker: true + block-level: 'NODE' + blocking-jobs: + - 'apex-verify.*' + - 'apex-deploy.*' + - 'apex-build.*' + - 'apex-runner.*' + + triggers: + - 'apex-colorado' + + builders: + - trigger-builds: + - project: 'apex-build-colorado' + git-revision: true + current-parameters: true + same-node: true + block: true + - trigger-builds: + - project: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado' + predefined-parameters: | + BUILD_DIRECTORY=apex-build-colorado/.build + OPNFV_CLEAN=yes + git-revision: true + same-node: true + block-thresholds: + build-step-failure-threshold: 'never' + block: true + - trigger-builds: + - project: 'functest-apex-apex-daily-colorado-daily-colorado' + predefined-parameters: + DEPLOY_SCENARIO=os-odl_l2-fdio-ha + block: true + same-node: true + block-thresholds: + build-step-failure-threshold: 'never' + failure-threshold: 'never' + unstable-threshold: 'FAILURE' + - job-template: name: 'apex-gs-clean-{stream}' @@ -655,6 +812,10 @@ triggers: - timed: '0 3 * * *' - trigger: + name: 'apex-colorado' + triggers: + - timed: '0 12 * * *' +- trigger: name: 'apex-gs-clean-{stream}' triggers: - timed: '0 2 * * *' diff --git a/jjb/dovetail/dovetail-artifacts-upload.sh b/jjb/dovetail/dovetail-artifacts-upload.sh new file mode 100755 index 000000000..94e4129fc --- /dev/null +++ b/jjb/dovetail/dovetail-artifacts-upload.sh @@ -0,0 +1,71 @@ +#!/bin/bash +# SPDX-license-identifier: Apache-2.0 +############################################################################## +# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +set -o pipefail + +echo "dovetail: pull and save the images" + +[[ -d ${CACHE_DIR} ]] || mkdir -p ${CACHE_DIR} + +cd ${CACHE_DIR} +sudo docker pull ${DOCKER_REPO_NAME}:${DOCKER_TAG} +sudo docker save -o ${STORE_FILE_NAME} ${DOCKER_REPO_NAME}:${DOCKER_TAG} + +importkey () { +# clone releng repository +echo "Cloning releng repository..." +[ -d releng ] && rm -rf releng +git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null +#this is where we import the siging key +if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then + source $WORKSPACE/releng/utils/gpg_import_key.sh +fi +} + +sign () { +gpg2 -vvv --batch --yes --no-tty \ + --default-key opnfv-helpdesk@rt.linuxfoundation.org \ + --passphrase besteffort \ + --detach-sig ${CACHE_DIR}/${STORE_FILE_NAME} + +gsutil cp ${CACHE_DIR}/${STORE_FILE_NAME}.sig ${STORE_URL}/${STORE_FILE_NAME}.sig +echo "signature Upload Complete!" +} + +upload () { +# log info to console +echo "Uploading to artifact. This could take some time..." +echo + +cd $WORKSPACE +# upload artifact and additional files to google storage +gsutil cp ${CACHE_DIR}/${STORE_FILE_NAME} ${STORE_URL}/${STORE_FILE_NAME} + +gsutil -m setmeta \ + -h "Cache-Control:private, max-age=0, no-transform" \ + ${STORE_URL}/${STORE_FILE_NAME} > /dev/null 2>&1 + +# disabled errexit due to gsutil setmeta complaints +# BadRequestException: 400 Invalid argument +# check if we uploaded the file successfully to see if things are fine +gsutil ls ${STORE_URL}/${STORE_FILE_NAME} > /dev/null 2>&1 +if [[ $? -ne 0 ]]; then + echo "Problem while uploading artifact!" + exit 1 +fi + +echo "dovetail: uploading Done!" +echo +echo "--------------------------------------------------------" +echo +} + +importkey +sign +upload diff --git a/jjb/dovetail/dovetail-artifacts-upload.yml b/jjb/dovetail/dovetail-artifacts-upload.yml new file mode 100644 index 000000000..dc2ae5aa2 --- /dev/null +++ b/jjb/dovetail/dovetail-artifacts-upload.yml @@ -0,0 +1,130 @@ +############################################ +# dovetail upload artifacts job +############################################ +- project: + name: dovetail-artifacts-upload + + project: 'dovetail' + + jobs: + - 'dovetail-{image}-artifacts-upload-{stream}' + + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false + + image: + - 'dovetail' + - 'functest' + - 'yardstick' + +############################################# +# job template +############################################# + +- job-template: + name: 'dovetail-{image}-artifacts-upload-{stream}' + + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 1 + max-per-node: 1 + option: 'project' + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - 'opnfv-build-ubuntu-defaults' + - dovetail-parameter: + gs-pathname: '{gs-pathname}' + image: '{image}' + branch: '{branch}' + + scm: + - git-scm + + builders: + - 'dovetail-builder-artifacts-upload' + - 'dovetail-workspace-cleanup' + +#################### +# parameter macros +#################### +- parameter: + name: dovetail-parameter + parameters: + - string: + name: CACHE_DIR + default: $WORKSPACE/cache{gs-pathname} + description: "the cache to store packages downloaded" + - string: + name: STORE_URL + default: gs://artifacts.opnfv.org/dovetail{gs-pathname} + description: "LF artifacts url for storage of dovetail packages" + - string: + name: DOCKER_REPO_NAME + default: opnfv/{image} + description: "docker repo name" + - string: + name: DOCKER_TAG + default: latest + description: "docker image tag of which will be uploaded to artifacts" + - string: + name: STORE_FILE_NAME + default: image_{image}_{branch}_$BUILD_ID.docker + description: "stored file name" + +#################################### +#builders for dovetail project +#################################### +- builder: + name: dovetail-builder-artifacts-upload + builders: + - shell: + !include-raw: ./dovetail-artifacts-upload.sh + +- builder: + name: dovetail-workspace-cleanup + builders: + - shell: | + #!/bin/bash + set -o errexit + + echo "Dovetail: cleanup cache used for storage downloaded packages" + + /bin/rm -rf $CACHE_DIR + + # Remove previous running containers if exist + if [[ -n "$(docker ps -a | grep $DOCKER_REPO_NAME)" ]]; then + echo "Removing existing $DOCKER_REPO_NAME containers..." + docker ps -a | grep $DOCKER_REPO_NAME | awk '{print $1}' | xargs docker rm -f + t=60 + # Wait max 60 sec for containers to be removed + while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep $DOCKER_REPO_NAME)" ]]; do + sleep 1 + let t=t-1 + done + fi + + # Remove existing images if exist + if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then + echo "Docker images to remove:" + docker images | head -1 && docker images | grep $DOCKER_REPO_NAME + image_tags=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $2}')) + for tag in "${image_tags[@]}"; do + if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $tag)" ]]; then + echo "Removing docker image $DOCKER_REPO_NAME:$tag..." + docker rmi -f $DOCKER_REPO_NAME:$tag + fi + done + fi diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml index f41325020..2b6ab7621 100644 --- a/jjb/dovetail/dovetail-ci-jobs.yml +++ b/jjb/dovetail/dovetail-ci-jobs.yml @@ -103,22 +103,22 @@ auto-trigger-name: 'daily-trigger-disabled' <<: *colorado #armband CI PODs - - armband_baremetal: + - armband-baremetal: slave-label: armband-baremetal SUT: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *master - - armband_virtual: + - armband-virtual: slave-label: armband-virtual SUT: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *master - - armband_baremetal: + - armband-baremetal: slave-label: armband-baremetal SUT: fuel auto-trigger-name: 'daily-trigger-disabled' <<: *colorado - - armband_virtual: + - armband-virtual: slave-label: armband-virtual SUT: fuel auto-trigger-name: 'daily-trigger-disabled' diff --git a/jjb/dovetail/dovetail-project-jobs.yml b/jjb/dovetail/dovetail-project-jobs.yml index 826684919..904841396 100644 --- a/jjb/dovetail/dovetail-project-jobs.yml +++ b/jjb/dovetail/dovetail-project-jobs.yml @@ -93,25 +93,21 @@ #builders for dovetail project ############################### - builder: - name: dovetail-unit-tests + name: dovetail-hello-world builders: - shell: | #!/bin/bash set -o errexit - set -o pipefail - echo "Running unit tests..." - cd $WORKSPACE - virtualenv $WORKSPACE/dovetail_venv - source $WORKSPACE/dovetail_venv/bin/activate + echo "hello world" - #packages installation - easy_install -U setuptools - easy_install -U pip - pip install -r unittests/requirements.txt - pip install -e . - #unit tests - /bin/bash $WORKSPACE/unittests/unittest.sh +- builder: + name: dovetail-unit-tests + builders: + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail - deactivate + tox diff --git a/jjb/functest/functest-cleanup.sh b/jjb/functest/functest-cleanup.sh index a1ae67d99..f8140e058 100755 --- a/jjb/functest/functest-cleanup.sh +++ b/jjb/functest/functest-cleanup.sh @@ -3,19 +3,34 @@ [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null" echo "Cleaning up docker containers/images..." +FUNCTEST_IMAGE=opnfv/functest +# Remove containers along with image opnfv/functest:<none> +dangling_images=($(docker images -f "dangling=true" | grep $FUNCTEST_IMAGE | awk '{print $1}')) +if [[ -n ${dangling_images} ]]; then + echo " Removing $FUNCTEST_IMAGE:<none> images and their containers..." + for image_id in "${dangling_images[@]}"; do + echo " Removing image_id: $image_id and its containers" + docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect} + docker rmi $image_id >${redirect} + done +fi + # Remove previous running containers if exist -if [[ ! -z $(docker ps -a | grep opnfv/functest) ]]; then - echo "Removing existing opnfv/functest containers..." - docker ps -a | grep opnfv/functest | awk '{print $1}' | xargs docker rm -f >${redirect} +functest_containers=$(docker ps -a | grep $FUNCTEST_IMAGE | awk '{print $1}') +if [[ -n ${functest_containers} ]]; then + echo " Removing existing $FUNCTEST_IMAGE containers..." + docker rm -f $functest_containers >${redirect} fi # Remove existing images if exist -if [[ $CLEAN_DOCKER_IMAGES == true ]] && [[ ! -z $(docker images | grep opnfv/functest) ]]; then - echo "Docker images to remove:" - docker images | head -1 && docker images | grep opnfv/functest >${redirect} - image_tags=($(docker images | grep opnfv/functest | awk '{print $2}')) - for tag in "${image_tags[@]}"; do - echo "Removing docker image opnfv/functest:$tag..." - docker rmi opnfv/functest:$tag >/dev/null - done +if [[ $CLEAN_DOCKER_IMAGES == true ]]; then + functest_image_tags=($(docker images | grep $FUNCTEST_IMAGE | awk '{print $2}')) + if [[ -n ${functest_image_tags} ]]; then + echo " Docker images to be removed:" >${redirect} + (docker images | head -1 && docker images | grep $FUNCTEST_IMAGE) >${redirect} + for tag in "${functest_image_tags[@]}"; do + echo " Removing docker image $FUNCTEST_IMAGE:$tag..." + docker rmi $FUNCTEST_IMAGE:$tag >${redirect} + done + fi fi diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml index 7eec70918..30ebb950f 100644 --- a/jjb/global/slave-params.yml +++ b/jjb/global/slave-params.yml @@ -425,6 +425,19 @@ name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT - parameter: + name: 'huawei-pod7-defaults' + parameters: + - node: + name: SLAVE_NAME + description: 'Slave name on Jenkins' + allowed-slaves: + - huawei-pod7 + default-slaves: + - huawei-pod7 + - string: + name: GIT_BASE + default: https://gerrit.opnfv.org/gerrit/$PROJECT +- parameter: name: 'zte-pod1-defaults' parameters: - node: diff --git a/jjb/infra/bifrost-verify-jobs.yml b/jjb/infra/bifrost-verify-jobs.yml index 422a42274..baf33ea3a 100644 --- a/jjb/infra/bifrost-verify-jobs.yml +++ b/jjb/infra/bifrost-verify-jobs.yml @@ -132,10 +132,6 @@ timeout: 10 wipe-workspace: true - triggers: - - '{project}-gerrit-trigger': - branch: '{branch}' - builders: - description-setter: description: "Built on $NODE_NAME" diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml index a708c8af2..33c73f9a7 100644 --- a/jjb/kvmfornfv/kvmfornfv.yml +++ b/jjb/kvmfornfv/kvmfornfv.yml @@ -201,20 +201,40 @@ - description-setter: description: "Built on $NODE_NAME" - multijob: - name: build + name: cyclictest-build condition: SUCCESSFUL projects: - - name: 'kvmfornfv-{testname}-daily-build-{stream}' + - name: 'kvmfornfv-cyclictest-daily-build-{stream}' current-parameters: false node-parameters: false git-revision: true kill-phase-on: FAILURE abort-all-job: true - multijob: - name: build + name: cyclictest-test + condition: SUCCESSFUL + projects: + - name: 'kvmfornfv-cyclictest-daily-test-{stream}' + current-parameters: false + node-parameters: false + git-revision: true + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: packetforward-build + condition: SUCCESSFUL + projects: + - name: 'kvmfornfv-packet_forward-daily-build-{stream}' + current-parameters: false + node-parameters: false + git-revision: true + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: packetforward-test condition: SUCCESSFUL projects: - - name: 'kvmfornfv-{testname}-daily-test-{stream}' + - name: 'kvmfornfv-packet_forward-daily-test-{stream}' current-parameters: false node-parameters: false git-revision: true diff --git a/jjb/opera/opera-daily-jobs.yml b/jjb/opera/opera-daily-jobs.yml new file mode 100644 index 000000000..556d59fcb --- /dev/null +++ b/jjb/opera/opera-daily-jobs.yml @@ -0,0 +1,143 @@ +- project: + name: 'opera-daily-jobs' + + project: 'opera' + +##################################### +# branch definitions +##################################### + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false + +##################################### +# patch verification phases +##################################### + phase: + - 'basic' + - 'deploy' + +##################################### +# jobs +##################################### + jobs: + - 'opera-daily-{stream}' + - 'opera-daily-{phase}-{stream}' +##################################### +# job templates +##################################### +- job-template: + name: 'opera-daily-{stream}' + + project-type: multijob + + disabled: '{obj:disabled}' + + concurrent: false + + properties: + - throttle: + enabled: true + max-total: 1 + max-per-node: 1 + option: 'project' + + scm: + - git-scm + + wrappers: + - ssh-agent-wrapper + + - timeout: + timeout: 120 + fail: true + + triggers: + - timed: '@midnight' + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - 'huawei-pod7-defaults' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - multijob: + name: basic + condition: SUCCESSFUL + projects: + - name: 'opera-daily-basic-{stream}' + current-parameters: true + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: deploy + condition: SUCCESSFUL + projects: + - name: 'opera-daily-deploy-{stream}' + current-parameters: true + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true +# - multijob: +# name: functest +# condition: SUCCESSFUL +# projects: +# - name: 'functest-compass-baremetal-suite-{stream}' +# current-parameters: true +# predefined-parameters: +# FUNCTEST_SUITE_NAME=opera +# node-parameters: true +# kill-phase-on: NEVER +# abort-all-job: true + +- job-template: + name: 'opera-daily-{phase}-{stream}' + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-per-node: 1 + option: 'project' + + scm: + - git-scm + + wrappers: + - ssh-agent-wrapper + - timeout: + timeout: 120 + fail: true + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - '{project}-daily-{phase}-macro' + +##################################### +# builder macros +##################################### +- builder: + name: 'opera-daily-basic-macro' + builders: + - shell: | + #!/bin/bash + echo "Hello world!" + +- builder: + name: 'opera-daily-deploy-macro' + builders: + - shell: | + #!/bin/bash + echo "Hello world!" + diff --git a/jjb/opera/opera-project-jobs.yml b/jjb/opera/opera-project-jobs.yml new file mode 100644 index 000000000..19f066b5f --- /dev/null +++ b/jjb/opera/opera-project-jobs.yml @@ -0,0 +1,57 @@ +- project: + + name: opera-project + + project: 'opera' + + stream: + - master: + branch: '{stream}' + gs-pathname: '' + + jobs: + - 'opera-build-{stream}' + +######################## +# job templates +######################## +- job-template: + name: 'opera-build-{stream}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 1 + max-per-node: 1 + option: 'project' + + parameters: + - project-parameter: + project: '{project}' + - 'opnfv-build-ubuntu-defaults' + - gerrit-parameter: + branch: '{branch}' + + scm: + - git-scm + + triggers: + - timed: 'H 23 * * *' + + builders: + - 'opera-build-macro' + +##################################### +# builder macros +##################################### +- builder: + name: 'opera-build-macro' + builders: + - shell: | + #!/bin/bash + + echo "Hello world!" + + diff --git a/jjb/opera/opera-verify-jobs.yml b/jjb/opera/opera-verify-jobs.yml new file mode 100644 index 000000000..0e9dba01d --- /dev/null +++ b/jjb/opera/opera-verify-jobs.yml @@ -0,0 +1,155 @@ +- project: + name: 'opera-verify-jobs' + + project: 'opera' + +##################################### +# branch definitions +##################################### + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false + +##################################### +# patch verification phases +##################################### + phase: + - 'basic' + - 'deploy' + +##################################### +# jobs +##################################### + jobs: + - 'opera-verify-{stream}' + - 'opera-verify-{phase}-{stream}' +##################################### +# job templates +##################################### +- job-template: + name: 'opera-verify-{stream}' + + project-type: multijob + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 1 + max-per-node: 1 + option: 'project' + + scm: + - git-scm-gerrit + + wrappers: + - ssh-agent-wrapper + - timeout: + timeout: 120 + fail: true + + triggers: + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + file-paths: + - compare-type: ANT + pattern: '**/*' + forbidden-file-paths: + - compare-type: ANT + pattern: 'docs/**' + readable-message: true + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - 'huawei-pod7-defaults' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - multijob: + name: basic + condition: SUCCESSFUL + projects: + - name: 'opera-verify-basic-{stream}' + current-parameters: true + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: deploy + condition: SUCCESSFUL + projects: + - name: 'opera-verify-deploy-{stream}' + current-parameters: true + node-parameters: true + kill-phase-on: FAILURE + abort-all-job: true + +- job-template: + name: 'opera-verify-{phase}-{stream}' + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-per-node: 1 + option: 'project' + + scm: + - git-scm-gerrit + + wrappers: + - ssh-agent-wrapper + - timeout: + timeout: 120 + fail: true + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - '{project}-verify-{phase}-macro' + +##################################### +# builder macros +##################################### +- builder: + name: 'opera-verify-basic-macro' + builders: + - shell: | + #!/bin/bash + echo "Hello world!" + +- builder: + name: 'opera-verify-deploy-macro' + builders: + - shell: | + #!/bin/bash + echo "Hello world!" + diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh index 87cee78bf..586afce1b 100644 --- a/utils/push-test-logs.sh +++ b/utils/push-test-logs.sh @@ -24,7 +24,8 @@ node_list=(\ 'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \ 'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \ 'ericsson-pod2' 'ericsson-pod3' 'ericsson-pod4' \ -'ericsson-virtual2' 'ericsson-virtual3' 'ericsson-virtual4' 'ericsson-virtual5' \ +'ericsson-virtual1' 'ericsson-virtual2' 'ericsson-virtual3' \ +'ericsson-virtual4' 'ericsson-virtual5' \ 'arm-pod1' 'arm-pod3' \ 'huawei-pod1' 'huawei-pod2' 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4') diff --git a/utils/test/dashboard.tar.gz b/utils/test/dashboard.tar.gz Binary files differdeleted file mode 100644 index ef85f90da..000000000 --- a/utils/test/dashboard.tar.gz +++ /dev/null diff --git a/utils/test/reporting/functest/testCase.py b/utils/test/reporting/functest/testCase.py index 876beb267..8d90fc861 100644 --- a/utils/test/reporting/functest/testCase.py +++ b/utils/test/reporting/functest/testCase.py @@ -33,15 +33,17 @@ class TestCase(object): 'vims': 'vIMS', 'doctor': 'Doctor', 'promise': 'Promise', - 'moon': 'moon', - 'copper': 'copper', - 'security_scan': 'security', - 'multisite': 'multisite', - 'domino': 'domino', - 'odl-sfc': 'SFC', - 'onos_sfc': 'SFC', - 'parser': 'parser' - } + 'moon': 'Moon', + 'copper': 'Copper', + 'security_scan': 'Security', + 'multisite':'Multisite', + 'domino':'Domino', + 'odl-sfc':'SFC', + 'onos_sfc':'SFC', + 'parser':'Parser', + 'connection_check':'Health (connection)', + 'api_check':'Health (api)', + 'snaps_smoke':'SNAPS' } try: self.displayName = display_name_matrix[self.name] except: @@ -126,14 +128,17 @@ class TestCase(object): 'vims': 'vims', 'doctor': 'doctor-notification', 'promise': 'promise', - 'moon': 'moon', + 'moon': 'moon_authentication', 'copper': 'copper-notification', 'security_scan': 'security', - 'multisite': 'multisite', - 'domino': 'domino-multinode', - 'odl-sfc': 'functest-odl-sfc', - 'onos_sfc': 'onos_sfc', - 'parser': 'parser-basics' + 'multisite':'multisite', + 'domino':'domino-multinode', + 'odl-sfc':'functest-odl-sfc', + 'onos_sfc':'onos_sfc', + 'parser':'parser-basics', + 'connection_check':'connection_check', + 'api_check':'api_check', + 'snaps_smoke':'snaps_smoke' } try: return test_match_matrix[self.name] @@ -142,3 +147,4 @@ class TestCase(object): def getDisplayName(self): return self.displayName + diff --git a/utils/test/testapi/opnfv_testapi/resources/models.py b/utils/test/testapi/opnfv_testapi/resources/models.py index e79308b53..c85c1d5b1 100644 --- a/utils/test/testapi/opnfv_testapi/resources/models.py +++ b/utils/test/testapi/opnfv_testapi/resources/models.py @@ -12,60 +12,87 @@ # feng.xiaowei@zte.com.cn add CreateResponse 5-19-2016
# feng.xiaowei@zte.com.cn mv TestCase to testcase_models.py 5-20-2016
# feng.xiaowei@zte.com.cn mv TestResut to result_models.py 5-23-2016
+# feng.xiaowei@zte.com.cn add ModelBase 12-20-2016
##############################################################################
+import copy
+
from opnfv_testapi.tornado_swagger import swagger
-@swagger.model()
-class CreateResponse(object):
- def __init__(self, href=''):
- self.href = href
+class ModelBase(object):
+
+ def _format(self, excludes):
+ new_obj = copy.deepcopy(self)
+ dicts = new_obj.__dict__
+ print self, self.__class__
+ for k in dicts.keys():
+ if k in excludes:
+ del dicts[k]
+ elif dicts[k]:
+ if hasattr(dicts[k], 'format'):
+ dicts[k] = dicts[k].format()
+ elif isinstance(dicts[k], list):
+ hs = []
+ for h in dicts[k]:
+ hs.append(h.format())
+ dicts[k] = hs
+ elif not isinstance(dicts[k], (str, int, float, dict)):
+ dicts[k] = str(dicts[k])
+ return dicts
+
+ def format(self):
+ return self._format(['_id'])
+
+ def format_http(self):
+ return self._format([])
@staticmethod
- def from_dict(res_dict):
- if res_dict is None:
+ def attr_parser():
+ return {}
+
+ @classmethod
+ def from_dict(cls, a_dict):
+ if a_dict is None:
return None
- res = CreateResponse()
- res.href = res_dict.get('href')
- return res
+ attr_parser = cls.attr_parser()
+ t = cls()
+ for k, v in a_dict.iteritems():
+ value = v
+ if isinstance(v, dict) and k in attr_parser:
+ value = attr_parser[k].from_dict(v)
+ elif isinstance(v, list) and k in attr_parser:
+ value = []
+ for item in v:
+ value.append(attr_parser[k].from_dict(item))
- def format(self):
- return {'href': self.href}
+ t.__setattr__(k, value)
+
+ return t
+
+
+class CreateResponse(ModelBase):
+ def __init__(self, href=''):
+ self.href = href
@swagger.model()
-class Versions(object):
+class Versions(ModelBase):
"""
@property versions:
@ptype versions: C{list} of L{Version}
"""
+
def __init__(self):
self.versions = list()
@staticmethod
- def from_dict(res_dict):
- if res_dict is None:
- return None
-
- res = Versions()
- for version in res_dict.get('versions'):
- res.versions.append(Version.from_dict(version))
- return res
+ def attr_parser():
+ return {'versions': Version}
@swagger.model()
-class Version(object):
+class Version(ModelBase):
def __init__(self, version=None, description=None):
self.version = version
self.description = description
-
- @staticmethod
- def from_dict(a_dict):
- if a_dict is None:
- return None
-
- ver = Version()
- ver.version = a_dict.get('version')
- ver.description = str(a_dict.get('description'))
- return ver
diff --git a/utils/test/testapi/opnfv_testapi/resources/pod_models.py b/utils/test/testapi/opnfv_testapi/resources/pod_models.py index 7231806f6..26a9e6788 100644 --- a/utils/test/testapi/opnfv_testapi/resources/pod_models.py +++ b/utils/test/testapi/opnfv_testapi/resources/pod_models.py @@ -6,8 +6,10 @@ # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## +import models from opnfv_testapi.tornado_swagger import swagger + # name: name of the POD e.g. zte-1 # mode: metal or virtual # details: any detail @@ -15,58 +17,29 @@ from opnfv_testapi.tornado_swagger import swagger @swagger.model() -class PodCreateRequest(object): +class PodCreateRequest(models.ModelBase): def __init__(self, name, mode='', details='', role=""): self.name = name self.mode = mode self.details = details self.role = role - def format(self): - return { - "name": self.name, - "mode": self.mode, - "details": self.details, - "role": self.role, - } - @swagger.model() -class Pod(PodCreateRequest): +class Pod(models.ModelBase): def __init__(self, name='', mode='', details='', role="", _id='', create_date=''): - super(Pod, self).__init__(name, mode, details, role) + self.name = name + self.mode = mode + self.details = details + self.role = role self._id = _id self.creation_date = create_date - @staticmethod - def from_dict(pod_dict): - if pod_dict is None: - return None - - p = Pod() - p._id = pod_dict.get('_id') - p.creation_date = str(pod_dict.get('creation_date')) - p.name = pod_dict.get('name') - p.mode = pod_dict.get('mode') - p.details = pod_dict.get('details') - p.role = pod_dict.get('role') - return p - - def format(self): - f = super(Pod, self).format() - f['creation_date'] = str(self.creation_date) - return f - - def format_http(self): - f = self.format() - f['_id'] = str(self._id) - return f - @swagger.model() -class Pods(object): +class Pods(models.ModelBase): """ @property pods: @ptype pods: C{list} of L{Pod} @@ -75,11 +48,5 @@ class Pods(object): self.pods = list() @staticmethod - def from_dict(res_dict): - if res_dict is None: - return None - - res = Pods() - for pod in res_dict.get('pods'): - res.pods.append(Pod.from_dict(pod)) - return res + def attr_parser(): + return {'pods': Pod} diff --git a/utils/test/testapi/opnfv_testapi/resources/project_models.py b/utils/test/testapi/opnfv_testapi/resources/project_models.py index f70630cda..f7323c1c4 100644 --- a/utils/test/testapi/opnfv_testapi/resources/project_models.py +++ b/utils/test/testapi/opnfv_testapi/resources/project_models.py @@ -6,37 +6,26 @@ # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## +import models from opnfv_testapi.tornado_swagger import swagger @swagger.model() -class ProjectCreateRequest(object): +class ProjectCreateRequest(models.ModelBase): def __init__(self, name, description=''): self.name = name self.description = description - def format(self): - return { - "name": self.name, - "description": self.description, - } - @swagger.model() -class ProjectUpdateRequest(object): +class ProjectUpdateRequest(models.ModelBase): def __init__(self, name='', description=''): self.name = name self.description = description - def format(self): - return { - "name": self.name, - "description": self.description, - } - @swagger.model() -class Project(object): +class Project(models.ModelBase): def __init__(self, name=None, _id=None, description=None, create_date=None): self._id = _id @@ -44,38 +33,9 @@ class Project(object): self.description = description self.creation_date = create_date - @staticmethod - def from_dict(res_dict): - - if res_dict is None: - return None - - t = Project() - t._id = res_dict.get('_id') - t.creation_date = res_dict.get('creation_date') - t.name = res_dict.get('name') - t.description = res_dict.get('description') - - return t - - def format(self): - return { - "name": self.name, - "description": self.description, - "creation_date": str(self.creation_date) - } - - def format_http(self): - return { - "_id": str(self._id), - "name": self.name, - "description": self.description, - "creation_date": str(self.creation_date), - } - @swagger.model() -class Projects(object): +class Projects(models.ModelBase): """ @property projects: @ptype projects: C{list} of L{Project} @@ -84,11 +44,5 @@ class Projects(object): self.projects = list() @staticmethod - def from_dict(res_dict): - if res_dict is None: - return None - - res = Projects() - for project in res_dict.get('projects'): - res.projects.append(Project.from_dict(project)) - return res + def attr_parser(): + return {'projects': Project} diff --git a/utils/test/testapi/opnfv_testapi/resources/result_models.py b/utils/test/testapi/opnfv_testapi/resources/result_models.py index f73f5c612..30119020b 100644 --- a/utils/test/testapi/opnfv_testapi/resources/result_models.py +++ b/utils/test/testapi/opnfv_testapi/resources/result_models.py @@ -6,11 +6,12 @@ # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## +import models from opnfv_testapi.tornado_swagger import swagger @swagger.model() -class TIHistory(object): +class TIHistory(models.ModelBase): """ @ptype step: L{float} """ @@ -18,22 +19,9 @@ class TIHistory(object): self.date = date self.step = step - def format(self): - return { - "date": self.date, - "step": self.step - } - - @staticmethod - def from_dict(a_dict): - if a_dict is None: - return None - - return TIHistory(a_dict.get('date'), a_dict.get('step')) - @swagger.model() -class TI(object): +class TI(models.ModelBase): """ @property histories: trust_indicator update histories @ptype histories: C{list} of L{TIHistory} @@ -43,31 +31,13 @@ class TI(object): self.current = current self.histories = list() - def format(self): - hs = [] - for h in self.histories: - hs.append(h.format()) - - return { - "current": self.current, - "histories": hs - } - @staticmethod - def from_dict(a_dict): - t = TI() - if a_dict: - t.current = a_dict.get('current') - if 'histories' in a_dict.keys(): - for history in a_dict.get('histories', None): - t.histories.append(TIHistory.from_dict(history)) - else: - t.histories = [] - return t + def attr_parser(): + return {'histories': TIHistory} @swagger.model() -class ResultCreateRequest(object): +class ResultCreateRequest(models.ModelBase): """ @property trust_indicator: @ptype trust_indicator: L{TI} @@ -98,25 +68,9 @@ class ResultCreateRequest(object): self.criteria = criteria self.trust_indicator = trust_indicator if trust_indicator else TI(0) - def format(self): - return { - "pod_name": self.pod_name, - "project_name": self.project_name, - "case_name": self.case_name, - "installer": self.installer, - "version": self.version, - "start_date": self.start_date, - "stop_date": self.stop_date, - "details": self.details, - "build_tag": self.build_tag, - "scenario": self.scenario, - "criteria": self.criteria, - "trust_indicator": self.trust_indicator.format() - } - @swagger.model() -class ResultUpdateRequest(object): +class ResultUpdateRequest(models.ModelBase): """ @property trust_indicator: @ptype trust_indicator: L{TI} @@ -124,14 +78,9 @@ class ResultUpdateRequest(object): def __init__(self, trust_indicator=None): self.trust_indicator = trust_indicator - def format(self): - return { - "trust_indicator": self.trust_indicator.format(), - } - @swagger.model() -class TestResult(object): +class TestResult(models.ModelBase): """ @property trust_indicator: used for long duration test case @ptype trust_indicator: L{TI} @@ -156,76 +105,19 @@ class TestResult(object): self.trust_indicator = trust_indicator @staticmethod - def from_dict(a_dict): - - if a_dict is None: - return None - - t = TestResult() - t._id = a_dict.get('_id') - t.case_name = a_dict.get('case_name') - t.pod_name = a_dict.get('pod_name') - t.project_name = a_dict.get('project_name') - t.start_date = str(a_dict.get('start_date')) - t.stop_date = str(a_dict.get('stop_date')) - t.details = a_dict.get('details') - t.version = a_dict.get('version') - t.installer = a_dict.get('installer') - t.build_tag = a_dict.get('build_tag') - t.scenario = a_dict.get('scenario') - t.criteria = a_dict.get('criteria') - t.trust_indicator = TI.from_dict(a_dict.get('trust_indicator')) - return t - - def format(self): - return { - "case_name": self.case_name, - "project_name": self.project_name, - "pod_name": self.pod_name, - "start_date": str(self.start_date), - "stop_date": str(self.stop_date), - "version": self.version, - "installer": self.installer, - "details": self.details, - "build_tag": self.build_tag, - "scenario": self.scenario, - "criteria": self.criteria, - "trust_indicator": self.trust_indicator.format() - } - - def format_http(self): - return { - "_id": str(self._id), - "case_name": self.case_name, - "project_name": self.project_name, - "pod_name": self.pod_name, - "start_date": str(self.start_date), - "stop_date": str(self.stop_date), - "version": self.version, - "installer": self.installer, - "details": self.details, - "build_tag": self.build_tag, - "scenario": self.scenario, - "criteria": self.criteria, - "trust_indicator": self.trust_indicator.format() - } + def attr_parser(): + return {'trust_indicator': TI} @swagger.model() -class TestResults(object): +class TestResults(models.ModelBase): """ - @property results: + @property rgit esults: @ptype results: C{list} of L{TestResult} """ def __init__(self): self.results = list() @staticmethod - def from_dict(a_dict): - if a_dict is None: - return None - - res = TestResults() - for result in a_dict.get('results'): - res.results.append(TestResult.from_dict(result)) - return res + def attr_parser(): + return {'results': TestResult} diff --git a/utils/test/testapi/opnfv_testapi/resources/testcase_models.py b/utils/test/testapi/opnfv_testapi/resources/testcase_models.py index 0ed705f0f..8cc3c6c6a 100644 --- a/utils/test/testapi/opnfv_testapi/resources/testcase_models.py +++ b/utils/test/testapi/opnfv_testapi/resources/testcase_models.py @@ -6,11 +6,12 @@ # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## +import models from opnfv_testapi.tornado_swagger import swagger @swagger.model() -class TestcaseCreateRequest(object): +class TestcaseCreateRequest(models.ModelBase): def __init__(self, name, url=None, description=None, tier=None, ci_loop=None, criteria=None, blocking=None, dependencies=None, run=None, @@ -29,16 +30,9 @@ class TestcaseCreateRequest(object): self.version = version self.trust = "Silver" - def format(self): - return { - "name": self.name, - "description": self.description, - "url": self.url, - } - @swagger.model() -class TestcaseUpdateRequest(object): +class TestcaseUpdateRequest(models.ModelBase): def __init__(self, name=None, description=None, project_name=None, tier=None, ci_loop=None, criteria=None, blocking=None, dependencies=None, run=None, @@ -57,26 +51,9 @@ class TestcaseUpdateRequest(object): self.version = version self.trust = trust - def format(self): - return { - "name": self.name, - "description": self.description, - "project_name": self.project_name, - "tier": self.tier, - "ci_loop": self.ci_loop, - "criteria": self.criteria, - "blocking": self.blocking, - "dependencies": self.dependencies, - "run": self.run, - "domains": self.domains, - "tags": self.tags, - "version": self.version, - "trust": self.trust - } - @swagger.model() -class Testcase(object): +class Testcase(models.ModelBase): def __init__(self, _id=None, name=None, project_name=None, description=None, url=None, creation_date=None, tier=None, ci_loop=None, criteria=None, @@ -89,85 +66,20 @@ class Testcase(object): self.description = None self.url = None self.creation_date = None - self.tier=None - self.ci_loop=None - self.criteria=None - self.blocking=None - self.dependencies=None - self.run=None - self.domains=None - self.tags=None - self.version=None - self.trust=None - - @staticmethod - def from_dict(a_dict): - - if a_dict is None: - return None - - t = Testcase() - t._id = a_dict.get('_id') - t.project_name = a_dict.get('project_name') - t.creation_date = a_dict.get('creation_date') - t.name = a_dict.get('name') - t.description = a_dict.get('description') - t.url = a_dict.get('url') - t.tier = a_dict.get('tier') - t.ci_loop = a_dict.get('ci_loop') - t.criteria = a_dict.get('criteria') - t.blocking = a_dict.get('blocking') - t.dependencies = a_dict.get('dependencies') - t.run = a_dict.get('run') - t.domains = a_dict.get('domains') - t.tags = a_dict.get('tags') - t.version = a_dict.get('version') - t.trust = a_dict.get('trust') - - return t - - def format(self): - return { - "name": self.name, - "description": self.description, - "project_name": self.project_name, - "creation_date": str(self.creation_date), - "url": self.url, - "tier": self.tier, - "ci_loop": self.ci_loop, - "criteria": self.criteria, - "blocking": self.blocking, - "dependencies": self.dependencies, - "run": self.run, - "domains": self.domains, - "tags": self.tags, - "version": self.version, - "trust": self.trust - } - - def format_http(self): - return { - "_id": str(self._id), - "name": self.name, - "project_name": self.project_name, - "description": self.description, - "creation_date": str(self.creation_date), - "url": self.url, - "tier": self.tier, - "ci_loop": self.ci_loop, - "criteria": self.criteria, - "blocking": self.blocking, - "dependencies": self.dependencies, - "run": self.run, - "domains": self.domains, - "tags": self.tags, - "version": self.version, - "trust": self.trust - } + self.tier = None + self.ci_loop = None + self.criteria = None + self.blocking = None + self.dependencies = None + self.run = None + self.domains = None + self.tags = None + self.version = None + self.trust = None @swagger.model() -class Testcases(object): +class Testcases(models.ModelBase): """ @property testcases: @ptype testcases: C{list} of L{Testcase} @@ -176,11 +88,5 @@ class Testcases(object): self.testcases = list() @staticmethod - def from_dict(res_dict): - if res_dict is None: - return None - - res = Testcases() - for testcase in res_dict.get('testcases'): - res.testcases.append(Testcase.from_dict(testcase)) - return res + def attr_parser(): + return {'testcases': Testcase} |