diff options
52 files changed, 1591 insertions, 686 deletions
diff --git a/jjb-sandbox/releng/releng-sandbox-jobs.yml b/jjb-sandbox/releng/releng-sandbox-jobs.yml new file mode 100644 index 000000000..ee35f4299 --- /dev/null +++ b/jjb-sandbox/releng/releng-sandbox-jobs.yml @@ -0,0 +1,77 @@ +- project: + name: 'releng-sandbox-jobs' + jobs: + - 'releng-deploy-sandbox' + - 'releng-clear-jenkins-jobs' + + project: 'releng' + +- job-template: + name: 'releng-deploy-sandbox' + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: 'master' + scm: + - gerrit-trigger-scm: + credentials-id: '{ssh-credentials}' + refspec: '$GERRIT_REFSPEC' + choosing-strategy: 'gerrit' + + triggers: + - gerrit: + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'redeploy' + projects: + - project-compare-type: 'ANT' + project-pattern: 'releng' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/master' + file-paths: + - compare-type: ANT + pattern: jjb-sandbox/** + - compare-type: ANT + pattern: utils/** + + builders: + - shell: + !include-raw-escape: verify-sandbox-jobs.sh + - shell: | + #! /bin/bash + jenkins-jobs update -r jjb-sandbox + + publishers: + - archive-artifacts: + artifacts: 'job_output/*' + +- job-template: + name: 'releng-clear-jenkins-jobs' + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: 'master' + + scm: + - gerrit-trigger-scm: + credentials-id: '{ssh-credentials}' + refspec: '' + choosing-strategy: 'default' + + triggers: + - timed: '@weekly' + + builders: + - shell: | + #! /bin/bash + jenkins-jobs delete -r -p jjb-sandbox -x jjb-sandbox/releng diff --git a/jjb-sandbox/releng/verify-sandbox-jobs.sh b/jjb-sandbox/releng/verify-sandbox-jobs.sh new file mode 100755 index 000000000..8f67e742b --- /dev/null +++ b/jjb-sandbox/releng/verify-sandbox-jobs.sh @@ -0,0 +1,21 @@ +#! /bin/bash +# SPDX-license-identifier: Apache-2.0 +############################################################################## +# Copyright (c) 2016 Linux Foundation and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +#test for non-ascii characters, these can pass the test and end up breaking things in production +for x in $(find . -name *\.yml); do + + if LC_ALL=C grep -q '[^[:print:][:space:]]' "$x"; then + echo "file "$x" contains non-ascii characters" + exit 1 + fi + +done + +jenkins-jobs test -r jjb/releng-defaults.yaml:jjb/releng-macros.yaml:jjb/opnfv/installer-params.yml:jjb/opnfv/slave-params.yml:jjb-sandbox \ + -o job_output diff --git a/jjb/daisy4nfv/daisy4nfv-basic.sh b/jjb/daisy4nfv/daisy4nfv-basic.sh new file mode 100755 index 000000000..87f5482e0 --- /dev/null +++ b/jjb/daisy4nfv/daisy4nfv-basic.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +echo "--------------------------------------------------------" +echo "This is diasy4nfv basic job!" +echo "--------------------------------------------------------" + diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh new file mode 100755 index 000000000..9eae8481b --- /dev/null +++ b/jjb/daisy4nfv/daisy4nfv-build.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +echo "--------------------------------------------------------" +echo "This is diasy4nfv build job!" +echo "--------------------------------------------------------" + diff --git a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh new file mode 100755 index 000000000..bd6eb7ee0 --- /dev/null +++ b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +echo "--------------------------------------------------------" +echo "This is diasy4nfv smoke test job!" +echo "--------------------------------------------------------" + diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml new file mode 100644 index 000000000..6444cf8ec --- /dev/null +++ b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml @@ -0,0 +1,228 @@ +- project: + name: 'daisy4nfv-verify-jobs' + + project: 'daisy4nfv' + + installer: 'daisy4nfv' +##################################### +# branch definitions +##################################### + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false +##################################### +# patch verification phases +##################################### + phase: + - 'basic': + slave-label: 'opnfv-build' + - 'build': + slave-label: 'opnfv-build-ubuntu' + - 'deploy-virtual': + slave-label: 'opnfv-build' + - 'smoke-test': + slave-label: 'opnfv-build' +##################################### +# jobs +##################################### + jobs: + - 'daisy4nfv-verify-{stream}' + - 'daisy4nfv-verify-{phase}-{stream}' +##################################### +# job templates +##################################### +- job-template: + name: 'daisy4nfv-verify-{stream}' + + project-type: multijob + + disabled: false + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 4 + option: 'project' + + scm: + - gerrit-trigger-scm: + credentials-id: '{ssh-credentials}' + refspec: '$GERRIT_REFSPEC' + choosing-strategy: 'gerrit' + + wrappers: + - ssh-agent-credentials: + users: + - '{ssh-credentials}' + - timeout: + timeout: 360 + fail: true + + triggers: + - gerrit: + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + forbidden-file-paths: + - compare-type: ANT + pattern: 'docs/**|.gitignore' + readable-message: true + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - 'opnfv-build-defaults' + - 'daisy4nfv-verify-defaults': + gs-pathname: '{gs-pathname}' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - multijob: + name: basic + condition: SUCCESSFUL + projects: + - name: 'daisy4nfv-verify-basic-{stream}' + current-parameters: false + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: build + condition: SUCCESSFUL + projects: + - name: 'daisy4nfv-verify-build-{stream}' + current-parameters: false + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: deploy-virtual + condition: SUCCESSFUL + projects: + - name: 'daisy4nfv-verify-deploy-virtual-{stream}' + current-parameters: false + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: smoke-test + condition: SUCCESSFUL + projects: + - name: 'daisy4nfv-verify-smoke-test-{stream}' + current-parameters: false + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: true + +- job-template: + name: 'daisy4nfv-verify-{phase}-{stream}' + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 6 + option: 'project' + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'daisy4nfv-verify-deploy-.*' + - 'daisy4nfv-verify-test-.*' + block-level: 'NODE' + + scm: + - gerrit-trigger-scm: + credentials-id: '{ssh-credentials}' + refspec: '$GERRIT_REFSPEC' + choosing-strategy: 'gerrit' + + wrappers: + - ssh-agent-credentials: + users: + - '{ssh-credentials}' + - timeout: + timeout: 360 + fail: true + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - '{slave-label}-defaults' + - 'daisy4nfv-verify-defaults': + gs-pathname: '{gs-pathname}' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - '{project}-verify-{phase}-macro' +##################################### +# builder macros +##################################### +- builder: + name: 'daisy4nfv-verify-basic-macro' + builders: + - shell: + !include-raw: ./daisy4nfv-basic.sh + +- builder: + name: 'daisy4nfv-verify-build-macro' + builders: + - shell: + !include-raw: ./daisy4nfv-build.sh + +- builder: + name: 'daisy4nfv-verify-deploy-virtual-macro' + builders: + - shell: + !include-raw: ./daisy4nfv-virtual-deploy.sh + +- builder: + name: 'daisy4nfv-verify-smoke-test-macro' + builders: + - shell: | + #!/bin/bash + + echo "Not activated!" +##################################### +# parameter macros +##################################### +- parameter: + name: 'daisy4nfv-verify-defaults' + parameters: + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + - string: + name: CACHE_DIRECTORY + default: $HOME/opnfv/cache/$INSTALLER_TYPE + description: "Directory where the cache to be used during the build is located." + - string: + name: GS_URL + default: artifacts.opnfv.org/$PROJECT{gs-pathname} + description: "URL to Google Storage." diff --git a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh new file mode 100755 index 000000000..8936be6c4 --- /dev/null +++ b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +echo "--------------------------------------------------------" +echo "This is diasy4nfv virtual deploy job!" +echo "--------------------------------------------------------" + diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml index f93ac9bef..4958ca2b5 100644 --- a/jjb/doctor/doctor.yml +++ b/jjb/doctor/doctor.yml @@ -136,9 +136,17 @@ TESTCASE_OPTIONS=-e INSPECTOR_TYPE=congress -v $WORKSPACE:$HOME/opnfv/repos/doctor block: true same-node: true + + publishers: + - postbuildscript: + builders: + - functest-copy-suite-log: + suite: '{project}' + - archive: + artifacts: '{project}.log' + +- builder: + name: functest-copy-suite-log + builders: - shell: | - logfile=$HOME/opnfv/functest/results/{stream}/doctor.log - echo - echo "[$logfile]" - echo - [ -e $logfile ] && cat $logfile + cp $HOME/opnfv/functest/results/${{GIT_BRANCH##*/}}/{suite}.log $WORKSPACE/ diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml index 91362ef3a..1dd1795cb 100644 --- a/jjb/dovetail/dovetail-ci-jobs.yml +++ b/jjb/dovetail/dovetail-ci-jobs.yml @@ -17,12 +17,14 @@ master: &master stream: master branch: '{stream}' + dovetail-branch: '{stream}' gs-pathname: '' docker-tag: 'latest' colorado: &colorado stream: colorado branch: 'stable/{stream}' - gs-pathname: '{stream}' + dovetail-branch: master + gs-pathname: '/{stream}' docker-tag: 'latest' #----------------------------------- @@ -131,7 +133,7 @@ - string: name: DOCKER_TAG default: '{docker-tag}' - description: 'Tag to pull docker image' + description: 'Tag to pull dovetail docker image' - string: name: CI_DEBUG default: 'true' @@ -141,7 +143,7 @@ - git-scm: credentials-id: '{ssh-credentials}' refspec: '' - branch: '{branch}' + branch: '{dovetail-branch}' builders: - description-setter: @@ -149,6 +151,12 @@ - 'dovetail-cleanup' - 'dovetail-{testsuite}' + publishers: + - archive: + artifacts: 'results/**/*' + allow-empty: true + fingerprint: true + ######################## # builder macros ######################## diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh index 5653d3e9e..3f7a47bee 100755 --- a/jjb/dovetail/dovetail-run.sh +++ b/jjb/dovetail/dovetail-run.sh @@ -42,7 +42,14 @@ echo "Dovetail: Pulling image opnfv/dovetail:${DOCKER_TAG}" docker pull opnfv/dovetail:$DOCKER_TAG >$redirect # Run docker +echo "Dovetail: docker running..." sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/dovetail:${DOCKER_TAG} \ "/home/opnfv/dovetail/scripts/run.py" +echo "Dovetail: store results..." +sudo cp -r /home/opnfv/dovetail/results ./ +#To make sure the file owner is jenkins, for the copied results files in the above line +#if not, there will be error when next time to wipe workspace +sudo chown -R jenkins:jenkins ${WORKSPACE}/results + echo "Dovetail: done!" diff --git a/jjb/fastpathmetrics/fastpathmetrics.yml b/jjb/fastpathmetrics/fastpathmetrics.yml index 504e07f25..40df385d8 100644 --- a/jjb/fastpathmetrics/fastpathmetrics.yml +++ b/jjb/fastpathmetrics/fastpathmetrics.yml @@ -18,7 +18,7 @@ gs-pathname: '' disabled: false - colorado: - branch: '{stream}' + branch: 'stable/{stream}' gs-pathname: '/{stream}' disabled: false diff --git a/jjb/fuel/fuel-ci-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml index 1c7946a87..1c7946a87 100644 --- a/jjb/fuel/fuel-ci-jobs.yml +++ b/jjb/fuel/fuel-daily-jobs.yml diff --git a/jjb/fuel/fuel-plugin-verify-jobs.yml b/jjb/fuel/fuel-plugin-verify-jobs.yml new file mode 100644 index 000000000..affc705f3 --- /dev/null +++ b/jjb/fuel/fuel-plugin-verify-jobs.yml @@ -0,0 +1,226 @@ +- project: + name: 'fuel-plugin-verify-jobs' + + project: 'fuel-plugin' + + installer: 'fuel' +##################################### +# branch definitions +##################################### + stream: + - master: + branch: '{stream}' + gs-pathname: '' + disabled: false +##################################### +# patch verification phases +##################################### + phase: + - 'build': + slave-label: 'opnfv-build-ubuntu' + - 'test': + slave-label: 'opnfv-build-ubuntu' +##################################### +# jobs +##################################### + jobs: + - 'fuel-verify-plugin-{stream}' + - 'fuel-verify-plugin-{phase}-{stream}' +##################################### +# job templates +##################################### +- job-template: + name: 'fuel-verify-plugin-{stream}' + + project-type: multijob + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 4 + option: 'project' + + # the url to plugin repo can essentially become a variable if + # the plugin name is injected to env by gerrit plugin + scm: + - git: + url: 'https://git.openstack.org/openstack/fuel-plugin-bgpvpn' + refspec: '$GERRIT_REFSPEC' + branches: + - 'origin/$GERRIT_BRANCH' + skip-tag: true + choosing-strategy: 'gerrit' + timeout: 10 + wipe-workspace: true + + wrappers: + - ssh-agent-credentials: + users: + - '{ssh-credentials}' + - timeout: + timeout: 360 + fail: true + + triggers: + - gerrit: + server-name: 'review.openstack.org' + silent-start: false + skip-vote: + successful: true + failed: true + unstable: true + notbuilt: true + escape-quotes: true + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'PLAIN' + project-pattern: 'openstack/fuel-plugin-bgpvpn' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + forbidden-file-paths: + - compare-type: ANT + pattern: 'README.md|.gitignore|.gitreview' + readable-message: true + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - 'opnfv-build-defaults' + - 'fuel-verify-plugin-defaults': + gs-pathname: '{gs-pathname}' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - multijob: + name: build + condition: SUCCESSFUL + projects: + - name: 'fuel-verify-plugin-build-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: true + - multijob: + name: test + condition: SUCCESSFUL + projects: + - name: 'fuel-verify-plugin-test-{stream}' + current-parameters: false + predefined-parameters: | + GERRIT_BRANCH=$GERRIT_BRANCH + GERRIT_REFSPEC=$GERRIT_REFSPEC + GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER + GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE + node-parameters: false + kill-phase-on: FAILURE + abort-all-job: true + +- job-template: + name: 'fuel-verify-plugin-{phase}-{stream}' + + disabled: '{obj:disabled}' + + concurrent: true + + properties: + - throttle: + enabled: true + max-total: 6 + option: 'project' + - build-blocker: + use-build-blocker: true + blocking-jobs: + - 'fuel-verify-plugin-test-.*' + block-level: 'NODE' + + # the url to plugin repo can essentially become a variable if + # the plugin name is injected to env by gerrit plugin + scm: + - git: + url: 'https://git.openstack.org/openstack/fuel-plugin-bgpvpn' + refspec: '$GERRIT_REFSPEC' + branches: + - 'origin/$GERRIT_BRANCH' + skip-tag: true + choosing-strategy: 'gerrit' + timeout: 10 + wipe-workspace: true + + wrappers: + - ssh-agent-credentials: + users: + - '{ssh-credentials}' + - timeout: + timeout: 360 + fail: true + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - '{slave-label}-defaults' + - '{installer}-defaults' + - 'fuel-verify-plugin-defaults': + gs-pathname: '{gs-pathname}' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - 'fuel-verify-plugin-{phase}-macro' +##################################### +# builder macros +##################################### +- builder: + name: 'fuel-verify-plugin-build-macro' + builders: + - shell: | + #!/bin/bash + + echo "Not activated!" + +- builder: + name: 'fuel-verify-plugin-test-macro' + builders: + - shell: | + #!/bin/bash + + echo "Not activated!" +##################################### +# parameter macros +##################################### +- parameter: + name: 'fuel-verify-plugin-defaults' + parameters: + - string: + name: BUILD_DIRECTORY + default: $WORKSPACE/build_output + description: "Directory where the build artifact will be located upon the completion of the build." + - string: + name: CACHE_DIRECTORY + default: $HOME/opnfv/cache/$INSTALLER_TYPE + description: "Directory where the cache to be used during the build is located." + - string: + name: GS_URL + default: artifacts.opnfv.org/$PROJECT{gs-pathname} + description: "URL to Google Storage." diff --git a/jjb/infra/bifrost-verify-jobs.yml b/jjb/infra/bifrost-verify-jobs.yml new file mode 100644 index 000000000..17796a832 --- /dev/null +++ b/jjb/infra/bifrost-verify-jobs.yml @@ -0,0 +1,178 @@ +- project: + name: 'openstack-bifrost-verify' +#-------------------------------- +# branches +#-------------------------------- + stream: + - master: + branch: '{stream}' +#-------------------------------- +# projects +#-------------------------------- + project: + - 'openstack': + project-repo: 'https://git.openstack.org/openstack/bifrost' + clone-location: '/opt/bifrost' + - 'opnfv': + project-repo: 'https://gerrit.opnfv.org/gerrit/releng' + clone-location: '/opt/releng' +#-------------------------------- +# distros +#-------------------------------- + distro: + - 'trusty': + disabled: false + dib-os-release: 'trusty' + dib-os-element: 'ubuntu-minimal' + dib-os-packages: 'openssh-server,vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl' + - 'centos7': + disabled: false + dib-os-release: '7' + dib-os-element: 'centos-minimal' + dib-os-packages: 'openssh-server,vim,less,bridge-utils,iputils,rsyslog,curl' + - 'suse': + disabled: true + dib-os-release: 'suse' + dib-os-element: 'suse' + dib-os-packages: '' +#-------------------------------- +# type +#-------------------------------- + type: + - virtual +#-------------------------------- +# jobs +#-------------------------------- + jobs: + - '{project}-bifrost-verify-{distro}-{type}-{stream}' +#-------------------------------- +# job templates +#-------------------------------- +- job-template: + name: '{project}-bifrost-verify-{distro}-{type}-{stream}' + + disabled: '{obj:disabled}' + + concurrent: false + + properties: + - build-blocker: + use-build-blocker: true + blocking-jobs: + - '.*-bifrost-verify.*-{type}' + block-level: 'NODE' + + parameters: + - string: + name: PROJECT + default: '{project}' + - string: + name: PROJECT_REPO + default: '{project-repo}' + - string: + name: CLONE_LOCATION + default: '{clone-location}' + - string: + name: DISTRO + default: '{distro}' + - string: + name: DIB_OS_RELEASE + default: '{dib-os-release}' + - string: + name: DIB_OS_ELEMENT + default: '{dib-os-element}' + - string: + name: DIB_OS_PACKAGES + default: '{dib-os-packages}' + - string: + name: CLEAN_DIB_IMAGES + default: 'true' + - label: + name: SLAVE_LABEL + default: 'infra-{type}-{distro}' + + scm: + - git: + url: '$PROJECT_REPO' + refspec: '$GERRIT_REFSPEC' + branches: + - 'origin/$GERRIT_BRANCH' + skip-tag: true + choosing-strategy: 'gerrit' + timeout: 10 + wipe-workspace: true + + triggers: + - '{project}-gerrit-trigger': + branch: '{branch}' + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - shell: + !include-raw-escape: ./bifrost-verify.sh + + publishers: + - email: + recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn +#-------------------------------- +# trigger macros +#-------------------------------- +- trigger: + name: 'openstack-gerrit-trigger' + triggers: + - gerrit: + server-name: 'review.openstack.org' + silent-start: true + skip-vote: + successful: true + failed: true + unstable: true + notbuilt: true + escape-quotes: true + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - comment-added-contains-event: + comment-contains-value: 'recheck' + projects: + - project-compare-type: 'PLAIN' + project-pattern: 'openstack/bifrost' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + forbidden-file-paths: + - compare-type: ANT + pattern: 'doc/**' + - compare-type: ANT + pattern: 'releasenotes/**' + readable-message: true +- trigger: + name: 'opnfv-gerrit-trigger' + triggers: + - gerrit: + server-name: 'gerrit.opnfv.org' + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: 'releng' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + file-paths: + - compare-type: ANT + pattern: 'prototypes/bifrost/**' + - compare-type: ANT + pattern: 'jjb/infra/**' + readable-message: true diff --git a/jjb/infra/openstack-bifrost-verify.sh b/jjb/infra/bifrost-verify.sh index c17cb8861..f7920a36f 100755 --- a/jjb/infra/openstack-bifrost-verify.sh +++ b/jjb/infra/bifrost-verify.sh @@ -17,43 +17,42 @@ function fix_ownership() { if [ -z "${JOB_URL+x}" ]; then echo "Not running as part of Jenkins. Handle the logs manually." else - chown -R jenkins:jenkins $WORKSPACE + sudo chown -R jenkins:jenkins $WORKSPACE + sudo chown -R jenkins:jenkins ${HOME}/.cache fi } # check distro to see if we support it -# we will have centos and suse supported in future -case "$DISTRO" in - trusty) - #start the test - echo "Starting provisioning of 3 VMs" - ;; - *) - echo "Distro $DISTRO is not supported!" - exit 1 -esac +if [[ ! "$DISTRO" =~ (trusty|centos7|suse) ]]; then + echo "Distro $DISTRO is not supported!" + exit 1 +fi # remove previously cloned repos -/bin/rm -rf /opt/bifrost /opt/puppet-infracloud /opt/stack /opt/releng +sudo /bin/rm -rf /opt/bifrost /opt/puppet-infracloud /opt/stack /opt/releng -# clone upstream bifrost repo and checkout the patch to verify -git clone https://git.openstack.org/openstack/bifrost /opt/bifrost -cd /opt/bifrost -git fetch https://git.openstack.org/openstack/bifrost $GERRIT_REFSPEC && git checkout FETCH_HEAD +# Fix up permissions +fix_ownership + +# clone all the repos first and checkout the patch afterwards +sudo git clone https://git.openstack.org/openstack/bifrost /opt/bifrost +sudo git clone https://git.openstack.org/openstack-infra/puppet-infracloud /opt/puppet-infracloud +sudo git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng -# clone puppet-infracloud -git clone https://git.openstack.org/openstack-infra/puppet-infracloud /opt/puppet-infracloud +# checkout the patch +cd $CLONE_LOCATION +sudo git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD # combine opnfv and upstream scripts/playbooks -cp -R $WORKSPACE/prototypes/bifrost/* /opt/bifrost/ +sudo /bin/cp -rf /opt/releng/prototypes/bifrost/* /opt/bifrost/ # cleanup remnants of previous deployment cd /opt/bifrost -./scripts/destroy-env.sh +sudo -E ./scripts/destroy-env.sh # provision 3 VMs; jumphost, controller, and compute cd /opt/bifrost -./scripts/test-bifrost-deployment.sh +sudo -E ./scripts/test-bifrost-deployment.sh # list the provisioned VMs cd /opt/bifrost diff --git a/jjb/infra/infra-daily-jobs.yml b/jjb/infra/infra-daily-jobs.yml deleted file mode 100644 index d779d56d5..000000000 --- a/jjb/infra/infra-daily-jobs.yml +++ /dev/null @@ -1,166 +0,0 @@ -- project: - name: 'infra-daily-jobs' - - project: 'releng' - - installer: 'infra' -#-------------------------------- -# BRANCH ANCHORS -#-------------------------------- - master: &master - stream: master - branch: '{stream}' - gs-pathname: '' -#-------------------------------- -# CI Slaves -#-------------------------------- - pod: - - virtual: - slave-label: infra-virtual-trusty - <<: *master -#-------------------------------- -# phases -#-------------------------------- - phase: - - 'provision' - - 'deploy' - - 'smoketest' -#-------------------------------- -# scenarios -#-------------------------------- - scenario: - - 'os-nosdn-nofeature-noha' -#-------------------------------- -# jobs -#-------------------------------- - jobs: - - 'infra-{scenario}-{pod}-daily-{stream}' - - 'infra-{phase}-{pod}-daily-{stream}' -######################## -# job templates -######################## -- job-template: - name: 'infra-{scenario}-{pod}-daily-{stream}' - - concurrent: false - - properties: - - build-blocker: - use-build-blocker: true - blocking-jobs: - - 'infra-os-.*?-{pod}-daily-{stream}' - block-level: 'NODE' - - wrappers: - - build-name: - name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO' - - triggers: - - timed: 'H */3 * * *' - - parameters: - - project-parameter: - project: '{project}' - - '{installer}-defaults' - - '{slave-label}-defaults' - - string: - name: DEPLOY_SCENARIO - default: '{scenario}' - - builders: - - description-setter: - description: "Built on $NODE_NAME" - - trigger-builds: - - project: 'infra-provision-{pod}-daily-{stream}' - current-parameters: false - predefined-parameters: - DEPLOY_SCENARIO={scenario} - same-node: true - block: true - - trigger-builds: - - project: 'infra-deploy-{pod}-daily-{stream}' - current-parameters: false - predefined-parameters: - DEPLOY_SCENARIO={scenario} - same-node: true - block: true - - trigger-builds: - - project: 'infra-smoketest-{pod}-daily-{stream}' - current-parameters: false - predefined-parameters: - DEPLOY_SCENARIO={scenario} - block: true - same-node: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - publishers: - - email: - recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn - -- job-template: - name: 'infra-{phase}-{pod}-daily-{stream}' - - concurrent: false - - properties: - - build-blocker: - use-build-blocker: true - blocking-jobs: - - 'infra-provision-{pod}-daily-{stream}' - - 'infra-deploy-{pod}-daily-{stream}' - - 'infra-smoketest-{pod}-daily-{stream}' - block-level: 'NODE' - - parameters: - - project-parameter: - project: '{project}' - - '{installer}-defaults' - - '{slave-label}-defaults' - - string: - name: DEPLOY_SCENARIO - default: 'os-nosdn-nofeature-noha' - - string: - name: CLEAN_DIB_IMAGES - default: 'false' - - scm: - - git-scm: - credentials-id: '{ssh-credentials}' - refspec: '' - branch: '{branch}' - - wrappers: - - build-name: - name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO' - - builders: - - description-setter: - description: "Built on $NODE_NAME" - - 'infra-{phase}-daily-builder' -##################################### -# builder macros -##################################### -- builder: - name: 'infra-provision-daily-builder' - builders: - - shell: | - #!/bin/bash - - sudo $WORKSPACE/jjb/infra/infra-provision.sh -- builder: - name: 'infra-deploy-daily-builder' - builders: - - shell: | - #!/bin/bash - - sudo $WORKSPACE/jjb/infra/infra-deploy.sh -- builder: - name: 'infra-smoketest-daily-builder' - builders: - - shell: | - #!/bin/bash - - sudo $WORKSPACE/jjb/infra/infra-smoketest.sh diff --git a/jjb/infra/infra-deploy.sh b/jjb/infra/infra-deploy.sh deleted file mode 100755 index 35ef9a1d0..000000000 --- a/jjb/infra/infra-deploy.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -echo "Not activated!" diff --git a/jjb/infra/infra-provision.sh b/jjb/infra/infra-provision.sh deleted file mode 100755 index 45ed3b928..000000000 --- a/jjb/infra/infra-provision.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -set -xe - -if [[ $(whoami) != "root" ]]; then - echo "Error: This script must be run as root!" - exit 1 -fi - -# remove previously cloned repos -/bin/rm -rf /opt/bifrost /opt/puppet-infracloud /opt/releng - -# clone upstream repos -git clone https://git.openstack.org/openstack/bifrost /opt/bifrost -git clone https://git.openstack.org/openstack-infra/puppet-infracloud /opt/puppet-infracloud - -# clone opnfv releng repo -git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng - -# combine opnfv and upstream scripts/playbooks -cp -R /opt/releng/prototypes/bifrost/* /opt/bifrost/ - -# cleanup remnants of previous deployment -cd /opt/bifrost -./scripts/destroy-env.sh - -# provision 3 VMs; jumphost, controller, and compute -cd /opt/bifrost -./scripts/test-bifrost-deployment.sh - -# list the provisioned VMs -cd /opt/bifrost -source env-vars -ironic node-list -virsh list diff --git a/jjb/infra/infra-smoketest.sh b/jjb/infra/infra-smoketest.sh deleted file mode 100755 index 35ef9a1d0..000000000 --- a/jjb/infra/infra-smoketest.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -echo "Not activated!" diff --git a/jjb/infra/openstack-bifrost-verify-jobs.yml b/jjb/infra/openstack-bifrost-verify-jobs.yml deleted file mode 100644 index 8afe47cd1..000000000 --- a/jjb/infra/openstack-bifrost-verify-jobs.yml +++ /dev/null @@ -1,111 +0,0 @@ -- project: - name: 'openstack-bifrost-verify' - - project: 'releng' -#-------------------------------- -# branches -#-------------------------------- - stream: - - master: - branch: '{stream}' -#-------------------------------- -# distros -# jobs for centos7 and suse can be enabled once the support is there -#-------------------------------- - distro: - - 'trusty': - slave-label: infra-virtual-trusty - disabled: false - - 'centos7': - slave-label: infra-virtual-trusty - disabled: true - - 'suse': - slave-label: infra-virtual-trusty - disabled: true -#-------------------------------- -# jobs -#-------------------------------- - jobs: - - 'openstack-bifrost-verify-{distro}-{stream}' -#-------------------------------- -# job templates -#-------------------------------- -- job-template: - name: 'openstack-bifrost-verify-{distro}-{stream}' - - concurrent: false - - disabled: '{obj:disabled}' - - properties: - - build-blocker: - use-build-blocker: true - blocking-jobs: - - 'infra-os-.*?-daily-.*' - block-level: 'NODE' - - parameters: - - project-parameter: - project: '{project}' - - string: - name: DISTRO - default: '{distro}' - - string: - name: CLEAN_DIB_IMAGES - default: 'true' - - '{slave-label}-defaults' - - scm: - - git-scm: - credentials-id: '{ssh-credentials}' - refspec: '' - branch: '{branch}' - - triggers: - - gerrit: - server-name: 'review.openstack.org' - silent-start: true - skip-vote: - successful: true - failed: true - unstable: true - notbuilt: true - escape-quotes: true - trigger-on: - - patchset-created-event: - exclude-drafts: 'false' - exclude-trivial-rebase: 'false' - exclude-no-code-change: 'false' - - comment-added-contains-event: - comment-contains-value: 'recheck' - projects: - - project-compare-type: 'PLAIN' - project-pattern: 'openstack/bifrost' - branches: - - branch-compare-type: 'ANT' - branch-pattern: '**/master' - forbidden-file-paths: - - compare-type: ANT - pattern: 'doc/**' - - compare-type: ANT - pattern: 'releasenotes/**' - readable-message: true - - builders: - - description-setter: - description: "Built on $NODE_NAME" - - 'openstack-bifrost-verify-builder' - - publishers: - - email: - recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn -##################################### -# builder macros -##################################### -- builder: - name: 'openstack-bifrost-verify-builder' - builders: - - shell: | - #!/bin/bash - - sudo -E $WORKSPACE/jjb/infra/openstack-bifrost-verify.sh diff --git a/jjb/joid/joid-ci-jobs.yml b/jjb/joid/joid-daily-jobs.yml index 6d0370983..6d0370983 100644 --- a/jjb/joid/joid-ci-jobs.yml +++ b/jjb/joid/joid-daily-jobs.yml diff --git a/jjb/multisite/multisite.yml b/jjb/multisite/multisite.yml index 21b973093..24c03fd4a 100644 --- a/jjb/multisite/multisite.yml +++ b/jjb/multisite/multisite.yml @@ -113,17 +113,6 @@ - project: 'functest-fuel-virtual-suite-{stream}' current-parameters: true predefined-parameters: - FUNCTEST_SUITE_NAME=healthcheck - same-node: true - block: true - block-thresholds: - build-step-failure-threshold: 'never' - failure-threshold: 'never' - unstable-threshold: 'FAILURE' - - trigger-builds: - - project: 'functest-fuel-virtual-suite-{stream}' - current-parameters: true - predefined-parameters: FUNCTEST_SUITE_NAME=multisite same-node: true block: true diff --git a/jjb/opnfv/opnfv-docker.sh b/jjb/opnfv/opnfv-docker.sh index c5edf7cc3..e637f7b32 100644 --- a/jjb/opnfv/opnfv-docker.sh +++ b/jjb/opnfv/opnfv-docker.sh @@ -59,7 +59,7 @@ if [[ "$UPDATE_LATEST_STABLE" == "true" ]]; then echo "ERROR: The image $DOCKER_REPO_NAME with tag $STABLE_TAG does not exist." exit 1 fi - docker tag -f $DOCKER_REPO_NAME:$STABLE_TAG $DOCKER_REPO_NAME:latest_stable + docker tag $DOCKER_REPO_NAME:$STABLE_TAG $DOCKER_REPO_NAME:latest_stable echo "Pushing $DOCKER_REPO_NAME:latest_stable ..." docker push $DOCKER_REPO_NAME:latest_stable exit 0 @@ -67,22 +67,9 @@ fi # cd to directory where Dockerfile is located -if [[ "$DOCKER_REPO_NAME" == "opnfv/bottlenecks" ]]; then - cd $WORKSPACE/ci/docker -elif [[ "$DOCKER_REPO_NAME" == "opnfv/cperf" ]]; then - cd $WORKSPACE/docker -elif [[ "$DOCKER_REPO_NAME" == "opnfv/dovetail" ]]; then - cd $WORKSPACE/docker -elif [[ "$DOCKER_REPO_NAME" == "opnfv/functest" ]]; then - cd $WORKSPACE/docker -elif [[ "$DOCKER_REPO_NAME" == "opnfv/qtip" ]]; then - cd $WORKSPACE/docker -elif [[ "$DOCKER_REPO_NAME" == "opnfv/storperf" ]]; then - cd $WORKSPACE/docker -elif [[ "$DOCKER_REPO_NAME" == "opnfv/yardstick" ]]; then - cd $WORKSPACE/tests/ci/docker/yardstick-ci -else - echo "ERROR: DOCKER_REPO_NAME parameter not valid: $DOCKER_REPO_NAME" +cd $WORKSPACE/docker +if [ ! -f ./Dockerfile ]; then + echo "ERROR: Dockerfile not found." exit 1 fi @@ -119,7 +106,7 @@ else fi echo "Creating tag '$DOCKER_TAG'..." -docker tag -f $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG $DOCKER_REPO_NAME:$DOCKER_TAG +docker tag $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG $DOCKER_REPO_NAME:$DOCKER_TAG # list the images echo "Available images are:" diff --git a/jjb/opnfv/opnfv-docs.yml b/jjb/opnfv/opnfv-docs.yml index 743657334..0ac8aa7e0 100644 --- a/jjb/opnfv/opnfv-docs.yml +++ b/jjb/opnfv/opnfv-docs.yml @@ -14,11 +14,13 @@ stream: - master: branch: '{stream}' + doc-version: '' gs-pathname: '' disabled: false - colorado: branch: 'stable/{stream}' - gs-pathname: '/{stream}' + doc-version: '2.0' + gs-pathname: '/{stream}/{doc-version}' disabled: false ######################## @@ -82,7 +84,7 @@ - string: name: GS_URL default: '$GS_BASE{gs-pathname}' - description: "Directory where the build artifact will be located upon the completion of the build." + description: "Directory where the build artifact will be located upon the completion of the build." scm: - gerrit-trigger-scm: diff --git a/jjb/opnfv/opnfv-utils.yml b/jjb/opnfv/opnfv-utils.yml new file mode 100644 index 000000000..94a99d451 --- /dev/null +++ b/jjb/opnfv/opnfv-utils.yml @@ -0,0 +1,40 @@ +- project: + + name: opnfv-utils + + jobs: + - 'prune-docker-images' +######################## +# job templates +######################## +- job-template: + name: 'prune-docker-images' + + disabled: false + + concurrent: true + + parameters: + - node: + name: SLAVE_NAME + description: Slaves to prune docker images + default-slaves: + - arm-build1 + - arm-build2 + - ericsson-build4 + - ericsson-build5 + - lf-build2 + allowed-multiselect: true + ignore-offline-nodes: true + + builders: + - description-setter: + description: "Built on $NODE_NAME" + - shell: | + #!/bin/bash + + (docker ps -q; docker ps -aq) | sort | uniq -u | xargs --no-run-if-empty docker rm + docker images -f dangling=true -q | xargs --no-run-if-empty docker rmi + + triggers: + - timed: '@midnight' diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml index ff24e9e6d..6cbaba4a5 100644 --- a/jjb/opnfv/slave-params.yml +++ b/jjb/opnfv/slave-params.yml @@ -239,16 +239,6 @@ name: GIT_BASE default: https://gerrit.opnfv.org/gerrit/$PROJECT description: 'Git URL to use on this Jenkins Slave' -- parameter: - name: 'infra-virtual-trusty-defaults' - parameters: - - label: - name: SLAVE_LABEL - default: 'infra-virtual-trusty' - - string: - name: GIT_BASE - default: https://gerrit.opnfv.org/gerrit/$PROJECT - description: 'Git URL to use on this Jenkins Slave' ##################################################### # Parameters for build slaves ##################################################### diff --git a/jjb/qtip/qtip-cleanup.sh b/jjb/qtip/qtip-cleanup.sh index b923aa2a8..95babb318 100644 --- a/jjb/qtip/qtip-cleanup.sh +++ b/jjb/qtip/qtip-cleanup.sh @@ -6,15 +6,12 @@ # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## -echo "Cleaning up QTIP docker containers/images..." - # Remove previous running containers if exist if [[ ! -z $(docker ps -a | grep opnfv/qtip) ]]; then echo "Removing existing opnfv/qtip containers..." - running_containers=$(docker ps | grep opnfv/qtip | awk '{print $1}') - docker stop ${running_containers} - all_containers=$(docker ps -a | grep opnfv/qtip | awk '{print $1}') - docker rm ${all_containers} + # workaround: sometimes it throws an error when stopping qtip container. + # To make sure ci job unblocked, remove qtip container by force without stopping it. + docker rm -f $(docker ps -a | grep opnfv/qtip | awk '{print $1}') fi # Remove existing images if exist @@ -27,4 +24,3 @@ if [[ ! -z $(docker images | grep opnfv/qtip) ]]; then docker rmi opnfv/qtip:$tag done fi - diff --git a/jjb/releng/releng-ci-jobs.yml b/jjb/releng/releng-ci-jobs.yml index ac323a3c1..2d88449e8 100644 --- a/jjb/releng/releng-ci-jobs.yml +++ b/jjb/releng/releng-ci-jobs.yml @@ -2,7 +2,6 @@ name: builder-jobs jobs: - 'builder-verify-jjb' - - 'builder-sandbox' - 'builder-merge' - 'artifacts-api' @@ -102,55 +101,6 @@ jenkins-jobs update -r --delete-old jjb/ - job-template: - name: 'builder-sandbox' - - # Upload all jjb jobs to sandbox instance, excluding jobs jjb - # builder jobs - - parameters: - - project-parameter: - project: '{project}' - - gerrit-parameter: - branch: 'master' - - scm: - - gerrit-trigger-scm: - credentials-id: '{ssh-credentials}' - refspec: '' - choosing-strategy: 'default' - - triggers: - - gerrit: - trigger-on: - - change-merged-event - - comment-added-contains-event: - comment-contains-value: 'remerge' - projects: - - project-compare-type: 'ANT' - project-pattern: 'releng' - branches: - - branch-compare-type: 'ANT' - branch-pattern: '**/sandbox' - file-paths: - - compare-type: ANT - pattern: jjb/** - - compare-type: ANT - pattern: utils/** - - builders: - - shell: - !include-raw: verify-releng.sh - - shell: | - #!/bin/bash - source /opt/virtualenv/jenkins-job-builder/bin/activate - cd /opt/jenkins-ci/releng - git pull - cp /etc/jenkins_jobs/jenkins_jobs.ini jenkins_sandbox.ini - sed -i 's/url=.*/url=https:\/\/sandbox.opnfv.org\//g' jenkins_sandbox.ini - jenkins-jobs --conf jenkins_sandbox.ini update -r -x jjb/releng --delete-old jjb - rm -f jenkins_sandbox.ini - -- job-template: name: 'artifacts-api' # Generate and upload the JSON file to used for artifacts site diff --git a/prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml b/prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml deleted file mode 100644 index 69eb787e7..000000000 --- a/prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml +++ /dev/null @@ -1,4 +0,0 @@ ---- -node_ssh_pause: 10 -wait_timeout: 1900 -multinode_testing: false diff --git a/prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml b/prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml index b4dffdccf..d650f1056 100644 --- a/prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml +++ b/prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml @@ -67,13 +67,18 @@ - role: ironic-enroll-dynamic - { role: ironic-inspect-node, when: inspect_nodes | default('false') | bool == true } - hosts: baremetal + name: "Create configuration drive files" + become: no + connection: local + roles: + - role: bifrost-configdrives-dynamic +- hosts: baremetal vars: multinode_testing: "{{ inventory_dhcp | bool == true }}" - name: "Create configuration drive files and deploy machines." + name: "Deploy machines." become: no connection: local + serial: 1 roles: - - role: bifrost-configdrives-dynamic - role: bifrost-deploy-nodes-dynamic - role: bifrost-prepare-for-test-dynamic - serial: 1 diff --git a/prototypes/bifrost/scripts/destroy-env.sh b/prototypes/bifrost/scripts/destroy-env.sh index 72ade5b14..674645777 100755 --- a/prototypes/bifrost/scripts/destroy-env.sh +++ b/prototypes/bifrost/scripts/destroy-env.sh @@ -37,8 +37,9 @@ rm -rf /var/log/libvirt/baremetal_logs/*.log CLEAN_DIB_IMAGES=${CLEAN_DIB_IMAGES:-false} if [ $CLEAN_DIB_IMAGES = "true" ]; then - rm -rf /httpboot - rm -rf /tftpboot + rm -rf /httpboot /tftpboot + mkdir /httpboot /tftpboot + chmod -R 755 /httpboot /tftpboot fi # remove VM disk images diff --git a/prototypes/bifrost/scripts/test-bifrost-deployment.sh b/prototypes/bifrost/scripts/test-bifrost-deployment.sh index fb49afc42..773697efe 100755 --- a/prototypes/bifrost/scripts/test-bifrost-deployment.sh +++ b/prototypes/bifrost/scripts/test-bifrost-deployment.sh @@ -18,6 +18,7 @@ ENABLE_VENV="false" USE_DHCP="false" USE_VENV="false" BUILD_IMAGE=true +PROVISION_WAIT_TIMEOUT=${PROVISION_WAIT_TIMEOUT:-2400} # Set defaults for ansible command-line options to drive the different # tests. @@ -114,7 +115,8 @@ ${ANSIBLE} -vvvv \ -e download_ipa=${DOWNLOAD_IPA} \ -e create_ipa_image=${CREATE_IPA_IMAGE} \ -e write_interfaces_file=${WRITE_INTERFACES_FILE} \ - -e ipv4_gateway=192.168.122.1 + -e ipv4_gateway=192.168.122.1 \ + -e wait_timeout=${PROVISION_WAIT_TIMEOUT} EXITCODE=$? if [ $EXITCODE != 0 ]; then diff --git a/prototypes/puppet-infracloud/hiera/common.yaml b/prototypes/puppet-infracloud/hiera/common.yaml index 1fcde2f75..5a758afe4 100644 --- a/prototypes/puppet-infracloud/hiera/common.yaml +++ b/prototypes/puppet-infracloud/hiera/common.yaml @@ -76,5 +76,90 @@ hosts: compute00.opnfvlocal: ip: 192.168.122.4 -# br-eth0 for debian, br_ens3 for RHEL -bridge_name: br-eth0 +# settings for bifrost +bridge_name: br_opnfv +ironic_db_password: pass +bifrost_mysql_password: pass +bifrost_ssh_private_key: | + -----BEGIN RSA PRIVATE KEY----- + MIIEowIBAAKCAQEAvwr2LbfJQuKZDOQse+DQHX84c9LCHvQfy0pu15JkiLM5dUtx + hLr/5fxSzblubS4WkNZVsGTtUp51f8yoQyltqquGlVfUf0GO+PCLaRp0arhli0Rl + sAGatI12amnrVap82jINiKQRO+UnF97z2hiB35Zxko4jSaPOOiL48DEKowZHL2Ja + jjUt6dXcaNotXNaKZpcxz92gdZhFOPU8BrJ/mI9k9u6QI/4qLG/WzW4frHLigA1t + OrZ3Nnu3tloWNsS1lh71KRfEv46VD8tCAZfXqJtjdH4Z4AUO++CLF/K4zXhIoFqU + Wf8aS64YzoaAfnJ+jUwKs92dVjuFtbEk+t2YLQIDAQABAoIBAQCAr++YaD6oUV9r + caANaiiGVhY+3u9oTmXEWMVFbRVPh/riaglzsUuDLm7QqWIbJXqJ4fcitTmv95GK + nt+RLizzVEt5+gnoFs8qHU6rY+ibos6z+0TMRKhjiw8DK4oc0JT9nc3EB1CcmgW1 + bLeyZ+PEKuEiKaDXkAHw43HwyfgyS3Lc90TSaLj3P7egsBuhx1Yy+wgyiPQ/bF0b + OBLHHK+nwYLGAq25n/+zA7XAndc2OQd4KzUJcvjyND+IMYnzEbeFH36UcFqbvgGu + nR55yIrCxsxcJhhT2slMNtg/xCmo3Jzz1kNBtwbNBik4/5Lkckny0xhQl+h7vz9U + +cKjwfK5AoGBAPSy/JHMeQ5/rzbA5LAZhVa/Yc4B5datkwLNg6mh4CzMabJs8AKd + de05XB/Nq6Hfp8Aa7zLt2GIb3iqF6w/y+j8YAXS2KQD8/HDs2/9Oxr512kfssk5D + dcpTqeIFetzM9pqnctVXBGlbz0QLeL+lT3kXY00+CBm6LjEv8dsPxZr3AoGBAMfd + nDnTjUVZ+sRpTBDM3MhKLMETxNWNDaozL+SgpYQwtKlSTfQVdFcM66a8qCFjQFsc + /6AjL0bjCA5u859IoQ4ValD0vgkyLHdEN0P1Grf3MK8kjOW1A1s1i2FY6U0z9AM2 + zsUCA9bB5A9wwxwofoa8VkaDpVSMITbakVoNxJj7AoGAImcft2fmBTHScoJAJLoR + 0xZpK8t8gug4aQZ34luN5v5+RcWnINb+g3GzEA2cec+2B/5BbwmdiH2eiJ/3YnCo + 2kIHwl7x+N+Ypk/GxmhO7Owo2j/e+b3mS6HjmpFmqrBuY2PzcyceyalMxKZQPbGC + MOYm4e88uFFCuUuiV0gqYhUCgYBmSFhCE6yxeCnoSEbgNicq7SLYMIjEDOqYVpfE + 9h2ed9qM6IzyQ+SFBBy4+MVGSOfPeRis2DTCnz8pO8i7lEyvy2/cPFPgmue8pZFu + 2smwqfUlPJxKlgdArzdEO18x3kubNXo9whk614EiEcAX8fVGeK3iak665Pe+fb5z + Cqa47wKBgDp3/dgtMneoePKNefy4a9vp5y4XKviC6GOrr0xpEM2ptZ+I7mUJcACN + KbaW0dPgtS1cApelmF73IAJRYbKMW7lQzql61IoGw4pGTIMPKerqRs/hTWYPZiSG + QHWf3iTV5uQr6cSRoUgkAUHVw2KTGad41RAhDp352iakZuNNBFga + -----END RSA PRIVATE KEY----- +bifrost_ssh_public_key: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC/CvYtt8lC4pkM5Cx74NAdfzhz0sIe9B/LSm7XkmSIszl1S3GEuv/l/FLNuW5tLhaQ1lWwZO1SnnV/zKhDKW2qq4aVV9R/QY748ItpGnRquGWLRGWwAZq0jXZqaetVqnzaMg2IpBE75ScX3vPaGIHflnGSjiNJo846IvjwMQqjBkcvYlqONS3p1dxo2i1c1opmlzHP3aB1mEU49TwGsn+Yj2T27pAj/iosb9bNbh+scuKADW06tnc2e7e2WhY2xLWWHvUpF8S/jpUPy0IBl9eom2N0fhngBQ774IsX8rjNeEigWpRZ/xpLrhjOhoB+cn6NTAqz3Z1WO4W1sST63Zgt yolanda@trasto +infracloud_vlan: 415 +infracloud_gateway_ip: 172.30.13.1 +default_network_interface: eno3 +dhcp_static_mask: 255.255.255.128 +dchp_pool_start: 10.20.0.130 +dhcp_pool_end: 10.20.0.254 +network_interface: eno1 +ipv4_nameserver: 8.8.8.8 +ipv4_subnet_mask: 255.255.255.0 +ipv4_gateway: 172.30.13.1 +ironic_inventory: + controller000.opnfvlocal: + driver: agent_ipmitool + driver_info: + power: + ipmi_address: 172.30.8.90 + ipmi_username: admin + provisioning_ipv4_address: 10.20.0.130 + ipv4_address: 172.30.13.66 + ansible_ssh_host: 172.30.13.66 + ipv4_gateway: 172.30.13.1 + ipv4_interface_mac: 00:1e:67:f9:9b:35 + name: controller000.opnfvlocal + nics: + - mac: a4:bf:01:01:a9:fc + - mac: 00:1e:67:f6:9b:35 + properties: + cpu_arch: x86_64 + cpus: '44' + disk_size: '1800' + ram: '65536' + uuid: 00a22849-2442-e511-906e-0012795d96dd + compute000.opnfvlocal: + driver: agent_ipmitool + driver_info: + power: + ipmi_address: 172.30.8.91 + ipmi_username: admin + provisioning_ipv4_address: 10.20.0.131 + ipv4_address: 172.30.13.67 + ansible_ssh_host: 172.30.13.67 + ipv4_gateway: 172.30.13.1 + ipv4_subnet_mask: 255.255.255.0 + name: compute000.opnfvlocal + nics: + - mac: a4:bf:01:01:a9:d4 + - mac: 00:1e:67:f6:9b:37 + properties: + cpu_arch: x86_64 + cpus: '44' + disk_size: '1800' + ram: '65536' + uuid: 0051e926-f242-e511-906e-0012795d96dd +ipmi_passwords: {'172.30.8.90': 'octopus', '172.30.8.91': 'octopus'} diff --git a/prototypes/puppet-infracloud/manifests/site.pp b/prototypes/puppet-infracloud/manifests/site.pp index 31c45767f..70901e696 100644 --- a/prototypes/puppet-infracloud/manifests/site.pp +++ b/prototypes/puppet-infracloud/manifests/site.pp @@ -73,7 +73,7 @@ node 'jumphost.opnfvlocal' { } } -node 'baremetal.opnfvlocal' { +node 'baremetal.opnfvlocal', 'lfpod5-jumpserver' { class { '::opnfv::server': iptables_public_udp_ports => [67, 69], sysadmins => hiera('sysadmins', []), @@ -91,10 +91,12 @@ node 'baremetal.opnfvlocal' { vlan => hiera('infracloud_vlan'), gateway_ip => hiera('infracloud_gateway_ip'), default_network_interface => hiera('default_network_interface'), + dhcp_static_mask => hiera('dhcp_static_mask'), dhcp_pool_start => hiera('dhcp_pool_start'), dhcp_pool_end => hiera('dhcp_pool_end'), network_interface => hiera('network_interface'), ipv4_nameserver => hiera('ipv4_nameserver'), ipv4_subnet_mask => hiera('ipv4_subnet_mask'), + bridge_name => hiera('bridge_name'), } } diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh index 4b710cab2..9ef4298ef 100755 --- a/utils/jenkins-jnlp-connect.sh +++ b/utils/jenkins-jnlp-connect.sh @@ -151,6 +151,7 @@ fi usage() { cat << EOF +**this file must be copied to the jenkins home directory to work** jenkins-jnlp-connect.sh configures monit to keep slave connection up Checks for new versions of slave.jar run as root to create pid directory and create monit config. diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh index f24d884f5..87cee78bf 100644 --- a/utils/push-test-logs.sh +++ b/utils/push-test-logs.sh @@ -23,7 +23,8 @@ dir_result="${HOME}/opnfv/$project/results/${branch}" node_list=(\ 'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \ 'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \ -'ericsson-pod2' \ +'ericsson-pod2' 'ericsson-pod3' 'ericsson-pod4' \ +'ericsson-virtual2' 'ericsson-virtual3' 'ericsson-virtual4' 'ericsson-virtual5' \ 'arm-pod1' 'arm-pod3' \ 'huawei-pod1' 'huawei-pod2' 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4') diff --git a/utils/test/dashboard/dashboard/common/elastic_access.py b/utils/test/dashboard/dashboard/common/elastic_access.py index e90a17fa3..8c6494d39 100644 --- a/utils/test/dashboard/dashboard/common/elastic_access.py +++ b/utils/test/dashboard/dashboard/common/elastic_access.py @@ -5,60 +5,41 @@ import urllib3 http = urllib3.PoolManager() -def delete_request(url, creds, body=None): +def _request(method, url, creds=None, body=None): headers = urllib3.make_headers(basic_auth=creds) - http.request('DELETE', url, headers=headers, body=body) + return http.request(method, url, headers=headers, body=body) -def publish_json(json_ojb, creds, to): - json_dump = json.dumps(json_ojb) - if to == 'stdout': - print json_dump - return 200, None - else: - headers = urllib3.make_headers(basic_auth=creds) - result = http.request('POST', to, headers=headers, body=json_dump) - return result.status, result.data +def _post(url, creds=None, body=None): + return _request('POST', url, creds=creds, body=body) -def _get_nr_of_hits(elastic_json): - return elastic_json['hits']['total'] +def _get(url, creds=None, body=None): + return json.loads(_request('GET', url, creds=creds, body=body).data) -def get_elastic_docs(elastic_url, creds, body=None, field = '_source'): +def delete_docs(url, creds=None, body=None): + return _request('DELETE', url, creds=creds, body=body) - # 1. get the number of results - headers = urllib3.make_headers(basic_auth=creds) - elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size=0', headers=headers, body=body).data) - print elastic_json - nr_of_hits = _get_nr_of_hits(elastic_json) - - # 2. get all results - elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size={}'.format(nr_of_hits), headers=headers, body=body).data) - - elastic_docs = [] - for hit in elastic_json['hits']['hits']: - elastic_docs.append(hit[field]) - return elastic_docs - - -def get_elastic_docs_by_days(elastic_url, creds, days): - if days == 0: - body = '''{ - "query": { - "match_all": {} - } - }''' - elif days > 0: - body = '''{{ - "query" : {{ - "range" : {{ - "start_date" : {{ - "gte" : "now-{}d" - }} - }} - }} - }}'''.format(days) - else: - raise Exception('Update days must be non-negative') - return get_elastic_docs(elastic_url, creds, body) + +def publish_docs(url, creds=None, body=None): + result = _post(url, creds=creds, body=(json.dumps(body))) + return result.status, result.data + + +def _get_docs_nr(url, creds=None, body=None): + res_data = _get('{}/_search?size=0'.format(url), creds=creds, body=body) + print type(res_data), res_data + return res_data['hits']['total'] + + +def get_docs(url, creds=None, body=None, field='_source'): + + docs_nr = _get_docs_nr(url, creds=creds, body=body) + res_data = _get('{}/_search?size={}'.format(url, docs_nr), + creds=creds, body=body) + + docs = [] + for hit in res_data['hits']['hits']: + docs.append(hit[field]) + return docs diff --git a/utils/test/dashboard/dashboard/conf/config.py b/utils/test/dashboard/dashboard/conf/config.py index 2e0f1cabb..b868999a2 100644 --- a/utils/test/dashboard/dashboard/conf/config.py +++ b/utils/test/dashboard/dashboard/conf/config.py @@ -25,7 +25,6 @@ class APIConfig: self._default_config_location = "../etc/config.ini" self.elastic_url = 'http://localhost:9200' self.elastic_creds = None - self.destination = 'elasticsearch' self.kibana_url = None self.is_js = True self.js_path = None @@ -67,7 +66,6 @@ class APIConfig: # Linking attributes to keys from file with their sections obj.elastic_url = obj._get_str_parameter("elastic", "url") obj.elastic_creds = obj._get_str_parameter("elastic", "creds") - obj.destination = obj._get_str_parameter("output", "destination") obj.kibana_url = obj._get_str_parameter("kibana", "url") obj.is_js = obj._get_bool_parameter("kibana", "js") obj.js_path = obj._get_str_parameter("kibana", "js_path") @@ -77,12 +75,10 @@ class APIConfig: def __str__(self): return "elastic_url = %s \n" \ "elastic_creds = %s \n" \ - "destination = %s \n" \ "kibana_url = %s \n" \ "is_js = %s \n" \ "js_path = %s \n" % (self.elastic_url, - self.elastic_creds, - self.destination, - self.kibana_url, - self.is_js, - self.js_path) + self.elastic_creds, + self.kibana_url, + self.is_js, + self.js_path) diff --git a/utils/test/dashboard/dashboard/elastic2kibana/main.py b/utils/test/dashboard/dashboard/elastic2kibana/main.py index c1cbc308e..f16879b93 100644 --- a/utils/test/dashboard/dashboard/elastic2kibana/main.py +++ b/utils/test/dashboard/dashboard/elastic2kibana/main.py @@ -3,8 +3,10 @@ import json import urlparse import argparse +from jinja2 import PackageLoader, Environment -from common import logger_utils, elastic_access +from common import elastic_access +from common import logger_utils from conf import testcases from conf.config import APIConfig @@ -25,6 +27,9 @@ es_creds = CONF.elastic_creds _installers = {'fuel', 'apex', 'compass', 'joid'} +env = Environment(loader=PackageLoader('elastic2kibana', 'templates')) +env.filters['jsonify'] = json.dumps + class KibanaDashboard(dict): def __init__(self, project_name, case_name, family, installer, pod, scenarios, visualization): @@ -44,12 +49,12 @@ class KibanaDashboard(dict): def _create_visualizations(self): for scenario in self.scenarios: - self._kibana_visualizations.append(KibanaVisualization(self.project_name, - self.case_name, - self.installer, - self.pod, - scenario, - self.visualization)) + self._kibana_visualizations.append(Visualization(self.project_name, + self.case_name, + self.installer, + self.pod, + scenario, + self.visualization)) self._visualization_title = self._kibana_visualizations[0].vis_state_title @@ -57,7 +62,8 @@ class KibanaDashboard(dict): for visualization in self._kibana_visualizations: url = urlparse.urljoin(base_elastic_url, '/.kibana/visualization/{}'.format(visualization.id)) logger.debug("publishing visualization '{}'".format(url)) - elastic_access.publish_json(visualization, es_creds, url) + # logger.error("_publish_visualization: %s" % visualization) + elastic_access.publish_docs(url, es_creds, visualization) def _construct_panels(self): size_x = 6 @@ -135,98 +141,37 @@ class KibanaDashboard(dict): def _publish(self): url = urlparse.urljoin(base_elastic_url, '/.kibana/dashboard/{}'.format(self.id)) logger.debug("publishing dashboard '{}'".format(url)) - elastic_access.publish_json(self, es_creds, url) + elastic_access.publish_docs(url, es_creds, self) def publish(self): self._publish_visualizations() self._publish() -class KibanaSearchSourceJSON(dict): - """ - "filter": [ - {"match": {"installer": {"query": installer, "type": "phrase"}}}, - {"match": {"project_name": {"query": project_name, "type": "phrase"}}}, - {"match": {"case_name": {"query": case_name, "type": "phrase"}}} - ] - """ - - def __init__(self, project_name, case_name, installer, pod, scenario): - super(KibanaSearchSourceJSON, self).__init__() - self["filter"] = [ - {"match": {"project_name": {"query": project_name, "type": "phrase"}}}, - {"match": {"case_name": {"query": case_name, "type": "phrase"}}}, - {"match": {"installer": {"query": installer, "type": "phrase"}}}, - {"match": {"scenario": {"query": scenario, "type": "phrase"}}} - ] - if pod != 'all': - self["filter"].append({"match": {"pod_name": {"query": pod, "type": "phrase"}}}) - - -class VisualizationState(dict): +class VisStateBuilder(object): def __init__(self, visualization): - super(VisualizationState, self).__init__() - name = visualization.get('name') - fields = visualization.get('fields') - - if name == 'tests_failures': - mode = 'grouped' - metric_type = 'sum' - self['type'] = 'histogram' - else: - # duration or success_percentage - mode = 'stacked' - metric_type = 'avg' - self['type'] = 'line' - - self['params'] = { - "shareYAxis": True, - "addTooltip": True, - "addLegend": True, - "smoothLines": False, - "scale": "linear", - "interpolate": "linear", - "mode": mode, - "times": [], - "addTimeMarker": False, - "defaultYExtents": False, - "setYExtents": False, - "yAxis": {} - } + super(VisStateBuilder, self).__init__() + self.visualization = visualization - self['aggs'] = [] + def build(self): + name = self.visualization.get('name') + fields = self.visualization.get('fields') - i = 1 + aggs = [] + index = 1 for field in fields: - self['aggs'].append({ - "id": str(i), - "type": metric_type, - "schema": "metric", - "params": { - "field": field.get('field') - } - }) - i += 1 - - self['aggs'].append({ - "id": str(i), - "type": 'date_histogram', - "schema": "segment", - "params": { - "field": "start_date", - "interval": "auto", - "customInterval": "2h", - "min_doc_count": 1, - "extended_bounds": {} - } + aggs.append({ + "id": index, + "field": field.get("field") }) + index += 1 - self['listeners'] = {} - self['title'] = ' '.join(['{} {}'.format(x['type'], x['params']['field']) for x in self['aggs'] - if x['schema'] == 'metric']) + template = env.get_template('{}.json'.format(name)) + vis = template.render(aggs=aggs) + return json.loads(vis) -class KibanaVisualization(dict): +class Visualization(object): def __init__(self, project_name, case_name, installer, pod, scenario, visualization): """ We need two things @@ -242,32 +187,35 @@ class KibanaVisualization(dict): :return: """ - super(KibanaVisualization, self).__init__() - vis_state = VisualizationState(visualization) - self.vis_state_title = vis_state['title'] - self['title'] = '{} {} {} {} {} {}'.format(project_name, - case_name, - self.vis_state_title, - installer, - pod, - scenario) - self.id = self['title'].replace(' ', '-').replace('/', '-') - self['visState'] = json.dumps(vis_state, separators=(',', ':')) - self['uiStateJSON'] = "{}" - self['description'] = "Kibana visualization for project_name '{}', case_name '{}', data '{}', installer '{}'," \ - " pod '{}' and scenario '{}'".format(project_name, - case_name, - self.vis_state_title, - installer, - pod, - scenario) - self['scenario'] = 1 - self['kibanaSavedObjectMeta'] = {"searchSourceJSON": json.dumps(KibanaSearchSourceJSON(project_name, - case_name, - installer, - pod, - scenario), - separators=(',', ':'))} + super(Visualization, self).__init__() + visState = VisStateBuilder(visualization).build() + self.vis_state_title = visState['title'] + + vis = { + "visState": json.dumps(visState), + "filters": { + "project_name": project_name, + "case_name": case_name, + "installer": installer, + "metric": self.vis_state_title, + "pod_name": pod, + "scenario": scenario + } + } + + template = env.get_template('visualization.json') + + self.visualization = json.loads(template.render(vis=vis)) + self._dumps(['visState', 'description', 'uiStateJSON']) + self._dumps_2deeps('kibanaSavedObjectMeta', 'searchSourceJSON') + self.id = self.visualization['title'].replace(' ', '-').replace('/', '-') + + def _dumps(self, items): + for key in items: + self.visualization[key] = json.dumps(self.visualization[key]) + + def _dumps_2deeps(self, key1, key2): + self.visualization[key1][key2] = json.dumps(self.visualization[key1][key2]) def _get_pods_and_scenarios(project_name, case_name, installer): @@ -286,7 +234,7 @@ def _get_pods_and_scenarios(project_name, case_name, installer): } }) - elastic_data = elastic_access.get_elastic_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'), + elastic_data = elastic_access.get_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'), es_creds, query_json) pods_and_scenarios = {} diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/duration.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/duration.json new file mode 100644 index 000000000..f50a668db --- /dev/null +++ b/utils/test/dashboard/dashboard/elastic2kibana/templates/duration.json @@ -0,0 +1,45 @@ +{% set aggs = aggs|default([]) -%} + +{ + "title": "duration", + "type": "line", + "listeners": {}, + "params": { + "addLegend": true, + "shareYAxis": true, + "addTooltip": true, + "smoothLines": false, + "scale": "linear", + "interpolate": "linear", + "times": [], + "addTimeMarker": false, + "defaultYExtents": false, + "setYExtents": false, + "yAxis": {}, + "mode": "stacked" + }, + "aggs": [ + {% for agg in aggs %} + { + "id": {{agg.id }}, + "type": "avg", + "schema": "metric", + "params": { + "field": "{{agg.field}}" + } + }, + {% endfor %} + { + "id": {{ aggs|length + 1 }}, + "type": "date_histogram", + "schema": "segment", + "params": { + "field": "start_date", + "interval": "auto", + "customInterval": "2h", + "min_doc_count": 1, + "extended_bounds": {} + } + } + ] +} diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/success_percentage.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/success_percentage.json new file mode 100644 index 000000000..993070844 --- /dev/null +++ b/utils/test/dashboard/dashboard/elastic2kibana/templates/success_percentage.json @@ -0,0 +1,45 @@ +{% set aggs = aggs|default([]) -%} + +{ + "title": "success_percentage", + "type": "line", + "listeners": {}, + "params": { + "addLegend": true, + "shareYAxis": true, + "addTooltip": true, + "smoothLines": false, + "scale": "linear", + "interpolate": "linear", + "times": [], + "addTimeMarker": false, + "defaultYExtents": false, + "setYExtents": false, + "yAxis": {}, + "mode": "stacked" + }, + "aggs": [ + {% for agg in aggs %} + { + "id": {{agg.id }}, + "type": "avg", + "schema": "metric", + "params": { + "field": "{{agg.field}}" + } + }, + {% endfor %} + { + "id": {{ aggs|length + 1 }}, + "type": "date_histogram", + "schema": "segment", + "params": { + "field": "start_date", + "interval": "auto", + "customInterval": "2h", + "min_doc_count": 1, + "extended_bounds": {} + } + } + ] +} diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/tests_failures.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/tests_failures.json new file mode 100644 index 000000000..01f9ba89e --- /dev/null +++ b/utils/test/dashboard/dashboard/elastic2kibana/templates/tests_failures.json @@ -0,0 +1,45 @@ +{% set aggs = aggs|default([]) -%} + +{ + "title": "tests_failures", + "type": "histogram", + "listeners": {}, + "params": { + "addLegend": true, + "shareYAxis": true, + "addTooltip": true, + "smoothLines": false, + "scale": "linear", + "interpolate": "linear", + "times": [], + "addTimeMarker": false, + "defaultYExtents": false, + "setYExtents": false, + "yAxis": {}, + "mode": "grouped" + }, + "aggs": [ + {% for agg in aggs %} + { + "id": {{agg.id }}, + "type": "sum", + "schema": "metric", + "params": { + "field": "{{agg.field}}" + } + }, + {% endfor %} + { + "id": {{ aggs|length + 1 }}, + "type": "date_histogram", + "schema": "segment", + "params": { + "field": "start_date", + "interval": "auto", + "customInterval": "2h", + "min_doc_count": 1, + "extended_bounds": {} + } + } + ] +} diff --git a/utils/test/dashboard/dashboard/elastic2kibana/templates/visualization.json b/utils/test/dashboard/dashboard/elastic2kibana/templates/visualization.json new file mode 100644 index 000000000..d51d4174e --- /dev/null +++ b/utils/test/dashboard/dashboard/elastic2kibana/templates/visualization.json @@ -0,0 +1,32 @@ +{% set vis = vis|default({}) -%} + + +{ + "description": "Kibana visualization for {{ vis.filters }}", + "kibanaSavedObjectMeta": { + "searchSourceJSON": { + "filter": [ + {% for key, value in vis.filters.iteritems() if key != "metric" %} + {% if not (key == "pod_name" and value == "all") %} + { + "match": { + "{{ key }}": { + "query": "{{ value }}", + "type": "phrase" + } + } + } + {% if not loop.last %} + , + {% endif %} + {% endif %} + {% endfor %} + ] + } + }, + "scenario": 1, + "title": "{{vis.filters.project_name}} {{vis.filters.case_name}} {{vis.filters.installer}} {{vis.filters.metric}} {{vis.filters.pod_name}} {{vis.filters.scenario}}", + "uiStateJSON": {}, + "visState": {{ vis.visState }} +} + diff --git a/utils/test/dashboard/dashboard/mongo2elastic/main.py b/utils/test/dashboard/dashboard/mongo2elastic/main.py index 25b5320d7..76efb14f0 100644 --- a/utils/test/dashboard/dashboard/mongo2elastic/main.py +++ b/utils/test/dashboard/dashboard/mongo2elastic/main.py @@ -38,12 +38,12 @@ tmp_docs_file = './mongo-{}.json'.format(uuid.uuid4()) class DocumentPublisher: - def __init__(self, doc, fmt, exist_docs, creds, to): + def __init__(self, doc, fmt, exist_docs, creds, elastic_url): self.doc = doc self.fmt = fmt self.creds = creds self.exist_docs = exist_docs - self.to = to + self.elastic_url = elastic_url self.is_formatted = True def format(self): @@ -64,7 +64,7 @@ class DocumentPublisher: self._publish() def _publish(self): - status, data = elastic_access.publish_json(self.doc, self.creds, self.to) + status, data = elastic_access.publish_docs(self.elastic_url, self.creds, self.doc) if status > 300: logger.error('Publish record[{}] failed, due to [{}]' .format(self.doc, json.loads(data)['error']['reason'])) @@ -163,14 +163,13 @@ class DocumentPublisher: class DocumentsPublisher: - def __init__(self, project, case, fmt, days, elastic_url, creds, to): + def __init__(self, project, case, fmt, days, elastic_url, creds): self.project = project self.case = case self.fmt = fmt self.days = days self.elastic_url = elastic_url self.creds = creds - self.to = to self.existed_docs = [] def export(self): @@ -200,7 +199,36 @@ class DocumentsPublisher: exit(-1) def get_existed_docs(self): - self.existed_docs = elastic_access.get_elastic_docs_by_days(self.elastic_url, self.creds, self.days) + if self.days == 0: + body = '''{{ + "query": {{ + "bool": {{ + "must": [ + {{ "match": {{ "project_name": "{}" }} }}, + {{ "match": {{ "case_name": "{}" }} }} + ] + }} + }} + }}'''.format(self.project, self.case) + elif self.days > 0: + body = '''{{ + "query": {{ + "bool": {{ + "must": [ + {{ "match": {{ "project_name": "{}" }} }}, + {{ "match": {{ "case_name": "{}" }} }} + ], + "filter": {{ + "range": {{ + "start_date": {{ "gte": "now-{}d" }} + }} + }} + }} + }} + }}'''.format(self.project, self.case, self.days) + else: + raise Exception('Update days must be non-negative') + self.existed_docs = elastic_access.get_docs(self.elastic_url, self.creds, body) return self def publish(self): @@ -211,7 +239,7 @@ class DocumentsPublisher: self.fmt, self.existed_docs, self.creds, - self.to).format().publish() + self.elastic_url).format().publish() finally: fdocs.close() self._remove() @@ -223,13 +251,9 @@ class DocumentsPublisher: def main(): base_elastic_url = urlparse.urljoin(CONF.elastic_url, '/test_results/mongo2elastic') - to = CONF.destination days = args.latest_days es_creds = CONF.elastic_creds - if to == 'elasticsearch': - to = base_elastic_url - for project, case_dicts in testcases.testcases_yaml.items(): for case_dict in case_dicts: case = case_dict.get('name') @@ -239,5 +263,4 @@ def main(): fmt, days, base_elastic_url, - es_creds, - to).export().get_existed_docs().publish() + es_creds).export().get_existed_docs().publish() diff --git a/utils/test/dashboard/etc/config.ini b/utils/test/dashboard/etc/config.ini index b94ac7b4f..1e67bd822 100644 --- a/utils/test/dashboard/etc/config.ini +++ b/utils/test/dashboard/etc/config.ini @@ -4,10 +4,6 @@ url = http://localhost:9200 creds = -[output] -# elasticsearch or console -destination = elasticsearch - [kibana] url = http://10.63.243.17:5601/app/kibana js = true diff --git a/utils/test/dashboard/kibana_cleanup.py b/utils/test/dashboard/kibana_cleanup.py index 9ce4994f5..ee0190049 100644 --- a/utils/test/dashboard/kibana_cleanup.py +++ b/utils/test/dashboard/kibana_cleanup.py @@ -14,10 +14,10 @@ logger.addHandler(file_handler) def delete_all(url, es_creds): - ids = elastic_access.get_elastic_docs(url, es_creds, body=None, field='_id') + ids = elastic_access.get_docs(url, es_creds, body=None, field='_id') for id in ids: del_url = '/'.join([url, id]) - elastic_access.delete_request(del_url, es_creds) + elastic_access.delete_docs(del_url, es_creds) if __name__ == '__main__': diff --git a/utils/test/reporting/functest/default.css b/utils/test/reporting/css/default.css index 897c3b12b..7da5e277a 100644 --- a/utils/test/reporting/functest/default.css +++ b/utils/test/reporting/css/default.css @@ -75,3 +75,30 @@ h2 { font-weight: bold; color:rgb(128, 128, 128) } + +#power-gauge g.arc { + fill: steelblue; +} + +#power-gauge g.pointer { + fill: #e85116; + stroke: #b64011; +} + +#power-gauge g.label text { + text-anchor: middle; + font-size: 14px; + font-weight: bold; + fill: #666; +} + +#power-gauge path { + +} + +.axis path, +.axis line { + fill: none; + stroke: #000; + shape-rendering: crispEdges; +} diff --git a/utils/test/reporting/functest/reporting-status.py b/utils/test/reporting/functest/reporting-status.py index 90699bd61..9df699629 100755 --- a/utils/test/reporting/functest/reporting-status.py +++ b/utils/test/reporting/functest/reporting-status.py @@ -184,8 +184,13 @@ for version in conf.versions: scenario_criteria = conf.MAX_SCENARIO_CRITERIA s_score = str(scenario_score) + "/" + str(scenario_criteria) - s_score_percent = float( + s_score_percent = 0.0 + try: + s_score_percent = float( scenario_score) / float(scenario_criteria) * 100 + except: + logger.error("cannot calculate the score percent") + s_status = "KO" if scenario_score < scenario_criteria: logger.info(">>>> scenario not OK, score = %s/%s" % diff --git a/utils/test/reporting/functest/reportingConf.py b/utils/test/reporting/functest/reportingConf.py index e1c4b61a8..1c9a2ac9f 100644 --- a/utils/test/reporting/functest/reportingConf.py +++ b/utils/test/reporting/functest/reportingConf.py @@ -13,7 +13,6 @@ installers = ["apex", "compass", "fuel", "joid"] # list of test cases declared in testcases.yaml but that must not be # taken into account for the scoring blacklist = ["ovno", "security_scan"] -# versions = ["brahmaputra", "master"] versions = ["master", "colorado"] PERIOD = 10 MAX_SCENARIO_CRITERIA = 50 diff --git a/utils/test/reporting/functest/template/index-status-tmpl.html b/utils/test/reporting/functest/template/index-status-tmpl.html index 67c23491a..2beb9128e 100644 --- a/utils/test/reporting/functest/template/index-status-tmpl.html +++ b/utils/test/reporting/functest/template/index-status-tmpl.html @@ -3,17 +3,65 @@ <meta charset="utf-8"> <!-- Bootstrap core CSS --> <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet"> - <link href="default.css" rel="stylesheet"> + <link href="../../../css/default.css" rel="stylesheet"> <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script> <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script> - <script type="text/javascript"> - $(document).ready(function (){ - $(".btn-more").click(function() { - $(this).hide(); - $(this).parent().find(".panel-default").show(); + <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script> + <script type="text/javascript" src="../../../js/gauge.js"></script> + <script type="text/javascript" src="../../../js/trend.js"></script> + <script> + function onDocumentReady() { + // Gauge management + {% for scenario in scenario_stats.iteritems() -%} + var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}'); + {%- endfor %} + + // assign success rate to the gauge + function updateReadings() { + {% for scenario,iteration in scenario_stats.iteritems() -%} + gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}}); + {%- endfor %} + } + updateReadings(); + } + + // trend line management + d3.csv("./scenario_history.txt", function(data) { + // *************************************** + // Create the trend line + {% for scenario,iteration in scenario_stats.iteritems() -%} + // for scenario {{scenario}} + // Filter results + var trend{{loop.index}} = data.filter(function(row) { + return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}"; + }) + // Parse the date + trend{{loop.index}}.forEach(function(d) { + d.date = parseDate(d.date); + d.score = +d.score }); - }) - </script> + // Draw the trend line + var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}}) + // **************************************** + {%- endfor %} + }); + if ( !window.isLoaded ) { + window.addEventListener("load", function() { + onDocumentReady(); + }, false); + } else { + onDocumentReady(); + } +</script> +<script type="text/javascript"> +$(document).ready(function (){ + $(".btn-more").click(function() { + $(this).hide(); + $(this).parent().find(".panel-default").show(); + }); +}) +</script> + </head> <body> <div class="container"> @@ -40,41 +88,17 @@ <div class="panel-heading"><h4><b>List of last scenarios ({{version}}) run over the last {{period}} days </b></h4></div> <table class="table"> <tr> - <th width="60%">Scenario</th> + <th width="40%">Scenario</th> <th width="20%">Status</th> + <th width="20%">Trend</th> <th width="10%">Score</th> <th width="10%">Iteration</th> </tr> {% for scenario,iteration in scenario_stats.iteritems() -%} <tr class="tr-ok"> <td><a href={{scenario_results[scenario].getUrlLastRun()}}>{{scenario}}</a></td> - <td>{%if scenario_results[scenario].getScorePercent() < 8.3 -%} - <img src="../../img/gauge_0.png"> - {%elif scenario_results[scenario].getScorePercent() < 16.7 -%} - <img src="../../img/gauge_8.3.png"> - {%elif scenario_results[scenario].getScorePercent() < 25 -%} - <img src="../../img/gauge_16.7.png"> - {%elif scenario_results[scenario].getScorePercent() < 33.3 -%} - <img src="../../img/gauge_25.png"> - {%elif scenario_results[scenario].getScorePercent() < 41.7 -%} - <img src="../../img/gauge_33.3.png"> - {%elif scenario_results[scenario].getScorePercent() < 50 -%} - <img src="../../img/gauge_41.7.png"> - {%elif scenario_results[scenario].getScorePercent() < 58.3 -%} - <img src="../../img/gauge_50.png"> - {%elif scenario_results[scenario].getScorePercent() < 66.7 -%} - <img src="../../img/gauge_58.3.png"> - {%elif scenario_results[scenario].getScorePercent() < 75 -%} - <img src="../../img/gauge_66.7.png"> - {%elif scenario_results[scenario].getScorePercent() < 83.3 -%} - <img src="../../img/gauge_75.png"> - {%elif scenario_results[scenario].getScorePercent() < 91.7 -%} - <img src="../../img/gauge_83.3.png"> - {%elif scenario_results[scenario].getScorePercent() < 100 -%} - <img src="../../img/gauge_91.7.png"> - {%- else -%} - <img src="../../img/gauge_100.png"> - {%- endif %}</td> + <td><div id="gaugeScenario{{loop.index}}"></div></td> + <td><div id="trend_svg{{loop.index}}"></div></td> <td>{{scenario_results[scenario].getScore()}}</td> <td>{{iteration}}</td> </tr> diff --git a/utils/test/reporting/js/gauge.js b/utils/test/reporting/js/gauge.js new file mode 100644 index 000000000..4cad16c61 --- /dev/null +++ b/utils/test/reporting/js/gauge.js @@ -0,0 +1,165 @@ +// ****************************************** +// Gauge for reporting +// Each scenario has a score +// We use a gauge to indicate the trust level +// ****************************************** +var gauge = function(container) { + var that = {}; + var config = { + size : 150, + clipWidth : 250, + clipHeight : 100, + ringInset : 20, + ringWidth : 40, + + pointerWidth : 7, + pointerTailLength : 5, + pointerHeadLengthPercent : 0.8, + + minValue : 0, + maxValue : 100, + + minAngle : -90, + maxAngle : 90, + + transitionMs : 4000, + + majorTicks : 7, + labelFormat : d3.format(',g'), + labelInset : 10, + + arcColorFn : d3.interpolateHsl(d3.rgb('#ff0000'), d3.rgb('#00ff00')) + }; + + +var range = undefined; +var r = undefined; +var pointerHeadLength = undefined; +var value = 0; + +var svg = undefined; +var arc = undefined; +var scale = undefined; +var ticks = undefined; +var tickData = undefined; +var pointer = undefined; + +var donut = d3.layout.pie(); + +function deg2rad(deg) { + return deg * Math.PI / 180; +} + +function newAngle(d) { + var ratio = scale(d); + var newAngle = config.minAngle + (ratio * range); + return newAngle; +} + +function configure() { + range = config.maxAngle - config.minAngle; + r = config.size / 2; + pointerHeadLength = Math.round(r * config.pointerHeadLengthPercent); + + // a linear scale that maps domain values to a percent from 0..1 + scale = d3.scale.linear() + .range([0,1]) + .domain([config.minValue, config.maxValue]); + + ticks = scale.ticks(config.majorTicks); + tickData = d3.range(config.majorTicks).map(function() {return 1/config.majorTicks;}); + + arc = d3.svg.arc() + .innerRadius(r - config.ringWidth - config.ringInset) + .outerRadius(r - config.ringInset) + .startAngle(function(d, i) { + var ratio = d * i; + return deg2rad(config.minAngle + (ratio * range)); + }) + .endAngle(function(d, i) { + var ratio = d * (i+1); + return deg2rad(config.minAngle + (ratio * range)); + }); +} +that.configure = configure; + +function centerTranslation() { + return 'translate('+r +','+ r +')'; +} + +function isRendered() { + return (svg !== undefined); +} +that.isRendered = isRendered; + +function render(newValue) { + svg = d3.select(container) + .append('svg:svg') + .attr('class', 'gauge') + .attr('width', config.clipWidth) + .attr('height', config.clipHeight); + + var centerTx = centerTranslation(); + + var arcs = svg.append('g') + .attr('class', 'arc') + .attr('transform', centerTx); + + arcs.selectAll('path') + .data(tickData) + .enter().append('path') + .attr('fill', function(d, i) { + return config.arcColorFn(d * i); + }) + .attr('d', arc); + + var lg = svg.append('g') + .attr('class', 'label') + .attr('transform', centerTx); + lg.selectAll('text') + .data(ticks) + .enter().append('text') + .attr('transform', function(d) { + var ratio = scale(d); + var newAngle = config.minAngle + (ratio * range); + return 'rotate(' +newAngle +') translate(0,' +(config.labelInset - r) +')'; + }) + .text(config.labelFormat); + + var lineData = [ [config.pointerWidth / 2, 0], + [0, -pointerHeadLength], + [-(config.pointerWidth / 2), 0], + [0, config.pointerTailLength], + [config.pointerWidth / 2, 0] ]; + var pointerLine = d3.svg.line().interpolate('monotone'); + var pg = svg.append('g').data([lineData]) + .attr('class', 'pointer') + .attr('transform', centerTx); + + pointer = pg.append('path') + .attr('d', pointerLine/*function(d) { return pointerLine(d) +'Z';}*/ ) + .attr('transform', 'rotate(' +config.minAngle +')'); + + update(newValue === undefined ? 0 : newValue); +} +that.render = render; + +function update(newValue, newConfiguration) { + if ( newConfiguration !== undefined) { + configure(newConfiguration); + } + var ratio = scale(newValue); + var newAngle = config.minAngle + (ratio * range); + pointer.transition() + .duration(config.transitionMs) + .ease('elastic') + .attr('transform', 'rotate(' +newAngle +')'); +} +that.update = update; + +configure(); + +render(); + +return that; +}; diff --git a/utils/test/reporting/js/trend.js b/utils/test/reporting/js/trend.js new file mode 100644 index 000000000..ec48e75ef --- /dev/null +++ b/utils/test/reporting/js/trend.js @@ -0,0 +1,68 @@ +// ****************************************** +// Trend line for reporting +// based on scenario_history.txt +// where data looks like +// date,scenario,installer,detail,score +// 2016-09-22 13:12,os-nosdn-fdio-noha,apex,4/12,33.0 +// 2016-09-22 13:13,os-odl_l2-fdio-noha,apex,12/15,80.0 +// 2016-09-22 13:13,os-odl_l2-sfc-noha,apex,18/24,75.0 +// ..... +// ****************************************** +// Set the dimensions of the canvas / graph +var trend_margin = {top: 20, right: 30, bottom: 50, left: 40}, + trend_width = 300 - trend_margin.left - trend_margin.right, + trend_height = 130 - trend_margin.top - trend_margin.bottom; + +// Parse the date / time +var parseDate = d3.time.format("%Y-%m-%d %H:%M").parse; + +// Set the ranges +var trend_x = d3.time.scale().range([0, trend_width]); +var trend_y = d3.scale.linear().range([trend_height, 0]); + +// Define the axes +var trend_xAxis = d3.svg.axis().scale(trend_x) + .orient("bottom").ticks(2).tickFormat(d3.time.format("%m-%d")); + +var trend_yAxis = d3.svg.axis().scale(trend_y) + .orient("left").ticks(2); + +// Define the line +var valueline = d3.svg.line() + .x(function(d) { return trend_x(d.date); }) + .y(function(d) { return trend_y(d.score); }); + +var trend = function(container, trend_data) { + + var trend_svg = d3.select(container) + .append("svg") + .attr("width", trend_width + trend_margin.left + trend_margin.right) + .attr("height", trend_height + trend_margin.top + trend_margin.bottom) + .append("g") + .attr("transform", + "translate(" + trend_margin.left + "," + trend_margin.top + ")"); + + // Scale the range of the data + trend_x.domain(d3.extent(trend_data, function(d) { return d.date; })); + trend_y.domain([0, d3.max(trend_data, function(d) { return d.score; })]); + + // Add the X Axis + trend_svg.append("g") + .attr("class", "x axis") + .attr("transform", "translate(0," + trend_height + ")") + .call(trend_xAxis); + + // Add the Y Axis + trend_svg.append("g") + .attr("class", "y axis") + .call(trend_yAxis); + + // Add the valueline path. + trend_svg.append("path") + .attr("class", "line") + .attr("d", valueline(trend_data)) + .attr("stroke", "steelblue") + .attr("fill", "none"); + + return trend; +} |