summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xjjb/fuel/fuel-build.sh7
-rwxr-xr-xjjb/fuel/fuel-deploy.sh5
-rwxr-xr-xjjb/fuel/fuel-download-artifact.sh9
-rwxr-xr-xjjb/fuel/fuel-upload-artifact.sh6
-rw-r--r--jjb/fuel/fuel.yml89
-rw-r--r--jjb/functest/functest-docker.sh16
-rw-r--r--jjb/functest/functest.yml12
-rw-r--r--jjb/genesis/genesis-opensteak.yml219
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-build.sh9
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yml57
-rwxr-xr-xutils/fetch_os_creds.sh10
-rwxr-xr-xutils/jenkins-jnlp-connect.sh6
-rw-r--r--utils/test/result_collection_api/dashboard/functest2Dashboard.py109
-rwxr-xr-xutils/test/result_collection_api/dashboard/vsperf2Dashboard.py121
-rw-r--r--utils/test/result_collection_api/resources/handlers.py21
15 files changed, 312 insertions, 384 deletions
diff --git a/jjb/fuel/fuel-build.sh b/jjb/fuel/fuel-build.sh
index 178a50c68..cffd8673b 100755
--- a/jjb/fuel/fuel-build.sh
+++ b/jjb/fuel/fuel-build.sh
@@ -12,7 +12,12 @@ echo
[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
# set OPNFV_ARTIFACT_VERSION
-export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+if [[ "$JOB_NAME" =~ "merge" ]]; then
+ echo "Building Fuel ISO for a merged change"
+ export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
+else
+ export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+fi
# start the build
cd $WORKSPACE/$INSTALLER/ci
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index bb0e2b15f..2ec519597 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -18,8 +18,8 @@ chmod a+x $HOME
chmod a+x $TMPDIR
# set CONFDIR, BRIDGE
-export CONFDIR=$WORKSPACE/fuel/deploy/templates/hardware_environment/conf/linux_foundation_lab/pod2
-export BRIDGE=pxebr
+CONFDIR=$WORKSPACE/fuel/deploy/templates/hardware_environment/conf/linux_foundation_lab/pod2
+BRIDGE=pxebr
# clone genesis repo and checkout the SR1 tag
echo "Cloning genesis repo"
@@ -42,6 +42,7 @@ echo
# start the deployment
echo "Issuing command"
echo "sudo $WORKSPACE/fuel/ci/deploy.sh -iso $WORKSPACE/opnfv.iso -dea $CONFDIR/dea.yaml -dha $CONFDIR/dha.yaml -s $TMPDIR -b $BRIDGE -nh"
+
sudo $WORKSPACE/fuel/ci/deploy.sh -iso $WORKSPACE/opnfv.iso -dea $CONFDIR/dea.yaml -dha $CONFDIR/dha.yaml -s $TMPDIR -b $BRIDGE -nh
echo
diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh
index 6eb1ba463..05dc05e05 100755
--- a/jjb/fuel/fuel-download-artifact.sh
+++ b/jjb/fuel/fuel-download-artifact.sh
@@ -3,8 +3,13 @@ set -o errexit
set -o nounset
set -o pipefail
-# get the latest.properties file in order to get info regarding latest artifact
-curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+if [[ "$JOB_NAME" =~ "merge" ]]; then
+ # get the properties file for the Fuel ISO built for a merged change
+ curl -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
+else
+ # get the latest.properties file in order to get info regarding latest artifact
+ curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+fi
# check if we got the file
[[ -f latest.properties ]] || exit 1
diff --git a/jjb/fuel/fuel-upload-artifact.sh b/jjb/fuel/fuel-upload-artifact.sh
index 3b700c649..2783f2cd0 100755
--- a/jjb/fuel/fuel-upload-artifact.sh
+++ b/jjb/fuel/fuel-upload-artifact.sh
@@ -14,7 +14,11 @@ source $WORKSPACE/opnfv.properties
# upload artifact and additional files to google storage
gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+if [[ "$JOB_NAME" =~ "daily" ]]; then
+ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+elif [[ "$JOB_NAME" =~ "merge" ]]; then
+ echo "Uploaded Fuel ISO for a merged change"
+fi
echo
echo "--------------------------------------------------------"
diff --git a/jjb/fuel/fuel.yml b/jjb/fuel/fuel.yml
index 1f53454e3..e809592fc 100644
--- a/jjb/fuel/fuel.yml
+++ b/jjb/fuel/fuel.yml
@@ -11,7 +11,6 @@
jobs:
- 'fuel-verify-build-{stream}'
- - 'fuel-verify-virtual-deploy-{stream}'
- 'fuel-merge-build-{stream}'
- 'fuel-merge-virtual-deploy-{stream}'
- 'fuel-daily-{stream}'
@@ -92,78 +91,8 @@
builders:
- shell:
!include-raw ./fuel-build.sh
-# - shell:
-# !include-raw ./fuel-upload-artifact.sh
-# - shell:
-# !include-raw ./fuel-workspace-cleanup.sh
-
-- job-template:
- name: 'fuel-verify-virtual-deploy-{stream}'
-
- project-type: freestyle
-
- node: ericsson-build
-
- concurrent: true
-
- properties:
- - throttle:
- enabled: true
- max-total: 2
- max-per-node: 1
-
- logrotate:
- daysToKeep: 30
- numToKeep: 10
- artifactDaysToKeep: -1
- artifactNumToKeep: -1
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{branch}'
- - fuel-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
-
- scm:
- - gerrit-trigger-scm:
- credentials-id: '{ssh-credentials}'
- refspec: '$GERRIT_REFSPEC'
- choosing-strategy: 'gerrit'
-
- wrappers:
- - ssh-agent-credentials:
- user: '{ssh-credentials}'
-
- triggers:
- - gerrit:
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- dependency-jobs: 'fuel-verify-build-{stream}'
-
- builders:
-# - shell:
-# !include-raw ./fuel-download-artifact.sh
- shell:
- !include-raw ./fuel-virtual-deploy.sh
-# - shell:
-# !include-raw ./fuel-workspace-cleanup.sh
+ !include-raw ./fuel-workspace-cleanup.sh
- job-template:
name: 'fuel-merge-build-{stream}'
@@ -222,10 +151,10 @@
builders:
- shell:
!include-raw ./fuel-build.sh
-# - shell:
-# !include-raw ./fuel-upload-artifact.sh
-# - shell:
-# !include-raw ./fuel-workspace-cleanup.sh
+ - shell:
+ !include-raw ./fuel-upload-artifact.sh
+ - shell:
+ !include-raw ./fuel-workspace-cleanup.sh
- job-template:
name: 'fuel-merge-virtual-deploy-{stream}'
@@ -281,12 +210,12 @@
dependency-jobs: 'fuel-merge-build-{stream}'
builders:
-# - shell:
-# !include-raw ./fuel-download-artifact.sh
+ - shell:
+ !include-raw ./fuel-download-artifact.sh
- shell:
!include-raw ./fuel-virtual-deploy.sh
-# - shell:
-# !include-raw ./fuel-workspace-cleanup.sh
+ - shell:
+ !include-raw ./fuel-workspace-cleanup.sh
- job-template:
name: 'fuel-daily-{stream}'
diff --git a/jjb/functest/functest-docker.sh b/jjb/functest/functest-docker.sh
index 54315cad1..c73bcf99a 100644
--- a/jjb/functest/functest-docker.sh
+++ b/jjb/functest/functest-docker.sh
@@ -26,6 +26,7 @@ else
echo "Tag version to be build and pushed: $DOCKER_TAG"
fi
+
# Remove previous running containers if exist
if [[ ! -z $(docker ps -a | grep $DOCKER_IMAGE_NAME) ]]; then
echo "Removing existing $DOCKER_IMAGE_NAME containers..."
@@ -33,20 +34,21 @@ if [[ ! -z $(docker ps -a | grep $DOCKER_IMAGE_NAME) ]]; then
docker ps -a | grep $DOCKER_IMAGE_NAME | awk '{{print $1}}' | xargs docker rm
fi
-# list the images
-echo "Available images are:"
-docker images
# Remove existing images if exist
if [[ ! -z $(docker images | grep $DOCKER_IMAGE_NAME) ]]; then
- echo "Removing existing $DOCKER_IMAGE_NAME images..."
- docker images | grep $DOCKER_IMAGE_NAME | awk '{{print $3}}' \
- | xargs docker rmi -f
+ echo "Docker images to remove:"
+ docker images | head -1 && docker images | grep $DOCKER_IMAGE_NAME
+ image_tags=($(docker images | grep $DOCKER_IMAGE_NAME | awk '{{print $2}}'))
+ for tag in "${{image_tags[@]}}"; do
+ echo "Removing docker image $DOCKER_IMAGE_NAME:$tag..."
+ docker rmi $DOCKER_IMAGE_NAME:$tag
+ done
fi
# Start the build
-echo "Building of $DOCKER_IMAGE_NAME:$DOCKER_TAG..."
+echo "Building docker image: $DOCKER_IMAGE_NAME:$DOCKER_TAG..."
cd $WORKSPACE/docker
docker build -t $DOCKER_IMAGE_NAME:$DOCKER_TAG .
echo "Creating tag 'latest'..."
diff --git a/jjb/functest/functest.yml b/jjb/functest/functest.yml
index 598bcf2a5..4df779d8c 100644
--- a/jjb/functest/functest.yml
+++ b/jjb/functest/functest.yml
@@ -226,10 +226,10 @@
artifactNumToKeep: -1
builders:
+ - 'functest-cleanup'
- 'set-functest-env'
- 'functest-all'
- 'functest-store-results'
- - 'functest-cleanup'
- job-template:
name: functest-{installer}-{stream}
@@ -271,10 +271,10 @@
artifactNumToKeep: -1
builders:
+ - 'functest-cleanup'
- 'set-functest-env'
- 'functest-all'
- 'functest-store-results'
- - 'functest-cleanup'
- job-template:
name: functest-vims-{installer}-{stream}
@@ -541,8 +541,7 @@
echo "Functest: Start Docker and prepare environment"
envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}"
- docker ps -a | grep opnfv/functest | awk '{print $1}' | xargs docker rm -f &>/dev/null
- docker pull opnfv/functest
+ docker pull opnfv/functest:latest_stable
echo "Functest: Running docker run command: docker run -i -e $envs opnfv/functest /bin/bash &"
docker run -i -e $envs opnfv/functest /bin/bash &
docker ps -a
@@ -647,8 +646,7 @@
set +e
# cleanup: remove any docker containers leftovers
- echo "Removing the docker container..."
+ echo "Removing existing Functest Docker containers..."
docker rm -f $(docker ps | grep opnfv/functest | awk '{print $1}')
- echo "Removing the docker image..."
+ echo "Removing existing Functest Docker image..."
docker rmi -f $(docker images | grep opnfv/functest | awk '{print $3}')
-
diff --git a/jjb/genesis/genesis-opensteak.yml b/jjb/genesis/genesis-opensteak.yml
deleted file mode 100644
index f2322354f..000000000
--- a/jjb/genesis/genesis-opensteak.yml
+++ /dev/null
@@ -1,219 +0,0 @@
-# this is the job configuration for bgs
-- project:
-
- name: genesis-opensteak
-
- installer:
- - opensteak
- jobs:
- - 'genesis-opensteak-verify'
- - 'genesis-opensteak-merge'
- - 'genesis-opensteak-daily-{stream}'
-
- # stream: branch with - in place of / (eg. stable-helium)
- # branch: branch (eg. stable/helium)
- stream:
- - master:
- branch: 'master'
-
- project: 'genesis'
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'genesis-opensteak-verify'
-
- project-type: freestyle
-
- node: ericsson-build
-
- logrotate:
- daysToKeep: 30
- numToKeep: 10
- artifactDaysToKeep: -1
- artifactNumToKeep: -1
-
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/genesis
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: 'master'
-
- scm:
- - gerrit-trigger-scm:
- credentials-id: '{ssh-credentials}'
- refspec: '$GERRIT_REFSPEC'
- choosing-strategy: 'gerrit'
-
- wrappers:
- - ssh-agent-credentials:
- user: '{ssh-credentials}'
-
- triggers:
- - gerrit:
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'genesis'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/master'
- file-paths:
- - compare-type: ANT
- pattern: 'common/**'
- - compare-type: ANT
- pattern: 'opensteak/**'
-
-
- builders:
- - 'opensteak-verify'
-
-- job-template:
- name: 'genesis-opensteak-merge'
-
- # builder-merge job to run JJB update
- #
- # This job's purpose is to update all the JJB
-
- project-type: freestyle
-
- node: ericsson-build
-
- logrotate:
- daysToKeep: 30
- numToKeep: 40
- artifactDaysToKeep: -1
- artifactNumToKeep: 5
-
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/genesis
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: 'master'
-
- scm:
- - gerrit-trigger-scm:
- credentials-id: '{ssh-credentials}'
- refspec: ''
- choosing-strategy: 'default'
-
- wrappers:
- - ssh-agent-credentials:
- user: '{ssh-credentials}'
-
- triggers:
- - gerrit:
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'genesis'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/master'
- file-paths:
- - compare-type: ANT
- pattern: 'common/**'
- - compare-type: ANT
- pattern: 'opensteak/**'
-
- builders:
- - 'opensteak-merge'
-
-- job-template:
- name: 'genesis-opensteak-daily-{stream}'
-
- project-type: freestyle
-
- node: ericsson-build
-
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: GS_URL
- default: 'artifacts.opnfv.org/genesis/opensteak'
- description: "URL to Google Storage."
- - string:
- name: INSTALLER
- default: 'opensteak'
- description: "Installer to use."
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/genesis
- - string:
- name: GERRIT_BRANCH
- default: origin/master
- description: "Branch to build, deploy and test."
- - string:
- name: GERRIT_REFSPEC
- default: refs/heads/master
- description: "Refspec to retrieve."
-
- scm:
- - git:
- skip-tag: true
- url: $GIT_BASE
- branches:
- - $GERRIT_BRANCH
- refspec: $GERRIT_REFSPEC
-
- triggers:
- - pollscm: '@midnight'
-
- logrotate:
- daysToKeep: 30
- numToKeep: 10
- artifactDaysToKeep: -1
- artifactNumToKeep: -1
-
- builders:
- - 'opensteak-daily-master'
-
-- builder:
- name: opensteak-verify
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello World!"
-
-- builder:
- name: opensteak-merge
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello World!"
-
-- builder:
- name: opensteak-daily-master
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello World!"
diff --git a/jjb/kvmfornfv/kvmfornfv-build.sh b/jjb/kvmfornfv/kvmfornfv-build.sh
new file mode 100755
index 000000000..4e00a9d87
--- /dev/null
+++ b/jjb/kvmfornfv/kvmfornfv-build.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+# build output directory
+OUTPUT_DIR=$WORKSPACE/build_output
+mkdir -p $OUTPUT_DIR
+
+# start the build
+cd $WORKSPACE
+./ci/build.sh $OUTPUT_DIR
diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml
index 17f7cfefa..345edcaaf 100644
--- a/jjb/kvmfornfv/kvmfornfv.yml
+++ b/jjb/kvmfornfv/kvmfornfv.yml
@@ -1,9 +1,8 @@
- project:
name: kvmfornfv
jobs:
- - 'kvmfornfv-verify'
- - 'kvmfornfv-merge'
- - 'kvmfornfv-daily-{stream}'
+ - 'kvmfornfv-verify-{stream}'
+ - 'kvmfornfv-merge-{stream}'
# stream: branch with - in place of / (eg. stable-arno)
# branch: branch (eg. stable/arno)
@@ -14,7 +13,7 @@
project: 'kvmfornfv'
- job-template:
- name: 'kvmfornfv-verify'
+ name: 'kvmfornfv-verify-{stream}'
node: ericsson-build
@@ -67,16 +66,16 @@
builders:
- shell:
- echo "Hello World"
+ !include-raw ./kvmfornfv-build.sh
- job-template:
- name: 'kvmfornfv-merge'
+ name: 'kvmfornfv-merge-{stream}'
# builder-merge job to run JJB update
#
# This job's purpose is to update all the JJB
- node: master
+ node: ericsson-build
project-type: freestyle
@@ -121,46 +120,4 @@
builders:
- shell:
- echo "Hello World"
-
-
-- job-template:
- name: 'kvmfornfv-daily-{stream}'
-
- # Job template for daily builders
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: master
-
- disabled: true
-
- project-type: freestyle
-
- logrotate:
- daysToKeep: '{build-days-to-keep}'
- numToKeep: '{build-num-to-keep}'
- artifactDaysToKeep: '{build-artifact-days-to-keep}'
- artifactNumToKeep: '{build-artifact-num-to-keep}'
-
- parameters:
- - project-parameter:
- project: '{project}'
-
- scm:
- - git-scm:
- credentials-id: '{ssh-credentials}'
- refspec: ''
- branch: '{branch}'
-
- wrappers:
- - ssh-agent-credentials:
- user: '{ssh-credentials}'
-
- triggers:
- - timed: 'H H * * *'
-
- builders:
- - shell:
- echo "Hello World"
+ !include-raw ./kvmfornfv-build.sh
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh
index cefc85761..7a5f8121a 100755
--- a/utils/fetch_os_creds.sh
+++ b/utils/fetch_os_creds.sh
@@ -126,14 +126,12 @@ elif [ "$installer_type" == "foreman" ]; then
| grep $admin_ip | sed 's/ /\n/g' | grep ^http | head -1) &> /dev/null
elif [ "$installer_type" == "compass" ]; then
- #ip_compass="10.1.0.12"
verify_connectivity $installer_ip
-
- # controller_ip='10.1.0.222'
- controller_ip=$(sshpass -p'root' ssh 2>/dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no root@10.1.0.12 \
- 'mysql -ucompass -pcompass -Dcompass -e"select package_config from cluster;"' \
- | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"ha_proxy\": {\"vip\":/)print $i}' \
+ controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+ 'mysql -ucompass -pcompass -Dcompass -e"select * from cluster;"' \
+ | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"host1\"/) {print $(i+1);break;}}' \
| grep -oP "\d+.\d+.\d+.\d+")
+
if [ -z $controller_ip ]; then
error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
fi
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index 03e47b8b2..d263b198a 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -78,7 +78,7 @@ makemonit () {
echo "Writing the following as monit config:"
cat << EOF | tee $monitconfdir/jenkins
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = "/bin/bash -c 'cd $dir; export started_monit=true; $0 $@'" as uid "$jenkinsuser" and gid "$jenkinsuser"
+start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'"
stop program = "/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'"
EOF
}
@@ -87,7 +87,7 @@ if [[ -f $monitconfdir/jenkins ]]; then
#test for diff
if [[ "$(diff $monitconfdir/jenkins <(echo "\
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = \"/bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\" as uid \"$jenkinsuser\" and gid \"$jenkinsuser\"
+start program = \"usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\"
stop program = \" /bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\
") )" ]]; then
echo "Updating monit config..."
@@ -169,7 +169,7 @@ do
s ) slave_secret="$OPTARG";;
h ) usage; exit;;
t ) started_monit=true
- skip_monit=true
+ skip_monit=true
run_in_foreground=true ;;
f ) test_firewall ;;
\? ) echo "Unknown option: -$OPTARG" >&2; exit 1;;
diff --git a/utils/test/result_collection_api/dashboard/functest2Dashboard.py b/utils/test/result_collection_api/dashboard/functest2Dashboard.py
index 688f0c28c..bfb7c8729 100644
--- a/utils/test/result_collection_api/dashboard/functest2Dashboard.py
+++ b/utils/test/result_collection_api/dashboard/functest2Dashboard.py
@@ -21,7 +21,7 @@ def get_functest_cases():
get the list of the supported test cases
TODO: update the list when adding a new test case for the dashboard
"""
- return ["vPing", "Tempest", "odl", "Rally"]
+ return ["status", "vPing", "vIMS", "Tempest", "odl", "Rally"]
def format_functest_for_dashboard(case, results):
@@ -53,6 +53,113 @@ def check_functest_case_exist(case):
return True
+def format_status_for_dashboard(results):
+ test_data = [{'description': 'Functest status'}]
+
+ # define magic equation for the status....
+ # 5 suites: vPing, odl, Tempest, vIMS, Rally
+ # Which overall KPI make sense...
+
+ # TODO to be done and discussed
+ testcases = get_functest_cases()
+ test_data.append({'nb test suite(s) run': len(testcases)-1})
+ # test_data.append({'nb test suite(s) failed':1})
+ # test_data.append({'test suite run': ['vPing', 'tempest', 'vIMS' ]})
+ # test_data.append({'average Openstack Tempest failure rate (%)': 10})
+ # test_data.append({'average odl failure rate (%)': 10})
+
+ return test_data
+
+
+def format_vIMS_for_dashboard(results):
+ """
+ Post processing for the vIMS test case
+ """
+ test_data = [{'description': 'vIMS results for Dashboard'}]
+
+ # Graph 1: (duration_deployment_orchestrator,
+ # duration_deployment_vnf,
+ # duration_test) = f(time)
+ # ********************************
+ new_element = []
+
+ for data in results:
+ new_element.append({'x': data['creation_date'],
+ 'y1': data['details']['orchestrator']['duration'],
+ 'y2': data['details']['vIMS']['duration'],
+ 'y3': data['details']['sig_test']['duration']})
+
+ test_data.append({'name': "Tempest nb tests/nb failures",
+ 'info': {'type': "graph",
+ 'xlabel': 'time',
+ 'y1label': 'orchestation deployment duration',
+ 'y2label': 'vIMS deployment duration',
+ 'y3label': 'vIMS test duration'},
+ 'data_set': new_element})
+
+ # Graph 2: (Nb test, nb failure, nb skipped)=f(time)
+ # **************************************************
+ new_element = []
+
+ for data in results:
+ # Retrieve all the tests
+ nbTests = 0
+ nbFailures = 0
+ nbSkipped = 0
+ vIMS_test = data['details']['sig_test']['result']
+
+ for data_test in vIMS_test:
+ # Calculate nb of tests run and nb of tests failed
+ # vIMS_results = get_vIMSresults(vIMS_test)
+ # print vIMS_results
+ if data_test['result'] == "Passed":
+ nbTests += 1
+ elif data_test['result'] == "Failed":
+ nbFailures += 1
+ elif data_test['result'] == "Skipped":
+ nbSkipped += 1
+
+ new_element.append({'x': data['creation_date'],
+ 'y1': nbTests,
+ 'y2': nbFailures,
+ 'y3': nbSkipped})
+
+ test_data.append({'name': "vIMS nb tests passed/failed/skipped",
+ 'info': {'type': "graph",
+ 'xlabel': 'time',
+ 'y1label': 'Number of tests passed',
+ 'y2label': 'Number of tests failed',
+ 'y3label': 'Number of tests skipped'},
+ 'data_set': new_element})
+
+ # Graph 3: bar graph Summ(nb tests run), Sum (nb tests failed)
+ # ********************************************************
+ nbTests = 0
+ nbFailures = 0
+
+ for data in results:
+ vIMS_test = data['details']['sig_test']['result']
+
+ for data_test in vIMS_test:
+ nbTestsOK = 0
+ nbTestsKO = 0
+
+ if data_test['result'] == "Passed":
+ nbTestsOK += 1
+ elif data_test['result'] == "Failed":
+ nbTestsKO += 1
+
+ nbTests += nbTestsOK + nbTestsKO
+ nbFailures += nbTestsKO
+
+ test_data.append({'name': "Total number of tests run/failure tests",
+ 'info': {"type": "bar"},
+ 'data_set': [{'Run': nbTests,
+ 'Failed': nbFailures}]})
+
+ return test_data
+
+
def format_Tempest_for_dashboard(results):
"""
Post processing for the Tempest test case
diff --git a/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py b/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py
new file mode 100755
index 000000000..323d3915c
--- /dev/null
+++ b/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+
+# Copyright 2015 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"),
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def get_vsperf_cases():
+ """
+ get the list of the supported test cases
+ TODO: update the list when adding a new test case for the dashboard
+ """
+ return ["tput_ovsdpdk", "tput_ovs",
+ "b2b_ovsdpdk", "b2b_ovs",
+ "tput_mod_vlan_ovsdpdk", "tput_mod_vlan_ovs",
+ "cont_ovsdpdk", "cont_ovs",
+ "pvp_cont_ovsdpdkuser", "pvp_cont_ovsdpdkcuse", "pvp_cont_ovsvirtio",
+ "pvvp_cont_ovsdpdkuser", "pvvp_cont_ovsdpdkcuse", "pvvp_cont_ovsvirtio",
+ "scalability_ovsdpdk", "scalability_ovs",
+ "pvp_tput_ovsdpdkuser", "pvp_tput_ovsdpdkcuse", "pvp_tput_ovsvirtio",
+ "pvp_b2b_ovsdpdkuser", "pvp_b2b_ovsdpdkcuse", "pvp_b2b_ovsvirtio",
+ "pvvp_tput_ovsdpdkuser", "pvvp_tput_ovsdpdkcuse", "pvvp_tput_ovsvirtio",
+ "pvvp_b2b_ovsdpdkuser", "pvvp_b2b_ovsdpdkcuse", "pvvp_b2b_ovsvirtio",
+ "cpu_load_ovsdpdk", "cpu_load_ovs",
+ "mem_load_ovsdpdk", "mem_load_ovs"]
+
+
+def check_vsperf_case_exist(case):
+ """
+ check if the testcase exists
+ if the test case is not defined or not declared in the list
+ return False
+ """
+ vsperf_cases = get_vsperf_cases()
+
+ if (case is None or case not in vsperf_cases):
+ return False
+ else:
+ return True
+
+
+def format_vsperf_for_dashboard(case, results):
+ """
+ generic method calling the method corresponding to the test case
+ check that the testcase is properly declared first
+ then build the call to the specific method
+ """
+ if check_vsperf_case_exist(case):
+ res = format_common_for_dashboard(case, results)
+ else:
+ res = []
+ print "Test cases not declared"
+ return res
+
+
+def format_common_for_dashboard(case, results):
+ """
+ Common post processing
+ """
+ test_data_description = case + " results for Dashboard"
+ test_data = [{'description': test_data_description}]
+
+ graph_name = ''
+ if "b2b" in case:
+ graph_name = "B2B frames"
+ else:
+ graph_name = "Rx frames per second"
+
+ # Graph 1: Rx fps = f(time)
+ # ********************************
+ new_element = []
+ for data in results:
+ new_element.append({'x': data['creation_date'],
+ 'y1': data['details']['64'],
+ 'y2': data['details']['128'],
+ 'y3': data['details']['512'],
+ 'y4': data['details']['1024'],
+ 'y5': data['details']['1518']})
+
+ test_data.append({'name': graph_name,
+ 'info': {'type': "graph",
+ 'xlabel': 'time',
+ 'y1label': 'frame size 64B',
+ 'y2label': 'frame size 128B',
+ 'y3label': 'frame size 512B',
+ 'y4label': 'frame size 1024B',
+ 'y5label': 'frame size 1518B'},
+ 'data_set': new_element})
+
+ return test_data
+
+
+
+
+############################ For local test ################################
+import os
+
+def _test():
+ ans = [{'creation_date': '2015-09-12', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '26.804', '1024': '1097.284', '512': '178.137', '1518': '12635.860', '128': '100.564'}},
+ {'creation_date': '2015-09-33', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '16.804', '1024': '1087.284', '512': '168.137', '1518': '12625.860', '128': '99.564'}}]
+
+ result = format_vsperf_for_dashboard("pvp_cont_ovsdpdkcuse", ans)
+ print result
+
+ result = format_vsperf_for_dashboard("b2b_ovsdpdk", ans)
+ print result
+
+ result = format_vsperf_for_dashboard("non_existing", ans)
+ print result
+
+if __name__ == '__main__':
+ _test()
diff --git a/utils/test/result_collection_api/resources/handlers.py b/utils/test/result_collection_api/resources/handlers.py
index 85c6172a5..be08c9791 100644
--- a/utils/test/result_collection_api/resources/handlers.py
+++ b/utils/test/result_collection_api/resources/handlers.py
@@ -719,6 +719,21 @@ class DashboardHandler(GenericApiHandler):
"error:Project name missing")
elif check_dashboard_ready_project(project_arg, "./dashboard"):
res = []
+
+ if case_arg is None:
+ raise HTTPError(
+ HTTP_NOT_FOUND,
+ "error:Test case missing for project " + project_arg)
+
+ # special case of status for project
+ if case_arg == "status":
+ del get_request["case_name"]
+ # retention time to be agreed
+ # last five days by default?
+ # TODO move to DB
+ period = datetime.now() - timedelta(days=5)
+ get_request["creation_date"] = {"$gte": period}
+
# fetching results
cursor = self.db.test_results.find(get_request)
while (yield cursor.fetch_next):
@@ -726,11 +741,7 @@ class DashboardHandler(GenericApiHandler):
cursor.next_object())
res.append(test_result.format_http())
- if case_arg is None:
- raise HTTPError(
- HTTP_NOT_FOUND,
- "error:Test case missing for project " + project_arg)
- elif check_dashboard_ready_case(project_arg, case_arg):
+ if check_dashboard_ready_case(project_arg, case_arg):
dashboard = get_dashboard_result(project_arg, case_arg, res)
else:
raise HTTPError(