summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--docs/etc/conf.py13
-rwxr-xr-xjjb/fuel/fuel-build.sh7
-rwxr-xr-xjjb/fuel/fuel-deploy.sh5
-rwxr-xr-xjjb/fuel/fuel-download-artifact.sh9
-rwxr-xr-xjjb/fuel/fuel-upload-artifact.sh6
-rw-r--r--jjb/fuel/fuel.yml89
-rw-r--r--jjb/functest/functest-docker.sh26
-rw-r--r--jjb/functest/functest.yml12
-rw-r--r--jjb/genesis/genesis-opensteak.yml219
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-build.sh9
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yml57
-rwxr-xr-xutils/docu-build-new.sh19
-rwxr-xr-xutils/fetch_os_creds.sh10
-rwxr-xr-xutils/jenkins-jnlp-connect.sh6
-rw-r--r--utils/test/result_collection_api/dashboard/functest2Dashboard.py109
-rw-r--r--utils/test/result_collection_api/resources/handlers.py21
16 files changed, 214 insertions, 403 deletions
diff --git a/docs/etc/conf.py b/docs/etc/conf.py
index 486983f19..671965378 100644
--- a/docs/etc/conf.py
+++ b/docs/etc/conf.py
@@ -2,16 +2,9 @@ import datetime
import sys
import os
-try:
- __import__('imp').find_module('sphinx.ext.numfig')
- extensions = ['sphinx.ext.numfig']
-except ImportError:
- # 'pip install sphinx_numfig'
- extensions = ['sphinx_numfig']
+needs_sphinx = '1.3'
-# numfig:
-number_figures = True
-figure_caption_prefix = "Fig."
+numfig = True
source_suffix = '.rst'
master_doc = 'index'
@@ -31,4 +24,4 @@ latex_elements = {
project = u'Copper: Virtual Infrastructure Deployment Policies'
copyright = u'%s, OPNFV' % datetime.date.today().year
version = u'1.0.0'
-release = u'1.0.0' \ No newline at end of file
+release = u'1.0.0'
diff --git a/jjb/fuel/fuel-build.sh b/jjb/fuel/fuel-build.sh
index 178a50c68..cffd8673b 100755
--- a/jjb/fuel/fuel-build.sh
+++ b/jjb/fuel/fuel-build.sh
@@ -12,7 +12,12 @@ echo
[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
# set OPNFV_ARTIFACT_VERSION
-export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+if [[ "$JOB_NAME" =~ "merge" ]]; then
+ echo "Building Fuel ISO for a merged change"
+ export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
+else
+ export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
+fi
# start the build
cd $WORKSPACE/$INSTALLER/ci
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index bb0e2b15f..2ec519597 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -18,8 +18,8 @@ chmod a+x $HOME
chmod a+x $TMPDIR
# set CONFDIR, BRIDGE
-export CONFDIR=$WORKSPACE/fuel/deploy/templates/hardware_environment/conf/linux_foundation_lab/pod2
-export BRIDGE=pxebr
+CONFDIR=$WORKSPACE/fuel/deploy/templates/hardware_environment/conf/linux_foundation_lab/pod2
+BRIDGE=pxebr
# clone genesis repo and checkout the SR1 tag
echo "Cloning genesis repo"
@@ -42,6 +42,7 @@ echo
# start the deployment
echo "Issuing command"
echo "sudo $WORKSPACE/fuel/ci/deploy.sh -iso $WORKSPACE/opnfv.iso -dea $CONFDIR/dea.yaml -dha $CONFDIR/dha.yaml -s $TMPDIR -b $BRIDGE -nh"
+
sudo $WORKSPACE/fuel/ci/deploy.sh -iso $WORKSPACE/opnfv.iso -dea $CONFDIR/dea.yaml -dha $CONFDIR/dha.yaml -s $TMPDIR -b $BRIDGE -nh
echo
diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh
index 6eb1ba463..05dc05e05 100755
--- a/jjb/fuel/fuel-download-artifact.sh
+++ b/jjb/fuel/fuel-download-artifact.sh
@@ -3,8 +3,13 @@ set -o errexit
set -o nounset
set -o pipefail
-# get the latest.properties file in order to get info regarding latest artifact
-curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+if [[ "$JOB_NAME" =~ "merge" ]]; then
+ # get the properties file for the Fuel ISO built for a merged change
+ curl -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
+else
+ # get the latest.properties file in order to get info regarding latest artifact
+ curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+fi
# check if we got the file
[[ -f latest.properties ]] || exit 1
diff --git a/jjb/fuel/fuel-upload-artifact.sh b/jjb/fuel/fuel-upload-artifact.sh
index 3b700c649..2783f2cd0 100755
--- a/jjb/fuel/fuel-upload-artifact.sh
+++ b/jjb/fuel/fuel-upload-artifact.sh
@@ -14,7 +14,11 @@ source $WORKSPACE/opnfv.properties
# upload artifact and additional files to google storage
gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+if [[ "$JOB_NAME" =~ "daily" ]]; then
+ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
+elif [[ "$JOB_NAME" =~ "merge" ]]; then
+ echo "Uploaded Fuel ISO for a merged change"
+fi
echo
echo "--------------------------------------------------------"
diff --git a/jjb/fuel/fuel.yml b/jjb/fuel/fuel.yml
index 1f53454e3..e809592fc 100644
--- a/jjb/fuel/fuel.yml
+++ b/jjb/fuel/fuel.yml
@@ -11,7 +11,6 @@
jobs:
- 'fuel-verify-build-{stream}'
- - 'fuel-verify-virtual-deploy-{stream}'
- 'fuel-merge-build-{stream}'
- 'fuel-merge-virtual-deploy-{stream}'
- 'fuel-daily-{stream}'
@@ -92,78 +91,8 @@
builders:
- shell:
!include-raw ./fuel-build.sh
-# - shell:
-# !include-raw ./fuel-upload-artifact.sh
-# - shell:
-# !include-raw ./fuel-workspace-cleanup.sh
-
-- job-template:
- name: 'fuel-verify-virtual-deploy-{stream}'
-
- project-type: freestyle
-
- node: ericsson-build
-
- concurrent: true
-
- properties:
- - throttle:
- enabled: true
- max-total: 2
- max-per-node: 1
-
- logrotate:
- daysToKeep: 30
- numToKeep: 10
- artifactDaysToKeep: -1
- artifactNumToKeep: -1
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{branch}'
- - fuel-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
-
- scm:
- - gerrit-trigger-scm:
- credentials-id: '{ssh-credentials}'
- refspec: '$GERRIT_REFSPEC'
- choosing-strategy: 'gerrit'
-
- wrappers:
- - ssh-agent-credentials:
- user: '{ssh-credentials}'
-
- triggers:
- - gerrit:
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- dependency-jobs: 'fuel-verify-build-{stream}'
-
- builders:
-# - shell:
-# !include-raw ./fuel-download-artifact.sh
- shell:
- !include-raw ./fuel-virtual-deploy.sh
-# - shell:
-# !include-raw ./fuel-workspace-cleanup.sh
+ !include-raw ./fuel-workspace-cleanup.sh
- job-template:
name: 'fuel-merge-build-{stream}'
@@ -222,10 +151,10 @@
builders:
- shell:
!include-raw ./fuel-build.sh
-# - shell:
-# !include-raw ./fuel-upload-artifact.sh
-# - shell:
-# !include-raw ./fuel-workspace-cleanup.sh
+ - shell:
+ !include-raw ./fuel-upload-artifact.sh
+ - shell:
+ !include-raw ./fuel-workspace-cleanup.sh
- job-template:
name: 'fuel-merge-virtual-deploy-{stream}'
@@ -281,12 +210,12 @@
dependency-jobs: 'fuel-merge-build-{stream}'
builders:
-# - shell:
-# !include-raw ./fuel-download-artifact.sh
+ - shell:
+ !include-raw ./fuel-download-artifact.sh
- shell:
!include-raw ./fuel-virtual-deploy.sh
-# - shell:
-# !include-raw ./fuel-workspace-cleanup.sh
+ - shell:
+ !include-raw ./fuel-workspace-cleanup.sh
- job-template:
name: 'fuel-daily-{stream}'
diff --git a/jjb/functest/functest-docker.sh b/jjb/functest/functest-docker.sh
index 6f752c221..c73bcf99a 100644
--- a/jjb/functest/functest-docker.sh
+++ b/jjb/functest/functest-docker.sh
@@ -15,7 +15,8 @@ DOCKER_IMAGE_NAME="opnfv/functest"
cd $WORKSPACE
git clone https://gerrit.opnfv.org/gerrit/releng
-DOCKER_TAG=$($WORKSPACE/releng/utils/calculate_version.sh -t docker -n $DOCKER_IMAGE_NAME)
+DOCKER_TAG=$($WORKSPACE/releng/utils/calculate_version.sh -t docker \
+ -n $DOCKER_IMAGE_NAME)
ret_val=$?
if [ $ret_val -ne 0 ]; then
@@ -25,22 +26,29 @@ else
echo "Tag version to be build and pushed: $DOCKER_TAG"
fi
-# Remove previous running containers
-echo "Removing existing $DOCKER_IMAGE_NAME containers..."
+
+# Remove previous running containers if exist
if [[ ! -z $(docker ps -a | grep $DOCKER_IMAGE_NAME) ]]; then
- docker ps | grep $DOCKER_IMAGE_NAME | awk '{{print $1}}' | xargs docker stop &>/dev/null
- docker ps -a | grep $DOCKER_IMAGE_NAME | awk '{{print $1}}' | xargs docker rm &>/dev/null
+ echo "Removing existing $DOCKER_IMAGE_NAME containers..."
+ docker ps | grep $DOCKER_IMAGE_NAME | awk '{{print $1}}' | xargs docker stop
+ docker ps -a | grep $DOCKER_IMAGE_NAME | awk '{{print $1}}' | xargs docker rm
fi
-# Remove existing images
-echo "Removing existing $DOCKER_IMAGE_NAME images..."
+
+# Remove existing images if exist
if [[ ! -z $(docker images | grep $DOCKER_IMAGE_NAME) ]]; then
- docker images | grep $DOCKER_IMAGE_NAME | awk '{{print $3}}' | xargs docker rmi &>/dev/null
+ echo "Docker images to remove:"
+ docker images | head -1 && docker images | grep $DOCKER_IMAGE_NAME
+ image_tags=($(docker images | grep $DOCKER_IMAGE_NAME | awk '{{print $2}}'))
+ for tag in "${{image_tags[@]}}"; do
+ echo "Removing docker image $DOCKER_IMAGE_NAME:$tag..."
+ docker rmi $DOCKER_IMAGE_NAME:$tag
+ done
fi
# Start the build
-echo "Building of $DOCKER_IMAGE_NAME:$DOCKER_TAG..."
+echo "Building docker image: $DOCKER_IMAGE_NAME:$DOCKER_TAG..."
cd $WORKSPACE/docker
docker build -t $DOCKER_IMAGE_NAME:$DOCKER_TAG .
echo "Creating tag 'latest'..."
diff --git a/jjb/functest/functest.yml b/jjb/functest/functest.yml
index 598bcf2a5..4df779d8c 100644
--- a/jjb/functest/functest.yml
+++ b/jjb/functest/functest.yml
@@ -226,10 +226,10 @@
artifactNumToKeep: -1
builders:
+ - 'functest-cleanup'
- 'set-functest-env'
- 'functest-all'
- 'functest-store-results'
- - 'functest-cleanup'
- job-template:
name: functest-{installer}-{stream}
@@ -271,10 +271,10 @@
artifactNumToKeep: -1
builders:
+ - 'functest-cleanup'
- 'set-functest-env'
- 'functest-all'
- 'functest-store-results'
- - 'functest-cleanup'
- job-template:
name: functest-vims-{installer}-{stream}
@@ -541,8 +541,7 @@
echo "Functest: Start Docker and prepare environment"
envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}"
- docker ps -a | grep opnfv/functest | awk '{print $1}' | xargs docker rm -f &>/dev/null
- docker pull opnfv/functest
+ docker pull opnfv/functest:latest_stable
echo "Functest: Running docker run command: docker run -i -e $envs opnfv/functest /bin/bash &"
docker run -i -e $envs opnfv/functest /bin/bash &
docker ps -a
@@ -647,8 +646,7 @@
set +e
# cleanup: remove any docker containers leftovers
- echo "Removing the docker container..."
+ echo "Removing existing Functest Docker containers..."
docker rm -f $(docker ps | grep opnfv/functest | awk '{print $1}')
- echo "Removing the docker image..."
+ echo "Removing existing Functest Docker image..."
docker rmi -f $(docker images | grep opnfv/functest | awk '{print $3}')
-
diff --git a/jjb/genesis/genesis-opensteak.yml b/jjb/genesis/genesis-opensteak.yml
deleted file mode 100644
index f2322354f..000000000
--- a/jjb/genesis/genesis-opensteak.yml
+++ /dev/null
@@ -1,219 +0,0 @@
-# this is the job configuration for bgs
-- project:
-
- name: genesis-opensteak
-
- installer:
- - opensteak
- jobs:
- - 'genesis-opensteak-verify'
- - 'genesis-opensteak-merge'
- - 'genesis-opensteak-daily-{stream}'
-
- # stream: branch with - in place of / (eg. stable-helium)
- # branch: branch (eg. stable/helium)
- stream:
- - master:
- branch: 'master'
-
- project: 'genesis'
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'genesis-opensteak-verify'
-
- project-type: freestyle
-
- node: ericsson-build
-
- logrotate:
- daysToKeep: 30
- numToKeep: 10
- artifactDaysToKeep: -1
- artifactNumToKeep: -1
-
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/genesis
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: 'master'
-
- scm:
- - gerrit-trigger-scm:
- credentials-id: '{ssh-credentials}'
- refspec: '$GERRIT_REFSPEC'
- choosing-strategy: 'gerrit'
-
- wrappers:
- - ssh-agent-credentials:
- user: '{ssh-credentials}'
-
- triggers:
- - gerrit:
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'genesis'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/master'
- file-paths:
- - compare-type: ANT
- pattern: 'common/**'
- - compare-type: ANT
- pattern: 'opensteak/**'
-
-
- builders:
- - 'opensteak-verify'
-
-- job-template:
- name: 'genesis-opensteak-merge'
-
- # builder-merge job to run JJB update
- #
- # This job's purpose is to update all the JJB
-
- project-type: freestyle
-
- node: ericsson-build
-
- logrotate:
- daysToKeep: 30
- numToKeep: 40
- artifactDaysToKeep: -1
- artifactNumToKeep: 5
-
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/genesis
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: 'master'
-
- scm:
- - gerrit-trigger-scm:
- credentials-id: '{ssh-credentials}'
- refspec: ''
- choosing-strategy: 'default'
-
- wrappers:
- - ssh-agent-credentials:
- user: '{ssh-credentials}'
-
- triggers:
- - gerrit:
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'genesis'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/master'
- file-paths:
- - compare-type: ANT
- pattern: 'common/**'
- - compare-type: ANT
- pattern: 'opensteak/**'
-
- builders:
- - 'opensteak-merge'
-
-- job-template:
- name: 'genesis-opensteak-daily-{stream}'
-
- project-type: freestyle
-
- node: ericsson-build
-
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: GS_URL
- default: 'artifacts.opnfv.org/genesis/opensteak'
- description: "URL to Google Storage."
- - string:
- name: INSTALLER
- default: 'opensteak'
- description: "Installer to use."
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/genesis
- - string:
- name: GERRIT_BRANCH
- default: origin/master
- description: "Branch to build, deploy and test."
- - string:
- name: GERRIT_REFSPEC
- default: refs/heads/master
- description: "Refspec to retrieve."
-
- scm:
- - git:
- skip-tag: true
- url: $GIT_BASE
- branches:
- - $GERRIT_BRANCH
- refspec: $GERRIT_REFSPEC
-
- triggers:
- - pollscm: '@midnight'
-
- logrotate:
- daysToKeep: 30
- numToKeep: 10
- artifactDaysToKeep: -1
- artifactNumToKeep: -1
-
- builders:
- - 'opensteak-daily-master'
-
-- builder:
- name: opensteak-verify
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello World!"
-
-- builder:
- name: opensteak-merge
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello World!"
-
-- builder:
- name: opensteak-daily-master
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello World!"
diff --git a/jjb/kvmfornfv/kvmfornfv-build.sh b/jjb/kvmfornfv/kvmfornfv-build.sh
new file mode 100755
index 000000000..4e00a9d87
--- /dev/null
+++ b/jjb/kvmfornfv/kvmfornfv-build.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+# build output directory
+OUTPUT_DIR=$WORKSPACE/build_output
+mkdir -p $OUTPUT_DIR
+
+# start the build
+cd $WORKSPACE
+./ci/build.sh $OUTPUT_DIR
diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml
index 17f7cfefa..345edcaaf 100644
--- a/jjb/kvmfornfv/kvmfornfv.yml
+++ b/jjb/kvmfornfv/kvmfornfv.yml
@@ -1,9 +1,8 @@
- project:
name: kvmfornfv
jobs:
- - 'kvmfornfv-verify'
- - 'kvmfornfv-merge'
- - 'kvmfornfv-daily-{stream}'
+ - 'kvmfornfv-verify-{stream}'
+ - 'kvmfornfv-merge-{stream}'
# stream: branch with - in place of / (eg. stable-arno)
# branch: branch (eg. stable/arno)
@@ -14,7 +13,7 @@
project: 'kvmfornfv'
- job-template:
- name: 'kvmfornfv-verify'
+ name: 'kvmfornfv-verify-{stream}'
node: ericsson-build
@@ -67,16 +66,16 @@
builders:
- shell:
- echo "Hello World"
+ !include-raw ./kvmfornfv-build.sh
- job-template:
- name: 'kvmfornfv-merge'
+ name: 'kvmfornfv-merge-{stream}'
# builder-merge job to run JJB update
#
# This job's purpose is to update all the JJB
- node: master
+ node: ericsson-build
project-type: freestyle
@@ -121,46 +120,4 @@
builders:
- shell:
- echo "Hello World"
-
-
-- job-template:
- name: 'kvmfornfv-daily-{stream}'
-
- # Job template for daily builders
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: master
-
- disabled: true
-
- project-type: freestyle
-
- logrotate:
- daysToKeep: '{build-days-to-keep}'
- numToKeep: '{build-num-to-keep}'
- artifactDaysToKeep: '{build-artifact-days-to-keep}'
- artifactNumToKeep: '{build-artifact-num-to-keep}'
-
- parameters:
- - project-parameter:
- project: '{project}'
-
- scm:
- - git-scm:
- credentials-id: '{ssh-credentials}'
- refspec: ''
- branch: '{branch}'
-
- wrappers:
- - ssh-agent-credentials:
- user: '{ssh-credentials}'
-
- triggers:
- - timed: 'H H * * *'
-
- builders:
- - shell:
- echo "Hello World"
+ !include-raw ./kvmfornfv-build.sh
diff --git a/utils/docu-build-new.sh b/utils/docu-build-new.sh
index 00d046479..67a62e381 100755
--- a/utils/docu-build-new.sh
+++ b/utils/docu-build-new.sh
@@ -29,17 +29,22 @@ while read -d $'\n'; do
done < <(find docs/ -name 'index.rst' -printf '%h\n' | sort -u )
for dir in "${{directories[@]}}"; do
+ _name="${{dir##*/}}"
+ _build="${{dir}}/build"
+ _output="docs/output/${{_name}}"
echo
- echo "#############################"
- echo "Building DOCS in ${{dir##*/}}"
- echo "#############################"
+ echo "#################${{_name//?/#}}"
+ echo "Building DOCS in ${{_name}}"
+ echo "#################${{_name//?/#}}"
echo
- if [[ ! -d docs/output/"${{dir##*/}}/" ]]; then
- mkdir -p docs/output/"${{dir##*/}}/"
- fi
+ mkdir -p "${{_output}}"
+
+ sphinx-build -b html -E -c docs/etc "${{dir}}" "${{_output}}"
- sphinx-build -b html -E -c docs/etc/ ""$dir"/" docs/output/"${{dir##*/}}/"
+ sphinx-build -b latex -E -c docs/etc "${{dir}}" "${{_build}}"
+ make -C "${{_build}}" LATEXOPTS='--interaction=nonstopmode' all-pdf
+ mv "${{_build}}"/*.pdf "${{_output}}"
done
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh
index cefc85761..7a5f8121a 100755
--- a/utils/fetch_os_creds.sh
+++ b/utils/fetch_os_creds.sh
@@ -126,14 +126,12 @@ elif [ "$installer_type" == "foreman" ]; then
| grep $admin_ip | sed 's/ /\n/g' | grep ^http | head -1) &> /dev/null
elif [ "$installer_type" == "compass" ]; then
- #ip_compass="10.1.0.12"
verify_connectivity $installer_ip
-
- # controller_ip='10.1.0.222'
- controller_ip=$(sshpass -p'root' ssh 2>/dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no root@10.1.0.12 \
- 'mysql -ucompass -pcompass -Dcompass -e"select package_config from cluster;"' \
- | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"ha_proxy\": {\"vip\":/)print $i}' \
+ controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+ 'mysql -ucompass -pcompass -Dcompass -e"select * from cluster;"' \
+ | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"host1\"/) {print $(i+1);break;}}' \
| grep -oP "\d+.\d+.\d+.\d+")
+
if [ -z $controller_ip ]; then
error "The controller $controller_ip is not up. Please check that the POD is correctly deployed."
fi
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index 03e47b8b2..d263b198a 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -78,7 +78,7 @@ makemonit () {
echo "Writing the following as monit config:"
cat << EOF | tee $monitconfdir/jenkins
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = "/bin/bash -c 'cd $dir; export started_monit=true; $0 $@'" as uid "$jenkinsuser" and gid "$jenkinsuser"
+start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'"
stop program = "/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'"
EOF
}
@@ -87,7 +87,7 @@ if [[ -f $monitconfdir/jenkins ]]; then
#test for diff
if [[ "$(diff $monitconfdir/jenkins <(echo "\
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = \"/bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\" as uid \"$jenkinsuser\" and gid \"$jenkinsuser\"
+start program = \"usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\"
stop program = \" /bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\
") )" ]]; then
echo "Updating monit config..."
@@ -169,7 +169,7 @@ do
s ) slave_secret="$OPTARG";;
h ) usage; exit;;
t ) started_monit=true
- skip_monit=true
+ skip_monit=true
run_in_foreground=true ;;
f ) test_firewall ;;
\? ) echo "Unknown option: -$OPTARG" >&2; exit 1;;
diff --git a/utils/test/result_collection_api/dashboard/functest2Dashboard.py b/utils/test/result_collection_api/dashboard/functest2Dashboard.py
index 688f0c28c..bfb7c8729 100644
--- a/utils/test/result_collection_api/dashboard/functest2Dashboard.py
+++ b/utils/test/result_collection_api/dashboard/functest2Dashboard.py
@@ -21,7 +21,7 @@ def get_functest_cases():
get the list of the supported test cases
TODO: update the list when adding a new test case for the dashboard
"""
- return ["vPing", "Tempest", "odl", "Rally"]
+ return ["status", "vPing", "vIMS", "Tempest", "odl", "Rally"]
def format_functest_for_dashboard(case, results):
@@ -53,6 +53,113 @@ def check_functest_case_exist(case):
return True
+def format_status_for_dashboard(results):
+ test_data = [{'description': 'Functest status'}]
+
+ # define magic equation for the status....
+ # 5 suites: vPing, odl, Tempest, vIMS, Rally
+ # Which overall KPI make sense...
+
+ # TODO to be done and discussed
+ testcases = get_functest_cases()
+ test_data.append({'nb test suite(s) run': len(testcases)-1})
+ # test_data.append({'nb test suite(s) failed':1})
+ # test_data.append({'test suite run': ['vPing', 'tempest', 'vIMS' ]})
+ # test_data.append({'average Openstack Tempest failure rate (%)': 10})
+ # test_data.append({'average odl failure rate (%)': 10})
+
+ return test_data
+
+
+def format_vIMS_for_dashboard(results):
+ """
+ Post processing for the vIMS test case
+ """
+ test_data = [{'description': 'vIMS results for Dashboard'}]
+
+ # Graph 1: (duration_deployment_orchestrator,
+ # duration_deployment_vnf,
+ # duration_test) = f(time)
+ # ********************************
+ new_element = []
+
+ for data in results:
+ new_element.append({'x': data['creation_date'],
+ 'y1': data['details']['orchestrator']['duration'],
+ 'y2': data['details']['vIMS']['duration'],
+ 'y3': data['details']['sig_test']['duration']})
+
+ test_data.append({'name': "Tempest nb tests/nb failures",
+ 'info': {'type': "graph",
+ 'xlabel': 'time',
+ 'y1label': 'orchestation deployment duration',
+ 'y2label': 'vIMS deployment duration',
+ 'y3label': 'vIMS test duration'},
+ 'data_set': new_element})
+
+ # Graph 2: (Nb test, nb failure, nb skipped)=f(time)
+ # **************************************************
+ new_element = []
+
+ for data in results:
+ # Retrieve all the tests
+ nbTests = 0
+ nbFailures = 0
+ nbSkipped = 0
+ vIMS_test = data['details']['sig_test']['result']
+
+ for data_test in vIMS_test:
+ # Calculate nb of tests run and nb of tests failed
+ # vIMS_results = get_vIMSresults(vIMS_test)
+ # print vIMS_results
+ if data_test['result'] == "Passed":
+ nbTests += 1
+ elif data_test['result'] == "Failed":
+ nbFailures += 1
+ elif data_test['result'] == "Skipped":
+ nbSkipped += 1
+
+ new_element.append({'x': data['creation_date'],
+ 'y1': nbTests,
+ 'y2': nbFailures,
+ 'y3': nbSkipped})
+
+ test_data.append({'name': "vIMS nb tests passed/failed/skipped",
+ 'info': {'type': "graph",
+ 'xlabel': 'time',
+ 'y1label': 'Number of tests passed',
+ 'y2label': 'Number of tests failed',
+ 'y3label': 'Number of tests skipped'},
+ 'data_set': new_element})
+
+ # Graph 3: bar graph Summ(nb tests run), Sum (nb tests failed)
+ # ********************************************************
+ nbTests = 0
+ nbFailures = 0
+
+ for data in results:
+ vIMS_test = data['details']['sig_test']['result']
+
+ for data_test in vIMS_test:
+ nbTestsOK = 0
+ nbTestsKO = 0
+
+ if data_test['result'] == "Passed":
+ nbTestsOK += 1
+ elif data_test['result'] == "Failed":
+ nbTestsKO += 1
+
+ nbTests += nbTestsOK + nbTestsKO
+ nbFailures += nbTestsKO
+
+ test_data.append({'name': "Total number of tests run/failure tests",
+ 'info': {"type": "bar"},
+ 'data_set': [{'Run': nbTests,
+ 'Failed': nbFailures}]})
+
+ return test_data
+
+
def format_Tempest_for_dashboard(results):
"""
Post processing for the Tempest test case
diff --git a/utils/test/result_collection_api/resources/handlers.py b/utils/test/result_collection_api/resources/handlers.py
index 85c6172a5..be08c9791 100644
--- a/utils/test/result_collection_api/resources/handlers.py
+++ b/utils/test/result_collection_api/resources/handlers.py
@@ -719,6 +719,21 @@ class DashboardHandler(GenericApiHandler):
"error:Project name missing")
elif check_dashboard_ready_project(project_arg, "./dashboard"):
res = []
+
+ if case_arg is None:
+ raise HTTPError(
+ HTTP_NOT_FOUND,
+ "error:Test case missing for project " + project_arg)
+
+ # special case of status for project
+ if case_arg == "status":
+ del get_request["case_name"]
+ # retention time to be agreed
+ # last five days by default?
+ # TODO move to DB
+ period = datetime.now() - timedelta(days=5)
+ get_request["creation_date"] = {"$gte": period}
+
# fetching results
cursor = self.db.test_results.find(get_request)
while (yield cursor.fetch_next):
@@ -726,11 +741,7 @@ class DashboardHandler(GenericApiHandler):
cursor.next_object())
res.append(test_result.format_http())
- if case_arg is None:
- raise HTTPError(
- HTTP_NOT_FOUND,
- "error:Test case missing for project " + project_arg)
- elif check_dashboard_ready_case(project_arg, case_arg):
+ if check_dashboard_ready_case(project_arg, case_arg):
dashboard = get_dashboard_result(project_arg, case_arg, res)
else:
raise HTTPError(