summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--jjb/armband/armband-ci-jobs.yml179
-rwxr-xr-xjjb/armband/armband-deploy.sh7
-rw-r--r--jjb/fuel/fuel-ci-jobs.yml14
-rwxr-xr-xjjb/fuel/fuel-deploy.sh4
-rw-r--r--jjb/fuel/fuel-project-jobs.yml4
-rw-r--r--jjb/functest/functest-ci-jobs.yml32
-rw-r--r--jjb/infra/infra-daily-jobs.yml11
-rwxr-xr-xjjb/infra/infra-provision.sh2
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-upload-artifact.sh22
-rw-r--r--jjb/opnfv/opnfv-docker.sh7
-rw-r--r--jjb/opnfv/slave-params.yml62
-rw-r--r--jjb/qtip/qtip-ci-jobs.yml85
-rw-r--r--jjb/qtip/qtip-cleanup.sh30
-rw-r--r--jjb/qtip/qtip-daily-ci.sh38
-rw-r--r--jjb/releng-macros.yaml21
-rw-r--r--jjb/yardstick/yardstick-ci-jobs.yml49
-rw-r--r--prototypes/bifrost/README.md2
-rwxr-xr-xprototypes/bifrost/scripts/destroy-env.sh (renamed from prototypes/bifrost/scripts/destroy_env.sh)10
-rwxr-xr-xprototypes/bifrost/scripts/test-bifrost-deployment.sh22
-rw-r--r--prototypes/puppet-infracloud/README.md23
-rw-r--r--prototypes/puppet-infracloud/hiera/common.yaml3
-rw-r--r--prototypes/puppet-infracloud/manifests/site.pp11
-rw-r--r--prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp8
-rw-r--r--prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp8
-rw-r--r--utils/push-test-logs.sh10
-rw-r--r--utils/test/reporting/functest/index.html9
-rw-r--r--utils/test/reporting/yardstick/index.html9
-rw-r--r--utils/test/reporting/yardstick/reportingConf.py2
-rw-r--r--utils/test/scripts/backup-db.sh12
-rw-r--r--utils/test/scripts/create_kibana_dashboards.py548
-rw-r--r--utils/test/scripts/mongo_to_elasticsearch.py42
-rw-r--r--utils/test/scripts/testcases.yaml129
32 files changed, 676 insertions, 739 deletions
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 6ea73e12a..f100a46a8 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -17,23 +17,44 @@
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
-# brahmaputra
+# CI POD's
+#--------------------------------
+# colorado
#--------------------------------
pod:
- - arm-pod1:
+ - armband-baremetal:
+ slave-label: armband-baremetal
installer: fuel
<<: *colorado
- - arm-pod2:
+ - armband-virtual:
+ slave-label: armband-virtual
installer: fuel
<<: *colorado
#--------------------------------
# master
#--------------------------------
- pod:
- - arm-pod1:
+ - armband-baremetal:
+ slave-label: armband-baremetal
installer: fuel
<<: *master
+ - armband-virtual:
+ slave-label: armband-virtual
+ installer: fuel
+ <<: *master
+#--------------------------------
+# NONE-CI POD's
+#--------------------------------
+# colorado
+#--------------------------------
+ - arm-pod2:
+ slave-label: arm-pod2
+ installer: fuel
+ <<: *colorado
+#--------------------------------
+# master
+#--------------------------------
- arm-pod2:
+ slave-label: arm-pod2
installer: fuel
<<: *master
#--------------------------------
@@ -42,27 +63,27 @@
scenario:
# HA scenarios
- 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l2-nofeature-ha':
- auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l3-nofeature-ha':
- auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l2-bgpvpn-ha':
- auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
# NOHA scenarios
- 'os-odl_l2-nofeature-noha':
- auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
jobs:
- - 'armband-{installer}-{scenario}-{pod}-daily-{stream}'
- - 'armband-{installer}-deploy-{pod}-daily-{stream}'
+ - '{installer}-{scenario}-{pod}-daily-{stream}'
+ - '{installer}-deploy-{pod}-daily-{stream}'
########################
# job templates
########################
- job-template:
- name: 'armband-{installer}-{scenario}-{pod}-daily-{stream}'
+ name: '{installer}-{scenario}-{pod}-daily-{stream}'
concurrent: false
@@ -75,7 +96,7 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'armband-{installer}-os-.*?-{pod}-daily-.*'
+ - '{installer}-os-.*?-{pod}-daily-.*'
block-level: 'NODE'
wrappers:
@@ -89,7 +110,7 @@
- project-parameter:
project: '{project}'
- '{installer}-defaults'
- - '{pod}-defaults':
+ - '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
@@ -99,7 +120,7 @@
builders:
- trigger-builds:
- - project: 'armband-{installer}-deploy-{pod}-daily-{stream}'
+ - project: '{installer}-deploy-{pod}-daily-{stream}'
current-parameters: false
predefined-parameters:
DEPLOY_SCENARIO={scenario}
@@ -116,9 +137,20 @@
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-{installer}-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
- job-template:
- name: 'armband-{installer}-deploy-{pod}-daily-{stream}'
+ name: '{installer}-deploy-{pod}-daily-{stream}'
concurrent: false
@@ -131,15 +163,15 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'armband-{installer}-deploy-{pod}-daily-{stream}'
- - 'armband-{installer}-deploy-generic-daily-.*'
+ - '{installer}-deploy-{pod}-daily-{stream}'
+ - '{installer}-deploy-generic-daily-.*'
block-level: 'NODE'
parameters:
- project-parameter:
project: '{project}'
- '{installer}-defaults'
- - '{pod}-defaults':
+ - '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
@@ -190,96 +222,141 @@
# trigger macros
########################
# CI PODs
-#----------------------------------------------------------
-# Enea Armband POD 1 Triggers running against master branch
-#----------------------------------------------------------
+#-----------------------------------------------------------------
+# Enea Armband CI Baremetal Triggers running against master branch
+#-----------------------------------------------------------------
- trigger:
- name: 'armband-fuel-os-odl_l2-nofeature-ha-arm-pod1-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 3 * * 1,4'
+ - timed: '0 3,15 * * 1'
- trigger:
- name: 'armband-fuel-os-nosdn-nofeature-ha-arm-pod1-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 15 * * 1,4'
+ - timed: '0 3,15 * * 2'
- trigger:
- name: 'armband-fuel-os-odl_l3-nofeature-ha-arm-pod1-master-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 3 * * 2,5'
+ - timed: '0 3,15 * * 3'
- trigger:
- name: 'armband-fuel-os-odl_l2-bgpvpn-ha-arm-pod1-master-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 15 * * 2,5'
+ - timed: '0 3,15 * * 4'
- trigger:
- name: 'armband-fuel-os-odl_l2-nofeature-noha-arm-pod1-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 3 * * 3,6'
+ - timed: '0 3,15 * * 5'
+#----------------------------------------------------------------------
+# Enea Armband CI Baremetal Triggers running against colorado branch
+#----------------------------------------------------------------------
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-colorado-trigger'
+ triggers:
+ - timed: '0 4,16 * * 1'
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-colorado-trigger'
+ triggers:
+ - timed: '0 4,16 * * 2'
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-colorado-trigger'
+ triggers:
+ - timed: '0 4,16 * * 3'
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-colorado-trigger'
+ triggers:
+ - timed: '0 4,16 * * 4'
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-colorado-trigger'
+ triggers:
+ - timed: '0 4,16 * * 5'
#---------------------------------------------------------------
-# Enea Armband POD 1 Triggers running against brahmaputra branch
+# Enea Armband CI Virtual Triggers running against master branch
#---------------------------------------------------------------
- trigger:
- name: 'armband-fuel-os-odl_l2-nofeature-ha-arm-pod1-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-master-trigger'
+ triggers:
+ - timed: ''
+#--------------------------------------------------------------------
+# Enea Armband CI Virtual Triggers running against colorado branch
+#--------------------------------------------------------------------
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-nosdn-nofeature-ha-arm-pod1-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l3-nofeature-ha-arm-pod1-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l2-bgpvpn-ha-arm-pod1-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l2-nofeature-noha-arm-pod1-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-colorado-trigger'
triggers:
- timed: ''
#----------------------------------------------------------
# Enea Armband POD 2 Triggers running against master branch
#----------------------------------------------------------
-# No triggers for master for now
- trigger:
- name: 'armband-fuel-os-odl_l2-nofeature-ha-arm-pod2-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-pod2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-nosdn-nofeature-ha-arm-pod2-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-arm-pod2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l3-nofeature-ha-arm-pod2-master-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-pod2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l2-bgpvpn-ha-arm-pod2-master-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l2-nofeature-noha-arm-pod2-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-master-trigger'
triggers:
- timed: ''
#---------------------------------------------------------------
-# Enea Armband POD 2 Triggers running against brahmaputra branch
+# Enea Armband POD 2 Triggers running against colorado branch
#---------------------------------------------------------------
- trigger:
- name: 'armband-fuel-os-odl_l2-nofeature-ha-arm-pod2-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-pod2-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-nosdn-nofeature-ha-arm-pod2-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-arm-pod2-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l3-nofeature-ha-arm-pod2-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-pod2-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l2-bgpvpn-ha-arm-pod2-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod2-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'armband-fuel-os-odl_l2-nofeature-noha-arm-pod2-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-colorado-trigger'
triggers:
- timed: ''
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
index 901f845bf..fb4c1ead5 100755
--- a/jjb/armband/armband-deploy.sh
+++ b/jjb/armband/armband-deploy.sh
@@ -50,6 +50,13 @@ if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
echo "cloning $LAB_CONFIG_URL"
git clone --quiet --branch ${GIT_BRANCH##origin/} $LAB_CONFIG_URL lab-config
LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
+
+ # Source local_env if present, which contains POD-specific config
+ local_env="${WORKSPACE}/lab-config/labs/$LAB_NAME/$POD_NAME/fuel/config/local_env"
+ if [ -e $local_env ]; then
+ echo "-- Sourcing local environment file"
+ source $local_env
+ fi
fi
# releng wants us to use nothing else but opnfv.iso for now. We comply.
diff --git a/jjb/fuel/fuel-ci-jobs.yml b/jjb/fuel/fuel-ci-jobs.yml
index 0d31c99b4..056b2cc0f 100644
--- a/jjb/fuel/fuel-ci-jobs.yml
+++ b/jjb/fuel/fuel-ci-jobs.yml
@@ -50,6 +50,12 @@
- zte-pod3:
slave-label: zte-pod3
<<: *master
+ - zte-pod1:
+ slave-label: zte-pod1
+ <<: *colorado
+ - zte-pod3:
+ slave-label: zte-pod3
+ <<: *colorado
#--------------------------------
# scenarios
#--------------------------------
@@ -175,7 +181,7 @@
publishers:
- email:
- recipients: peter.barabas@ericsson.com
+ recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
- job-template:
name: 'fuel-deploy-{pod}-daily-{stream}'
@@ -232,7 +238,7 @@
publishers:
- email:
- recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com peter.barabas@ericsson.com
+ recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com peter.barabas@ericsson.com fzhadaev@mirantis.com
########################
# parameter macros
@@ -802,7 +808,7 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-colorado-trigger'
triggers:
- - timed: ''
+ - timed: '0 2 * * *'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-colorado-trigger'
triggers:
@@ -979,7 +985,7 @@
- trigger:
name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-colorado-trigger'
triggers:
- - timed: ''
+ - timed: '0 18 * * *'
- trigger:
name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-colorado-trigger'
triggers:
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index 730f0d116..136aac861 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -121,7 +121,7 @@ export FUEL_MASTER_IP=10.20.0.2
export TACKER_SCRIPT_URL="https://git.opnfv.org/cgit/fuel/plain/prototypes/sfc_tacker/poc.tacker-up.sh?h=${GIT_BRANCH#*/}"
export CONTROLLER_NODE_IP=$(sshpass -pr00tme /usr/bin/ssh -o UserKnownHostsFile=/dev/null \
-o StrictHostKeyChecking=no root@$FUEL_MASTER_IP 'fuel node list' | \
- grep opendaylight | cut -d'|' -f5)
+ grep controller | head -1 | cut -d'|' -f5)
# we can't do much if we do not have the controller IP
if [[ ! "$CONTROLLER_NODE_IP" =~ "10.20.0" ]]; then
@@ -149,8 +149,6 @@ send "/bin/mkdir -p /root/sfc-poc && cd /root/sfc-poc\r"
expect "# "
send "git clone https://gerrit.opnfv.org/gerrit/fuel && cd fuel\r"
expect "# "
-send "git fetch https://gerrit.opnfv.org/gerrit/fuel refs/changes/97/10597/2 && git checkout FETCH_HEAD\r"
-expect "# "
send "/bin/bash /root/sfc-poc/fuel/prototypes/sfc_tacker/poc.tacker-up.sh\r"
expect "# "
send "exit\r"
diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml
index cf893832b..588ab0cd5 100644
--- a/jjb/fuel/fuel-project-jobs.yml
+++ b/jjb/fuel/fuel-project-jobs.yml
@@ -79,7 +79,7 @@
publishers:
- email:
- recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com
+ recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com fzhadaev@mirantis.com
- job-template:
name: 'fuel-merge-build-{stream}'
@@ -218,7 +218,7 @@
publishers:
- email:
- recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com
+ recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com fzhadaev@mirantis.com
- job-template:
name: 'fuel-deploy-generic-daily-{stream}'
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml
index 3070c2535..0f0cabab3 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-ci-jobs.yml
@@ -96,6 +96,23 @@
slave-label: '{pod}'
installer: apex
<<: *colorado
+# armband CI PODs
+ - armband-baremetal:
+ slave-label: armband-baremetal
+ installer: fuel
+ <<: *master
+ - armband-virtual:
+ slave-label: armband-virtual
+ installer: fuel
+ <<: *master
+ - armband-baremetal:
+ slave-label: armband-baremetal
+ installer: fuel
+ <<: *colorado
+ - armband-virtual:
+ slave-label: armband-virtual
+ installer: fuel
+ <<: *colorado
#--------------------------------
# None-CI PODs
#--------------------------------
@@ -119,7 +136,7 @@
slave-label: '{pod}'
installer: apex
<<: *master
- - arm-pod1:
+ - arm-pod2:
slave-label: '{pod}'
installer: fuel
<<: *master
@@ -127,6 +144,10 @@
slave-label: '{pod}'
installer: fuel
<<: *master
+ - zte-pod1:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *colorado
- zte-pod2:
slave-label: '{pod}'
installer: fuel
@@ -135,7 +156,11 @@
slave-label: '{pod}'
installer: fuel
<<: *master
- - arm-pod1:
+ - zte-pod3:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *colorado
+ - arm-pod2:
slave-label: '{pod}'
installer: fuel
<<: *colorado
@@ -293,7 +318,6 @@
- 'functest-cleanup'
- 'set-functest-env'
- 'functest-suite'
- - 'functest-exit'
- builder:
name: functest-daily
@@ -335,4 +359,4 @@
name: functest-exit
builders:
- shell:
- !include-raw: ./functest-exit.sh \ No newline at end of file
+ !include-raw: ./functest-exit.sh
diff --git a/jjb/infra/infra-daily-jobs.yml b/jjb/infra/infra-daily-jobs.yml
index 64c2fc09c..a066e7db4 100644
--- a/jjb/infra/infra-daily-jobs.yml
+++ b/jjb/infra/infra-daily-jobs.yml
@@ -96,6 +96,10 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
+ publishers:
+ - email:
+ recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
+
- job-template:
name: 'infra-{phase}-{pod}-daily-{stream}'
@@ -118,6 +122,9 @@
- string:
name: DEPLOY_SCENARIO
default: 'os-nosdn-nofeature-noha'
+ - string:
+ name: CLEAN_DIB_IMAGES
+ default: 'false'
scm:
- git-scm:
@@ -149,11 +156,11 @@
- shell: |
#!/bin/bash
- sudo $WORKSPACE/jjb/infra/infra-provision.sh
+ echo "Not activated!"
- builder:
name: 'infra-smoketest-daily-builder'
builders:
- shell: |
#!/bin/bash
- sudo $WORKSPACE/jjb/infra/infra-provision.sh
+ echo "Not activated!"
diff --git a/jjb/infra/infra-provision.sh b/jjb/infra/infra-provision.sh
index 5ddbaf968..45ed3b928 100755
--- a/jjb/infra/infra-provision.sh
+++ b/jjb/infra/infra-provision.sh
@@ -21,7 +21,7 @@ cp -R /opt/releng/prototypes/bifrost/* /opt/bifrost/
# cleanup remnants of previous deployment
cd /opt/bifrost
-./scripts/destroy_env.sh
+./scripts/destroy-env.sh
# provision 3 VMs; jumphost, controller, and compute
cd /opt/bifrost
diff --git a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
index 327ea97e8..6f8fff3ff 100755
--- a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
+++ b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
@@ -11,6 +11,7 @@ fi
case "$JOB_TYPE" in
verify)
+ OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
@@ -26,11 +27,32 @@ case "$JOB_TYPE" in
exit 1
esac
+# save information regarding artifacts into file
+(
+ echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
+ echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
+ echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
+ echo "OPNFV_ARTIFACT_URL=$GS_UPLOAD_LOCATION"
+ echo "OPNFV_BUILD_URL=$BUILD_URL"
+) > $WORKSPACE/opnfv.properties
+source $WORKSPACE/opnfv.properties
+
+# upload artifacts
gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
gsutil -m setmeta -r \
-h "Cache-Control:private, max-age=0, no-transform" \
$GS_UPLOAD_LOCATION > /dev/null 2>&1
+# upload metadata file for the artifacts built by daily job
+if [[ "$JOB_TYPE" == "daily" ]]; then
+ gsutil cp $WORKSPACE/opnfv.properties $GS_UPLOAD_LOCATION/opnfv.properties > $WORKSPACE/gsutil.log 2>&1
+ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > $WORKSPACE/gsutil.log 2>&1
+ gsutil -m setmeta -r \
+ -h "Cache-Control:private, max-age=0, no-transform" \
+ $GS_UPLOAD_LOCATION/opnfv.properties \
+ gs://$GS_URL/latest.properties > /dev/null 2>&1
+fi
+
gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1
if [[ $? -ne 0 ]]; then
echo "Problem while uploading artifacts!"
diff --git a/jjb/opnfv/opnfv-docker.sh b/jjb/opnfv/opnfv-docker.sh
index ef4738482..07198c608 100644
--- a/jjb/opnfv/opnfv-docker.sh
+++ b/jjb/opnfv/opnfv-docker.sh
@@ -110,7 +110,12 @@ echo "Tag version to be build and pushed: $DOCKER_TAG"
# Start the build
echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG"
-docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG .
+if [[ $DOCKER_REPO_NAME == *"functest"* ]]; then
+ docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG --build-arg BRANCH=$branch .
+else
+ docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG .
+fi
+
echo "Creating tag '$DOCKER_TAG'..."
docker tag -f $DOCKER_REPO_NAME:$DOCKER_BRANCH_TAG $DOCKER_REPO_NAME:$DOCKER_TAG
diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml
index 59348e454..c7ec6aa9d 100644
--- a/jjb/opnfv/slave-params.yml
+++ b/jjb/opnfv/slave-params.yml
@@ -151,6 +151,20 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
+ name: 'armband-baremetal-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'armband-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: LAB_CONFIG_URL
+ default: ssh://git@git.enea.com/pharos/lab-config
+ description: 'Base URI to the configuration directory'
+- parameter:
name: 'joid-baremetal-defaults'
parameters:
- label:
@@ -202,6 +216,20 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
+ name: 'armband-virtual-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'armband-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: LAB_CONFIG_URL
+ default: ssh://git@git.enea.com/pharos/lab-config
+ description: 'Base URI to the configuration directory'
+- parameter:
name: 'joid-virtual-defaults'
parameters:
- label:
@@ -576,32 +604,6 @@
default: /root/.ssh/id_rsa
description: 'SSH key to use for Apex'
- parameter:
- name: 'arm-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - arm-pod1
- default-slaves:
- - arm-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: DEFAULT_BRIDGE
- default: 'admin6_br0,public6_br0'
- desciption: 'The bridge to use for Fuel PXE booting. It can be a comma sparated list of bridges, in which case the first is the PXE boot bridge, and all subsequent interfaces that will be added to the VM. If left empty, most deploy scripts will default to pxebr.'
- - string:
- name: DEPLOY_TIMEOUT
- default: '360'
- description: 'Deployment timeout in minutes'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://git@git.enea.com/pharos/lab-config
- description: 'Base URI to the configuration directory'
-- parameter:
name: 'arm-pod2-defaults'
parameters:
- node:
@@ -616,14 +618,6 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- string:
- name: DEFAULT_BRIDGE
- default: 'admin_br0,public_br0'
- desciption: 'The bridge to use for Fuel PXE booting. It can be a comma sparated list of bridges, in which case the first is the PXE boot bridge, and all subsequent interfaces that will be added to the VM. If left empty, most deploy scripts will default to pxebr.'
- - string:
- name: DEPLOY_TIMEOUT
- default: '360'
- description: 'Deployment timeout in minutes'
- - string:
name: LAB_CONFIG_URL
default: ssh://git@git.enea.com/pharos/lab-config
description: 'Base URI to the configuration directory'
diff --git a/jjb/qtip/qtip-ci-jobs.yml b/jjb/qtip/qtip-ci-jobs.yml
index d454b0f07..d0d6b47a3 100644
--- a/jjb/qtip/qtip-ci-jobs.yml
+++ b/jjb/qtip/qtip-ci-jobs.yml
@@ -13,6 +13,7 @@
stream: master
branch: '{stream}'
gs-pathname: ''
+ docker-tag: 'latest'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
@@ -60,6 +61,10 @@
- string:
name: DEPLOY_SCENARIO
default: 'os-nosdn-nofeature-ha'
+ - string:
+ name: DOCKER_TAG
+ default: '{docker-tag}'
+ description: 'Tag to pull docker image'
scm:
- git-scm:
@@ -72,9 +77,7 @@
builders:
- 'qtip-cleanup'
- - 'qtip-set-env'
- - 'qtip-run-suite'
- - 'qtip-pushtoDB'
+ - 'qtip-daily-ci'
publishers:
- email:
@@ -84,82 +87,16 @@
#biuilder macros
###########################
- builder:
- name: qtip-set-env
- builders:
- - shell: |
- #!/bin/bash
- echo "Qtip: Start Docker and prepare environment"
- envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} -e NODE_NAME=${NODE_NAME}"
- suite="TEST_CASE=all"
- dir_imgstore="${HOME}/imgstore"
- img_volume="${dir_imgstore}:/home/opnfv/imgstore"
- docker pull opnfv/qtip:latest
- cmd=" docker run -id -e $envs -e $suite -v ${img_volume} opnfv/qtip:latest /bin/bash"
- echo "Qtip: Running docker run command: ${cmd}"
- ${cmd}
- docker ps -a
- container_id=$(docker ps | grep 'opnfv/qtip:latest' | awk '{print $1}' | head -1)
- if [ $(docker ps | grep 'opnfv/qtip' | wc -l) == 0 ]; then
- echo "The container opnfv/qtip with ID=${container_id} has not been properly started. Exiting..."
- exit 1
- fi
-- builder:
- name: qtip-run-suite
- builders:
- - shell: |
- #!/bin/bash
- container_id=$(docker ps | grep 'opnfv/qtip:latest' | awk '{print $1}' | head -1)
- if [[ ! -z ${container_id} ]]; then
- echo "The container ID is: ${container_id}"
- QTIP_REPO=/home/opnfv/repos/qtip
- docker exec -t ${container_id} $QTIP_REPO/docker/run_qtip.sh
- else
- echo "Container ID not available"
- fi
-
-- builder:
- name: qtip-pushtoDB
+ name: qtip-daily-ci
builders:
- - shell: |
- #!/bin/bash
-
- echo "Pushing available results to DB"
- echo "The container id is:"
- container_id=$(docker ps | grep 'opnfv/qtip:latest' | awk '{print $1}' | head -1)
- if [[ ! -z ${container_id} ]]; then
- echo "The condiner ID is: ${container_id}"
- QTIP_REPO=/home/opnfv/repos/qtip
- docker exec -t ${container_id} $QTIP_REPO/docker/push_db.sh
- else
- echo "Container ID not available"
- fi
+ - shell:
+ !include-raw: ./qtip-daily-ci.sh
- builder:
name: qtip-cleanup
builders:
- - shell: |
- #!/bin/bash
-
- echo "Cleaning up QTIP docker containers/images..."
- # Remove previous running containers if exist
- if [[ ! -z $(docker ps -a | grep opnfv/qtip) ]]; then
- echo "Removing existing opnfv/qtip containers..."
- running_containers=$(docker ps | grep opnfv/qtip | awk '{print $1}')
- docker stop ${running_containers}
- all_containers=$(docker ps -a | grep opnfv/qtip | awk '{print $1}')
- docker rm ${all_containers}
- fi
-
- # Remove existing images if exist
- if [[ ! -z $(docker images | grep opnfv/qtip) ]]; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep opnfv/qtip
- image_tags=($(docker images | grep opnfv/qtip | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- echo "Removing docker image opnfv/qtip:$tag..."
- docker rmi opnfv/qtip:$tag
- done
- fi
+ - shell:
+ !include-raw: ./qtip-cleanup.sh
#################
#trigger macros
diff --git a/jjb/qtip/qtip-cleanup.sh b/jjb/qtip/qtip-cleanup.sh
new file mode 100644
index 000000000..b923aa2a8
--- /dev/null
+++ b/jjb/qtip/qtip-cleanup.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+##############################################################################
+# Copyright (c) 2016 ZTE and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+echo "Cleaning up QTIP docker containers/images..."
+
+# Remove previous running containers if exist
+if [[ ! -z $(docker ps -a | grep opnfv/qtip) ]]; then
+ echo "Removing existing opnfv/qtip containers..."
+ running_containers=$(docker ps | grep opnfv/qtip | awk '{print $1}')
+ docker stop ${running_containers}
+ all_containers=$(docker ps -a | grep opnfv/qtip | awk '{print $1}')
+ docker rm ${all_containers}
+fi
+
+# Remove existing images if exist
+if [[ ! -z $(docker images | grep opnfv/qtip) ]]; then
+ echo "Docker images to remove:"
+ docker images | head -1 && docker images | grep opnfv/qtip
+ image_tags=($(docker images | grep opnfv/qtip | awk '{print $2}'))
+ for tag in "${image_tags[@]}"; do
+ echo "Removing docker image opnfv/qtip:$tag..."
+ docker rmi opnfv/qtip:$tag
+ done
+fi
+
diff --git a/jjb/qtip/qtip-daily-ci.sh b/jjb/qtip/qtip-daily-ci.sh
new file mode 100644
index 000000000..4fdc04345
--- /dev/null
+++ b/jjb/qtip/qtip-daily-ci.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+##############################################################################
+# Copyright (c) 2016 ZTE and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -e
+
+envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} -e NODE_NAME=${NODE_NAME}"
+suite="TEST_CASE=all"
+dir_imgstore="${HOME}/imgstore"
+img_volume="${dir_imgstore}:/home/opnfv/imgstore"
+
+echo "Qtip: Pulling docker image: opnfv/qtip:${DOCKER_TAG}"
+docker pull opnfv/qtip:$DOCKER_TAG
+
+cmd=" docker run -id -e $envs -e $suite -v ${img_volume} opnfv/qtip:${DOCKER_TAG} /bin/bash"
+echo "Qtip: Running docker command: ${cmd}"
+${cmd}
+
+container_id=$(docker ps | grep "opnfv/qtip:${DOCKER_TAG}" | awk '{print $1}' | head -1)
+if [ $(docker ps | grep 'opnfv/qtip' | wc -l) == 0 ]; then
+ echo "The container opnfv/qtip with ID=${container_id} has not been properly started. Exiting..."
+ exit 1
+else
+ echo "The container ID is: ${container_id}"
+ QTIP_REPO=/home/opnfv/repos/qtip
+
+ echo "Run Qtip test"
+ docker exec -t ${container_id} $QTIP_REPO/docker/run_qtip.sh
+
+ echo "Pushing available results to DB"
+ docker exec -t ${container_id} $QTIP_REPO/docker/push_db.sh
+fi
+
+echo "Qtip done!"
diff --git a/jjb/releng-macros.yaml b/jjb/releng-macros.yaml
index 8328aec03..2aa775fd6 100644
--- a/jjb/releng-macros.yaml
+++ b/jjb/releng-macros.yaml
@@ -47,6 +47,7 @@
- 'origin/$GERRIT_BRANCH'
skip-tag: true
choosing-strategy: '{choosing-strategy}'
+ timeout: 15
- wrapper:
name: build-timeout
@@ -208,12 +209,10 @@
mv docs_output "$local_path"
gsutil -m cp -r "$local_path" "gs://$gs_base"
- if gsutil ls "gs://$gs_path" | grep -e 'html$' > /dev/null 2>&1 ; then
- gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- "gs://$gs_path"/**.html
- fi
+ gsutil -m setmeta \
+ -h "Content-Type:text/html" \
+ -h "Cache-Control:private, max-age=0, no-transform" \
+ "gs://$gs_path"/**.html > /dev/null 2>&1
echo "Document link(s):" >> gerrit_comment.txt
find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
@@ -244,12 +243,10 @@
mv docs_output "$local_path"
gsutil -m cp -r "$local_path" "gs://$GS_URL"
- if gsutil ls "gs://$gs_path" | grep -e 'html$' > /dev/null 2>&1 ; then
- gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- "gs://$gs_path"/**.html
- fi
+ gsutil -m setmeta \
+ -h "Content-Type:text/html" \
+ -h "Cache-Control:private, max-age=0, no-transform" \
+ "gs://$gs_path"/**.html > /dev/null 2>&1
echo "Document link(s):" >> gerrit_comment.txt
find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml
index d9fb43555..dd88a52b6 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-ci-jobs.yml
@@ -49,6 +49,27 @@
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *colorado
+# armband CI PODs
+ - armband-baremetal:
+ slave-label: armband-baremetal
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - armband-virtual:
+ slave-label: armband-virtual
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - armband-baremetal:
+ slave-label: armband-baremetal
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *colorado
+ - armband-virtual:
+ slave-label: armband-virtual
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *colorado
# joid CI PODs
- baremetal:
slave-label: joid-baremetal
@@ -121,6 +142,11 @@
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
+ - zte-pod1:
+ slave-label: '{pod}'
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *colorado
- zte-pod2:
slave-label: '{pod}'
installer: fuel
@@ -131,7 +157,12 @@
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - arm-pod1:
+ - zte-pod3:
+ slave-label: '{pod}'
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *colorado
+ - arm-pod2:
slave-label: '{pod}'
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
@@ -272,6 +303,20 @@
default: ''
description: 'Arguments to use in order to choose the backend DB'
- parameter:
+ name: 'yardstick-params-armband-baremetal'
+ parameters:
+ - string:
+ name: YARDSTICK_DB_BACKEND
+ default: '-i 104.197.68.199:8086'
+ description: 'Arguments to use in order to choose the backend DB'
+- parameter:
+ name: 'yardstick-params-armband-virtual'
+ parameters:
+ - string:
+ name: YARDSTICK_DB_BACKEND
+ default: ''
+ description: 'Arguments to use in order to choose the backend DB'
+- parameter:
name: 'yardstick-params-joid-baremetal'
parameters:
- string:
@@ -365,7 +410,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-arm-pod1'
+ name: 'yardstick-params-arm-pod2'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
diff --git a/prototypes/bifrost/README.md b/prototypes/bifrost/README.md
index fffd1de3d..f50ffb217 100644
--- a/prototypes/bifrost/README.md
+++ b/prototypes/bifrost/README.md
@@ -24,7 +24,7 @@ Please follow that steps:
5. Run destroy script if you need to cleanup previous environment::
cd /opt/bifrost
- ./scripts/destroy_env.sh
+ ./scripts/destroy-env.sh
6. Run deployment script to spin up 3 vms with bifrost: jumphost, controller and compute::
diff --git a/prototypes/bifrost/scripts/destroy_env.sh b/prototypes/bifrost/scripts/destroy-env.sh
index 819048b53..4dffee62a 100755
--- a/prototypes/bifrost/scripts/destroy_env.sh
+++ b/prototypes/bifrost/scripts/destroy-env.sh
@@ -26,9 +26,13 @@ echo "removing leases"
echo "removing logs"
rm -rf /var/log/libvirt/baremetal_logs/*.log
-# clean up images
-rm -rf /httpboot/*
-rm -rf /tftpboot/*
+# clean up dib images only if requested explicitly
+if [ $CLEAN_DIB_IMAGES = "true" ]; then
+ rm -rf /httpboot/*
+ rm -rf /tftpboot/*
+fi
+
+# remove VM disk images
rm -rf /var/lib/libvirt/images/*.qcow2
echo "restarting services"
diff --git a/prototypes/bifrost/scripts/test-bifrost-deployment.sh b/prototypes/bifrost/scripts/test-bifrost-deployment.sh
index 66affe9a7..d796f3509 100755
--- a/prototypes/bifrost/scripts/test-bifrost-deployment.sh
+++ b/prototypes/bifrost/scripts/test-bifrost-deployment.sh
@@ -71,17 +71,17 @@ set -x -o nounset
cd $BIFROST_HOME/playbooks
# Syntax check of dynamic inventory test path
-${ANSIBLE} -vvvv \
- -i inventory/localhost \
- test-bifrost-create-vm.yaml \
- --syntax-check \
- --list-tasks
-${ANSIBLE} -vvvv \
- -i inventory/localhost \
- ${TEST_PLAYBOOK} \
- --syntax-check \
- --list-tasks \
- -e testing_user=${TESTING_USER}
+for task in syntax-check list-tasks; do
+ ${ANSIBLE} -vvvv \
+ -i inventory/localhost \
+ test-bifrost-create-vm.yaml \
+ --${task}
+ ${ANSIBLE} -vvvv \
+ -i inventory/localhost \
+ ${TEST_PLAYBOOK} \
+ --${task} \
+ -e testing_user=${TESTING_USER}
+done
# Create the test VMS
${ANSIBLE} -vvvv \
diff --git a/prototypes/puppet-infracloud/README.md b/prototypes/puppet-infracloud/README.md
index f3bd67279..37d575c33 100644
--- a/prototypes/puppet-infracloud/README.md
+++ b/prototypes/puppet-infracloud/README.md
@@ -11,7 +11,7 @@ Once all the hardware is provisioned, enter in controller and compute nodes and
2. Copy hiera to the right place::
- cp /opt/releng/prototypes/puppet-infracloud/hiera/common.yaml /var/lib/hiera/
+ cp /opt/releng/prototypes/puppet-infracloud/hiera/common.yaml /var/lib/hiera
3. Install modules::
@@ -21,7 +21,7 @@ Once all the hardware is provisioned, enter in controller and compute nodes and
4. Apply the infracloud manifest::
cd /opt/releng/prototypes/puppet-infracloud
- puppet apply --manifests/site.pp --modulepath=/etc/puppet/modules:/opt/releng/prototypes/puppet-infracloud/modules
+ puppet apply manifests/site.pp --modulepath=/etc/puppet/modules:/opt/releng/prototypes/puppet-infracloud/modules
5. Once you finish this operation on controller and compute nodes, you will have a functional OpenStack cloud.
@@ -31,7 +31,7 @@ In jumphost, follow that steps:
git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng
-2. Create OpenStack clouds config directory:
+2. Create OpenStack clouds config directory::
mkdir -p /root/.config/openstack
@@ -39,14 +39,23 @@ In jumphost, follow that steps:
cp /opt/releng/prototypes/puppet-infracloud/creds/clouds.yaml /root/.config/openstack/
-4. Install openstack-client:
+4. Install python-dev package as the installation of python-openstackclient depends on it
+
+ apt-get install -y python-dev
+
+5. Install openstack-client. (version 3.2.0 is known to work)::
pip install python-openstackclient
-5. Export the desired cloud::
+6. Update /etc/hosts and add controller00::
+
+ 192.168.122.3 controller00
+ 192.168.122.3 controller00.opnfvlocal controller00
+
+7. Export the desired cloud::
export OS_CLOUD=opnfv
-6. Start using it::
+8. Start using it::
- openstack server list
+ openstack service list
diff --git a/prototypes/puppet-infracloud/hiera/common.yaml b/prototypes/puppet-infracloud/hiera/common.yaml
index 6c28f1972..7d6b44063 100644
--- a/prototypes/puppet-infracloud/hiera/common.yaml
+++ b/prototypes/puppet-infracloud/hiera/common.yaml
@@ -75,3 +75,6 @@ hosts:
ip: 192.168.122.3
compute00.opnfvlocal:
ip: 192.168.122.4
+
+# br-eth0 for debian, br-ens3 for RHEL
+bridge_name: br-eth0
diff --git a/prototypes/puppet-infracloud/manifests/site.pp b/prototypes/puppet-infracloud/manifests/site.pp
index e524918c6..1bbd282ee 100644
--- a/prototypes/puppet-infracloud/manifests/site.pp
+++ b/prototypes/puppet-infracloud/manifests/site.pp
@@ -30,7 +30,7 @@ node 'controller00.opnfvlocal' {
keystone_admin_token => hiera('keystone_admin_token'),
ssl_key_file_contents => hiera('ssl_key_file_contents'),
ssl_cert_file_contents => hiera('ssl_cert_file_contents'),
- br_name => 'br-eth0',
+ br_name => hiera('bridge_name'),
controller_public_address => $::fqdn,
neutron_subnet_cidr => '192.168.122.0/24',
neutron_subnet_gateway => '192.168.122.1',
@@ -55,9 +55,16 @@ node 'compute00.opnfvlocal' {
neutron_admin_password => hiera('neutron_admin_password'),
ssl_cert_file_contents => hiera('ssl_cert_file_contents'),
ssl_key_file_contents => hiera('ssl_key_file_contents'),
- br_name => 'br-eth0',
+ br_name => hiera('bridge_name'),
controller_public_address => 'controller00.opnfvlocal',
virt_type => 'qemu',
}
}
+node 'jumphost.opnfvlocal' {
+ class { 'opnfv::server':
+ sysadmins => hiera('sysadmins', []),
+ enable_unbound => false,
+ purge_apt_sources => false,
+ }
+}
diff --git a/prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp b/prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp
index ca548a5d5..77908c0b8 100644
--- a/prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp
+++ b/prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp
@@ -8,6 +8,14 @@ class opnfv::compute (
$controller_public_address,
$virt_type = 'kvm',
) {
+ # disable selinux if needed
+ if $::osfamily == 'RedHat' {
+ class { 'selinux':
+ mode => 'permissive',
+ before => Class['::infracloud::compute'],
+ }
+ }
+
class { '::infracloud::compute':
nova_rabbit_password => $nova_rabbit_password,
neutron_rabbit_password => $neutron_rabbit_password,
diff --git a/prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp b/prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp
index 7522692c1..4bae42cf7 100644
--- a/prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp
+++ b/prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp
@@ -30,6 +30,14 @@ class opnfv::controller (
$opnfv_password,
$opnfv_email = 'opnfvuser@gmail.com',
) {
+ # disable selinux if needed
+ if $::osfamily == 'RedHat' {
+ class { 'selinux':
+ mode => 'permissive',
+ before => Class['::infracloud::controller'],
+ }
+ }
+
class { '::infracloud::controller':
keystone_rabbit_password => $keystone_rabbit_password,
neutron_rabbit_password => $neutron_rabbit_password,
diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh
index 24b3281e8..0fa882bc9 100644
--- a/utils/push-test-logs.sh
+++ b/utils/push-test-logs.sh
@@ -54,8 +54,14 @@ if [ -d "$dir_result" ]; then
if [ $? != 0 ]; then
echo "Not possible to push results to artifact: gsutil not installed.";
else
- echo "copy result files to artifact $project_artifact"
- gsutil -m cp -r "$dir_result" gs://artifacts.opnfv.org/"$project_artifact"/ >/dev/null 2>&1
+ echo "Uploading logs to artifact $project_artifact"
+ gsutil -m cp -r "$dir_result"/* gs://artifacts.opnfv.org/"$project_artifact"/ >/dev/null 2>&1
+ echo "Logs can be found in http://artifacts.opnfv.org/logs_${project}_${testbed}.html"
+ cd $dir_result
+ files=($(find . -name \* -print|sed 's/^\.//'|sed '/^\s*$/d'))
+ for f in ${files[@]}; do
+ echo "http://artifacts.opnfv.org/${project_artifact}${f}"
+ done
fi
fi
else
diff --git a/utils/test/reporting/functest/index.html b/utils/test/reporting/functest/index.html
index af4033567..bb1bce209 100644
--- a/utils/test/reporting/functest/index.html
+++ b/utils/test/reporting/functest/index.html
@@ -21,10 +21,11 @@
<h3 class="text-muted">Functest reporting page</h3>
<nav>
<ul class="nav nav-justified">
- <li class="active"><a href="#">Home</a></li>
- <li><a href="./index-status-apex.html">Status</a></li>
- <li><a href="./index-tempest-apex.html">Tempest</a></li>
- <li><a href="./index-vims-apex.html">vIMS</a></li>
+ <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
+ <li><a href="index-status-apex.html">Apex</a></li>
+ <li><a href="index-status-compass.html">Compass</a></li>
+ <li><a href="index-status-fuel.html">Fuel</a></li>
+ <li><a href="index-status-joid.html">Joid</a></li>
</ul>
</nav>
</div>
diff --git a/utils/test/reporting/yardstick/index.html b/utils/test/reporting/yardstick/index.html
index ec64bc848..488f1421d 100644
--- a/utils/test/reporting/yardstick/index.html
+++ b/utils/test/reporting/yardstick/index.html
@@ -21,8 +21,11 @@
<h3 class="text-muted">Yardstick reporting page</h3>
<nav>
<ul class="nav nav-justified">
- <li class="active"><a href="#">Home</a></li>
- <li><a href="./index-status-apex.html">Status</a></li>
+ <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
+ <li><a href="index-status-apex.html">Apex</a></li>
+ <li><a href="index-status-compass.html">Compass</a></li>
+ <li><a href="index-status-fuel.html">Fuel</a></li>
+ <li><a href="index-status-joid.html">Joid</a></li>
</ul>
</nav>
</div>
@@ -45,4 +48,4 @@
</div>
</div>
<div class="col-md-1"></div>
-</div> \ No newline at end of file
+</div>
diff --git a/utils/test/reporting/yardstick/reportingConf.py b/utils/test/reporting/yardstick/reportingConf.py
index af95cc00f..9e34034e2 100644
--- a/utils/test/reporting/yardstick/reportingConf.py
+++ b/utils/test/reporting/yardstick/reportingConf.py
@@ -11,7 +11,7 @@
# ****************************************************
installers = ["apex", "compass", "fuel", "joid"]
-versions = ["master"]
+versions = ["master", "stable/colorado"]
# get data in the past 7 days
PERIOD = 7
diff --git a/utils/test/scripts/backup-db.sh b/utils/test/scripts/backup-db.sh
index aa36aa370..35c3fbe5a 100644
--- a/utils/test/scripts/backup-db.sh
+++ b/utils/test/scripts/backup-db.sh
@@ -18,16 +18,16 @@ echo "Create Directory for backup"
mkdir -p $TARGET_DIR
echo "Export results"
-mongoexport -db test_results_collection -c test_results --out $TARGET_DIR/results.json
+mongoexport --db test_results_collection -c results --out $TARGET_DIR/backup-results.json
echo "Export test cases"
-mongoexport --db test_results_collection -c test_cases --out $TARGET_DIR/backup-cases.json
+mongoexport --db test_results_collection -c testcases --out $TARGET_DIR/backup-cases.json
echo "Export projects"
-mongoexport --db test_results_collection -c test_projects --out $TARGET_DIR/backup-projects.json
+mongoexport --db test_results_collection -c projects --out $TARGET_DIR/backup-projects.json
echo "Export pods"
-mongoexport --db test_results_collection -c pod --out $TARGET_DIR/backup-pod.json
+mongoexport --db test_results_collection -c pods --out $TARGET_DIR/backup-pod.json
echo "Create tar.gz"
-tar -cvzf $TEST_RESULT_DB_BACKUP $TARGET_DIR
+#tar -cvzf $TEST_RESULT_DB_BACKUP $TARGET_DIR
echo "Delete temp directory"
-rm -Rf $TARGET_DIR
+#rm -Rf $TARGET_DIR
diff --git a/utils/test/scripts/create_kibana_dashboards.py b/utils/test/scripts/create_kibana_dashboards.py
index 73f4ed971..abb9471ac 100644
--- a/utils/test/scripts/create_kibana_dashboards.py
+++ b/utils/test/scripts/create_kibana_dashboards.py
@@ -1,431 +1,32 @@
#! /usr/bin/env python
+import json
import logging
+import urlparse
+
import argparse
+import yaml
+
import shared_utils
-import json
-import urlparse
logger = logging.getLogger('create_kibana_dashboards')
logger.setLevel(logging.DEBUG)
-file_handler = logging.FileHandler('/var/log/{}.log'.format('create_kibana_dashboards'))
+file_handler = logging.FileHandler('./{}.log'.format('create_kibana_dashboards'))
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
logger.addHandler(file_handler)
_installers = {'fuel', 'apex', 'compass', 'joid'}
-# see class VisualizationState for details on format
-_testcases = [
- ('functest', 'tempest_smoke_serial',
- [
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.duration"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "tempest_smoke_serial duration",
- "test_family": "VIM"
- }
- },
-
- {
- "metrics": [
- {
- "type": "sum",
- "params": {
- "field": "details.tests"
- }
- },
- {
- "type": "sum",
- "params": {
- "field": "details.failures"
- }
- }
- ],
- "type": "histogram",
- "metadata": {
- "label": "tempest_smoke_serial nr of tests/failures",
- "test_family": "VIM"
- }
- },
-
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.success_percentage"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "tempest_smoke_serial success percentage",
- "test_family": "VIM"
- }
- }
- ]
- ),
-
- ('functest', 'rally_sanity',
- [
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.duration"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "rally_sanity duration",
- "test_family": "VIM"
- }
- },
-
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.tests"
- }
- }
- ],
- "type": "histogram",
- "metadata": {
- "label": "rally_sanity nr of tests",
- "test_family": "VIM"
- }
- },
-
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.success_percentage"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "rally_sanity success percentage",
- "test_family": "VIM"
- }
- }
- ]
- ),
-
- ('functest', 'vping_ssh',
- [
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.duration"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "vPing duration",
- "test_family": "VIM"
- }
- }
- ]
- ),
-
- ('functest', 'vping_userdata',
- [
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.duration"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "vPing_userdata duration",
- "test_family": "VIM"
- }
- }
- ]
- ),
-
- ('functest', 'odl',
- [
- {
- "metrics": [
- {
- "type": "sum",
- "params": {
- "field": "details.tests"
- }
- },
- {
- "type": "sum",
- "params": {
- "field": "details.failures"
- }
- }
- ],
- "type": "histogram",
- "metadata": {
- "label": "ODL nr of tests/failures",
- "test_family": "Controller"
- }
- },
-
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.success_percentage"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "ODL success percentage",
- "test_family": "Controller"
- }
- }
- ]
- ),
-
- ('functest', 'onos',
- [
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.FUNCvirNet.duration"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "ONOS FUNCvirNet duration",
- "test_family": "Controller"
- }
- },
-
- {
- "metrics": [
- {
- "type": "sum",
- "params": {
- "field": "details.FUNCvirNet.tests"
- }
- },
- {
- "type": "sum",
- "params": {
- "field": "details.FUNCvirNet.failures"
- }
- }
- ],
- "type": "histogram",
- "metadata": {
- "label": "ONOS FUNCvirNet nr of tests/failures",
- "test_family": "Controller"
- }
- },
-
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.FUNCvirNetL3.duration"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "ONOS FUNCvirNetL3 duration",
- "test_family": "Controller"
- }
- },
-
- {
- "metrics": [
- {
- "type": "sum",
- "params": {
- "field": "details.FUNCvirNetL3.tests"
- }
- },
- {
- "type": "sum",
- "params": {
- "field": "details.FUNCvirNetL3.failures"
- }
- }
- ],
- "type": "histogram",
- "metadata": {
- "label": "ONOS FUNCvirNetL3 nr of tests/failures",
- "test_family": "Controller"
- }
- }
- ]
- ),
-
- ('functest', 'vims',
- [
- {
- "metrics": [
- {
- "type": "sum",
- "params": {
- "field": "details.sig_test.tests"
- }
- },
- {
- "type": "sum",
- "params": {
- "field": "details.sig_test.failures"
- }
- },
- {
- "type": "sum",
- "params": {
- "field": "details.sig_test.passed"
- }
- },
- {
- "type": "sum",
- "params": {
- "field": "details.sig_test.skipped"
- }
- }
- ],
- "type": "histogram",
- "metadata": {
- "label": "vIMS nr of tests/failures/passed/skipped",
- "test_family": "Features"
- }
- },
-
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.vIMS.duration"
- }
- },
- {
- "type": "avg",
- "params": {
- "field": "details.orchestrator.duration"
- }
- },
- {
- "type": "avg",
- "params": {
- "field": "details.sig_test.duration"
- }
- }
- ],
- "type": "histogram",
- "metadata": {
- "label": "vIMS/ochestrator/test duration",
- "test_family": "Features"
- }
- }
- ]
- ),
-
- ('promise', 'promise',
- [
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.duration"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "promise duration",
- "test_family": "Features"
- }
- },
-
- {
- "metrics": [
- {
- "type": "sum",
- "params": {
- "field": "details.tests"
- }
- },
- {
- "type": "sum",
- "params": {
- "field": "details.failures"
- }
- }
- ],
- "type": "histogram",
- "metadata": {
- "label": "promise nr of tests/failures",
- "test_family": "Features"
- }
- }
- ]
- ),
-
- ('doctor', 'doctor-notification',
- [
- {
- "metrics": [
- {
- "type": "avg",
- "params": {
- "field": "details.duration"
- }
- }
- ],
- "type": "line",
- "metadata": {
- "label": "doctor-notification duration",
- "test_family": "Features"
- }
- }
- ]
- )
-]
-
class KibanaDashboard(dict):
- def __init__(self, project_name, case_name, installer, pod, scenarios, visualization_detail):
+ def __init__(self, project_name, case_name, family, installer, pod, scenarios, visualization):
super(KibanaDashboard, self).__init__()
self.project_name = project_name
self.case_name = case_name
+ self.family = family
self.installer = installer
self.pod = pod
self.scenarios = scenarios
- self.visualization_detail = visualization_detail
+ self.visualization = visualization
self._visualization_title = None
self._kibana_visualizations = []
self._kibana_dashboard = None
@@ -439,7 +40,7 @@ class KibanaDashboard(dict):
self.installer,
self.pod,
scenario,
- self.visualization_detail))
+ self.visualization))
self._visualization_title = self._kibana_visualizations[0].vis_state_title
@@ -512,7 +113,15 @@ class KibanaDashboard(dict):
},
separators=(',', ':'))
}
- self['metadata'] = self.visualization_detail['metadata']
+
+ label = self.case_name
+ if 'label' in self.visualization:
+ label += " %s" % self.visualization.get('label')
+ label += " %s" % self.visualization.get('name')
+ self['metadata'] = {
+ "label": label,
+ "test_family": self.family
+ }
def _publish(self):
url = urlparse.urljoin(base_elastic_url, '/.kibana/dashboard/{}'.format(self.id))
@@ -546,58 +155,21 @@ class KibanaSearchSourceJSON(dict):
class VisualizationState(dict):
- def __init__(self, input_dict):
- """
- dict structure:
- {
- "metrics":
- [
- {
- "type": type, # default sum
- "params": {
- "field": field # mandatory, no default
- },
- {metric2}
- ],
- "segments":
- [
- {
- "type": type, # default date_histogram
- "params": {
- "field": field # default start_date
- },
- {segment2}
- ],
- "type": type, # default area
- "mode": mode, # default grouped for type 'histogram', stacked for other types
- "metadata": {
- "label": "tempest_smoke_serial duration",# mandatory, no default
- "test_family": "VIM" # mandatory, no default
- }
- }
-
- default modes:
- type histogram: grouped
- type area: stacked
-
- :param input_dict:
- :return:
- """
+ def __init__(self, visualization):
super(VisualizationState, self).__init__()
- metrics = input_dict['metrics']
- segments = [] if 'segments' not in input_dict else input_dict['segments']
-
- graph_type = 'area' if 'type' not in input_dict else input_dict['type']
- self['type'] = graph_type
-
- if 'mode' not in input_dict:
- if graph_type == 'histogram':
- mode = 'grouped'
- else:
- # default
- mode = 'stacked'
+ name = visualization.get('name')
+ fields = visualization.get('fields')
+
+ if name == 'tests_failures':
+ mode = 'grouped'
+ metric_type = 'sum'
+ self['type'] = 'histogram'
else:
- mode = input_dict['mode']
+ # duration or success_percentage
+ mode = 'stacked'
+ metric_type = 'avg'
+ self['type'] = 'line'
+
self['params'] = {
"shareYAxis": True,
"addTooltip": True,
@@ -616,35 +188,18 @@ class VisualizationState(dict):
self['aggs'] = []
i = 1
- for metric in metrics:
+ for field in fields:
self['aggs'].append({
"id": str(i),
- "type": 'sum' if 'type' not in metric else metric['type'],
+ "type": metric_type,
"schema": "metric",
"params": {
- "field": metric['params']['field']
+ "field": field.get('field')
}
})
i += 1
- if len(segments) > 0:
- for segment in segments:
- self['aggs'].append({
- "id": str(i),
- "type": 'date_histogram' if 'type' not in segment else segment['type'],
- "schema": "metric",
- "params": {
- "field": "start_date" if ('params' not in segment or 'field' not in segment['params'])
- else segment['params']['field'],
- "interval": "auto",
- "customInterval": "2h",
- "min_doc_count": 1,
- "extended_bounds": {}
- }
- })
- i += 1
- else:
- self['aggs'].append({
+ self['aggs'].append({
"id": str(i),
"type": 'date_histogram',
"schema": "segment",
@@ -663,7 +218,7 @@ class VisualizationState(dict):
class KibanaVisualization(dict):
- def __init__(self, project_name, case_name, installer, pod, scenario, detail):
+ def __init__(self, project_name, case_name, installer, pod, scenario, visualization):
"""
We need two things
1. filter created from
@@ -679,7 +234,7 @@ class KibanaVisualization(dict):
:return:
"""
super(KibanaVisualization, self).__init__()
- vis_state = VisualizationState(detail)
+ vis_state = VisualizationState(visualization)
self.vis_state_title = vis_state['title']
self['title'] = '{} {} {} {} {} {}'.format(project_name,
case_name,
@@ -752,13 +307,25 @@ def construct_dashboards():
:return: list of KibanaDashboards
"""
kibana_dashboards = []
- for project_name, case_name, visualization_details in _testcases:
- for installer in _installers:
- pods_and_scenarios = _get_pods_and_scenarios(project_name, case_name, installer)
- for visualization_detail in visualization_details:
- for pod, scenarios in pods_and_scenarios.iteritems():
- kibana_dashboards.append(KibanaDashboard(project_name, case_name, installer, pod, scenarios,
- visualization_detail))
+ with open('./testcases.yaml') as f:
+ testcases_yaml = yaml.safe_load(f)
+
+ for project, case_dicts in testcases_yaml.items():
+ for case in case_dicts:
+ case_name = case.get('name')
+ visualizations = case.get('visualizations')
+ family = case.get('test_family')
+ for installer in _installers:
+ pods_and_scenarios = _get_pods_and_scenarios(project, case_name, installer)
+ for visualization in visualizations:
+ for pod, scenarios in pods_and_scenarios.iteritems():
+ kibana_dashboards.append(KibanaDashboard(project,
+ case_name,
+ family,
+ installer,
+ pod,
+ scenarios,
+ visualization))
return kibana_dashboards
@@ -821,4 +388,3 @@ if __name__ == '__main__':
if generate_inputs:
generate_js_inputs(input_file_path, kibana_url, dashboards)
-
diff --git a/utils/test/scripts/mongo_to_elasticsearch.py b/utils/test/scripts/mongo_to_elasticsearch.py
index 2ffbc1713..ded58ef4c 100644
--- a/utils/test/scripts/mongo_to_elasticsearch.py
+++ b/utils/test/scripts/mongo_to_elasticsearch.py
@@ -1,13 +1,16 @@
#! /usr/bin/env python
-import logging
-import argparse
-import shared_utils
+import datetime
import json
-import urlparse
-import uuid
+import logging
import os
import subprocess
-import datetime
+import traceback
+import urlparse
+import uuid
+
+import argparse
+
+import shared_utils
logger = logging.getLogger('mongo_to_elasticsearch')
logger.setLevel(logging.DEBUG)
@@ -370,18 +373,21 @@ def modify_mongo_entry(testcase):
project = testcase['project_name']
case_name = testcase['case_name']
logger.info("Processing mongo test case '{}'".format(case_name))
- if project == 'functest':
- if case_name == 'rally_sanity':
- return modify_functest_rally(testcase)
- elif case_name.lower() == 'odl':
- return modify_functest_odl(testcase)
- elif case_name.lower() == 'onos':
- return modify_functest_onos(testcase)
- elif case_name.lower() == 'vims':
- return modify_functest_vims(testcase)
- elif case_name == 'tempest_smoke_serial':
- return modify_functest_tempest(testcase)
- return modify_default_entry(testcase)
+ try:
+ if project == 'functest':
+ if case_name == 'rally_sanity':
+ return modify_functest_rally(testcase)
+ elif case_name.lower() == 'odl':
+ return modify_functest_odl(testcase)
+ elif case_name.lower() == 'onos':
+ return modify_functest_onos(testcase)
+ elif case_name.lower() == 'vims':
+ return modify_functest_vims(testcase)
+ elif case_name == 'tempest_smoke_serial':
+ return modify_functest_tempest(testcase)
+ return modify_default_entry(testcase)
+ except Exception:
+ logger.error("Fail in modify testcase[%s]\nerror message: %s" % (testcase, traceback.format_exc()))
else:
return False
diff --git a/utils/test/scripts/testcases.yaml b/utils/test/scripts/testcases.yaml
new file mode 100644
index 000000000..12031ef5d
--- /dev/null
+++ b/utils/test/scripts/testcases.yaml
@@ -0,0 +1,129 @@
+functest:
+ -
+ name: tempest_smoke_serial
+ test_family: VIM
+ visualizations:
+ -
+ name: duration
+ fields:
+ - field: details.duration
+ -
+ name: tests_failures
+ fields:
+ - field: details.tests
+ - field: details.failures
+ -
+ name: success_percentage
+ fields:
+ - field: details.success_percentage
+ -
+ name: rally_sanity
+ test_family: VIM
+ visualizations:
+ -
+ name: duration
+ fields:
+ - field: details.duration
+ -
+ name: tests_failures
+ fields:
+ - field: details.tests
+ -
+ name: success_percentage
+ fields:
+ - field: details.success_percentage
+ -
+ name: vping_ssh
+ test_family: VIM
+ visualizations:
+ -
+ name: duration
+ fields:
+ - field: details.duration
+ -
+ name: vping_userdata
+ test_family: VIM
+ visualizations:
+ -
+ name: duration
+ fields:
+ - field: details.duration
+ -
+ name: odl
+ test_family: Controller
+ visualizations:
+ -
+ name: tests_failures
+ fields:
+ - field: details.tests
+ - field: details.failures
+ -
+ name: success_percentage
+ fields:
+ - field: details.success_percentage
+ -
+ name: onos
+ test_family: Controller
+ visualizations:
+ -
+ name: duration
+ label: FUNCvirNet
+ fields:
+ - field: details.FUNCvirNet.duration
+ -
+ name: duration
+ label: FUNCvirNetL3
+ fields:
+ - field: details.FUNCvirNetL3.duration
+ -
+ name: tests_failures
+ label: FUNCvirNet
+ fields:
+ - field: details.FUNCvirNet.tests
+ - field: details.FUNCvirNet.failures
+ -
+ name: tests_failures
+ label: FUNCvirNetL3
+ fields:
+ - field: details.FUNCvirNetL3.tests
+ - field: details.FUNCvirNetL3.failures
+ -
+ name: vims
+ test_family: Features
+ visualizations:
+ -
+ name: duration
+ fields:
+ - field: details.vIMS.duration
+ - field: details.orchestrator.duration
+ - field: details.sig_test.duration
+ -
+ name: tests_failures
+ fields:
+ - field: details.sig_test.tests
+ - field: details.sig_test.failures
+ - field: details.sig_test.passed
+ - field: details.sig_test.skipped
+promise:
+ -
+ name: promise
+ test_family: Features
+ visualizations:
+ -
+ name: duration
+ fields:
+ - field: details.duration
+ -
+ name: tests_failures
+ fields:
+ - field: details.tests
+ - field: details.failures
+doctor:
+ -
+ name: doctor-notification
+ test_family: Features
+ visualizations:
+ -
+ name: duration
+ fields:
+ - field: details.duration