summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--jjb-sandbox/releng/releng-sandbox-jobs.yml41
-rwxr-xr-xjjb-sandbox/releng/verify-sandbox-jobs.sh2
-rw-r--r--jjb/armband/armband-ci-jobs.yml57
-rwxr-xr-xjjb/armband/armband-deploy.sh4
-rwxr-xr-xjjb/armband/build.sh2
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yml22
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-build.sh8
-rw-r--r--jjb/daisy4nfv/daisy4nfv-verify-jobs.yml2
-rw-r--r--jjb/dovetail/dovetail-ci-jobs.yml21
-rw-r--r--jjb/dovetail/dovetail-project-jobs.yml35
-rwxr-xr-xjjb/dovetail/dovetail-run.sh6
-rwxr-xr-xjjb/fuel/fuel-build.sh2
-rwxr-xr-xjjb/fuel/fuel-deploy.sh2
-rw-r--r--jjb/infra/bifrost-verify-jobs.yml6
-rw-r--r--jjb/opnfv/opnfv-utils.yml1
-rw-r--r--jjb/opnfv/slave-params.yml6
-rw-r--r--prototypes/puppet-infracloud/hiera/common_baremetal.yaml5
-rw-r--r--prototypes/puppet-infracloud/manifests/site.pp1
-rw-r--r--prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp16
-rw-r--r--utils/installer-adapter/ApexAdapter.py35
-rw-r--r--utils/installer-adapter/CompassAdapter.py35
-rw-r--r--utils/installer-adapter/FuelAdapter.py219
-rw-r--r--utils/installer-adapter/InstallerHandler.py78
-rw-r--r--utils/installer-adapter/JoidAdapter.py35
-rw-r--r--utils/installer-adapter/RelengLogger.py52
-rw-r--r--utils/installer-adapter/SSHUtils.py130
-rw-r--r--utils/installer-adapter/__init__.py0
-rw-r--r--utils/installer-adapter/example.py22
-rw-r--r--utils/test/reporting/yardstick/reporting-status.py50
-rw-r--r--utils/test/reporting/yardstick/reportingUtils.py21
-rw-r--r--utils/test/reporting/yardstick/scenarioResult.py7
-rw-r--r--utils/test/reporting/yardstick/template/index-status-tmpl.html63
32 files changed, 899 insertions, 87 deletions
diff --git a/jjb-sandbox/releng/releng-sandbox-jobs.yml b/jjb-sandbox/releng/releng-sandbox-jobs.yml
index ee35f4299..aa10a4327 100644
--- a/jjb-sandbox/releng/releng-sandbox-jobs.yml
+++ b/jjb-sandbox/releng/releng-sandbox-jobs.yml
@@ -2,12 +2,13 @@
name: 'releng-sandbox-jobs'
jobs:
- 'releng-deploy-sandbox'
- - 'releng-clear-jenkins-jobs'
project: 'releng'
+ node: 'releng-sandbox'
- job-template:
name: 'releng-deploy-sandbox'
+ node: '{node}'
parameters:
- project-parameter:
@@ -30,6 +31,13 @@
- draft-published-event
- comment-added-contains-event:
comment-contains-value: 'redeploy'
+ custom-url: '$BUILD_URL deploying to $JENKINS_URL'
+ silent-start: true
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
projects:
- project-compare-type: 'ANT'
project-pattern: 'releng'
@@ -39,39 +47,16 @@
file-paths:
- compare-type: ANT
pattern: jjb-sandbox/**
- - compare-type: ANT
- pattern: utils/**
+
+ wrappers: ''
builders:
- shell:
!include-raw-escape: verify-sandbox-jobs.sh
- shell: |
- #! /bin/bash
- jenkins-jobs update -r jjb-sandbox
+ #!/bin/bash
+ jenkins-jobs update --delete-old -r jjb/releng-defaults.yaml:jjb/releng-macros.yaml:jjb/opnfv/installer-params.yml:jjb/opnfv/slave-params.yml:jjb-sandbox
publishers:
- archive-artifacts:
artifacts: 'job_output/*'
-
-- job-template:
- name: 'releng-clear-jenkins-jobs'
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: 'master'
-
- scm:
- - gerrit-trigger-scm:
- credentials-id: '{ssh-credentials}'
- refspec: ''
- choosing-strategy: 'default'
-
- triggers:
- - timed: '@weekly'
-
- builders:
- - shell: |
- #! /bin/bash
- jenkins-jobs delete -r -p jjb-sandbox -x jjb-sandbox/releng
diff --git a/jjb-sandbox/releng/verify-sandbox-jobs.sh b/jjb-sandbox/releng/verify-sandbox-jobs.sh
index 8f67e742b..599016106 100755
--- a/jjb-sandbox/releng/verify-sandbox-jobs.sh
+++ b/jjb-sandbox/releng/verify-sandbox-jobs.sh
@@ -1,4 +1,4 @@
-#! /bin/bash
+#!/bin/bash
# SPDX-license-identifier: Apache-2.0
##############################################################################
# Copyright (c) 2016 Linux Foundation and others.
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 024681841..d4fa5da94 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -70,10 +70,14 @@
auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l2-bgpvpn-ha':
auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
+ - 'os-odl_l2-sfc-ha':
+ auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
# NOHA scenarios
- 'os-odl_l2-nofeature-noha':
auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
+ - 'os-odl_l2-sfc-noha':
+ auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
jobs:
- '{installer}-{scenario}-{pod}-daily-{stream}'
@@ -228,7 +232,7 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0 * * 1,6'
+ - timed: '0 0 * * 1'
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
triggers:
@@ -236,7 +240,7 @@
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: '0 0 * * 3,7'
+ - timed: '0 0 * * 3'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
triggers:
@@ -245,6 +249,15 @@
name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
triggers:
- timed: '0 0 * * 5'
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger'
+ triggers:
+ - timed: '0 0 * * 6'
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-master-trigger'
+ triggers:
+ - timed: '0 0 * * 7'
+
#----------------------------------------------------------------------
# Enea Armband CI Baremetal Triggers running against colorado branch
#----------------------------------------------------------------------
@@ -268,6 +281,14 @@
name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-colorado-trigger'
triggers:
- timed: '0 16 * * 3,5'
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-colorado-trigger'
+ triggers:
+ - timed: ''
#---------------------------------------------------------------
# Enea Armband CI Virtual Triggers running against master branch
#---------------------------------------------------------------
@@ -291,6 +312,14 @@
name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-master-trigger'
+ triggers:
+ - timed: ''
#--------------------------------------------------------------------
# Enea Armband CI Virtual Triggers running against colorado branch
#--------------------------------------------------------------------
@@ -314,6 +343,14 @@
name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-colorado-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-colorado-trigger'
+ triggers:
+ - timed: ''
#----------------------------------------------------------
# Enea Armband POD 2 Triggers running against master branch
#----------------------------------------------------------
@@ -337,6 +374,14 @@
name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-master-trigger'
+ triggers:
+ - timed: ''
#---------------------------------------------------------------
# Enea Armband POD 2 Triggers running against colorado branch
#---------------------------------------------------------------
@@ -360,3 +405,11 @@
name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-colorado-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-colorado-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
index 4041a6b14..c8e58afa8 100755
--- a/jjb/armband/armband-deploy.sh
+++ b/jjb/armband/armband-deploy.sh
@@ -12,6 +12,8 @@ set -o errexit
set -o nounset
set -o pipefail
+export TERM="vt220"
+
# source the file so we get OPNFV vars
source latest.properties
@@ -47,7 +49,7 @@ mkdir -p $TMPDIR
cd $WORKSPACE
if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
- echo "cloning $LAB_CONFIG_URL"
+ echo "Cloning securedlab repo ${GIT_BRANCH##origin/}"
git clone --quiet --branch ${GIT_BRANCH##origin/} $LAB_CONFIG_URL lab-config
LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
diff --git a/jjb/armband/build.sh b/jjb/armband/build.sh
index 300306f77..a058ca158 100755
--- a/jjb/armband/build.sh
+++ b/jjb/armband/build.sh
@@ -12,6 +12,8 @@ set -o errexit
set -o nounset
set -o pipefail
+export TERM="vt220"
+
echo "Host info: $(hostname) $(hostname -I)"
cd $WORKSPACE
diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml
index bede7de46..4d799af8e 100644
--- a/jjb/compass4nfv/compass-project-jobs.yml
+++ b/jjb/compass4nfv/compass-project-jobs.yml
@@ -16,19 +16,28 @@
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ distro:
+ - 'trusty':
+ disabled: false
+ os-version: 'trusty'
+ openstack-os-version: ''
+ - 'centos7':
+ disabled: false
+ os-version: 'centos7'
+ openstack-os-version: ''
jobs:
- - 'compass-verify-{stream}'
+ - 'compass-verify-{distro}-{stream}'
- 'compass-build-iso-{stream}'
- 'compass-build-ppa-{stream}'
- - 'compass-verify-deploy-{stream}'
+ - 'compass-verify-deploy-{distro}-{stream}'
########################
# job templates
########################
- job-template:
- name: 'compass-verify-{stream}'
+ name: 'compass-verify-{distro}-{stream}'
disabled: false
@@ -92,8 +101,11 @@
builders:
- trigger-builds:
- - project: 'compass-verify-deploy-{stream}'
+ - project: 'compass-verify-deploy-{distro}-{stream}'
current-parameters: true
+ predefined-parameters: |
+ COMPASS_OS_VERSION={os-version}
+ COMPASS_OS_VERSION_OPTION={openstack-os-version}
same-node: true
block: true
- trigger-builds:
@@ -109,7 +121,7 @@
unstable-threshold: 'FAILURE'
- job-template:
- name: 'compass-verify-deploy-{stream}'
+ name: 'compass-verify-deploy-{distro}-{stream}'
concurrent: true
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
index 9eae8481b..ec11db587 100755
--- a/jjb/daisy4nfv/daisy4nfv-build.sh
+++ b/jjb/daisy4nfv/daisy4nfv-build.sh
@@ -4,3 +4,11 @@ echo "--------------------------------------------------------"
echo "This is diasy4nfv build job!"
echo "--------------------------------------------------------"
+# build output directory
+OUTPUT_DIR=$WORKSPACE/build_output
+mkdir -p $OUTPUT_DIR
+
+# start the build
+cd $WORKSPACE
+./ci/build.sh $OUTPUT_DIR
+
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
index 6444cf8ec..e81e300c9 100644
--- a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
@@ -19,7 +19,7 @@
- 'basic':
slave-label: 'opnfv-build'
- 'build':
- slave-label: 'opnfv-build-ubuntu'
+ slave-label: 'opnfv-build-centos'
- 'deploy-virtual':
slave-label: 'opnfv-build'
- 'smoke-test':
diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml
index 1dd1795cb..29212005d 100644
--- a/jjb/dovetail/dovetail-ci-jobs.yml
+++ b/jjb/dovetail/dovetail-ci-jobs.yml
@@ -81,6 +81,27 @@
SUT: compass
auto-trigger-name: 'daily-trigger-disabled'
<<: *colorado
+#apex CI PODs
+ - apex-verify-master:
+ slave-label: '{pod}'
+ SUT: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - apex-daily-master:
+ slave-label: '{pod}'
+ SUT: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - apex-verify-colorado:
+ slave-label: '{pod}'
+ SUT: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *colorado
+ - apex-daily-colorado:
+ slave-label: '{pod}'
+ SUT: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *colorado
#--------------------------------
# None-CI PODs
#--------------------------------
diff --git a/jjb/dovetail/dovetail-project-jobs.yml b/jjb/dovetail/dovetail-project-jobs.yml
index bf0552251..41fd8cdcb 100644
--- a/jjb/dovetail/dovetail-project-jobs.yml
+++ b/jjb/dovetail/dovetail-project-jobs.yml
@@ -57,9 +57,7 @@
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
builders:
- - shell: |
- echo "dovetail: verify job"
- #unittest will be added future
+ - dovetail-unit-tests
- job-template:
name: 'dovetail-merge-{stream}'
@@ -93,6 +91,31 @@
branch-pattern: '**/{branch}'
builders:
- - shell: |
- echo "dovetail: merge"
- #unittest will be added future
+ - dovetail-unit-tests
+
+################################
+#builders for dovetail project
+###############################
+- builder:
+ name: dovetail-unit-tests
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+
+ echo "Running unit tests..."
+ cd $WORKSPACE
+ virtualenv $WORKSPACE/dovetail_venv
+ source $WORKSPACE/dovetail_venv/bin/activate
+
+ #packages installation
+ easy_install -U setuptools
+ easy_install -U pip
+ pip install -r unittests/requirements.txt
+ pip install -e .
+
+ #unit tests
+ /bin/bash $WORKSPACE/unittests/unittest.sh
+
+ deactivate
diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh
index 3f7a47bee..098b7db0c 100755
--- a/jjb/dovetail/dovetail-run.sh
+++ b/jjb/dovetail/dovetail-run.sh
@@ -34,6 +34,10 @@ fi
opts="--privileged=true --rm"
envs="-e CI_DEBUG=${CI_DEBUG} \
+ -e INSTALLER_TYPE=${INSTALLER_TYPE} \
+ -e INSTALLER_IP=${INSTALLER_IP} \
+ -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
+ -e DEPLOY_TYPE=${DEPLOY_TYPE} \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /home/opnfv/dovetail/results:/home/opnfv/dovetail/results"
@@ -44,7 +48,7 @@ docker pull opnfv/dovetail:$DOCKER_TAG >$redirect
# Run docker
echo "Dovetail: docker running..."
sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/dovetail:${DOCKER_TAG} \
-"/home/opnfv/dovetail/scripts/run.py"
+"/home/opnfv/dovetail/dovetail/run.py"
echo "Dovetail: store results..."
sudo cp -r /home/opnfv/dovetail/results ./
diff --git a/jjb/fuel/fuel-build.sh b/jjb/fuel/fuel-build.sh
index 7e36a0c53..c66dc3d8d 100755
--- a/jjb/fuel/fuel-build.sh
+++ b/jjb/fuel/fuel-build.sh
@@ -11,6 +11,8 @@ set -o errexit
set -o nounset
set -o pipefail
+export TERM="vt220"
+
cd $WORKSPACE
# remove the expired items from cache
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index ef47ff0e3..48b1dac2f 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -10,6 +10,8 @@
set -o nounset
set -o pipefail
+export TERM="vt220"
+
# source the file so we get OPNFV vars
source latest.properties
diff --git a/jjb/infra/bifrost-verify-jobs.yml b/jjb/infra/bifrost-verify-jobs.yml
index 17796a832..a2a57d468 100644
--- a/jjb/infra/bifrost-verify-jobs.yml
+++ b/jjb/infra/bifrost-verify-jobs.yml
@@ -123,12 +123,6 @@
triggers:
- gerrit:
server-name: 'review.openstack.org'
- silent-start: true
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
escape-quotes: true
trigger-on:
- patchset-created-event:
diff --git a/jjb/opnfv/opnfv-utils.yml b/jjb/opnfv/opnfv-utils.yml
index 94a99d451..717bb3cbc 100644
--- a/jjb/opnfv/opnfv-utils.yml
+++ b/jjb/opnfv/opnfv-utils.yml
@@ -19,7 +19,6 @@
name: SLAVE_NAME
description: Slaves to prune docker images
default-slaves:
- - arm-build1
- arm-build2
- ericsson-build4
- ericsson-build5
diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml
index 4ffaff4ae..7eca41a6d 100644
--- a/jjb/opnfv/slave-params.yml
+++ b/jjb/opnfv/slave-params.yml
@@ -162,7 +162,7 @@
description: 'Git URL to use on this Jenkins Slave'
- string:
name: LAB_CONFIG_URL
- default: ssh://git@git.enea.com/pharos/lab-config
+ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
description: 'Base URI to the configuration directory'
- parameter:
name: 'joid-baremetal-defaults'
@@ -227,7 +227,7 @@
description: 'Git URL to use on this Jenkins Slave'
- string:
name: LAB_CONFIG_URL
- default: ssh://git@git.enea.com/pharos/lab-config
+ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
description: 'Base URI to the configuration directory'
- parameter:
name: 'joid-virtual-defaults'
@@ -608,7 +608,7 @@
description: 'Git URL to use on this Jenkins Slave'
- string:
name: LAB_CONFIG_URL
- default: ssh://git@git.enea.com/pharos/lab-config
+ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
description: 'Base URI to the configuration directory'
- parameter:
name: 'intel-virtual6-defaults'
diff --git a/prototypes/puppet-infracloud/hiera/common_baremetal.yaml b/prototypes/puppet-infracloud/hiera/common_baremetal.yaml
index 5ea00831c..9825ed367 100644
--- a/prototypes/puppet-infracloud/hiera/common_baremetal.yaml
+++ b/prototypes/puppet-infracloud/hiera/common_baremetal.yaml
@@ -115,7 +115,7 @@ default_network_interface: eno3
dhcp_static_mask: 255.255.255.128
dhcp_pool_start: 10.20.0.130
dhcp_pool_end: 10.20.0.254
-network_interface: eno1
+network_interface: eth1
ipv4_nameserver: 8.8.8.8
ipv4_subnet_mask: 255.255.255.0
ipv4_gateway: 172.30.13.1
@@ -131,6 +131,7 @@ ironic_inventory:
ansible_ssh_host: 172.30.13.90
ipv4_gateway: 172.30.13.1
ipv4_interface_mac: 00:1e:67:f9:9b:35
+ ipv4_subnet_mask: 255.255.255.192
name: controller00.opnfvlocal
nics:
- mac: a4:bf:01:01:a9:fc
@@ -151,6 +152,7 @@ ironic_inventory:
ipv4_address: 172.30.13.91
ansible_ssh_host: 172.30.13.91
ipv4_gateway: 172.30.13.1
+ ipv4_interface_mac: 00:1e:67:f6:9b:37
ipv4_subnet_mask: 255.255.255.0
name: compute00.opnfvlocal
nics:
@@ -168,3 +170,4 @@ neutron_subnet_gateway: '172.30.13.1'
neutron_subnet_allocation_pools:
- 'start=172.30.13.100,end=172.30.13.254'
virt_type: 'kvm'
+dib_dev_user_password: devuser
diff --git a/prototypes/puppet-infracloud/manifests/site.pp b/prototypes/puppet-infracloud/manifests/site.pp
index f09bfe2e9..8cbfef8c7 100644
--- a/prototypes/puppet-infracloud/manifests/site.pp
+++ b/prototypes/puppet-infracloud/manifests/site.pp
@@ -96,5 +96,6 @@ node 'baremetal.opnfvlocal', 'lfpod5-jumpserver' {
ipv4_nameserver => hiera('ipv4_nameserver'),
ipv4_subnet_mask => hiera('ipv4_subnet_mask'),
bridge_name => hiera('bridge_name'),
+ dib_dev_user_password => hiera('dib_dev_user_password'),
}
}
diff --git a/prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp b/prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp
index c4bff0962..a1e7d5d0e 100644
--- a/prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp
+++ b/prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp
@@ -224,6 +224,20 @@ class opnfv::server (
}
}
- # add hosts entries
+ # ensure that we have non-pass sudo, and
+ # not require tty
+ file_line { 'sudo_rule_no_pw':
+ path => '/etc/sudoers',
+ line => '%wheel ALL=(ALL) NOPASSWD: ALL',
+ }
+ file_line { 'sudo_rule_notty':
+ path => '/etc/sudoers',
+ line => 'Defaults requiretty',
+ match => '.*requiretty.*',
+ match_for_absence => true,
+ ensure => absent,
+ multiple => true,
+ }
+
create_resources('host', hiera_hash('hosts'))
}
diff --git a/utils/installer-adapter/ApexAdapter.py b/utils/installer-adapter/ApexAdapter.py
new file mode 100644
index 000000000..bf451f3d2
--- /dev/null
+++ b/utils/installer-adapter/ApexAdapter.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from SSHUtils import SSH_Connection
+
+
+class ApexAdapter:
+
+ def __init__(self, installer_ip):
+ self.installer_ip = installer_ip
+
+ def get_deployment_info(self):
+ pass
+
+ def get_nodes(self):
+ pass
+
+ def get_controller_ips(self):
+ pass
+
+ def get_compute_ips(self):
+ pass
+
+ def get_file_from_installer(self, origin, target, options=None):
+ pass
+
+ def get_file_from_controller(self, origin, target, ip=None, options=None):
+ pass \ No newline at end of file
diff --git a/utils/installer-adapter/CompassAdapter.py b/utils/installer-adapter/CompassAdapter.py
new file mode 100644
index 000000000..b40a8d788
--- /dev/null
+++ b/utils/installer-adapter/CompassAdapter.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from SSHUtils import SSH_Connection
+
+
+class CompassAdapter:
+
+ def __init__(self, installer_ip):
+ self.installer_ip = installer_ip
+
+ def get_deployment_info(self):
+ pass
+
+ def get_nodes(self):
+ pass
+
+ def get_controller_ips(self):
+ pass
+
+ def get_compute_ips(self):
+ pass
+
+ def get_file_from_installer(self, origin, target, options=None):
+ pass
+
+ def get_file_from_controller(self, origin, target, ip=None, options=None):
+ pass \ No newline at end of file
diff --git a/utils/installer-adapter/FuelAdapter.py b/utils/installer-adapter/FuelAdapter.py
new file mode 100644
index 000000000..15f0e929f
--- /dev/null
+++ b/utils/installer-adapter/FuelAdapter.py
@@ -0,0 +1,219 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from SSHUtils import SSH_Connection
+import RelengLogger as rl
+
+
+class FuelAdapter:
+
+ def __init__(self, installer_ip, user="root", password="r00tme"):
+ self.installer_ip = installer_ip
+ self.user = user
+ self.password = password
+ self.connection = SSH_Connection(
+ installer_ip, self.user, self.password, use_system_keys=False)
+ self.logger = rl.Logger("Handler").getLogger()
+
+ def runcmd_fuel_nodes(self):
+ output, error = self.connection.run_remote_cmd('fuel nodes')
+ if len(error) > 0:
+ self.logger.error("error %s" % error)
+ return error
+ return output
+
+ def runcmd_fuel_env(self):
+ output, error = self.connection.run_remote_cmd('fuel env')
+ if len(error) > 0:
+ self.logger.error("error %s" % error)
+ return error
+ return output
+
+ def get_clusters(self):
+ environments = []
+ output = self.runcmd_fuel_env()
+ lines = output.rsplit('\n')
+ if len(lines) < 2:
+ self.logger.infp("No environments found in the deployment.")
+ return None
+ else:
+ fields = lines[0].rsplit(' | ')
+
+ index_id = -1
+ index_status = -1
+ index_name = -1
+ index_release_id = -1
+
+ for i in range(0, len(fields) - 1):
+ if "id" in fields[i]:
+ index_id = i
+ elif "status" in fields[i]:
+ index_status = i
+ elif "name" in fields[i]:
+ index_name = i
+ elif "release_id" in fields[i]:
+ index_release_id = i
+
+ # order env info
+ for i in range(2, len(lines) - 1):
+ fields = lines[i].rsplit(' | ')
+ dict = {"id": fields[index_id].strip(),
+ "status": fields[index_status].strip(),
+ "name": fields[index_name].strip(),
+ "release_id": fields[index_release_id].strip()}
+ environments.append(dict)
+
+ return environments
+
+ def get_nodes(self, options=None):
+ nodes = []
+ output = self.runcmd_fuel_nodes()
+ lines = output.rsplit('\n')
+ if len(lines) < 2:
+ self.logger.info("No nodes found in the deployment.")
+ return None
+ else:
+ # get fields indexes
+ fields = lines[0].rsplit(' | ')
+
+ index_id = -1
+ index_status = -1
+ index_name = -1
+ index_cluster = -1
+ index_ip = -1
+ index_mac = -1
+ index_roles = -1
+ index_online = -1
+
+ for i in range(0, len(fields) - 1):
+ if "id" in fields[i]:
+ index_id = i
+ elif "status" in fields[i]:
+ index_status = i
+ elif "name" in fields[i]:
+ index_name = i
+ elif "cluster" in fields[i]:
+ index_cluster = i
+ elif "ip" in fields[i]:
+ index_ip = i
+ elif "mac" in fields[i]:
+ index_mac = i
+ elif "roles " in fields[i]:
+ index_roles = i
+ elif "online" in fields[i]:
+ index_online = i
+
+ # order nodes info
+ for i in range(2, len(lines) - 1):
+ fields = lines[i].rsplit(' | ')
+ dict = {"id": fields[index_id].strip(),
+ "status": fields[index_status].strip(),
+ "name": fields[index_name].strip(),
+ "cluster": fields[index_cluster].strip(),
+ "ip": fields[index_ip].strip(),
+ "mac": fields[index_mac].strip(),
+ "roles": fields[index_roles].strip(),
+ "online": fields[index_online].strip()}
+ if options and options['cluster']:
+ if fields[index_cluster].strip() == options['cluster']:
+ nodes.append(dict)
+ else:
+ nodes.append(dict)
+
+ return nodes
+
+ def get_controller_ips(self, options):
+ nodes = self.get_nodes(options=options)
+ controllers = []
+ for node in nodes:
+ if "controller" in node["roles"]:
+ controllers.append(node['ip'])
+ return controllers
+
+ def get_compute_ips(self, options=None):
+ nodes = self.get_nodes(options=options)
+ computes = []
+ for node in nodes:
+ if "compute" in node["roles"]:
+ computes.append(node['ip'])
+ return computes
+
+ def get_deployment_info(self):
+ str = "Deployment details:\n"
+ str += "\tInstaller: Fuel\n"
+ str += "\tScenario: Unknown\n"
+ sdn = "None"
+ clusters = self.get_clusters()
+ str += "\tN.Clusters: %s\n" % len(clusters)
+ for cluster in clusters:
+ cluster_dic = {'cluster': cluster['id']}
+ str += "\tCluster info:\n"
+ str += "\t ID: %s\n" % cluster['id']
+ str += "\t NAME: %s\n" % cluster['name']
+ str += "\t STATUS: %s\n" % cluster['status']
+ nodes = self.get_nodes(options=cluster_dic)
+ num_nodes = len(nodes)
+ for node in nodes:
+ if "opendaylight" in node['roles']:
+ sdn = "OpenDaylight"
+ elif "onos" in node['roles']:
+ sdn = "ONOS"
+ num_controllers = len(
+ self.get_controller_ips(options=cluster_dic))
+ num_computes = len(self.get_compute_ips(options=cluster_dic))
+ ha = False
+ if num_controllers > 1:
+ ha = True
+
+ str += "\t HA: %s\n" % ha
+ str += "\t NUM.NODES: %s\n" % num_nodes
+ str += "\t CONTROLLERS: %s\n" % num_controllers
+ str += "\t COMPUTES: %s\n" % num_computes
+ str += "\t SDN CONTR.: %s\n\n" % sdn
+ str += self.runcmd_fuel_nodes()
+ return str
+
+ def get_file_from_installer(self, remote_path, local_path, options=None):
+ self.logger.debug("Fetching %s from %s" %
+ (remote_path, self.installer_ip))
+ if self.connection.scp_get(local_path, remote_path) != 0:
+ self.logger.error("SCP failed to retrieve the file.")
+ return 1
+ self.logger.info("%s successfully copied from Fuel to %s" %
+ (remote_path, local_path))
+
+ def get_file_from_controller(self,
+ remote_path,
+ local_path,
+ ip=None,
+ options=None):
+ if ip is None:
+ controllers = self.get_controller_ips(options=options)
+ if len(controllers) == 0:
+ self.logger.info("No controllers found in the deployment.")
+ return 1
+ else:
+ target_ip = controllers[0]
+ else:
+ target_ip = ip
+
+ fuel_dir = '/root/scp/'
+ cmd = 'mkdir -p %s;rsync -Rav %s:%s %s' % (
+ fuel_dir, target_ip, remote_path, fuel_dir)
+ self.logger.info("Copying %s from %s to Fuel..." %
+ (remote_path, target_ip))
+ output, error = self.connection.run_remote_cmd(cmd)
+ self.logger.debug("Copying files from Fuel to %s..." % local_path)
+ self.get_file_from_installer(
+ fuel_dir + remote_path, local_path, options)
+ cmd = 'rm -r %s' % fuel_dir
+ output, error = self.connection.run_remote_cmd(cmd)
+ self.logger.info("%s successfully copied from %s to %s" %
+ (remote_path, target_ip, local_path))
diff --git a/utils/installer-adapter/InstallerHandler.py b/utils/installer-adapter/InstallerHandler.py
new file mode 100644
index 000000000..b81b806ca
--- /dev/null
+++ b/utils/installer-adapter/InstallerHandler.py
@@ -0,0 +1,78 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from FuelAdapter import FuelAdapter
+from ApexAdapter import ApexAdapter
+from CompassAdapter import CompassAdapter
+from JoidAdapter import JoidAdapter
+
+
+INSTALLERS = ["fuel", "apex", "compass", "joid"]
+
+
+class InstallerHandler:
+
+ def __init__(self,
+ installer,
+ installer_ip,
+ installer_user,
+ installer_pwd=None):
+ self.installer = installer.lower()
+ self.installer_ip = installer_ip
+ self.installer_user = installer_user
+ self.installer_pwd = installer_pwd
+
+ if self.installer == INSTALLERS[0]:
+ self.InstallerAdapter = FuelAdapter(self.installer_ip,
+ self.installer_user,
+ self.installer_pwd)
+ elif self.installer == INSTALLERS[1]:
+ self.InstallerAdapter = ApexAdapter(self.installer_ip)
+ elif self.installer == INSTALLERS[2]:
+ self.InstallerAdapter = CompassAdapter(self.installer_ip)
+ elif self.installer == INSTALLERS[3]:
+ self.InstallerAdapter = JoidAdapter(self.installer_ip)
+ else:
+ print("Installer %s is not valid. "
+ "Please use one of the followings: %s"
+ % (self.installer, INSTALLERS))
+ exit(1)
+
+ def get_deployment_info(self):
+ return self.InstallerAdapter.get_deployment_info()
+
+ def get_nodes(self, options=None):
+ return self.InstallerAdapter.get_nodes(options=options)
+
+ def get_controller_ips(self, options=None):
+ return self.InstallerAdapter.get_controller_ips(options=options)
+
+ def get_compute_ips(self, options=None):
+ return self.InstallerAdapter.get_compute_ips(options=options)
+
+ def get_file_from_installer(self,
+ remote_path,
+ local_path,
+ options=None):
+ return self.InstallerAdapter.get_file_from_installer(remote_path,
+ local_path,
+ options=options)
+
+ def get_file_from_controller(self,
+ remote_path,
+ local_path,
+ ip=None,
+ options=None):
+ return self.InstallerAdapter.get_file_from_controller(remote_path,
+ local_path,
+ ip=ip,
+ options=options)
+
+ def get_all(self):
+ pass
diff --git a/utils/installer-adapter/JoidAdapter.py b/utils/installer-adapter/JoidAdapter.py
new file mode 100644
index 000000000..e78ca0fae
--- /dev/null
+++ b/utils/installer-adapter/JoidAdapter.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from SSHUtils import SSH_Connection
+
+
+class JoidAdapter:
+
+ def __init__(self, installer_ip):
+ self.installer_ip = installer_ip
+
+ def get_deployment_info(self):
+ pass
+
+ def get_nodes(self):
+ pass
+
+ def get_controller_ips(self):
+ pass
+
+ def get_compute_ips(self):
+ pass
+
+ def get_file_from_installer(self, origin, target, options=None):
+ pass
+
+ def get_file_from_controller(self, origin, target, ip=None, options=None):
+ pass \ No newline at end of file
diff --git a/utils/installer-adapter/RelengLogger.py b/utils/installer-adapter/RelengLogger.py
new file mode 100644
index 000000000..b38e78095
--- /dev/null
+++ b/utils/installer-adapter/RelengLogger.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+#
+# jose.lausuch@ericsson.com
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Logging levels:
+# Level Numeric value
+# CRITICAL 50
+# ERROR 40
+# WARNING 30
+# INFO 20
+# DEBUG 10
+# NOTSET 0
+#
+# Usage:
+# import RelengLogger as rl
+# logger = fl.Logger("script_name").getLogger()
+# logger.info("message to be shown with - INFO - ")
+# logger.debug("message to be shown with - DEBUG -")
+
+import logging
+import os
+
+
+class Logger:
+
+ def __init__(self, logger_name, level="INFO"):
+
+ self.logger = logging.getLogger(logger_name)
+ self.logger.propagate = 0
+ self.logger.setLevel(logging.DEBUG)
+
+ ch = logging.StreamHandler()
+ formatter = logging.Formatter('%(asctime)s - %(name)s - '
+ '%(levelname)s - %(message)s')
+ ch.setFormatter(formatter)
+ if level.lower() == "debug":
+ ch.setLevel(logging.DEBUG)
+ else:
+ ch.setLevel(logging.INFO)
+ self.logger.addHandler(ch)
+
+ hdlr = logging.FileHandler('/tmp/releng.log')
+ hdlr.setFormatter(formatter)
+ hdlr.setLevel(logging.DEBUG)
+ self.logger.addHandler(hdlr)
+
+ def getLogger(self):
+ return self.logger
diff --git a/utils/installer-adapter/SSHUtils.py b/utils/installer-adapter/SSHUtils.py
new file mode 100644
index 000000000..9c92a3be1
--- /dev/null
+++ b/utils/installer-adapter/SSHUtils.py
@@ -0,0 +1,130 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+import paramiko
+from scp import SCPClient
+import time
+import RelengLogger as rl
+
+
+class SSH_Connection:
+
+ def __init__(self,
+ host,
+ user,
+ password,
+ use_system_keys=True,
+ private_key=None,
+ use_proxy=False,
+ proxy_host=None,
+ proxy_user=None,
+ proxy_password=None,
+ timeout=10):
+ self.host = host
+ self.user = user
+ self.password = password
+ self.use_system_keys = use_system_keys
+ self.private_key = private_key
+ self.use_proxy = use_proxy
+ self.proxy_host = proxy_host
+ self.proxy_user = proxy_user
+ self.proxy_password = proxy_password
+ self.timeout = timeout
+ paramiko.util.log_to_file("paramiko.log")
+ self.logger = rl.Logger("SSHUtils").getLogger()
+
+ def connect(self):
+ client = paramiko.SSHClient()
+ if self.use_system_keys:
+ client.load_system_host_keys()
+ elif self.private_key:
+ client.load_host_keys(self.private_key)
+ else:
+ client.load_host_keys('/dev/null')
+
+ client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+
+ t = self.timeout
+ proxy = None
+ if self.use_proxy:
+ proxy_command = 'ssh -o UserKnownHostsFile=/dev/null '
+ '-o StrictHostKeyChecking=no %s@%s -W %s:%s' % (self.proxy_user,
+ self.proxy_host,
+ self.host, 22)
+ proxy = paramiko.ProxyCommand(proxy_command)
+ self.logger.debug("Proxy command: %s" % proxy_command)
+ while t > 0:
+ try:
+ self.logger.debug(
+ "Trying to stablish ssh connection to %s..." % self.host)
+ client.connect(self.host,
+ username=self.user,
+ password=self.password,
+ look_for_keys=True,
+ sock=proxy,
+ pkey=self.private_key,
+ timeout=self.timeout)
+ self.logger.debug("Successfully connected to %s!" % self.host)
+ return client
+ except:
+ time.sleep(1)
+ t -= 1
+
+ if t == 0:
+ return None
+
+ def scp_put(self, local_path, remote_path):
+ client = self.connect()
+ if client:
+ scp = SCPClient(client.get_transport())
+ try:
+ scp.put(local_path, remote_path)
+ client.close()
+ return 0
+ except Exception, e:
+ self.logger.error(e)
+ client.close()
+ return 1
+ else:
+ self.logger.error("Cannot stablish ssh connection.")
+
+ def scp_get(self, local_path, remote_path):
+ client = self.connect()
+ if client:
+ scp = SCPClient(client.get_transport())
+ try:
+ scp.get(remote_path, local_path)
+ client.close()
+ return 0
+ except Exception, e:
+ self.logger.error(e)
+ client.close()
+ return 1
+ else:
+ self.logger.error("Cannot stablish ssh connection.")
+ return 1
+
+ def run_remote_cmd(self, command):
+ client = self.connect()
+ if client:
+ try:
+ stdin, stdout, stderr = client.exec_command(command)
+ out = ''
+ for line in stdout.readlines():
+ out += line
+ err = stderr.readlines()
+ client.close()
+ return out, err
+ except:
+ client.close()
+ return 1
+ else:
+ self.logger.error("Cannot stablish ssh connection.")
+ return 1
diff --git a/utils/installer-adapter/__init__.py b/utils/installer-adapter/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/utils/installer-adapter/__init__.py
diff --git a/utils/installer-adapter/example.py b/utils/installer-adapter/example.py
new file mode 100644
index 000000000..804d79c3d
--- /dev/null
+++ b/utils/installer-adapter/example.py
@@ -0,0 +1,22 @@
+# This is an example of usage of this Tool
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+
+from InstallerHandler import InstallerHandler
+
+fuel_handler = InstallerHandler(installer='fuel',
+ installer_ip='10.20.0.2',
+ installer_user='root',
+ installer_pwd='r00tme')
+print("Nodes in cluster 1:\n%s\n" %
+ fuel_handler.get_nodes(options={'cluster': '1'}))
+print("Nodes in cluster 2:\n%s\n" %
+ fuel_handler.get_nodes(options={'cluster': '2'}))
+print("Nodes:\n%s\n" % fuel_handler.get_nodes())
+print("Controller nodes:\n%s\n" % fuel_handler.get_controller_ips())
+print("Compute nodes:\n%s\n" % fuel_handler.get_compute_ips())
+print("\n%s\n" % fuel_handler.get_deployment_info())
+fuel_handler.get_file_from_installer('/root/deploy/dea.yaml', './dea.yaml')
+fuel_handler.get_file_from_controller(
+ '/etc/neutron/neutron.conf', './neutron.conf')
+fuel_handler.get_file_from_controller(
+ '/root/openrc', './openrc')
diff --git a/utils/test/reporting/yardstick/reporting-status.py b/utils/test/reporting/yardstick/reporting-status.py
index 60f1523bb..49809e9d8 100644
--- a/utils/test/reporting/yardstick/reporting-status.py
+++ b/utils/test/reporting/yardstick/reporting-status.py
@@ -8,10 +8,7 @@
#
import datetime
import jinja2
-import requests
-import sys
-import time
-import yaml
+import os
import reportingUtils as utils
import reportingConf as conf
@@ -20,6 +17,7 @@ from scenarios import config as cf
# Logger
logger = utils.getLogger("Yardstick-Status")
+reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
logger.info("*******************************************")
logger.info("* Generating reporting scenario status *")
@@ -35,21 +33,23 @@ for version in conf.versions:
# get scenarios results data
scenario_results = utils.getScenarioStatus(installer, version)
if 'colorado' == version:
- stable_result = utils.getScenarioStatus(installer, 'stable/colorado')
- for k,v in stable_result.items():
- if not scenario_results.has_key(k):
+ stable_result = utils.getScenarioStatus(installer,
+ 'stable/colorado')
+ for k, v in stable_result.items():
+ if k not in scenario_results.keys():
scenario_results[k] = []
scenario_results[k] += stable_result[k]
scenario_result_criteria = {}
for s in scenario_results.keys():
- if cf.has_key(installer) and cf[installer].has_key(s):
+ if installer in cf.keys() and s in cf[installer].keys():
scenario_results.pop(s)
# From each scenarios get results list
for s, s_result in scenario_results.items():
logger.info("---------------------------------")
- logger.info("installer %s, version %s, scenario %s:" % (installer, version, s))
+ logger.info("installer %s, version %s, scenario %s:" % (installer,
+ version, s))
ten_criteria = len(s_result)
ten_score = 0
@@ -62,15 +62,38 @@ for version in conf.versions:
for v in four_result:
four_score += v
- s_status = str(utils.get_status(four_result, s_result))
+ s_status = str(utils.get_percent(four_result, s_result))
s_four_score = str(four_score) + '/' + str(four_criteria)
s_ten_score = str(ten_score) + '/' + str(ten_criteria)
- scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_four_score, s_ten_score)
+ s_score_percent = utils.get_percent(four_result, s_result)
if '100' == s_status:
logger.info(">>>>> scenario OK, save the information")
else:
- logger.info(">>>> scenario not OK, last 4 iterations = %s, last 10 days = %s" % (s_four_score, s_ten_score))
+ logger.info(">>>> scenario not OK, last 4 iterations = %s, \
+ last 10 days = %s" % (s_four_score, s_ten_score))
+
+ # Save daily results in a file
+ path_validation_file = (conf.REPORTING_PATH +
+ "/release/" + version +
+ "/scenario_history.txt")
+
+ if not os.path.exists(path_validation_file):
+ with open(path_validation_file, 'w') as f:
+ info = 'date,scenario,installer,details,score\n'
+ f.write(info)
+
+ with open(path_validation_file, "a") as f:
+ info = (reportingDate + "," + s + "," + installer +
+ "," + s_ten_score + "," +
+ str(s_score_percent) + "\n")
+ f.write(info)
+
+ scenario_result_criteria[s] = sr.ScenarioResult(s_status,
+ s_four_score,
+ s_ten_score,
+ s_score_percent)
+
logger.info("--------------------------")
templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
@@ -82,7 +105,8 @@ for version in conf.versions:
outputText = template.render(scenario_results=scenario_result_criteria,
installer=installer,
period=conf.PERIOD,
- version=version)
+ version=version,
+ date=reportingDate)
with open(conf.REPORTING_PATH + "/release/" + version +
"/index-status-" + installer + ".html", "wb") as fh:
diff --git a/utils/test/reporting/yardstick/reportingUtils.py b/utils/test/reporting/yardstick/reportingUtils.py
index 71eb9196c..ec9ed76dc 100644
--- a/utils/test/reporting/yardstick/reportingUtils.py
+++ b/utils/test/reporting/yardstick/reportingUtils.py
@@ -32,7 +32,7 @@ def getLogger(module):
def getScenarioStatus(installer, version):
url = (conf.URL_BASE + "?case=" + "scenario_status" +
"&installer=" + installer +
- "&version=" + version +"&period=" + str(conf.PERIOD))
+ "&version=" + version + "&period=" + str(conf.PERIOD))
request = Request(url)
try:
@@ -53,7 +53,7 @@ def getScenarioStatus(installer, version):
scenario_results[r['scenario']] = []
scenario_results[r['scenario']].append(r)
- for k,v in scenario_results.items():
+ for k, v in scenario_results.items():
# scenario_results[k] = v[:conf.LASTEST_TESTS]
s_list = []
for element in v:
@@ -66,20 +66,25 @@ def getScenarioStatus(installer, version):
# return scenario_results
return result_dict
+
def subfind(given_list, pattern_list):
+
for i in range(len(given_list)):
- if given_list[i] == pattern_list[0] and given_list[i:i + conf.LASTEST_TESTS] == pattern_list:
+ if given_list[i] == pattern_list[0] and \
+ given_list[i:i + conf.LASTEST_TESTS] == pattern_list:
return True
return False
-def get_percent(status):
-
+
+def _get_percent(status):
+
if status * 100 % 6:
return round(float(status) * 100 / 6, 1)
else:
return status * 100 / 6
-def get_status(four_list, ten_list):
+
+def get_percent(four_list, ten_list):
four_score = 0
ten_score = 0
@@ -97,13 +102,13 @@ def get_status(four_list, ten_list):
else:
status = four_score + 1
- return get_percent(status)
+ return _get_percent(status)
def _test():
status = getScenarioStatus("compass", "master")
print "status:++++++++++++++++++++++++"
- print json.dumps(status,indent=4)
+ print json.dumps(status, indent=4)
if __name__ == '__main__': # pragma: no cover
diff --git a/utils/test/reporting/yardstick/scenarioResult.py b/utils/test/reporting/yardstick/scenarioResult.py
index 61ffb2ce7..1f7eb2b24 100644
--- a/utils/test/reporting/yardstick/scenarioResult.py
+++ b/utils/test/reporting/yardstick/scenarioResult.py
@@ -9,10 +9,12 @@
class ScenarioResult(object):
- def __init__(self, status, four_days_score='', ten_days_score=''):
+ def __init__(self, status, four_days_score='', ten_days_score='',
+ score_percent=0.0):
self.status = status
self.four_days_score = four_days_score
self.ten_days_score = ten_days_score
+ self.score_percent = score_percent
def getStatus(self):
return self.status
@@ -22,3 +24,6 @@ class ScenarioResult(object):
def getFourDaysScore(self):
return self.four_days_score
+
+ def getScorePercent(self):
+ return self.score_percent
diff --git a/utils/test/reporting/yardstick/template/index-status-tmpl.html b/utils/test/reporting/yardstick/template/index-status-tmpl.html
index 602ce8a74..5a4dc347c 100644
--- a/utils/test/reporting/yardstick/template/index-status-tmpl.html
+++ b/utils/test/reporting/yardstick/template/index-status-tmpl.html
@@ -3,9 +3,56 @@
<meta charset="utf-8">
<!-- Bootstrap core CSS -->
<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet">
- <link href="default.css" rel="stylesheet">
+ <link href="../../../css/default.css" rel="stylesheet">
<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script>
<script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script>
+ <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script>
+ <script type="text/javascript" src="../../../js/gauge.js"></script>
+ <script type="text/javascript" src="../../../js/trend.js"></script>
+ <script>
+ function onDocumentReady() {
+ // Gauge management
+ {% for scenario in scenario_results.keys() -%}
+ var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}');
+ {%- endfor %}
+ // assign success rate to the gauge
+ function updateReadings() {
+ {% for scenario in scenario_results.keys() -%}
+ gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}});
+ {%- endfor %}
+ }
+ updateReadings();
+ }
+
+ // trend line management
+ //d3.csv("./scenario_history.txt", function(data) {
+ d3.csv("./scenario_history.txt", function(data) {
+ // ***************************************
+ // Create the trend line
+ {% for scenario in scenario_results.keys() -%}
+ // for scenario {{scenario}}
+ // Filter results
+ var trend{{loop.index}} = data.filter(function(row) {
+ return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}";
+ })
+ // Parse the date
+ trend{{loop.index}}.forEach(function(d) {
+ d.date = parseDate(d.date);
+ d.score = +d.score
+ });
+ // Draw the trend line
+ var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}})
+ // ****************************************
+ {%- endfor %}
+ });
+ if ( !window.isLoaded ) {
+ window.addEventListener("load", function() {
+ onDocumentReady();
+ }, false);
+ } else {
+ onDocumentReady();
+ }
+ </script>
<script type="text/javascript">
$(document).ready(function (){
$(".btn-more").click(function() {
@@ -18,10 +65,10 @@
<body>
<div class="container">
<div class="masthead">
- <h3 class="text-muted">Yardstick status page ({{version}})</h3>
+ <h3 class="text-muted">Yardstick status page ({{version}}, {{date}})</h3>
<nav>
<ul class="nav nav-justified">
- <li class="active"><a href="index.html">Home</a></li>
+ <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
<li><a href="index-status-apex.html">Apex</a></li>
<li><a href="index-status-compass.html">Compass</a></li>
<li><a href="index-status-fuel.html">Fuel</a></li>
@@ -42,15 +89,15 @@
<tr>
<th width="40%">Scenario</th>
<th width="20%">Status</th>
- <th width="20%">Last 4 Iterations</th>
- <th width="20%">Last 10 Days</th>
+ <th width="20%">Trend</th>
+ <th width="10%">Last 4 Iterations</th>
+ <th width="10%">Last 10 Days</th>
</tr>
{% for scenario,result in scenario_results.iteritems() -%}
<tr class="tr-ok">
<td>{{scenario}}</td>
- <td>
- <img src="../../img/gauge_{{ scenario_results[scenario].getStatus() }}.png">
- </td>
+ <td><div id="gaugeScenario{{loop.index}}"></div></td>
+ <td><div id="trend_svg{{loop.index}}"></div></td>
<td>{{scenario_results[scenario].getFourDaysScore()}}</td>
<td>{{scenario_results[scenario].getTenDaysScore()}}</td>
</tr>