summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xjjb/apex/apex-deploy.sh6
-rw-r--r--jjb/apex/apex.yml44
-rw-r--r--jjb/apex/apex.yml.j244
-rw-r--r--jjb/armband/armband-ci-jobs.yml72
-rw-r--r--jjb/bottlenecks/bottlenecks-run-suite.sh17
-rw-r--r--jjb/compass4nfv/compass-dovetail-jobs.yml2
-rw-r--r--jjb/doctor/doctor.yml2
-rw-r--r--jjb/dovetail/dovetail-ci-jobs.yml4
-rwxr-xr-xjjb/dovetail/dovetail-run.sh21
-rw-r--r--jjb/functest/functest-alpine.sh1
-rw-r--r--jjb/functest/functest-daily-jobs.yml9
-rwxr-xr-xjjb/functest/set-functest-env.sh2
-rw-r--r--jjb/global/slave-params.yml12
-rw-r--r--jjb/releng/opnfv-docker.yml5
-rw-r--r--jjb/securedlab/check-jinja2.yml6
-rw-r--r--jjb/xci/osa-periodic-jobs.yml258
-rw-r--r--jjb/yardstick/yardstick-daily-jobs.yml14
-rw-r--r--utils/push-test-logs.sh2
-rw-r--r--utils/test/reporting/docker/Dockerfile17
-rwxr-xr-xutils/test/reporting/docker/web_server.sh2
-rw-r--r--utils/test/reporting/reporting/functest/template/index-status-tmpl.html29
-rw-r--r--utils/test/reporting/setup.py1
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/models.py4
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py237
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/scenario_models.py6
-rw-r--r--utils/test/testapi/opnfv_testapi/router/url_mappings.py4
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py255
27 files changed, 777 insertions, 299 deletions
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
index ed02714fd..ce9544b28 100755
--- a/jjb/apex/apex-deploy.sh
+++ b/jjb/apex/apex-deploy.sh
@@ -10,6 +10,8 @@ echo "Starting the Apex deployment."
echo "--------------------------------------------------------"
echo
+sudo rm -rf /tmp/tmp*
+
if [ -z "$DEPLOY_SCENARIO" ]; then
echo "Deploy scenario not set!"
exit 1
@@ -56,7 +58,9 @@ else
BASE=$CONFIG
IMAGES=$RESOURCES
LIB="/var/opt/opnfv/lib"
-
+ sudo mkdir -p /var/log/apex
+ sudo chmod 777 /var/log/apex
+ cd /var/log/apex
fi
# Install Dependencies
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index f0e0535ea..a3cd7b3b1 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -180,7 +180,7 @@
branch-pattern: '**/{branch}'
file-paths:
- compare-type: ANT
- pattern: 'tests/**'
+ pattern: 'apex/tests/**'
properties:
- logrotate-default
- throttle:
@@ -243,11 +243,18 @@
pattern: 'lib/**'
- compare-type: ANT
pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/**'
properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
- throttle:
- max-per-node: 3
+ max-per-node: 1
max-total: 10
option: 'project'
@@ -373,8 +380,13 @@
properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
- throttle:
- max-per-node: 3
+ max-per-node: 1
max-total: 10
option: 'project'
@@ -588,7 +600,7 @@
wrappers:
- timeout:
- timeout: 120
+ timeout: 140
fail: true
parameters:
@@ -779,18 +791,18 @@
enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
abort-all-job: false
git-revision: false
- - multijob:
- name: StorPerf
- condition: ALWAYS
- projects:
- - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
+# - multijob:
+# name: StorPerf
+# condition: ALWAYS
+# projects:
+# - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
+# node-parameters: true
+# current-parameters: false
+# predefined-parameters:
+# DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+# kill-phase-on: NEVER
+# abort-all-job: false
+# git-revision: false
# Build status is always success due conditional plugin prefetching
# build status before multijob phases execute
# - conditional-step:
diff --git a/jjb/apex/apex.yml.j2 b/jjb/apex/apex.yml.j2
index 5a44dbc00..3f69ff622 100644
--- a/jjb/apex/apex.yml.j2
+++ b/jjb/apex/apex.yml.j2
@@ -92,7 +92,7 @@
branch-pattern: '**/{branch}'
file-paths:
- compare-type: ANT
- pattern: 'tests/**'
+ pattern: 'apex/tests/**'
properties:
- logrotate-default
- throttle:
@@ -155,11 +155,18 @@
pattern: 'lib/**'
- compare-type: ANT
pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/**'
properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
- throttle:
- max-per-node: 3
+ max-per-node: 1
max-total: 10
option: 'project'
@@ -285,8 +292,13 @@
properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
- throttle:
- max-per-node: 3
+ max-per-node: 1
max-total: 10
option: 'project'
@@ -500,7 +512,7 @@
wrappers:
- timeout:
- timeout: 120
+ timeout: 140
fail: true
parameters:
@@ -691,18 +703,18 @@
enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
abort-all-job: false
git-revision: false
- - multijob:
- name: StorPerf
- condition: ALWAYS
- projects:
- - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
+# - multijob:
+# name: StorPerf
+# condition: ALWAYS
+# projects:
+# - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
+# node-parameters: true
+# current-parameters: false
+# predefined-parameters:
+# DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+# kill-phase-on: NEVER
+# abort-all-job: false
+# git-revision: false
# Build status is always success due conditional plugin prefetching
# build status before multijob phases execute
# - conditional-step:
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index cdc14e4e0..f1bff072c 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -52,16 +52,16 @@
slave-label: arm-pod2
installer: fuel
<<: *euphrates
- - arm-pod3:
- slave-label: arm-pod3
+ - arm-pod5:
+ slave-label: arm-pod5
installer: fuel
<<: *euphrates
- arm-pod4:
slave-label: arm-pod4
installer: fuel
<<: *euphrates
- - arm-virtual1:
- slave-label: arm-virtual1
+ - arm-virtual2:
+ slave-label: arm-virtual2
installer: fuel
<<: *euphrates
#--------------------------------
@@ -71,16 +71,16 @@
slave-label: arm-pod2
installer: fuel
<<: *master
- - arm-pod3:
- slave-label: arm-pod3
+ - arm-pod5:
+ slave-label: arm-pod5
installer: fuel
<<: *master
- arm-pod4:
slave-label: arm-pod4
installer: fuel
<<: *master
- - arm-virtual1:
- slave-label: arm-virtual1
+ - arm-virtual2:
+ slave-label: arm-virtual2
installer: fuel
<<: *master
#--------------------------------
@@ -412,31 +412,31 @@
# Enea Armband Non CI Virtual Triggers running against euphrates branch
#--------------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-euphrates-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual2-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-euphrates-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-arm-virtual2-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-euphrates-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual2-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-euphrates-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual2-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-euphrates-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual2-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-euphrates-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-arm-virtual2-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-euphrates-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-arm-virtual2-euphrates-trigger'
triggers:
- timed: ''
@@ -444,31 +444,31 @@
# Enea Armband Non CI Virtual Triggers running against master branch
#--------------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-arm-virtual2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-master-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-master-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-master-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-arm-virtual2-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-master-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-arm-virtual2-master-trigger'
triggers:
- timed: ''
@@ -538,62 +538,62 @@
# Enea Armband POD 3 Triggers running against master branch
#----------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-pod5-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-arm-pod5-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-master-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-pod5-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-master-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod5-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-master-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-pod5-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-master-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-arm-pod5-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-master-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-arm-pod5-master-trigger'
triggers:
- timed: ''
#---------------------------------------------------------------
# Enea Armband POD 3 Triggers running against euphrates branch
#---------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-euphrates-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-arm-pod5-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-euphrates-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-arm-pod5-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-euphrates-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-arm-pod5-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-euphrates-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod5-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-euphrates-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-arm-pod5-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-euphrates-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-arm-pod5-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-euphrates-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-arm-pod5-euphrates-trigger'
triggers:
- timed: ''
#--------------------------------------------------------------------------
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
index 341aab590..a7570431d 100644
--- a/jjb/bottlenecks/bottlenecks-run-suite.sh
+++ b/jjb/bottlenecks/bottlenecks-run-suite.sh
@@ -16,6 +16,10 @@ RELENG_REPO=${WORKSPACE}/releng
[ -d ${RELENG_REPO} ] && rm -rf ${RELENG_REPO}
git clone https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO} >${redirect}
+YARDSTICK_REPO=${WORKSPACE}/yardstick
+[ -d ${YARDSTICK_REPO} ] && rm -rf ${YARDSTICK_REPO}
+git clone https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO} >${redirect}
+
OPENRC=/tmp/admin_rc.sh
OS_CACERT=/tmp/os_cacert
@@ -85,11 +89,18 @@ if [[ $SUITE_NAME == *posca* ]]; then
echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
fi
- cmd="sudo python ${RELENG_REPO}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
+ if [[ ${INSTALLER_TYPE} != compass ]]; then
+ cmd="sudo python ${RELENG_REPO}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
-i ${INSTALLER_IP} ${options} -f ${BOTTLENECKS_CONFIG}/pod.yaml \
-s ${BOTTLENECKS_CONFIG}/id_rsa"
- echo ${cmd}
- ${cmd}
+ echo ${cmd}
+ ${cmd}
+ else
+ cmd="sudo cp ${YARDSTICK_REPO}/etc/yardstick/nodes/compass_sclab_virtual/pod.yaml \
+ ${BOTTLENECKS_CONFIG}"
+ echo ${cmd}
+ ${cmd}
+ fi
deactivate
diff --git a/jjb/compass4nfv/compass-dovetail-jobs.yml b/jjb/compass4nfv/compass-dovetail-jobs.yml
index 67d1e4eee..101db8241 100644
--- a/jjb/compass4nfv/compass-dovetail-jobs.yml
+++ b/jjb/compass4nfv/compass-dovetail-jobs.yml
@@ -19,7 +19,7 @@
#------------------------------------
pod:
- baremetal:
- slave-label: compass-baremetal
+ slave-label: compass-baremetal-branch
os-version: 'xenial'
<<: *danube
#-----------------------------------
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index 23d12def4..d535d6109 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -39,7 +39,7 @@
pod:
- arm-pod2:
slave-label: '{pod}'
- - arm-pod3:
+ - arm-pod5:
slave-label: '{pod}'
jobs:
diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml
index 42e1ad585..92b1db356 100644
--- a/jjb/dovetail/dovetail-ci-jobs.yml
+++ b/jjb/dovetail/dovetail-ci-jobs.yml
@@ -142,12 +142,12 @@
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - arm-pod3:
+ - arm-pod5:
slave-label: '{pod}'
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - arm-virtual1:
+ - arm-virtual2:
slave-label: '{pod}'
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh
index 346a1ef08..7dd6a2ddc 100755
--- a/jjb/dovetail/dovetail-run.sh
+++ b/jjb/dovetail/dovetail-run.sh
@@ -122,13 +122,26 @@ if [ "$INSTALLER_TYPE" == "apex" ]; then
sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
fi
+image_path=${HOME}/opnfv/dovetail/images
+if [[ ! -d ${image_path} ]]; then
+ mkdir -p ${image_path}
+fi
# sdnvpn test case needs to download this image first before running
-echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
-wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
+ubuntu_image=${image_path}/ubuntu-16.04-server-cloudimg-amd64-disk1.img
+if [[ ! -f ${ubuntu_image} ]]; then
+ echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
+ wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${image_path}
+fi
+sudo cp ${ubuntu_image} ${DOVETAIL_CONFIG}
# functest needs to download this image first before running
-echo "Download image cirros-0.3.5-x86_64-disk.img ..."
-wget -q -nc http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img -P ${DOVETAIL_CONFIG}
+cirros_image=${image_path}/cirros-0.3.5-x86_64-disk.img
+if [[ ! -f ${cirros_image} ]]; then
+ echo "Download image cirros-0.3.5-x86_64-disk.img ..."
+ wget -q -nc http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img -P ${image_path}
+fi
+sudo cp ${cirros_image} ${DOVETAIL_CONFIG}
+
opts="--privileged=true -id"
diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh
index da098862a..f0e08e171 100644
--- a/jjb/functest/functest-alpine.sh
+++ b/jjb/functest/functest-alpine.sh
@@ -66,6 +66,7 @@ fi
volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file_vol}"
+set +e
tiers=(healthcheck smoke features vnf)
for tier in ${tiers[@]}; do
diff --git a/jjb/functest/functest-daily-jobs.yml b/jjb/functest/functest-daily-jobs.yml
index f14ca758f..23649fc08 100644
--- a/jjb/functest/functest-daily-jobs.yml
+++ b/jjb/functest/functest-daily-jobs.yml
@@ -154,7 +154,7 @@
slave-label: '{pod}'
installer: fuel
<<: *master
- - arm-pod3:
+ - arm-pod5:
slave-label: '{pod}'
installer: fuel
<<: *master
@@ -162,7 +162,7 @@
slave-label: '{pod}'
installer: fuel
<<: *master
- - arm-virtual1:
+ - arm-virtual2:
slave-label: '{pod}'
installer: fuel
<<: *master
@@ -190,7 +190,7 @@
slave-label: '{pod}'
installer: fuel
<<: *danube
- - arm-pod3:
+ - arm-pod5:
slave-label: '{pod}'
installer: fuel
<<: *danube
@@ -198,7 +198,7 @@
slave-label: '{pod}'
installer: fuel
<<: *danube
- - arm-virtual1:
+ - arm-virtual2:
slave-label: '{pod}'
installer: fuel
<<: *danube
@@ -418,6 +418,7 @@
- ./functest-env-presetup.sh
- ../../utils/fetch_os_creds.sh
- ./functest-alpine.sh
+ - ../../utils/push-test-logs.sh
- builder:
name: functest-daily
diff --git a/jjb/functest/set-functest-env.sh b/jjb/functest/set-functest-env.sh
index 7d9e737e7..e54c3bf13 100755
--- a/jjb/functest/set-functest-env.sh
+++ b/jjb/functest/set-functest-env.sh
@@ -33,7 +33,7 @@ if [ "$BRANCH" != 'stable/danube' ]; then
echo "Functest: Download images that will be used by test cases"
images_dir="${HOME}/opnfv/functest/images"
chmod +x ${WORKSPACE}/functest/ci/download_images.sh
- ${WORKSPACE}/functest/ci/download_images.sh ${images_dir}
+ ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} > ${redirect} 2>&1
images_vol="-v ${images_dir}:/home/opnfv/functest/images"
echo "Functest: Images successfully downloaded"
fi
diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml
index f5de021a6..9234206a5 100644
--- a/jjb/global/slave-params.yml
+++ b/jjb/global/slave-params.yml
@@ -817,15 +817,15 @@
default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
description: 'Base URI to the configuration directory'
- parameter:
- name: 'arm-pod3-defaults'
+ name: 'arm-pod5-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - arm-pod3
+ - arm-pod5
default-slaves:
- - arm-pod3
+ - arm-pod5
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -853,15 +853,15 @@
default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
description: 'Base URI to the configuration directory'
- parameter:
- name: 'arm-virtual1-defaults'
+ name: 'arm-virtual2-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - arm-virtual1
+ - arm-virtual2
default-slaves:
- - arm-virtual1
+ - arm-virtual2
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
diff --git a/jjb/releng/opnfv-docker.yml b/jjb/releng/opnfv-docker.yml
index 9d27329ed..414eba255 100644
--- a/jjb/releng/opnfv-docker.yml
+++ b/jjb/releng/opnfv-docker.yml
@@ -60,6 +60,11 @@
dockerdir: 'docker/storperf-master'
<<: *master
<<: *other-receivers
+ - 'storperf-graphite':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-graphite'
+ <<: *master
+ <<: *other-receivers
- 'storperf-httpfrontend':
project: 'storperf'
dockerdir: 'docker/storperf-httpfrontend'
diff --git a/jjb/securedlab/check-jinja2.yml b/jjb/securedlab/check-jinja2.yml
index 1e85536e7..430ced560 100644
--- a/jjb/securedlab/check-jinja2.yml
+++ b/jjb/securedlab/check-jinja2.yml
@@ -70,6 +70,12 @@
pattern: '**/*.jinja2'
- compare-type: ANT
pattern: '**/*.yaml'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
+
builders:
- check-jinja
diff --git a/jjb/xci/osa-periodic-jobs.yml b/jjb/xci/osa-periodic-jobs.yml
index 350ee766b..833575039 100644
--- a/jjb/xci/osa-periodic-jobs.yml
+++ b/jjb/xci/osa-periodic-jobs.yml
@@ -1,149 +1,225 @@
- project:
- project: 'releng-xci'
+ name: 'opnfv-osa-periodic'
- name: 'os-periodic'
-#--------------------------------
-# Branch Anchors
-#--------------------------------
-# the versions stated here default to branches which then later
-# on used for checking out the branches, pulling in head of the branch.
- master: &master
- stream: master
- openstack-osa-version: '{stream}'
- opnfv-releng-version: 'master'
- gs-pathname: ''
- ocata: &ocata
- stream: ocata
- openstack-osa-version: 'stable/{stream}'
- opnfv-releng-version: 'master'
- gs-pathname: '/{stream}'
+ project: 'releng-xci'
#--------------------------------
-# XCI PODs
+# branches
#--------------------------------
- pod:
- - virtual:
- <<: *master
- - virtual:
- <<: *ocata
+ stream:
+ - master:
+ branch: '{stream}'
#--------------------------------
-# Supported Distros
+# distros
#--------------------------------
distro:
- 'xenial':
disabled: false
- slave-label: xci-xenial-virtual
- dib-os-release: 'xenial'
- dib-os-element: 'ubuntu-minimal'
- dib-os-packages: 'vlan,vim,less,bridge-utils,sudo,language-pack-en,iputils-ping,rsyslog,curl,python,debootstrap,ifenslave,ifenslave-2.6,lsof,lvm2,tcpdump,nfs-kernel-server,chrony,iptables'
- extra-dib-elements: 'openssh-server'
- 'centos7':
disabled: true
- slave-label: xci-centos7-virtual
- dib-os-release: '7'
- dib-os-element: 'centos7'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- 'suse':
disabled: true
- slave-label: xci-suse-virtual
- dib-os-release: '42.2'
- dib-os-element: 'opensuse-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
-
+#--------------------------------
+# type
+#--------------------------------
+ type:
+ - virtual
+#--------------------------------
+# phases
+#--------------------------------
+ phase:
+ - 'deploy'
+ - 'healthcheck'
#--------------------------------
# jobs
#--------------------------------
jobs:
- - 'osa-deploy-{pod}-{distro}-periodic-{stream}'
-
+ - 'osa-periodic-{distro}-{type}-{stream}'
+ - 'osa-periodic-{phase}-{type}-{stream}'
#--------------------------------
# job templates
#--------------------------------
- job-template:
- name: 'osa-deploy-{pod}-{distro}-periodic-{stream}'
+ name: 'osa-periodic-{distro}-{type}-{stream}'
+
+ project-type: multijob
disabled: '{obj:disabled}'
concurrent: false
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - '^xci-os.*'
- - '^xci-deploy.*'
- - '^xci-functest.*'
- - '^bifrost-.*periodic.*'
- - '^osa-.*periodic.*'
+ - 'xci-verify-.*'
+ - 'bifrost-verify-.*'
+ - 'bifrost-periodic-.*'
+ - 'osa-verify-.*'
+ - 'osa-periodic-.*'
block-level: 'NODE'
+
+ wrappers:
+ - ssh-agent-wrapper
+ - build-timeout:
+ timeout: 240
+ - fix-workspace-permissions
+
+ scm:
+ - git-scm-osa
+
+ triggers:
+ - pollscm:
+ cron: "@midnight"
+ ignore-post-commit-hooks: True
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - label:
+ name: SLAVE_LABEL
+ default: 'xci-virtual-{distro}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'osa-periodic-deploy-{type}-{stream}'
+ current-parameters: true
+ predefined-parameters: |
+ DISTRO={distro}
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+ git-revision: true
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: healthcheck
+ condition: SUCCESSFUL
+ projects:
+ - name: 'osa-periodic-healthcheck-{type}-{stream}'
+ current-parameters: true
+ predefined-parameters: |
+ DISTRO={distro}
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+ FUNCTEST_SUITE_NAME=healthcheck
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: false
+
+- job-template:
+ name: 'osa-periodic-{phase}-{type}-{stream}'
+
+ disabled: false
+
+ concurrent: true
+
+ properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'xci-verify-deploy-.*'
+ - 'xci-verify-healthcheck-.*'
+ - 'bifrost-verify-.*'
+ - 'bifrost-periodic-.*'
+ - 'osa-verify-.*'
+ - 'osa-periodic-.*'
+ block-level: 'NODE'
parameters:
- project-parameter:
project: '{project}'
- branch: '{opnfv-releng-version}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- - string:
- name: XCI_FLAVOR
- default: 'ha'
+ branch: '{branch}'
+ - label:
+ name: SLAVE_LABEL
+ default: 'xci-virtual-{distro}'
- string:
name: OPENSTACK_OSA_VERSION
- default: '{openstack-osa-version}'
- - string:
- name: OPNFV_RELENG_VERSION
- default: '{opnfv-releng-version}'
+ default: 'master'
- string:
name: DISTRO
- default: '{distro}'
+ default: 'xenial'
- string:
- name: DIB_OS_RELEASE
- default: '{dib-os-release}'
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-noha'
- string:
- name: DIB_OS_ELEMENT
- default: '{dib-os-element}'
+ name: XCI_FLAVOR
+ default: 'mini'
- string:
- name: DIB_OS_PACKAGES
- default: '{dib-os-packages}'
+ name: XCI_LOOP
+ default: 'periodic'
- string:
- name: EXTRA_DIB_ELEMENTS
- default: '{extra-dib-elements}'
+ name: OPNFV_RELENG_DEV_PATH
+ default: $WORKSPACE/releng-xci
- string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - label:
- name: SLAVE_LABEL
- default: '{slave-label}'
+ name: FUNCTEST_SUITE_NAME
+ default: 'healthcheck'
- string:
name: ANSIBLE_VERBOSITY
- default: ''
+ default: '-vvvv'
- string:
- name: XCI_LOOP
- default: 'periodic'
-
- wrappers:
- - fix-workspace-permissions
+ name: FORCE_MASTER
+ default: 'true'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
scm:
- - git-scm
+ - git-scm-osa
- # trigger is disabled until we know which jobs we will have
- # and adjust stuff accordingly
- triggers:
- - timed: '' # '@midnight'
+ wrappers:
+ - ssh-agent-wrapper
+ - build-timeout:
+ timeout: 240
+ - fix-workspace-permissions
builders:
- description-setter:
- description: "Built on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - 'osa-deploy-builder'
+ description: "Built on $NODE_NAME"
+ - 'osa-periodic-{phase}-macro'
-#---------------------------
+#--------------------------------
# builder macros
-#---------------------------
+#--------------------------------
- builder:
- name: osa-deploy-builder
+ name: 'osa-periodic-deploy-macro'
builders:
- - shell:
- !include-raw: ./xci-deploy.sh
+ - shell: |
+ #!/bin/bash
+
+ # here we will
+ # - clone releng-xci repo as the jobs are running against openstack gerrit
+ # and we need to clone releng-xci ourselves to $OPNFV_RELENG_DEV_PATH
+ # - run sources-branch-updater.sh from osa to update/pin the role versions
+ # at the time this job gets triggered against osa master in case if the
+ # deployment succeeds and we decide to bump version used by xci
+ # - copy generated role versions into $OPNFV_RELENG_DEV_PATH/xci/file
+ # - start the deployment by executing xci-deploy.sh as usual
+ #
+ # we might also need to pin versions of openstack services as well.
+
+ echo "Hello World!"
+
+- builder:
+ name: 'osa-periodic-healthcheck-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ echo "Hello World!"
+#--------------------------------
+# scm macro
+#--------------------------------
+- scm:
+ name: git-scm-osa
+ scm:
+ - git:
+ url: https://review.openstack.org/p/openstack/openstack-ansible.git
+ branches:
+ - master
+ timeout: 15
diff --git a/jjb/yardstick/yardstick-daily-jobs.yml b/jjb/yardstick/yardstick-daily-jobs.yml
index 007384b7a..39935abc2 100644
--- a/jjb/yardstick/yardstick-daily-jobs.yml
+++ b/jjb/yardstick/yardstick-daily-jobs.yml
@@ -176,22 +176,22 @@
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *danube
- - arm-pod3:
+ - arm-pod5:
slave-label: '{pod}'
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - arm-pod3:
+ - arm-pod5:
slave-label: '{pod}'
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *danube
- - arm-virtual1:
+ - arm-virtual2:
slave-label: '{pod}'
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - arm-virtual1:
+ - arm-virtual2:
slave-label: '{pod}'
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
@@ -381,7 +381,7 @@
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-arm-virtual1'
+ name: 'yardstick-params-arm-virtual2'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -464,7 +464,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-arm-pod3'
+ name: 'yardstick-params-arm-pod5'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -515,4 +515,4 @@
- trigger:
name: 'yardstick-daily-huawei-pod4-trigger'
triggers:
- - timed: '0 1 * * *'
+ - timed: '' \ No newline at end of file
diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh
index 79190ec2f..518d20ae5 100644
--- a/utils/push-test-logs.sh
+++ b/utils/push-test-logs.sh
@@ -27,7 +27,7 @@ node_list=(\
'ericsson-pod1' 'ericsson-pod2' \
'ericsson-virtual1' 'ericsson-virtual2' 'ericsson-virtual3' \
'ericsson-virtual4' 'ericsson-virtual5' 'ericsson-virtual12' \
-'arm-pod1' 'arm-pod3' \
+'arm-pod1' 'arm-pod5' \
'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4' \
diff --git a/utils/test/reporting/docker/Dockerfile b/utils/test/reporting/docker/Dockerfile
index f5168d1ae..f2357909d 100644
--- a/utils/test/reporting/docker/Dockerfile
+++ b/utils/test/reporting/docker/Dockerfile
@@ -27,19 +27,28 @@ ENV CONFIG_REPORTING_YAML ${working_dir}/reporting.yaml
WORKDIR ${HOME}
# Packaged dependencies
RUN apt-get update && apt-get install -y \
+build-essential \
ssh \
+curl \
+gnupg \
python-pip \
+python-dev \
+python-setuptools \
git-core \
-nodejs \
-npm \
supervisor \
--no-install-recommends
-RUN pip install --upgrade pip
+RUN pip install --upgrade pip && easy_install -U setuptools==30.0.0
-RUN git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng ${HOME}/releng
+RUN git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng /home/opnfv/releng
RUN pip install -r ${working_dir}/requirements.txt
+RUN sh -c 'curl -sL https://deb.nodesource.com/setup_8.x | bash -' \
+ && apt-get install -y nodejs \
+ && npm install -g bower \
+ && npm install -g grunt \
+ && npm install -g grunt-cli
+
WORKDIR ${working_dir}
RUN python setup.py install
RUN docker/reporting.sh
diff --git a/utils/test/reporting/docker/web_server.sh b/utils/test/reporting/docker/web_server.sh
index a34c11dd7..0dd8df73d 100755
--- a/utils/test/reporting/docker/web_server.sh
+++ b/utils/test/reporting/docker/web_server.sh
@@ -9,8 +9,6 @@ echo "daemon off;" >> /etc/nginx/nginx.conf
# supervisor config
cp /home/opnfv/releng/utils/test/reporting/docker/supervisor.conf /etc/supervisor/conf.d/
-ln -s /usr/bin/nodejs /usr/bin/node
-
# Manage Angular front end
cd pages && /bin/bash angular.sh
diff --git a/utils/test/reporting/reporting/functest/template/index-status-tmpl.html b/utils/test/reporting/reporting/functest/template/index-status-tmpl.html
index 74d410e96..50fc648aa 100644
--- a/utils/test/reporting/reporting/functest/template/index-status-tmpl.html
+++ b/utils/test/reporting/reporting/functest/template/index-status-tmpl.html
@@ -90,7 +90,7 @@ $(document).ready(function (){
<div class="panel-heading"><h4><b>List of last scenarios ({{version}}) run over the last {{period}} days </b></h4></div>
<table class="table">
<tr>
- <th width="40%">Scenario</th>
+ <th width="40%">HA Scenario</th>
<th width="20%">Status</th>
<th width="20%">Trend</th>
<th width="10%">Score</th>
@@ -98,14 +98,39 @@ $(document).ready(function (){
</tr>
{% for scenario,iteration in scenario_stats.iteritems() -%}
<tr class="tr-ok">
+ {% if '-ha' in scenario -%}
<td><a href={{scenario_results[scenario].getUrlLastRun()}}>{{scenario}}</a></td>
<td><div id="gaugeScenario{{loop.index}}"></div></td>
<td><div id="trend_svg{{loop.index}}"></div></td>
<td>{{scenario_results[scenario].getScore()}}</td>
<td>{{iteration}}</td>
+ {%- endif %}
+ </tr>
+ {%- endfor %}
+ <br>
+ </table>
+ <br>
+ <table class="table">
+ <tr>
+ <th width="40%">NOHA Scenario</th>
+ <th width="20%">Status</th>
+ <th width="20%">Trend</th>
+ <th width="10%">Score</th>
+ <th width="10%">Iteration</th>
+ </tr>
+ {% for scenario,iteration in scenario_stats.iteritems() -%}
+ <tr class="tr-ok">
+ {% if '-noha' in scenario -%}
+ <td><a href={{scenario_results[scenario].getUrlLastRun()}}>{{scenario}}</a></td>
+ <td><div id="gaugeScenario{{loop.index}}"></div></td>
+ <td><div id="trend_svg{{loop.index}}"></div></td>
+ <td>{{scenario_results[scenario].getScore()}}</td>
+ <td>{{iteration}}</td>
+ {%- endif %}
</tr>
{%- endfor %}
- </table>
+ </table>
+
</div>
diff --git a/utils/test/reporting/setup.py b/utils/test/reporting/setup.py
index a52d90555..17849f67b 100644
--- a/utils/test/reporting/setup.py
+++ b/utils/test/reporting/setup.py
@@ -8,7 +8,6 @@
# http://www.apache.org/licenses/LICENSE-2.0
# pylint: disable=missing-docstring
-
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
diff --git a/utils/test/testapi/opnfv_testapi/resources/models.py b/utils/test/testapi/opnfv_testapi/resources/models.py
index 6f04cc236..e70a6ed23 100644
--- a/utils/test/testapi/opnfv_testapi/resources/models.py
+++ b/utils/test/testapi/opnfv_testapi/resources/models.py
@@ -61,11 +61,11 @@ class ModelBase(object):
'{} has no attribute {}'.format(cls.__name__, k))
value = v
if isinstance(v, dict) and k in attr_parser:
- value = attr_parser[k].from_dict(v)
+ value = attr_parser[k].from_dict_with_raise(v)
elif isinstance(v, list) and k in attr_parser:
value = []
for item in v:
- value.append(attr_parser[k].from_dict(item))
+ value.append(attr_parser[k].from_dict_with_raise(item))
t.__setattr__(k, value)
diff --git a/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py b/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
index d215d18b9..e9c19a7a4 100644
--- a/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
@@ -114,8 +114,21 @@ class ScenarioGURHandler(GenericScenarioHandler):
self._get_one(query={'name': name})
pass
+ @swagger.operation(nickname="updateScenarioName")
def put(self, name):
- pass
+ """
+ @description: update scenario, only rename is supported currently
+ @param body: fields to be updated
+ @type body: L{ScenarioUpdateRequest}
+ @in body: body
+ @rtype: L{Scenario}
+ @return 200: update success
+ @raise 404: scenario not exist
+ @raise 403: nothing to update
+ """
+ query = {'name': name}
+ db_keys = ['name']
+ self._update(query=query, db_keys=db_keys)
@swagger.operation(nickname="deleteScenarioByName")
def delete(self, name):
@@ -147,6 +160,12 @@ class ScenarioUpdater(object):
('projects', 'put'): self._update_requests_update_projects,
('projects', 'delete'): self._update_requests_delete_projects,
('owner', 'put'): self._update_requests_change_owner,
+ ('versions', 'post'): self._update_requests_add_versions,
+ ('versions', 'put'): self._update_requests_update_versions,
+ ('versions', 'delete'): self._update_requests_delete_versions,
+ ('installers', 'post'): self._update_requests_add_installers,
+ ('installers', 'put'): self._update_requests_update_installers,
+ ('installers', 'delete'): self._update_requests_delete_installers,
}
updates[(item, action)](self.data)
@@ -210,42 +229,16 @@ class ScenarioUpdater(object):
@iter_installers
@iter_versions
def _update_requests_add_projects(self, version):
- exists = list()
- malformat = list()
- for n in self.body:
- try:
- f_n = models.ScenarioProject.from_dict_with_raise(n)
- if not any(o.project == f_n.project for o in version.projects):
- version.projects.append(f_n)
- else:
- exists.append(n['project'])
- except Exception as e:
- malformat.append(e.message)
- if malformat:
- raises.BadRequest(message.bad_format(malformat))
- elif exists:
- raises.Conflict(message.exist('projects', exists))
+ version.projects = self._update_with_body(models.ScenarioProject,
+ 'project',
+ version.projects)
@iter_installers
@iter_versions
def _update_requests_update_projects(self, version):
- exists = list()
- malformat = list()
- projects = list()
- for n in self.body:
- try:
- f_n = models.ScenarioProject.from_dict_with_raise(n)
- if not any(o.project == f_n.project for o in projects):
- projects.append(models.ScenarioProject.from_dict(n))
- else:
- exists.append(n['project'])
- except:
- malformat.append(n)
- if malformat:
- raises.BadRequest(message.bad_format(malformat))
- elif exists:
- raises.Forbidden(message.exist('projects', exists))
- version.projects = projects
+ version.projects = self._update_with_body(models.ScenarioProject,
+ 'project',
+ list())
@iter_installers
@iter_versions
@@ -257,12 +250,66 @@ class ScenarioUpdater(object):
def _update_requests_change_owner(self, version):
version.owner = self.body.get('owner')
+ @iter_installers
+ def _update_requests_add_versions(self, installer):
+ installer.versions = self._update_with_body(models.ScenarioVersion,
+ 'version',
+ installer.versions)
+
+ @iter_installers
+ def _update_requests_update_versions(self, installer):
+ installer.versions = self._update_with_body(models.ScenarioVersion,
+ 'version',
+ list())
+
+ @iter_installers
+ def _update_requests_delete_versions(self, installer):
+ installer.versions = self._remove_versions(installer.versions)
+
+ def _update_requests_add_installers(self, scenario):
+ scenario.installers = self._update_with_body(models.ScenarioInstaller,
+ 'installer',
+ scenario.installers)
+
+ def _update_requests_update_installers(self, scenario):
+ scenario.installers = self._update_with_body(models.ScenarioInstaller,
+ 'installer',
+ list())
+
+ def _update_requests_delete_installers(self, scenario):
+ scenario.installers = self._remove_installers(scenario.installers)
+
+ def _update_with_body(self, clazz, field, withs):
+ exists = list()
+ malformat = list()
+ for new in self.body:
+ try:
+ format_new = clazz.from_dict_with_raise(new)
+ new_name = getattr(format_new, field)
+ if not any(getattr(o, field) == new_name for o in withs):
+ withs.append(format_new)
+ else:
+ exists.append(new_name)
+ except Exception as error:
+ malformat.append(error.message)
+ if malformat:
+ raises.BadRequest(message.bad_format(malformat))
+ elif exists:
+ raises.Conflict(message.exist('{}s'.format(field), exists))
+ return withs
+
def _filter_installers(self, installers):
return self._filter('installer', installers)
+ def _remove_installers(self, installers):
+ return self._remove('installer', installers)
+
def _filter_versions(self, versions):
return self._filter('version', versions)
+ def _remove_versions(self, versions):
+ return self._remove('version', versions)
+
def _filter_projects(self, projects):
return self._filter('project', projects)
@@ -602,3 +649,127 @@ class ScenarioOwnerHandler(GenericScenarioUpdateHandler):
locators={'scenario': scenario,
'installer': None,
'version': None})
+
+
+class ScenarioVersionsHandler(GenericScenarioUpdateHandler):
+ @swagger.operation(nickname="addVersionsUnderScenario")
+ def post(self, scenario):
+ """
+ @description: add versions to scenario
+ @notes: add one or multiple versions
+ POST /api/v1/scenarios/<scenario_name>/versions? \
+ installer=<installer_name>
+ @param body: versions to be added
+ @type body: C{list} of L{ScenarioVersion}
+ @in body: body
+ @param installer: installer type
+ @type installer: L{string}
+ @in installer: query
+ @required installer: True
+ @return 200: versions are added.
+ @raise 400: bad schema
+ @raise 409: conflict, version already exists
+ @raise 404: scenario/installer not exist
+ """
+ self.do_update('versions',
+ 'post',
+ locators={'scenario': scenario,
+ 'installer': None})
+
+ @swagger.operation(nickname="updateVersionsUnderScenario")
+ def put(self, scenario):
+ """
+ @description: replace all versions
+ @notes: substitute all versions as a totality
+ PUT /api/v1/scenarios/<scenario_name>/versions? \
+ installer=<installer_name>
+ @param body: new versions
+ @type body: C{list} of L{ScenarioVersion}
+ @in body: body
+ @param installer: installer type
+ @type installer: L{string}
+ @in installer: query
+ @required installer: True
+ @return 200: replace versions success.
+ @raise 400: bad schema
+ @raise 404: scenario/installer not exist
+ """
+ self.do_update('versions',
+ 'put',
+ locators={'scenario': scenario,
+ 'installer': None})
+
+ @swagger.operation(nickname="deleteVersionsUnderScenario")
+ def delete(self, scenario):
+ """
+ @description: delete one or multiple versions
+ @notes: delete one or multiple versions
+ DELETE /api/v1/scenarios/<scenario_name>/versions? \
+ installer=<installer_name>
+ @param body: versions(names) to be deleted
+ @type body: C{list} of L{string}
+ @in body: body
+ @param installer: installer type
+ @type installer: L{string}
+ @in installer: query
+ @required installer: True
+ @return 200: delete versions success.
+ @raise 404: scenario/installer not exist
+ """
+ self.do_update('versions',
+ 'delete',
+ locators={'scenario': scenario,
+ 'installer': None})
+
+
+class ScenarioInstallersHandler(GenericScenarioUpdateHandler):
+ @swagger.operation(nickname="addInstallersUnderScenario")
+ def post(self, scenario):
+ """
+ @description: add installers to scenario
+ @notes: add one or multiple installers
+ POST /api/v1/scenarios/<scenario_name>/installers
+ @param body: installers to be added
+ @type body: C{list} of L{ScenarioInstaller}
+ @in body: body
+ @return 200: installers are added.
+ @raise 400: bad schema
+ @raise 409: conflict, installer already exists
+ @raise 404: scenario not exist
+ """
+ self.do_update('installers',
+ 'post',
+ locators={'scenario': scenario})
+
+ @swagger.operation(nickname="updateInstallersUnderScenario")
+ def put(self, scenario):
+ """
+ @description: replace all installers
+ @notes: substitute all installers as a totality
+ PUT /api/v1/scenarios/<scenario_name>/installers
+ @param body: new installers
+ @type body: C{list} of L{ScenarioInstaller}
+ @in body: body
+ @return 200: replace versions success.
+ @raise 400: bad schema
+ @raise 404: scenario/installer not exist
+ """
+ self.do_update('installers',
+ 'put',
+ locators={'scenario': scenario})
+
+ @swagger.operation(nickname="deleteInstallersUnderScenario")
+ def delete(self, scenario):
+ """
+ @description: delete one or multiple installers
+ @notes: delete one or multiple installers
+ DELETE /api/v1/scenarios/<scenario_name>/installers
+ @param body: installers(names) to be deleted
+ @type body: C{list} of L{string}
+ @in body: body
+ @return 200: delete versions success.
+ @raise 404: scenario/installer not exist
+ """
+ self.do_update('installers',
+ 'delete',
+ locators={'scenario': scenario})
diff --git a/utils/test/testapi/opnfv_testapi/resources/scenario_models.py b/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
index c6da76b81..d950ed1d7 100644
--- a/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
+++ b/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
@@ -167,6 +167,12 @@ class ScenarioChangeOwnerRequest(models.ModelBase):
@swagger.model()
+class ScenarioUpdateRequest(models.ModelBase):
+ def __init__(self, name=None):
+ self.name = name
+
+
+@swagger.model()
class Scenario(models.ModelBase):
"""
@property installers:
diff --git a/utils/test/testapi/opnfv_testapi/router/url_mappings.py b/utils/test/testapi/opnfv_testapi/router/url_mappings.py
index 9c9556c6b..3e3ab87aa 100644
--- a/utils/test/testapi/opnfv_testapi/router/url_mappings.py
+++ b/utils/test/testapi/opnfv_testapi/router/url_mappings.py
@@ -64,6 +64,10 @@ mappings = [
scenario_handlers.ScenarioProjectsHandler),
(r"/api/v1/scenarios/([^/]+)/owner",
scenario_handlers.ScenarioOwnerHandler),
+ (r"/api/v1/scenarios/([^/]+)/versions",
+ scenario_handlers.ScenarioVersionsHandler),
+ (r"/api/v1/scenarios/([^/]+)/installers",
+ scenario_handlers.ScenarioInstallersHandler),
# static path
(r'/(.*\.(css|png|gif|js|html|json|map|woff2|woff|ttf))',
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py
index 50a8c8d2d..1367fc669 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_scenario.py
@@ -2,7 +2,7 @@ import functools
import httplib
import json
import os
-from copy import deepcopy
+
from datetime import datetime
from opnfv_testapi.common import message
@@ -50,15 +50,15 @@ class TestScenarioBase(base.TestBase):
self.assertEqual(scenario, models.Scenario.from_dict(req))
@staticmethod
- def _set_query(*args):
+ def set_query(*args):
uri = ''
for arg in args:
uri += arg + '&'
return uri[0: -1]
- def _get_and_assert(self, name, req=None):
+ def get_and_assert(self, name):
code, body = self.get(name)
- self.assert_res(code, body, req)
+ self.assert_res(code, body, self.req_d)
class TestScenarioCreate(TestScenarioBase):
@@ -97,25 +97,25 @@ class TestScenarioGet(TestScenarioBase):
self.scenario_2 = self.create_return_name(self.req_2)
def test_getByName(self):
- self._get_and_assert(self.scenario_1, self.req_d)
+ self.get_and_assert(self.scenario_1)
def test_getAll(self):
self._query_and_assert(query=None, reqs=[self.req_d, self.req_2])
def test_queryName(self):
- query = self._set_query('name=nosdn-nofeature-ha')
+ query = self.set_query('name=nosdn-nofeature-ha')
self._query_and_assert(query, reqs=[self.req_d])
def test_queryInstaller(self):
- query = self._set_query('installer=apex')
+ query = self.set_query('installer=apex')
self._query_and_assert(query, reqs=[self.req_d])
def test_queryVersion(self):
- query = self._set_query('version=master')
+ query = self.set_query('version=master')
self._query_and_assert(query, reqs=[self.req_d])
def test_queryProject(self):
- query = self._set_query('project=functest')
+ query = self.set_query('project=functest')
self._query_and_assert(query, reqs=[self.req_d, self.req_2])
# close due to random fail, open again after solve it in another patch
@@ -170,14 +170,21 @@ class TestScenarioUpdate(TestScenarioBase):
def update_url_fixture(item):
def _update_url_fixture(xstep):
def wrapper(self, *args, **kwargs):
+ self.update_url = '{}/{}'.format(self.scenario_url, item)
locator = None
if item in ['projects', 'owner']:
locator = 'installer={}&version={}'.format(
self.installer,
self.version)
- self.update_url = '{}/{}?{}'.format(self.scenario_url,
- item,
- locator)
+ elif item in ['versions']:
+ locator = 'installer={}'.format(
+ self.installer)
+ elif item in ['rename']:
+ self.update_url = self.scenario_url
+
+ if locator:
+ self.update_url = '{}?{}'.format(self.update_url, locator)
+
xstep(self, *args, **kwargs)
return wrapper
return _update_url_fixture
@@ -186,139 +193,257 @@ class TestScenarioUpdate(TestScenarioBase):
def _update_partial(set_update):
@functools.wraps(set_update)
def wrapper(self):
- update, scenario = set_update(self, deepcopy(self.req_d))
- code, body = getattr(self, operate)(update, self.scenario)
- getattr(self, expected)(code, scenario)
+ update = set_update(self)
+ code, body = getattr(self, operate)(update)
+ getattr(self, expected)(code)
return wrapper
return _update_partial
@update_partial('_add', '_success')
- def test_addScore(self, scenario):
+ def test_addScore(self):
add = models.ScenarioScore(date=str(datetime.now()), score='11/12')
- projects = scenario['installers'][0]['versions'][0]['projects']
+ projects = self.req_d['installers'][0]['versions'][0]['projects']
functest = filter(lambda f: f['project'] == 'functest', projects)[0]
functest['scores'].append(add.format())
self.update_url = '{}/scores?{}'.format(self.scenario_url,
self.locate_project)
- return add, scenario
+ return add
@update_partial('_add', '_success')
- def test_addTrustIndicator(self, scenario):
+ def test_addTrustIndicator(self):
add = models.ScenarioTI(date=str(datetime.now()), status='gold')
- projects = scenario['installers'][0]['versions'][0]['projects']
+ projects = self.req_d['installers'][0]['versions'][0]['projects']
functest = filter(lambda f: f['project'] == 'functest', projects)[0]
functest['trust_indicators'].append(add.format())
self.update_url = '{}/trust_indicators?{}'.format(self.scenario_url,
self.locate_project)
- return add, scenario
+ return add
@update_partial('_add', '_success')
- def test_addCustoms(self, scenario):
- add = ['odl', 'parser', 'vping_ssh']
- projects = scenario['installers'][0]['versions'][0]['projects']
+ def test_addCustoms(self):
+ adds = ['odl', 'parser', 'vping_ssh']
+ projects = self.req_d['installers'][0]['versions'][0]['projects']
functest = filter(lambda f: f['project'] == 'functest', projects)[0]
- functest['customs'] = list(set(functest['customs'] + add))
+ functest['customs'] = list(set(functest['customs'] + adds))
self.update_url = '{}/customs?{}'.format(self.scenario_url,
self.locate_project)
- return add, scenario
+ return adds
@update_partial('_update', '_success')
- def test_updateCustoms(self, scenario):
- news = ['odl', 'parser', 'vping_ssh']
- projects = scenario['installers'][0]['versions'][0]['projects']
+ def test_updateCustoms(self):
+ updates = ['odl', 'parser', 'vping_ssh']
+ projects = self.req_d['installers'][0]['versions'][0]['projects']
functest = filter(lambda f: f['project'] == 'functest', projects)[0]
- functest['customs'] = news
+ functest['customs'] = updates
self.update_url = '{}/customs?{}'.format(self.scenario_url,
self.locate_project)
- return news, scenario
+ return updates
@update_partial('_delete', '_success')
- def test_deleteCustoms(self, scenario):
- obsoletes = ['vping_ssh']
- projects = scenario['installers'][0]['versions'][0]['projects']
+ def test_deleteCustoms(self):
+ deletes = ['vping_ssh']
+ projects = self.req_d['installers'][0]['versions'][0]['projects']
functest = filter(lambda f: f['project'] == 'functest', projects)[0]
functest['customs'] = ['healthcheck']
self.update_url = '{}/customs?{}'.format(self.scenario_url,
self.locate_project)
- return obsoletes, scenario
+ return deletes
@update_url_fixture('projects')
@update_partial('_add', '_success')
- def test_addProjects_succ(self, scenario):
+ def test_addProjects_succ(self):
add = models.ScenarioProject(project='qtip').format()
- scenario['installers'][0]['versions'][0]['projects'].append(add)
- return [add], scenario
+ self.req_d['installers'][0]['versions'][0]['projects'].append(add)
+ return [add]
@update_url_fixture('projects')
@update_partial('_add', '_conflict')
- def test_addProjects_already_exist(self, scenario):
+ def test_addProjects_already_exist(self):
add = models.ScenarioProject(project='functest').format()
- scenario['installers'][0]['versions'][0]['projects'].append(add)
- return [add], scenario
+ return [add]
@update_url_fixture('projects')
@update_partial('_add', '_bad_request')
- def test_addProjects_bad_schema(self, scenario):
+ def test_addProjects_bad_schema(self):
add = models.ScenarioProject(project='functest').format()
add['score'] = None
- scenario['installers'][0]['versions'][0]['projects'].append(add)
- return [add], scenario
+ return [add]
@update_url_fixture('projects')
@update_partial('_update', '_success')
- def test_updateProjects_succ(self, scenario):
+ def test_updateProjects_succ(self):
update = models.ScenarioProject(project='qtip').format()
- scenario['installers'][0]['versions'][0]['projects'] = [update]
- return [update], scenario
+ self.req_d['installers'][0]['versions'][0]['projects'] = [update]
+ return [update]
+
+ @update_url_fixture('projects')
+ @update_partial('_update', '_conflict')
+ def test_updateProjects_duplicated(self):
+ update = models.ScenarioProject(project='qtip').format()
+ return [update, update]
@update_url_fixture('projects')
@update_partial('_update', '_bad_request')
- def test_updateProjects_bad_schema(self, scenario):
+ def test_updateProjects_bad_schema(self):
update = models.ScenarioProject(project='functest').format()
update['score'] = None
- scenario['installers'][0]['versions'][0]['projects'] = [update]
- return [update], scenario
+ return [update]
@update_url_fixture('projects')
@update_partial('_delete', '_success')
- def test_deleteProjects(self, scenario):
+ def test_deleteProjects(self):
deletes = ['functest']
- projects = scenario['installers'][0]['versions'][0]['projects']
- scenario['installers'][0]['versions'][0]['projects'] = filter(
+ projects = self.req_d['installers'][0]['versions'][0]['projects']
+ self.req_d['installers'][0]['versions'][0]['projects'] = filter(
lambda f: f['project'] != 'functest',
projects)
- return deletes, scenario
+ return deletes
@update_url_fixture('owner')
@update_partial('_update', '_success')
- def test_changeOwner(self, scenario):
+ def test_changeOwner(self):
new_owner = 'new_owner'
update = models.ScenarioChangeOwnerRequest(new_owner).format()
- scenario['installers'][0]['versions'][0]['owner'] = new_owner
- return update, scenario
+ self.req_d['installers'][0]['versions'][0]['owner'] = new_owner
+ return update
+
+ @update_url_fixture('versions')
+ @update_partial('_add', '_success')
+ def test_addVersions_succ(self):
+ add = models.ScenarioVersion(version='Euphrates').format()
+ self.req_d['installers'][0]['versions'].append(add)
+ return [add]
+
+ @update_url_fixture('versions')
+ @update_partial('_add', '_conflict')
+ def test_addVersions_already_exist(self):
+ add = models.ScenarioVersion(version='master').format()
+ return [add]
- def _add(self, update_req, new_scenario):
+ @update_url_fixture('versions')
+ @update_partial('_add', '_bad_request')
+ def test_addVersions_bad_schema(self):
+ add = models.ScenarioVersion(version='euphrates').format()
+ add['notexist'] = None
+ return [add]
+
+ @update_url_fixture('versions')
+ @update_partial('_update', '_success')
+ def test_updateVersions_succ(self):
+ update = models.ScenarioVersion(version='euphrates').format()
+ self.req_d['installers'][0]['versions'] = [update]
+ return [update]
+
+ @update_url_fixture('versions')
+ @update_partial('_update', '_conflict')
+ def test_updateVersions_duplicated(self):
+ update = models.ScenarioVersion(version='euphrates').format()
+ return [update, update]
+
+ @update_url_fixture('versions')
+ @update_partial('_update', '_bad_request')
+ def test_updateVersions_bad_schema(self):
+ update = models.ScenarioVersion(version='euphrates').format()
+ update['not_owner'] = 'Iam'
+ return [update]
+
+ @update_url_fixture('versions')
+ @update_partial('_delete', '_success')
+ def test_deleteVersions(self):
+ deletes = ['master']
+ versions = self.req_d['installers'][0]['versions']
+ self.req_d['installers'][0]['versions'] = filter(
+ lambda f: f['version'] != 'master',
+ versions)
+ return deletes
+
+ @update_url_fixture('installers')
+ @update_partial('_add', '_success')
+ def test_addInstallers_succ(self):
+ add = models.ScenarioInstaller(installer='daisy').format()
+ self.req_d['installers'].append(add)
+ return [add]
+
+ @update_url_fixture('installers')
+ @update_partial('_add', '_conflict')
+ def test_addInstallers_already_exist(self):
+ add = models.ScenarioInstaller(installer='apex').format()
+ return [add]
+
+ @update_url_fixture('installers')
+ @update_partial('_add', '_bad_request')
+ def test_addInstallers_bad_schema(self):
+ add = models.ScenarioInstaller(installer='daisy').format()
+ add['not_exist'] = 'not_exist'
+ return [add]
+
+ @update_url_fixture('installers')
+ @update_partial('_update', '_success')
+ def test_updateInstallers_succ(self):
+ update = models.ScenarioInstaller(installer='daisy').format()
+ self.req_d['installers'] = [update]
+ return [update]
+
+ @update_url_fixture('installers')
+ @update_partial('_update', '_conflict')
+ def test_updateInstallers_duplicated(self):
+ update = models.ScenarioInstaller(installer='daisy').format()
+ return [update, update]
+
+ @update_url_fixture('installers')
+ @update_partial('_update', '_bad_request')
+ def test_updateInstallers_bad_schema(self):
+ update = models.ScenarioInstaller(installer='daisy').format()
+ update['not_exist'] = 'not_exist'
+ return [update]
+
+ @update_url_fixture('installers')
+ @update_partial('_delete', '_success')
+ def test_deleteInstallers(self):
+ deletes = ['apex']
+ installers = self.req_d['installers']
+ self.req_d['installers'] = filter(
+ lambda f: f['installer'] != 'apex',
+ installers)
+ return deletes
+
+ @update_url_fixture('rename')
+ @update_partial('_update', '_success')
+ def test_renameScenario(self):
+ new_name = 'new_scenario_name'
+ update = models.ScenarioUpdateRequest(name=new_name)
+ self.req_d['name'] = new_name
+ return update
+
+ @update_url_fixture('rename')
+ @update_partial('_update', '_forbidden')
+ def test_renameScenario_exist(self):
+ new_name = self.req_d['name']
+ update = models.ScenarioUpdateRequest(name=new_name)
+ return update
+
+ def _add(self, update_req):
return self.post_direct_url(self.update_url, update_req)
- def _update(self, update_req, new_scenario):
+ def _update(self, update_req):
return self.update_direct_url(self.update_url, update_req)
- def _delete(self, update_req, new_scenario):
+ def _delete(self, update_req):
return self.delete_direct_url(self.update_url, update_req)
- def _success(self, status, new_scenario):
+ def _success(self, status):
self.assertEqual(status, httplib.OK)
- self._get_and_assert(new_scenario.get('name'), new_scenario)
+ self.get_and_assert(self.req_d['name'])
- def _forbidden(self, status, new_scenario):
+ def _forbidden(self, status):
self.assertEqual(status, httplib.FORBIDDEN)
- def _bad_request(self, status, new_scenario):
+ def _bad_request(self, status):
self.assertEqual(status, httplib.BAD_REQUEST)
- def _conflict(self, status, new_scenario):
+ def _conflict(self, status):
self.assertEqual(status, httplib.CONFLICT)