summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xjjb/3rd_party_ci/functest-netvirt.sh7
-rw-r--r--jjb/3rd_party_ci/odl-netvirt.yml12
-rw-r--r--jjb/doctor/doctor.yml107
-rw-r--r--jjb/fuel/fuel-daily-jobs.yml204
-rwxr-xr-xjjb/fuel/fuel-plugin-build.sh10
-rwxr-xr-xjjb/fuel/fuel-plugin-test.sh10
-rw-r--r--jjb/fuel/fuel-plugin-verify-jobs.yml236
-rw-r--r--jjb/fuel/fuel-project-jobs.yml4
-rw-r--r--jjb/fuel/fuel-verify-jobs.yml4
-rw-r--r--jjb/global/releng-macros.yml24
-rw-r--r--jjb/global/slave-params.yml14
-rwxr-xr-xjjb/infra/bifrost-upload-logs.sh29
-rw-r--r--jjb/infra/bifrost-verify-jobs.yml11
-rwxr-xr-xjjb/infra/bifrost-verify.sh53
-rw-r--r--jjb/releng/testapi-automate.yml66
-rw-r--r--jjb/releng/testapi-docker-update.sh18
-rw-r--r--jjb/releng/testapi-run-tests.sh17
-rw-r--r--jjb/storperf/storperf.yml5
-rwxr-xr-xutils/fetch_os_creds.sh11
-rw-r--r--utils/push-test-logs.sh1
-rw-r--r--utils/test/declaration/addtestcase.php40
-rw-r--r--utils/test/declaration/index.php221
-rw-r--r--utils/test/declaration/testcases.php36
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py45
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/scenario_models.py34
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py68
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/scenario-c1.json (renamed from utils/test/testapi/opnfv_testapi/tests/unit/scenario-create.json)2
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/scenario-c2.json73
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_base.py8
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py86
30 files changed, 631 insertions, 825 deletions
diff --git a/jjb/3rd_party_ci/functest-netvirt.sh b/jjb/3rd_party_ci/functest-netvirt.sh
deleted file mode 100755
index adffaf42d..000000000
--- a/jjb/3rd_party_ci/functest-netvirt.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-set -e
-
-# wipe the WORKSPACE
-/bin/rm -rf $WORKSPACE/*
-
-echo "Hello World"
diff --git a/jjb/3rd_party_ci/odl-netvirt.yml b/jjb/3rd_party_ci/odl-netvirt.yml
index ca1936156..054761ea7 100644
--- a/jjb/3rd_party_ci/odl-netvirt.yml
+++ b/jjb/3rd_party_ci/odl-netvirt.yml
@@ -20,8 +20,6 @@
slave-label: 'ericsson-virtual5'
- 'install-netvirt':
slave-label: 'odl-netvirt-virtual'
- - 'functest':
- slave-label: 'odl-netvirt-virtual'
- 'postprocess':
slave-label: 'odl-netvirt-virtual'
#####################################
@@ -119,9 +117,8 @@
name: functest
condition: SUCCESSFUL
projects:
- - name: 'functest-netvirt-virtual-suite-{stream}'
+ - name: 'functest-netvirt-virtual-daily-{stream}'
predefined-parameters: |
- FUNCTEST_SUITE_NAME=vping_userdata,bgpvpn
RC_FILE_PATH=/home/jenkins/cloner-info/overcloudrc
DEPLOY_SCENARIO=os-odl_l2-bgpvpn-noha
node-parameters: true
@@ -162,7 +159,7 @@
use-build-blocker: true
blocking-jobs:
- 'odl-netvirt-verify-virtual-install-.*'
- - 'odl-netvirt-verify-virtual-functest-.*'
+ - 'functest-netvirt-virtual-daily-.*'
- 'odl-netvirt-verify-virtual-postprocess-.*'
block-level: 'NODE'
@@ -200,11 +197,6 @@
- shell:
!include-raw: ./install-netvirt.sh
- builder:
- name: 'netvirt-verify-functest-builder'
- builders:
- - shell:
- !include-raw: ./functest-netvirt.sh
-- builder:
name: 'netvirt-verify-postprocess-builder'
builders:
- shell:
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index ab9ef8f6c..62f89e686 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -24,10 +24,17 @@
- 'sample'
- 'congress'
+ task:
+ - verify:
+ profiler: 'none'
+ auto-trigger-name: 'doctor-verify'
+ - profiling:
+ profiler: 'poc'
+ auto-trigger-name: 'experimental'
+
jobs:
- 'doctor-verify-{stream}'
- - 'doctor-verify-{installer}-{inspector}-{stream}'
- - 'doctor-profiling-{stream}'
+ - 'doctor-{task}-{installer}-{inspector}-{stream}':
- job-template:
name: 'doctor-verify-{stream}'
@@ -71,7 +78,7 @@
- shell: "[ -e tests/run.sh ] && bash -n ./tests/run.sh"
- job-template:
- name: 'doctor-verify-{installer}-{inspector}-{stream}'
+ name: 'doctor-{task}-{installer}-{inspector}-{stream}'
node: '{slave-label}'
@@ -107,7 +114,7 @@
default: '{project}'
- string:
name: TESTCASE_OPTIONS
- default: '-e INSPECTOR_TYPE={inspector} -v $WORKSPACE:/home/opnfv/repos/doctor'
+ default: '-e INSPECTOR_TYPE={inspector} -e PROFILER_TYPE={profiler} -v $WORKSPACE:/home/opnfv/repos/doctor'
description: 'Addtional parameters specific to test case(s)'
# functest-parameter
- string:
@@ -131,32 +138,9 @@
- git-scm-gerrit
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'tests/**'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
+ - '{auto-trigger-name}':
+ project: '{project}'
+ branch: '{branch}'
builders:
- 'functest-suite-builder'
@@ -172,65 +156,36 @@
- archive:
artifacts: 'tests/*.log'
-- job-template:
- name: 'doctor-profiling-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - 'doctor-defaults':
- project: '{project}'
- branch: '{branch}'
- - string:
- name: PROFILER
- default: poc
- description: "Profiler to be used"
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - 'experimental':
- project: '{project}'
- branch: '{branch}'
-
-#####################################
-# parameter macros
-#####################################
-# TODO(yujunz) replace common parameter in doctor-verify-{stream} with macro
-- parameter:
- name: 'doctor-defaults'
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
#####################################
# trigger macros
#####################################
-# TODO(yujunz) move to opnfv commom
- trigger:
- name: 'experimental'
+ name: 'doctor-verify'
triggers:
- gerrit:
server-name: 'gerrit.opnfv.org'
trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
- comment-added-contains-event:
- comment-contains-value: 'check-experimental'
+ comment-contains-value: 'reverify'
projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: 'ANT'
- pattern: 'tests/**'
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'tests/**'
skip-vote:
successful: true
failed: true
unstable: true
notbuilt: true
- silent-start: true
diff --git a/jjb/fuel/fuel-daily-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml
index 87488c886..cd22c1ac2 100644
--- a/jjb/fuel/fuel-daily-jobs.yml
+++ b/jjb/fuel/fuel-daily-jobs.yml
@@ -15,10 +15,10 @@
branch: '{stream}'
disabled: false
gs-pathname: ''
- colorado: &colorado
- stream: colorado
+ danube: &danube
+ stream: danube
branch: 'stable/{stream}'
- disabled: false
+ disabled: true
gs-pathname: '/{stream}'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
@@ -34,10 +34,10 @@
<<: *master
- baremetal:
slave-label: fuel-baremetal
- <<: *colorado
+ <<: *danube
- virtual:
slave-label: fuel-virtual
- <<: *colorado
+ <<: *danube
#--------------------------------
# None-CI PODs
#--------------------------------
@@ -52,10 +52,10 @@
<<: *master
- zte-pod1:
slave-label: zte-pod1
- <<: *colorado
+ <<: *danube
- zte-pod3:
slave-label: zte-pod3
- <<: *colorado
+ <<: *danube
#--------------------------------
# scenarios
#--------------------------------
@@ -340,81 +340,81 @@
triggers:
- timed: ''
#-----------------------------------------------
-# Triggers for job running on fuel-baremetal against colorado branch
+# Triggers for job running on fuel-baremetal against danube branch
#-----------------------------------------------
# HA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 20 * * *'
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 23 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 2 * * *'
- trigger:
- name: 'fuel-os-onos-sfc-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 5 * * *'
- trigger:
- name: 'fuel-os-onos-nofeature-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 8 * * *'
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 11 * * *'
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 14 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 17 * * *'
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 20 * * *'
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
#-----------------------------------------------
@@ -494,79 +494,79 @@
triggers:
- timed: '5 9 * * *'
#-----------------------------------------------
-# Triggers for job running on fuel-virtual against colorado branch
+# Triggers for job running on fuel-virtual against danube branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-danube-trigger'
triggers:
- timed: '0 13 * * *'
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-danube-trigger'
triggers:
- timed: '30 15 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-danube-trigger'
triggers:
- timed: '0 18 * * *'
- trigger:
- name: 'fuel-os-onos-sfc-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-noha-virtual-daily-danube-trigger'
triggers:
- timed: '30 20 * * *'
- trigger:
- name: 'fuel-os-onos-nofeature-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-noha-virtual-daily-danube-trigger'
triggers:
- timed: '0 23 * * *'
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-danube-trigger'
triggers:
- timed: '30 1 * * *'
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-danube-trigger'
triggers:
- timed: '0 4 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-virtual-daily-danube-trigger'
triggers:
- timed: '30 6 * * *'
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-virtual-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-virtual-daily-danube-trigger'
triggers:
- timed: '0 9 * * *'
#-----------------------------------------------
@@ -799,231 +799,231 @@
triggers:
- timed: ''
#-----------------------------------------------
-# ZTE POD1 Triggers running against colorado branch
+# ZTE POD1 Triggers running against danube branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: '0 2 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
#-----------------------------------------------
-# ZTE POD2 Triggers running against colorado branch
+# ZTE POD2 Triggers running against danube branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
#-----------------------------------------------
-# ZTE POD3 Triggers running against colorado branch
+# ZTE POD3 Triggers running against danube branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: '0 18 * * *'
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-colorado-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
diff --git a/jjb/fuel/fuel-plugin-build.sh b/jjb/fuel/fuel-plugin-build.sh
deleted file mode 100755
index f7f613dc0..000000000
--- a/jjb/fuel/fuel-plugin-build.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-echo "Not activated!"
diff --git a/jjb/fuel/fuel-plugin-test.sh b/jjb/fuel/fuel-plugin-test.sh
deleted file mode 100755
index f7f613dc0..000000000
--- a/jjb/fuel/fuel-plugin-test.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-echo "Not activated!"
diff --git a/jjb/fuel/fuel-plugin-verify-jobs.yml b/jjb/fuel/fuel-plugin-verify-jobs.yml
deleted file mode 100644
index bf847edfe..000000000
--- a/jjb/fuel/fuel-plugin-verify-jobs.yml
+++ /dev/null
@@ -1,236 +0,0 @@
-- project:
- name: 'fuel-plugin-verify-jobs'
-
- project: 'fuel-plugin'
-
- installer: 'fuel'
-#####################################
-# branch definitions
-#####################################
- stream:
- - master:
- upstream-branch: '{stream}'
- opnfv-branch: 'experimental'
- gs-pathname: ''
- disabled: false
-#####################################
-# patch verification phases
-#####################################
- phase:
- - 'build':
- slave-label: 'opnfv-build-ubuntu'
- - 'test':
- slave-label: 'opnfv-build-ubuntu'
-#####################################
-# jobs
-#####################################
- jobs:
- - 'fuel-verify-plugin-{stream}'
- - 'fuel-verify-plugin-{phase}-{stream}'
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'fuel-verify-plugin-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{upstream-branch}'
- description: 'OpenStack branch to use'
- - string:
- name: OPNFV_BRANCH
- default: '{opnfv-branch}'
- description: 'OPNFV branch to use'
- - 'opnfv-build-defaults'
- - 'fuel-verify-plugin-defaults':
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git:
- url: 'https://git.openstack.org/$GERRIT_PROJECT'
- refspec: '$GERRIT_REFSPEC'
- branches:
- - 'origin/$GERRIT_BRANCH'
- skip-tag: true
- choosing-strategy: 'gerrit'
- timeout: 10
- wipe-workspace: true
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'review.openstack.org'
- silent-start: false
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
- escape-quotes: true
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'PLAIN'
- project-pattern: 'openstack/fuel-plugin-bgpvpn'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{upstream-branch}'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'README.md|.gitignore|.gitreview'
- - project-compare-type: 'PLAIN'
- project-pattern: 'openstack/fuel-plugin-onos'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{upstream-branch}'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'README.md|.gitignore|.gitreview'
- readable-message: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-plugin-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: test
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-plugin-test-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'fuel-verify-plugin-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - throttle:
- enabled: true
- max-total: 6
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-verify-plugin-test-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{upstream-branch}'
- description: 'OpenStack branch to use'
- - string:
- name: OPNFV_BRANCH
- default: '{opnfv-branch}'
- description: 'OPNFV branch to use'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
- - 'fuel-verify-plugin-defaults':
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git:
- url: 'https://git.openstack.org/$GERRIT_PROJECT'
- refspec: '$GERRIT_REFSPEC'
- branches:
- - 'origin/$GERRIT_BRANCH'
- skip-tag: true
- choosing-strategy: 'gerrit'
- timeout: 10
- wipe-workspace: true
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'fuel-verify-plugin-{phase}-macro'
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'fuel-verify-plugin-build-macro'
- builders:
- - shell:
- !include-raw: ./fuel-plugin-build.sh
-
-- builder:
- name: 'fuel-verify-plugin-test-macro'
- builders:
- - shell:
- !include-raw: ./fuel-plugin-test.sh
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'fuel-verify-plugin-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml
index 8fdf8deae..731b9bd0d 100644
--- a/jjb/fuel/fuel-project-jobs.yml
+++ b/jjb/fuel/fuel-project-jobs.yml
@@ -13,10 +13,10 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - colorado:
+ - danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
+ disabled: true
jobs:
- 'fuel-build-daily-{stream}'
diff --git a/jjb/fuel/fuel-verify-jobs.yml b/jjb/fuel/fuel-verify-jobs.yml
index 4a76d9da7..c5f913e49 100644
--- a/jjb/fuel/fuel-verify-jobs.yml
+++ b/jjb/fuel/fuel-verify-jobs.yml
@@ -12,10 +12,10 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - colorado:
+ - danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
+ disabled: true
#####################################
# patch verification phases
#####################################
diff --git a/jjb/global/releng-macros.yml b/jjb/global/releng-macros.yml
index 10835f6a3..404c3dd69 100644
--- a/jjb/global/releng-macros.yml
+++ b/jjb/global/releng-macros.yml
@@ -96,6 +96,30 @@
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+- trigger:
+ name: 'experimental'
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - comment-added-contains-event:
+ comment-contains-value: 'check-experimental'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: 'tests/**'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
+ silent-start: true
+
- wrapper:
name: ssh-agent-wrapper
wrappers:
diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml
index 39a1b1b9c..9d08487a3 100644
--- a/jjb/global/slave-params.yml
+++ b/jjb/global/slave-params.yml
@@ -364,6 +364,20 @@
default: /root/.ssh/id_rsa
description: 'SSH key to use for Apex'
- parameter:
+ name: 'intel-pod9-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod9
+ default-slaves:
+ - intel-pod9
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
name: 'intel-pod3-defaults'
parameters:
- node:
diff --git a/jjb/infra/bifrost-upload-logs.sh b/jjb/infra/bifrost-upload-logs.sh
deleted file mode 100755
index a03365942..000000000
--- a/jjb/infra/bifrost-upload-logs.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 SUSE.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -eu
-set -o pipefail
-
-BIFROST_CONSOLE_LOG="${BUILD_URL}/consoleText"
-BIFROST_GS_URL=${BIFROST_LOG_URL/http:/gs:}
-
-echo "Uploading build logs to ${BIFROST_LOG_URL}"
-
-echo "Uploading console output"
-curl -L ${BIFROST_CONSOLE_LOG} | gsutil cp - ${BIFROST_GS_URL}/console.txt
-
-[[ ! -d ${WORKSPACE}/logs ]] && exit 0
-
-pushd ${WORKSPACE}/logs/ &> /dev/null
-for x in *.log; do
- echo "Compressing and uploading $x"
- tar -czf - $x | gsutil cp - ${BIFROST_GS_URL}/$x.tar.gz
-done
-popd ${WORKSPACE}/logs &> /dev/null
diff --git a/jjb/infra/bifrost-verify-jobs.yml b/jjb/infra/bifrost-verify-jobs.yml
index cbe21e256..8184b6343 100644
--- a/jjb/infra/bifrost-verify-jobs.yml
+++ b/jjb/infra/bifrost-verify-jobs.yml
@@ -142,7 +142,6 @@
builders:
- bifrost-set-name
- bifrost-build
- - bifrost-artifacts-upload
publishers:
- email:
@@ -163,7 +162,7 @@
exclude-no-code-change: 'false'
- comment-added-contains-event:
comment-contains-value: 'recheck'
- custom-url: '* $JOB_NAME $BIFROST_LOG_URL'
+ custom-url: '* $JOB_NAME $BIFROST_LOG_URL/index.html'
silent-start: true
projects:
- project-compare-type: 'PLAIN'
@@ -192,7 +191,7 @@
comment-contains-value: 'recheck'
- comment-added-contains-event:
comment-contains-value: 'reverify'
- custom-url: '* $JOB_NAME $BIFROST_LOG_URL'
+ custom-url: '* $JOB_NAME $BIFROST_LOG_URL/index.html'
projects:
- project-compare-type: 'ANT'
project-pattern: 'releng'
@@ -220,9 +219,3 @@
builders:
- shell:
!include-raw: ./bifrost-verify.sh
-
-- builder:
- name: bifrost-artifacts-upload
- builders:
- - shell:
- !include-raw: ./bifrost-upload-logs.sh
diff --git a/jjb/infra/bifrost-verify.sh b/jjb/infra/bifrost-verify.sh
index 9fbb1d0d9..201d3f2d6 100755
--- a/jjb/infra/bifrost-verify.sh
+++ b/jjb/infra/bifrost-verify.sh
@@ -11,7 +11,51 @@ set -o errexit
set -o nounset
set -o pipefail
-trap fix_ownership EXIT
+trap cleanup_and_upload EXIT
+
+function upload_logs() {
+ BIFROST_CONSOLE_LOG="${BUILD_URL}/consoleText"
+ BIFROST_GS_URL=${BIFROST_LOG_URL/http:/gs:}
+
+ echo "Uploading build logs to ${BIFROST_LOG_URL}"
+
+ echo "Uploading console output"
+ curl -s -L ${BIFROST_CONSOLE_LOG} > build_log.txt
+ gsutil -q cp -Z build_log.txt ${BIFROST_GS_URL}/build_log.txt
+ rm build_log.txt
+
+ [[ ! -d ${WORKSPACE}/logs ]] && exit 0
+
+ pushd ${WORKSPACE}/logs/ &> /dev/null
+ for x in *.log; do
+ echo "Compressing and uploading $x"
+ gsutil -q cp -Z ${x} ${BIFROST_GS_URL}/${x}
+ done
+
+ echo "Generating the landing page"
+ cat > index.html <<EOF
+<html>
+<h1>Build results for <a href=https://$GERRIT_NAME/#/c/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER>$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER</a></h1>
+<h2>Job: $JOB_NAME</h2>
+<ul>
+<li><a href=${BIFROST_LOG_URL}/build_log.txt>build_log.txt</a></li>
+EOF
+
+ for x in *.log; do
+ echo "<li><a href=${BIFROST_LOG_URL}/${x}>${x}</a></li>" >> index.html
+ done
+
+ cat >> index.html << EOF
+</ul>
+</html>
+EOF
+
+ gsutil -q cp index.html ${BIFROST_GS_URL}/index.html
+
+ rm index.html
+
+ popd &> /dev/null
+}
function fix_ownership() {
if [ -z "${JOB_URL+x}" ]; then
@@ -25,6 +69,13 @@ function fix_ownership() {
fi
}
+function cleanup_and_upload() {
+ original_exit=$?
+ fix_ownership
+ upload_logs
+ exit $original_exit
+}
+
# check distro to see if we support it
if [[ ! "$DISTRO" =~ (trusty|centos7|suse) ]]; then
echo "Distro $DISTRO is not supported!"
diff --git a/jjb/releng/testapi-automate.yml b/jjb/releng/testapi-automate.yml
index 0566c3fd6..c03835279 100644
--- a/jjb/releng/testapi-automate.yml
+++ b/jjb/releng/testapi-automate.yml
@@ -6,9 +6,50 @@
gs-pathname: ''
jobs:
- 'testapi-automate-{stream}'
+ - 'testapi-verify-{stream}'
+
project: 'releng'
- job-template:
+ name: 'testapi-verify-{stream}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: 'utils/**'
+
+ builders:
+ - run-unit-tests
+
+- job-template:
name: 'testapi-automate-{stream}'
parameters:
@@ -16,6 +57,10 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
+ - string:
+ name: DOCKER_TAG
+ default: "latest"
+ description: "Tag name for testapi docker image"
- 'opnfv-build-defaults'
scm:
@@ -39,6 +84,7 @@
pattern: 'utils/**'
builders:
+ - docker-update
- start-testapi-server
- testapi-doc-build
- upload-doc-artifact
@@ -49,10 +95,16 @@
################################
- builder:
- name: testapi-doc-build
+ name: run-unit-tests
builders:
- shell: |
- bash ./utils/test/testapi/htmlize/doc-build.sh
+ bash ./jjb/releng/testapi-run-tests.sh
+
+- builder:
+ name: docker-update
+ builders:
+ - shell: |
+ bash ./jjb/releng/testapi-docker-update.sh
- builder:
name: start-testapi-server
@@ -61,13 +113,19 @@
bash ./utils/test/testapi/htmlize/prepare.sh
- builder:
- name: clean-testapi-server
+ name: testapi-doc-build
builders:
- shell: |
- bash ./utils/test/testapi/htmlize/finish.sh
+ bash ./utils/test/testapi/htmlize/doc-build.sh
- builder:
name: upload-doc-artifact
builders:
- shell: |
bash ./utils/test/testapi/htmlize/push-doc-artifact.sh
+
+- builder:
+ name: clean-testapi-server
+ builders:
+ - shell: |
+ bash ./utils/test/testapi/htmlize/finish.sh
diff --git a/jjb/releng/testapi-docker-update.sh b/jjb/releng/testapi-docker-update.sh
new file mode 100644
index 000000000..84f5c3217
--- /dev/null
+++ b/jjb/releng/testapi-docker-update.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -o errexit
+set -o nounset
+
+cd $WORKSPACE/utils/test/testapi/docker/
+
+# Remove previous containers
+docker ps -a | grep "opnfv/testapi" | awk '{ print $1 }' | xargs -r docker rm -f
+
+# Remove previous images
+docker images | grep "opnfv/testapi" | awk '{ print $3 }' | xargs -r docker rmi -f
+
+# Start build
+docker build --no-cache -t opnfv/testapi:$DOCKER_TAG .
+
+# Push Image
+docker push opnfv/testapi:$DOCKER_TAG
diff --git a/jjb/releng/testapi-run-tests.sh b/jjb/releng/testapi-run-tests.sh
new file mode 100644
index 000000000..47691ed04
--- /dev/null
+++ b/jjb/releng/testapi-run-tests.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+set -o errexit
+
+echo "Running unit tests..."
+
+# Creating virtual environment
+virtualenv $WORKSPACE/testapi_test_venv
+source $WORKSPACE/testapi_test_venv/bin/activate
+
+cd $WORKSPACE/utils/test/testapi/
+
+# Install requirements
+pip install -r requirements.txt
+pip install -r test-requirements.txt
+
+# Run unit tests
+bash run_test.sh
diff --git a/jjb/storperf/storperf.yml b/jjb/storperf/storperf.yml
index 902fff999..8f42f8c06 100644
--- a/jjb/storperf/storperf.yml
+++ b/jjb/storperf/storperf.yml
@@ -144,15 +144,14 @@
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: storperf-pod
-
- disabled: false
+ disabled: '{obj:disabled}'
parameters:
- project-parameter:
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
+ - 'intel-pod9-defaults'
scm:
- git-scm
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh
index 3667dbe6c..856f69a27 100755
--- a/utils/fetch_os_creds.sh
+++ b/utils/fetch_os_creds.sh
@@ -38,6 +38,16 @@ verify_connectivity() {
error "Can not talk to $ip."
}
+
+swap_to_public() {
+ if [ "$1" != "" ]; then
+ info "Exchanging keystone public IP in rc file to $public_ip"
+ sed -i "/OS_AUTH_URL/c\export OS_AUTH_URL=\'$public_ip'" $dest_path
+ sed -i 's/internalURL/publicURL/g' $dest_path
+ fi
+}
+
+
: ${DEPLOY_TYPE:=''}
#Get options
@@ -151,6 +161,7 @@ elif [ "$installer_type" == "compass" ]; then
| grep identity | awk '{print $14}')
fi
info "public_ip: $public_ip"
+ swap_to_public $public_ip
elif [ "$installer_type" == "joid" ]; then
diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh
index 733ffbcb5..61a6c327d 100644
--- a/utils/push-test-logs.sh
+++ b/utils/push-test-logs.sh
@@ -28,6 +28,7 @@ node_list=(\
'ericsson-virtual4' 'ericsson-virtual5' \
'arm-pod1' 'arm-pod3' \
'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
+'huawei-pod6' 'huawei-pod7' \
'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
diff --git a/utils/test/declaration/addtestcase.php b/utils/test/declaration/addtestcase.php
deleted file mode 100644
index 0e5bed689..000000000
--- a/utils/test/declaration/addtestcase.php
+++ /dev/null
@@ -1,40 +0,0 @@
-<?php
-function sendPostData($url, $post){
- $ch = curl_init($url);
- $headers= array('Accept: application/json','Content-Type: application/json');
- curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "POST");
- curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
- curl_setopt($ch, CURLOPT_HTTPHEADER, $headers);
- curl_setopt($ch, CURLOPT_POSTFIELDS,$post);
- curl_setopt($ch, CURLOPT_FOLLOWLOCATION, 1);
- $result = curl_exec($ch);
- curl_close($ch);
- return $result;
-}
-
-if(isset($_REQUEST['url'])){
- $url=$_REQUEST['url'];
-}
-if(isset($_REQUEST['name'])){
- $name=$_REQUEST['name'];
-}
-if(isset($_REQUEST['desc'])){
- $desc=$_REQUEST['desc'];
-}
-if(isset($_REQUEST['project'])){
-
- $url_send=$_REQUEST['project'];
- $url_send="http://testresults.opnfv.org:80/test/api/v1/projects/".$url_send."/cases";
- $str_data=array('url'=>$url,'name'=>$name,'description'=>$desc);
- $str_data=json_encode($str_data);
- $res=sendPostData($url_send, $str_data);
- echo '<div class="alert alert-success"> <strong>Success!</strong> Added New test Case </div>';
-
-}else{
-
- echo '<div class="alert alert-danger"> <strong>Error!</strong> Failed to Add New test Case </div>';
-
-}
-
-?>
-
diff --git a/utils/test/declaration/index.php b/utils/test/declaration/index.php
deleted file mode 100644
index b2c5d0370..000000000
--- a/utils/test/declaration/index.php
+++ /dev/null
@@ -1,221 +0,0 @@
-<!DOCTYPE html>
-<html lang="en">
-<head>
- <title>OPNFV DashBoard</title>
- <meta charset="utf-8">
- <meta name="viewport" content="width=device-width, initial-scale=1">
- <link rel="stylesheet" href="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
- <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.12.2/jquery.min.js"></script>
- <script src="http://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js"></script>
-<script>
-$(function() {
-
- $('form#new_testcase').on('submit', function(){
- var selected = $('select#sel_pro2').find("option:selected").val();
- var uri = $('input#uri').val();
- var name = $('input#name').val();
- var desc = $('textarea#desc').val();
- var new_url="http://testresults.opnfv.org:80/test/api/v1/projects/"+selected+"/cases";
- $.post("addtestcase.php", {"project":selected,"url":uri,"name":name,"description":desc}, function(result){
- $("div#result").html(result);
- });
- });
-
-});
-
-$(function() {
-
- $('select#sel1').on('change', function(){
- var selected = $(this).find("option:selected").val();
- var new_url="http://testresults.opnfv.org:80/test/api/v1/projects/"+selected+"/cases";
- //$.post('testcases.php', {project: selected});
- console.log(selected);
- $.post("testcases.php", {project: selected}, function(result){
- $("div#4a").html(result);
- });
-
- });
-
-});
-</script>
-<style>
-body {
- padding : 10px ;
-}
-
-#exTab1 .tab-content {
- color : black;
- padding : 5px 15px;
-}
-
-#exTab2 h3 {
- color : white;
- background-color: #428bca;
- padding : 5px 15px;
-}
-
-/* remove border radius for the tab */
-
-#exTab1 .nav-pills > li > a {
- border-radius: 0;
-}
-
-/* change border radius for the tab , apply corners on top*/
-
-#exTab3 .nav-pills > li > a {
- border-radius: 4px 4px 0 0 ;
-}
-
-#exTab3 .tab-content {
- color : white;
- background-color: #428bca;
- padding : 5px 15px;
-}
-
-</style>
-</head>
-<body>
-
-<div class="container">
- <h1>OPNFV DASHBOARD: </h1></div>
-<div id="exTab1" class="container">
- <ul class="nav nav-pills">
- <li class="active">
- <a href="#1a" data-toggle="tab">PODS</a>
- </li>
- <li><a href="#2a" data-toggle="tab">PROJECTS</a>
- </li>
- <li><a href="#3a" data-toggle="tab">TESTCASES</a>
- </li>
- <li><a href="#5a" data-toggle="tab">ADD TESTCASE</a>
- </li>
- <li><a href="http://testresults.opnfv.org/kibana_dashboards/" >RESULTS</a>
- </li>
- </ul>
- <div class="tab-content clearfix">
- <div class="tab-pane active" id="1a">
- <table class="table table-striped">
- <thead>
- <tr>
- <th>#</th>
- <th>Pod Name</th>
- <th>Creation Date</th>
- <th>Role</th>
- <th>Mode</th>
- </tr>
- </thead>
- <?php
- $url = "http://testresults.opnfv.org:80/test/api/v1/pods";
- $response = file_get_contents($url);
- $data = json_decode($response);
- $pods = $data->pods;
- $i=1;
- foreach ( $pods as $pod ){
-
- $column_str="";
- $column_str="<tr><td>".$i."</td>";
- $column_str=$column_str."<td>".$pod->name."</td>";
- $column_str= $column_str."<td>".$pod->creation_date."</td>";
- $column_str= $column_str."<td>".$pod->role."</td>";
- $column_str= $column_str."<td>".$pod->mode."</td>";
- $column_str= $column_str."</tr>";
- echo $column_str;
- $i=$i+1;
- }
- ?>
- </table>
- </div>
- <div class="tab-pane" id="2a">
- <table class="table table-striped">
- <thead>
- <tr>
- <th>#</th>
- <th>Project</th>
- <th>Creation Date</th>
- </tr>
- </thead>
- <?php
- $url = "http://testresults.opnfv.org:80/test/api/v1/projects";
- $response = file_get_contents($url);
- $data = json_decode($response);
- $projects=$data->projects;
- $i=0;
- foreach ( $projects as $project ){
-
- $column_str="";
- $column_str="<tr><td>".$i."</td>";
- $column_str=$column_str."<td>".$project->name."</td>";
- $column_str= $column_str."<td>".$project->creation_date."</td>";
- $column_str= $column_str."</tr>";
- echo $column_str;
- $i=$i+1;
- }
-?>
- </table>
- </div>
- <div class="tab-pane" id="3a">
-<div class="form-group">
- <label for="sel1">Select list:</label>
- <select class="form-control" id="sel1">
-<?php
- $url = "http://testresults.opnfv.org:80/test/api/v1/projects";
- $response = file_get_contents($url);
- $data = json_decode($response);
- $projects=$data->projects;
- $i=0;
- $firstvalue=$projects[0]->name;
- foreach ( $projects as $project ){
- $column_str="";
- $column_str="<option>".$project->name."</option>";
- echo $column_str;
- }
-
-?>
-</select>
-</div>
- <div class="tab-pane" id="4a">
- <?php
- require "testcases.php";
- ?>
- </div>
- </div>
- <div class="tab-pane" id="5a">
- <form role="form" id="new_testcase">
-<div class="form-group">
- <label for="sel1">Select list:</label>
- <select class="form-control" id="sel_pro2">
-<?php
- $url = "http://testresults.opnfv.org:80/test/api/v1/projects";
- $response = file_get_contents($url);
- $data = json_decode($response);
- $projects=$data->projects;
- $i=0;
- $firstvalue=$projects[0]->name;
- foreach ( $projects as $project ){
- $column_str="";
- $column_str="<option>".$project->name."</option>";
- echo $column_str;
- }
-?>
-</select>
-</div>
-<div class="form-group"> <!-- Name field -->
- <label class="control-label " for="name">TestCase URI</label>
- <input class="form-control" id="uri" name="uri" type="text"/>
- </div>
-<div class="form-group"> <!-- Name field -->
- <label class="control-label " for="name">TestCase Name</label>
- <input class="form-control" id="name" name="name" type="text"/>
- </div>
-<div class="form-group"> <!-- Name field -->
- <label class="control-label " for="name">Description</label>
- <textarea class="form-control" rows="5" id="desc"></textarea>
- </div>
- <button type="submit" class="btn btn-default">Submit</button>
-</form>
- </div>
-<div class="container" id="result"></div>
- </div>
-</div>
-</body>
-</html>
diff --git a/utils/test/declaration/testcases.php b/utils/test/declaration/testcases.php
deleted file mode 100644
index 20645807e..000000000
--- a/utils/test/declaration/testcases.php
+++ /dev/null
@@ -1,36 +0,0 @@
-<?php
- if(isset($_REQUEST['project'])){
- $selected=$_REQUEST['project'];
- }
- else{
- $url = "http://testresults.opnfv.org:80/test/api/v1/projects";
- $response = file_get_contents($url);
- $data = json_decode($response);
- $projects=$data->projects;
- $selected=$projects[0]->name;
- }
- $new_url="http://testresults.opnfv.org:80/test/api/v1/projects/".$selected."/cases";
- $response = file_get_contents($new_url);
- $data = json_decode($response);
- $testcases=$data->testcases;
- $i=0;
- $column_str="";
- $column_str=$column_str."<table class=\"table table-striped\"><tr>";
- $column_str=$column_str."<th>#</th><th>Test Case Name</th>";
- $column_str=$column_str."<th>Creation Date</th>";
- $column_str=$column_str."<th>Description</th></tr>";
- foreach ( $testcases as $testcase ){
- $i=$i+1;
- $column_str=$column_str."<tr>";
- $column_str=$column_str."<td>".$i."</td>";
- $column_str=$column_str."<td>".$testcase->name."</td>";
- $column_str=$column_str."<td>".$testcase->creation_date."</td>";
- $column_str=$column_str."<td>".$testcase->description."</td>";
- $column_str=$column_str."</tr>";
-
- }
- $column_str=$column_str."</table>";
- echo $column_str;
-
-?>
-
diff --git a/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py b/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
index 7bf3d5d53..28fe28e68 100644
--- a/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
@@ -19,11 +19,49 @@ class ScenariosCLHandler(GenericScenarioHandler):
"""
@description: Retrieve scenario(s).
@notes: Retrieve scenario(s)
- @return 200: all scenarios consist with query,
+ Available filters for this request are :
+ - name : scenario name
+
+ GET /scenarios?name=scenario_1
+ @param name: scenario name
+ @type name: L{string}
+ @in name: query
+ @required name: False
+ @param installer: installer type
+ @type installer: L{string}
+ @in installer: query
+ @required installer: False
+ @param version: version
+ @type version: L{string}
+ @in version: query
+ @required version: False
+ @param project: project name
+ @type project: L{string}
+ @in project: query
+ @required project: False
+ @return 200: all scenarios satisfy queries,
empty list if no scenario is found
@rtype: L{Scenarios}
"""
- self._list()
+
+ def _set_query():
+ query = dict()
+ elem_query = dict()
+ for k in self.request.query_arguments.keys():
+ v = self.get_query_argument(k)
+ if k == 'installer':
+ elem_query["installer"] = v
+ elif k == 'version':
+ elem_query["versions.version"] = v
+ elif k == 'project':
+ elem_query["versions.projects.project"] = v
+ else:
+ query[k] = v
+ if elem_query:
+ query['installers'] = {'$elemMatch': elem_query}
+ return query
+
+ self._list(_set_query())
@swagger.operation(nickname="Create a new scenario")
def post(self):
@@ -58,6 +96,7 @@ class ScenarioGURHandler(GenericScenarioHandler):
@return 200: scenario exist
@raise 404: scenario not exist
"""
+ self._get_one({'name': name})
pass
@swagger.operation(nickname="Update the scenario by name")
@@ -65,7 +104,7 @@ class ScenarioGURHandler(GenericScenarioHandler):
"""
@description: update a single scenario by name
@param body: fields to be updated
- @type body: L{string}
+ @type body: L{ScenarioCreateRequest}
@in body: body
@rtype: L{Scenario}
@return 200: update success
diff --git a/utils/test/testapi/opnfv_testapi/resources/scenario_models.py b/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
index b4bb3634b..f89a12428 100644
--- a/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
+++ b/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
@@ -11,7 +11,7 @@ class ScenarioTI(models.ModelBase):
@swagger.model()
class ScenarioScore(models.ModelBase):
- def __init__(self, date=None, score=''):
+ def __init__(self, date=None, score='0'):
self.date = date
self.score = score
@@ -27,14 +27,19 @@ class ScenarioProject(models.ModelBase):
@ptype trust_indicators: C{list} of L{ScenarioTI}
"""
def __init__(self,
- name='',
+ project='',
customs=None,
scores=None,
trust_indicators=None):
- self.name = name
+ self.project = project
self.customs = customs
self.scores = scores
- self.trust_indicator = trust_indicators
+ self.trust_indicators = trust_indicators
+
+ @staticmethod
+ def attr_parser():
+ return {'scores': ScenarioScore,
+ 'trust_indicators': ScenarioTI}
@swagger.model()
@@ -43,10 +48,14 @@ class ScenarioVersion(models.ModelBase):
@property projects:
@ptype projects: C{list} of L{ScenarioProject}
"""
- def __init__(self, version, projects=None):
+ def __init__(self, version=None, projects=None):
self.version = version
self.projects = projects
+ @staticmethod
+ def attr_parser():
+ return {'projects': ScenarioProject}
+
@swagger.model()
class ScenarioInstaller(models.ModelBase):
@@ -54,11 +63,14 @@ class ScenarioInstaller(models.ModelBase):
@property versions:
@ptype versions: C{list} of L{ScenarioVersion}
"""
- def __init__(self, installer=None, owner=None, versions=None):
+ def __init__(self, installer=None, versions=None):
self.installer = installer
- self.owner = owner
self.versions = versions if versions else list()
+ @staticmethod
+ def attr_parser():
+ return {'versions': ScenarioVersion}
+
@swagger.model()
class ScenarioCreateRequest(models.ModelBase):
@@ -70,6 +82,10 @@ class ScenarioCreateRequest(models.ModelBase):
self.name = name
self.installers = installers if installers else list()
+ @staticmethod
+ def attr_parser():
+ return {'installers': ScenarioInstaller}
+
@swagger.model()
class Scenario(models.ModelBase):
@@ -83,6 +99,10 @@ class Scenario(models.ModelBase):
self.creation_date = create_date
self.installers = installers if installers else list()
+ @staticmethod
+ def attr_parser():
+ return {'installers': ScenarioInstaller}
+
@swagger.model()
class Scenarios(models.ModelBase):
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py b/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
index d86d8eadf..3c4fd01a3 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
@@ -55,7 +55,8 @@ class MemCursor(object):
class MemDb(object):
- def __init__(self):
+ def __init__(self, name):
+ self.name = name
self.contents = []
pass
@@ -109,8 +110,59 @@ class MemDb(object):
return True
return False
- @staticmethod
- def _in(content, *args):
+ def _in(self, content, *args):
+ if self.name == 'scenarios':
+ return self._in_scenarios(content, *args)
+ else:
+ return self._in_others(content, *args)
+
+ def _in_scenarios_installer(self, installer, content):
+ hit = False
+ for s_installer in content['installers']:
+ if installer == s_installer['installer']:
+ hit = True
+
+ return hit
+
+ def _in_scenarios_version(self, version, content):
+ hit = False
+ for s_installer in content['installers']:
+ for s_version in s_installer['versions']:
+ if version == s_version['version']:
+ hit = True
+ return hit
+
+ def _in_scenarios_project(self, project, content):
+ hit = False
+ for s_installer in content['installers']:
+ for s_version in s_installer['versions']:
+ for s_project in s_version['projects']:
+ if project == s_project['project']:
+ hit = True
+
+ return hit
+
+ def _in_scenarios(self, content, *args):
+ for arg in args:
+ for k, v in arg.iteritems():
+ if k == 'installers':
+ for inner in v.values():
+ for i_k, i_v in inner.iteritems():
+ if i_k == 'installer':
+ return self._in_scenarios_installer(i_v,
+ content)
+ elif i_k == 'versions.version':
+ return self._in_scenarios_version(i_v,
+ content)
+ elif i_k == 'versions.projects.project':
+ return self._in_scenarios_project(i_v,
+ content)
+ elif content.get(k, None) != v:
+ return False
+
+ return True
+
+ def _in_others(self, content, *args):
for arg in args:
for k, v in arg.iteritems():
if k == 'start_date':
@@ -185,8 +237,8 @@ def __getattr__(name):
return globals()[name]
-pods = MemDb()
-projects = MemDb()
-testcases = MemDb()
-results = MemDb()
-scenarios = MemDb()
+pods = MemDb('pods')
+projects = MemDb('projects')
+testcases = MemDb('testcases')
+results = MemDb('results')
+scenarios = MemDb('scenarios')
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/scenario-create.json b/utils/test/testapi/opnfv_testapi/tests/unit/scenario-c1.json
index eba8b6c0a..187802215 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/scenario-create.json
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/scenario-c1.json
@@ -35,4 +35,4 @@
]
}
]
-} \ No newline at end of file
+}
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/scenario-c2.json b/utils/test/testapi/opnfv_testapi/tests/unit/scenario-c2.json
new file mode 100644
index 000000000..b6a3b83ab
--- /dev/null
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/scenario-c2.json
@@ -0,0 +1,73 @@
+{
+ "name": "odl_2-nofeature-ha",
+ "installers":
+ [
+ {
+ "installer": "fuel",
+ "versions":
+ [
+ {
+ "owner": "Lucky",
+ "version": "colorado",
+ "projects":
+ [
+ {
+ "project": "functest",
+ "customs": [ "healthcheck", "vping_ssh"],
+ "scores": [],
+ "trust_indicators": [
+ {
+ "date": "2017-01-18 22:46:44",
+ "status": "silver"
+ }
+
+ ]
+ },
+ {
+ "project": "yardstick",
+ "customs": ["suite-a"],
+ "scores": [
+ {
+ "date": "2017-01-08 22:46:44",
+ "score": "0"
+ }
+ ],
+ "trust_indicators": [
+ {
+ "date": "2017-01-18 22:46:44",
+ "status": "gold"
+ }
+ ]
+ }
+ ]
+ },
+ {
+ "owner": "Luke",
+ "version": "colorado",
+ "projects":
+ [
+ {
+ "project": "functest",
+ "customs": [ "healthcheck", "vping_ssh"],
+ "scores":
+ [
+ {
+ "date": "2017-01-09 22:46:44",
+ "score": "11/14"
+ }
+
+ ],
+ "trust_indicators": []
+ },
+ {
+ "project": "yardstick",
+ "customs": [],
+ "scores": [],
+ "trust_indicators": []
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py
index 9343ab2fb..fc780e44c 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py
@@ -47,11 +47,11 @@ class TestBase(AsyncHTTPTestCase):
return self.create_help(self.basePath, req, *args)
def create_help(self, uri, req, *args):
- if req and not isinstance(req, str):
- req = json.dumps(req.format())
+ if req and not isinstance(req, str) and hasattr(req, 'format'):
+ req = req.format()
res = self.fetch(self._update_uri(uri, *args),
method='POST',
- body=req if req else json.dumps(None),
+ body=json.dumps(req),
headers=self.headers)
return self._get_return(res, self.create_res)
@@ -97,7 +97,7 @@ class TestBase(AsyncHTTPTestCase):
return uri.count('%s')
def _get_query_uri(self, query):
- return self.basePath + '?' + query
+ return self.basePath + '?' + query if query else self.basePath
def _get_uri(self, *args):
return self._update_uri(self.basePath, *args)
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py
index 8e827813c..ff5979524 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py
@@ -4,15 +4,20 @@ import os
from opnfv_testapi.common.constants import HTTP_BAD_REQUEST
from opnfv_testapi.common.constants import HTTP_FORBIDDEN
from opnfv_testapi.common.constants import HTTP_OK
+from opnfv_testapi.resources.scenario_models import Scenario
from opnfv_testapi.resources.scenario_models import ScenarioCreateRequest
+from opnfv_testapi.resources.scenario_models import Scenarios
from test_testcase import TestBase
class TestScenarioBase(TestBase):
def setUp(self):
super(TestScenarioBase, self).setUp()
+ self.get_res = Scenario
+ self.list_res = Scenarios
self.basePath = '/api/v1/scenarios'
- self.load_request('scenario-create.json')
+ self.req_d = self._load_request('scenario-c1.json')
+ self.req_2 = self._load_request('scenario-c2.json')
def tearDown(self):
pass
@@ -20,10 +25,26 @@ class TestScenarioBase(TestBase):
def assert_body(self, project, req=None):
pass
- def load_request(self, f_req):
- with open(os.path.join(os.path.dirname(__file__), f_req), 'r') as f:
- self.req_d = json.dumps(json.load(f))
+ @staticmethod
+ def _load_request(f_req):
+ abs_file = os.path.join(os.path.dirname(__file__), f_req)
+ with open(abs_file, 'r') as f:
+ loader = json.load(f)
f.close()
+ return loader
+
+ def create_return_name(self, req):
+ _, res = self.create(req)
+ return res.href.split('/')[-1]
+
+ def assert_res(self, code, scenario, req=None):
+ self.assertEqual(code, HTTP_OK)
+ if req is None:
+ req = self.req_d
+ scenario_dict = scenario.format_http()
+ self.assertIsNotNone(scenario_dict['_id'])
+ self.assertIsNotNone(scenario_dict['creation_date'])
+ self.assertDictContainsSubset(req, scenario_dict)
class TestScenarioCreate(TestScenarioBase):
@@ -53,3 +74,60 @@ class TestScenarioCreate(TestScenarioBase):
(code, body) = self.create_d()
self.assertEqual(code, HTTP_FORBIDDEN)
self.assertIn('already exists', body)
+
+
+class TestScenarioGet(TestScenarioBase):
+ def setUp(self):
+ super(TestScenarioGet, self).setUp()
+ self.scenario_1 = self.create_return_name(self.req_d)
+ self.scenario_2 = self.create_return_name(self.req_2)
+
+ def test_getByName(self):
+ code, body = self.get(self.scenario_1)
+ self.assert_res(code, body, req=self.req_d)
+
+ def test_getAll(self):
+ self._query_and_assert(query=None, reqs=[self.req_d, self.req_2])
+
+ def test_queryName(self):
+ query = self._set_query('name=nosdn-nofeature-ha')
+ self._query_and_assert(query, reqs=[self.req_d])
+
+ def test_queryInstaller(self):
+ query = self._set_query('installer=apex')
+ self._query_and_assert(query, reqs=[self.req_d])
+
+ def test_queryVersion(self):
+ query = self._set_query('version=master')
+ self._query_and_assert(query, reqs=[self.req_d])
+
+ def test_queryProject(self):
+ query = self._set_query('project=functest')
+ self._query_and_assert(query, reqs=[self.req_d, self.req_2])
+
+ def test_queryCombination(self):
+ query = self._set_query('name=nosdn-nofeature-ha',
+ 'installer=apex',
+ 'version=master',
+ 'project=functest')
+
+ self._query_and_assert(query, reqs=[self.req_d])
+
+ @staticmethod
+ def _set_query(*args):
+ uri = ''
+ for arg in args:
+ uri += arg + '&'
+ return uri[0: -1]
+
+ def _query_and_assert(self, query, found=True, reqs=None):
+ code, body = self.query(query)
+ if not found:
+ self.assertEqual(code, HTTP_OK)
+ self.assertEqual(0, len(body.scenarios))
+ else:
+ self.assertEqual(len(reqs), len(body.scenarios))
+ for req in reqs:
+ for scenario in body.scenarios:
+ if req['name'] == scenario.name:
+ self.assert_res(code, scenario, req)