summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xjjb/apex/apex-build.sh6
-rw-r--r--jjb/apex/apex.yml416
-rw-r--r--jjb/armband/armband-ci-jobs.yml116
-rwxr-xr-xjjb/armband/armband-download-artifact.sh2
-rw-r--r--jjb/armband/armband-project-jobs.yml15
-rwxr-xr-xjjb/armband/build.sh48
-rw-r--r--jjb/availability/availability.yml6
-rw-r--r--jjb/bottlenecks/bottlenecks-ci-jobs.yml28
-rw-r--r--jjb/bottlenecks/bottlenecks-project-jobs.yml11
-rw-r--r--jjb/compass4nfv/compass-build.sh2
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yml81
-rw-r--r--jjb/compass4nfv/compass-deploy.sh18
-rw-r--r--jjb/compass4nfv/compass-makeppa.sh10
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yml2
-rw-r--r--jjb/conductor/conductor.yml6
-rw-r--r--jjb/copper/copper.yml6
-rw-r--r--jjb/doctor/doctor.yml6
-rw-r--r--jjb/domino/domino.yml7
-rw-r--r--jjb/dpacc/dpacc.yml6
-rw-r--r--jjb/fastpathmetrics/fastpathmetrics.yml6
-rwxr-xr-xjjb/fuel/fuel-build.sh2
-rw-r--r--jjb/fuel/fuel-ci-jobs.yml530
-rwxr-xr-xjjb/fuel/fuel-download-artifact.sh5
-rw-r--r--jjb/fuel/fuel-project-jobs.yml6
-rw-r--r--jjb/fuel/fuel-verify-jobs.yml21
-rw-r--r--jjb/functest/functest-ci-jobs.yml41
-rw-r--r--jjb/functest/functest-project-jobs.yml6
-rw-r--r--jjb/ipv6/ipv6.yml6
-rw-r--r--jjb/joid/joid-ci-jobs.yml188
-rw-r--r--jjb/joid/joid-deploy.sh128
-rw-r--r--jjb/joid/joid-verify-jobs.yml6
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-download-artifact.sh3
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-test.sh9
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-upload-artifact.sh10
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yml6
-rw-r--r--jjb/multisite/multisite.yml13
-rw-r--r--jjb/netready/netready.yml6
-rw-r--r--jjb/octopus/octopus.yml6
-rw-r--r--jjb/onosfw/onosfw.yml18
-rw-r--r--jjb/opnfv/installer-params.yml12
-rw-r--r--jjb/opnfv/opnfv-docker.yml12
-rw-r--r--jjb/opnfv/opnfv-docs.yml8
-rw-r--r--jjb/opnfv/opnfv-lint.yml8
-rw-r--r--jjb/opnfv/slave-params.yml123
-rw-r--r--jjb/opnfvdocs/opnfvdocs.yml8
-rw-r--r--jjb/ovsnfv/ovsnfv.yml8
-rw-r--r--jjb/parser/parser.yml6
-rw-r--r--jjb/pharos/pharos.yml6
-rw-r--r--jjb/prediction/prediction.yml6
-rw-r--r--jjb/promise/promise.yml6
-rw-r--r--jjb/qtip/qtip-ci-jobs.yml30
-rw-r--r--jjb/qtip/qtip-project-jobs.yml6
-rw-r--r--jjb/releng-macros.yaml2
-rw-r--r--jjb/storperf/storperf.yml6
-rw-r--r--jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml6
-rw-r--r--jjb/vswitchperf/vswitchperf.yml10
-rw-r--r--jjb/yardstick/yardstick-ci-jobs.yml67
-rwxr-xr-xjjb/yardstick/yardstick-daily.sh10
-rw-r--r--jjb/yardstick/yardstick-project-jobs.yml8
-rw-r--r--prototypes/bifrost/README.md48
-rw-r--r--prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml4
-rw-r--r--prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml66
-rwxr-xr-xprototypes/bifrost/scripts/destroy_env.sh38
-rwxr-xr-xprototypes/bifrost/scripts/test-bifrost-deployment.sh121
-rw-r--r--prototypes/puppet-infracloud/.gitkeep0
-rw-r--r--prototypes/puppet-infracloud/README.md52
-rw-r--r--prototypes/puppet-infracloud/creds/clouds.yaml12
-rw-r--r--prototypes/puppet-infracloud/hiera/common.yaml77
-rwxr-xr-xprototypes/puppet-infracloud/install_modules.sh121
-rw-r--r--prototypes/puppet-infracloud/manifests/site.pp63
-rw-r--r--prototypes/puppet-infracloud/modules.env81
-rw-r--r--prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp23
-rw-r--r--prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp85
-rw-r--r--prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp222
-rwxr-xr-xutils/fetch_os_creds.sh1
-rwxr-xr-xutils/jenkins-jnlp-connect.sh9
-rwxr-xr-x[-rw-r--r--]utils/test/reporting/functest/reporting-status.py9
-rwxr-xr-x[-rw-r--r--]utils/test/reporting/functest/reporting-tempest.py204
-rwxr-xr-x[-rw-r--r--]utils/test/reporting/functest/reporting-vims.py158
-rw-r--r--utils/test/reporting/functest/reportingConf.py9
-rw-r--r--utils/test/reporting/functest/template/index-status-tmpl.html4
-rw-r--r--utils/test/reporting/functest/template/index-tempest-tmpl.html2
-rw-r--r--utils/test/reporting/functest/template/index-vims-tmpl.html2
-rw-r--r--utils/test/reporting/functest/testCase.py16
-rw-r--r--utils/test/result_collection_api/update/README.md27
-rwxr-xr-xutils/test/result_collection_api/update/playbook-update.sh90
-rwxr-xr-xutils/test/result_collection_api/update/templates/rm_images.sh8
-rw-r--r--utils/test/result_collection_api/update/test.yml12
-rw-r--r--utils/test/result_collection_api/update/update.yml11
89 files changed, 2748 insertions, 983 deletions
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
index f6b2e3214..e3e3f6194 100755
--- a/jjb/apex/apex-build.sh
+++ b/jjb/apex/apex-build.sh
@@ -23,7 +23,7 @@ fi
# start the build
cd $WORKSPACE/ci
./build.sh $BUILD_ARGS
-RPM_VERSION=$(grep Version: $BUILD_DIRECTORY/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
+RPM_VERSION=$(grep Version: $BUILD_DIRECTORY/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
# list the contents of BUILD_OUTPUT directory
echo "Build Directory is ${BUILD_DIRECTORY}"
echo "Build Directory Contents:"
@@ -44,10 +44,10 @@ if ! echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then
echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_MD5SUM=$(md5sum $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
+ echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm"
echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm"
- echo "OPNFV_RPM_MD5SUM=$(md5sum $BUILD_DIRECTORY/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
+ echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
echo "OPNFV_BUILD_URL=$BUILD_URL"
) > $WORKSPACE/opnfv.properties
fi
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index da9089cd6..8a5a82f13 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -1,15 +1,14 @@
- project:
name: apex
jobs:
- - 'apex-verify-{stream1}'
- - 'apex-verify-{stream2}'
+ - 'apex-verify-{stream}'
+ - 'apex-verify-unit-tests-{stream}'
- 'apex-runner-{platform}-{scenario}-{stream}'
- - 'apex-runner-cperf-{stream1}'
+ - 'apex-runner-cperf-{stream}'
- 'apex-build-{stream}'
- 'apex-deploy-virtual-{scenario}-{stream}'
- 'apex-deploy-baremetal-{scenario}-{stream}'
- - 'apex-daily-{stream1}'
- - 'apex-daily-{stream2}'
+ - 'apex-daily-{stream}'
# stream: branch with - in place of / (eg. stable-arno)
# branch: branch (eg. stable/arno)
@@ -17,37 +16,37 @@
- master:
branch: 'master'
gs-pathname: ''
- block-stream: 'brahmaputra'
- slave: 'intel-pod7'
+ block-stream: 'colorado'
+ slave: 'lf-pod1'
verify-slave: 'apex-verify-master'
daily-slave: 'apex-daily-master'
- - brahmaputra:
- branch: 'stable/brahmaputra'
- gs-pathname: '/brahmaputra'
+ - colorado:
+ branch: 'stable/colorado'
+ gs-pathname: '/colorado'
block-stream: 'master'
slave: 'lf-pod1'
- verify-slave: 'apex-verify-brahmaputra'
- daily-slave: 'apex-daily-brahmaputra'
- disabled: true
+ verify-slave: 'apex-verify-colorado'
+ daily-slave: 'apex-daily-colorado'
+ disabled: false
stream1:
- master:
branch: 'master'
gs-pathname: ''
- block-stream: 'brahmaputra'
+ block-stream: 'colorado'
slave: 'lf-pod1'
verify-slave: 'apex-verify-master'
daily-slave: 'apex-daily-master'
stream2:
- - brahmaputra:
- branch: 'stable/brahmaputra'
- gs-pathname: '/brahmaputra'
+ - colorado:
+ branch: 'stable/colorado'
+ gs-pathname: '/colorado'
block-stream: 'master'
slave: 'lf-pod1'
- verify-slave: 'apex-verify-brahmaputra'
- daily-slave: 'apex-daily-brahmaputra'
- disabled: true
+ verify-slave: 'apex-verify-colorado'
+ daily-slave: 'apex-daily-colorado'
+ disabled: false
project: 'apex'
@@ -55,6 +54,7 @@
- 'os-nosdn-nofeature-noha'
- 'os-nosdn-nofeature-ha'
- 'os-nosdn-nofeature-ha-ipv6'
+ - 'os-nosdn-ovs-noha'
- 'os-nosdn-fdio-noha'
- 'os-odl_l2-nofeature-ha'
- 'os-odl_l2-bgpvpn-ha'
@@ -69,11 +69,13 @@
- 'baremetal'
- 'virtual'
-# Brahmaputra Verify
+# Unit Test
- job-template:
- name: 'apex-verify-{stream2}'
+ name: 'apex-verify-unit-tests-{stream}'
- node: '{slave}'
+ node: '{verify-slave}'
+
+ concurrent: true
parameters:
- apex-parameter:
@@ -113,60 +115,19 @@
branch-pattern: '**/{branch}'
file-paths:
- compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'lib/**'
- - compare-type: ANT
- pattern: 'config/**'
-
+ pattern: 'tests/**'
properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'apex-daily.*{stream2}'
- - 'apex-deploy.*{stream2}'
- - 'apex-build.*{stream2}'
- - 'apex-runner.*{stream2}'
- - 'apex-verify-{stream2}'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
builders:
- 'apex-unit-test'
- - 'apex-build'
- - trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream2}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream2}/build_output
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- - trigger-builds:
- - project: 'apex-deploy-virtual-os-onos-nofeature-ha-{stream2}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream2}/build_output
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- - trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l3-nofeature-ha-{stream2}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream2}/build_output
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- - trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l2-sfc-noha-{stream2}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream2}/build_output
- OPNFV_CLEAN=yes
- git-revision: false
- block: true
- - 'apex-workspace-cleanup'
-# Master Verify
+# Verify
- job-template:
- name: 'apex-verify-{stream1}'
+ name: 'apex-verify-{stream}'
node: '{verify-slave}'
@@ -223,11 +184,11 @@
use-build-blocker: true
block-level: 'NODE'
blocking-jobs:
- - 'apex-daily.*{stream1}'
- - 'apex-deploy.*{stream1}'
- - 'apex-build.*{stream1}'
- - 'apex-runner.*{stream1}'
- - 'apex-verify-{stream1}'
+ - 'apex-daily.*'
+ - 'apex-deploy.*'
+ - 'apex-build.*'
+ - 'apex-runner.*'
+ - 'apex-verify.*'
- throttle:
max-per-node: 1
max-total: 10
@@ -237,30 +198,30 @@
- 'apex-unit-test'
- 'apex-build'
- trigger-builds:
- - project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-{stream1}'
+ - project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream1}
+ BUILD_DIRECTORY=apex-verify-{stream}
OPNFV_CLEAN=yes
git-revision: false
block: true
same-node: true
- trigger-builds:
- - project: 'functest-apex-{verify-slave}-suite-{stream1}'
+ - project: 'functest-apex-{verify-slave}-suite-{stream}'
predefined-parameters: |
DEPLOY_SCENARIO=os-nosdn-nofeature-ha
FUNCTEST_SUITE_NAME=healthcheck
block: true
same-node: true
- trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream1}'
+ - project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream1}
+ BUILD_DIRECTORY=apex-verify-{stream}
OPNFV_CLEAN=yes
git-revision: false
block: true
same-node: true
- trigger-builds:
- - project: 'functest-apex-{verify-slave}-suite-{stream1}'
+ - project: 'functest-apex-{verify-slave}-suite-{stream}'
predefined-parameters: |
DEPLOY_SCENARIO=os-odl_l2-nofeature-ha
FUNCTEST_SUITE_NAME=healthcheck
@@ -299,8 +260,8 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'apex-daily.*{stream}'
- - 'apex-verify.*{stream}'
+ - 'apex-daily.*'
+ - 'apex-verify.*'
builders:
- trigger-builds:
@@ -329,7 +290,7 @@
unstable-threshold: 'FAILURE'
- job-template:
- name: 'apex-runner-cperf-{stream1}'
+ name: 'apex-runner-cperf-{stream}'
# runner cperf job
@@ -360,7 +321,7 @@
use-build-blocker: false
block-level: 'NODE'
blocking-jobs:
- - 'apex-deploy.*{stream}'
+ - 'apex-deploy.*'
- throttle:
max-per-node: 1
max-total: 10
@@ -368,13 +329,13 @@
builders:
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-nofeature-ha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-odl_l2-nofeature-ha-{stream}'
predefined-parameters:
OPNFV_CLEAN=yes
git-revision: false
block: true
- trigger-builds:
- - project: 'cperf-apex-intel-pod2-daily-{stream1}'
+ - project: 'cperf-apex-intel-pod2-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-nofeature-ha
block: true
@@ -417,7 +378,7 @@
use-build-blocker: true
block-level: 'NODE'
blocking-jobs:
- - 'apex-deploy.*{stream}'
+ - 'apex-deploy.*'
- throttle:
max-per-node: 1
max-total: 10
@@ -474,7 +435,7 @@
use-build-blocker: true
block-level: 'NODE'
blocking-jobs:
- - 'apex-deploy.*{stream}'
+ - 'apex-deploy.*'
- throttle:
max-per-node: 1
max-total: 10
@@ -517,159 +478,18 @@
use-build-blocker: true
block-level: 'NODE'
blocking-jobs:
- - 'apex-verify.*{stream}'
- - 'apex-deploy.*{stream}'
- - 'apex-build.*{stream}'
+ - 'apex-verify.*'
+ - 'apex-deploy.*'
+ - 'apex-build.*'
builders:
- 'apex-deploy'
- 'apex-workspace-cleanup'
-# Brahmaputra Daily
+# Daily
- job-template:
- name: 'apex-daily-{stream2}'
-
- # Job template for daily build
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{slave}'
-
- disabled: true
-
- scm:
- - git-scm:
- credentials-id: '{ssh-credentials}'
- refspec: ''
- branch: '{branch}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'apex-verify.*{stream2}'
- - 'apex-deploy.*{stream2}'
- - 'apex-build.*{stream2}'
- - 'apex-runner.*{stream2}'
-
- triggers:
- - 'apex-{stream2}'
-
- builders:
- - trigger-builds:
- - project: 'apex-build-{stream2}'
- git-revision: true
- current-parameters: true
- block: true
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-nofeature-ha-{stream2}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream2}/build_output
- OPNFV_CLEAN=yes
- git-revision: true
- block: true
- - trigger-builds:
- - project: 'functest-apex-{slave}-daily-{stream2}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-nofeature-ha
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream2}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-nofeature-ha
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-onos-nofeature-ha-{stream2}'
- predefined-parameters:
- BUILD_DIRECTORY=apex-build-{stream2}/build_output
- git-revision: true
- block: true
- - trigger-builds:
- - project: 'functest-apex-{slave}-daily-{stream2}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-onos-nofeature-ha
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream2}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-onos-nofeature-ha
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l3-nofeature-ha-{stream2}'
- predefined-parameters:
- BUILD_DIRECTORY=apex-build-{stream2}/build_output
- git-revision: true
- block: true
- - trigger-builds:
- - project: 'functest-apex-{slave}-daily-{stream2}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream2}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-bgpvpn-ha-{stream2}'
- predefined-parameters:
- BUILD_DIRECTORY=apex-build-{stream2}/build_output
- git-revision: true
- block: true
- - trigger-builds:
- - project: 'functest-apex-{slave}-daily-{stream2}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream2}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
-# Master Daily
-- job-template:
- name: 'apex-daily-{stream1}'
+ name: 'apex-daily-{stream}'
# Job template for daily build
#
@@ -697,25 +517,25 @@
use-build-blocker: true
block-level: 'NODE'
blocking-jobs:
- - 'apex-verify.*{stream1}'
- - 'apex-deploy.*{stream1}'
- - 'apex-build.*{stream1}'
- - 'apex-runner.*{stream1}'
+ - 'apex-verify.*'
+ - 'apex-deploy.*'
+ - 'apex-build.*'
+ - 'apex-runner.*'
triggers:
- - 'apex-{stream1}'
+ - 'apex-{stream}'
builders:
- trigger-builds:
- - project: 'apex-build-{stream1}'
+ - project: 'apex-build-{stream}'
git-revision: true
current-parameters: true
same-node: true
block: true
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-nosdn-nofeature-ha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-nosdn-nofeature-ha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -723,7 +543,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-nosdn-nofeature-ha
block: true
@@ -733,7 +553,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-nosdn-nofeature-ha
block: true
@@ -743,9 +563,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-nofeature-ha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-odl_l2-nofeature-ha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -753,7 +573,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-nofeature-ha
block: true
@@ -763,7 +583,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-nofeature-ha
block: true
@@ -773,9 +593,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l3-nofeature-ha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-odl_l3-nofeature-ha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -783,7 +603,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
block: true
@@ -793,7 +613,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
block: true
@@ -803,9 +623,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-onos-nofeature-ha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-onos-nofeature-ha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -813,7 +633,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-onos-nofeature-ha
block: true
@@ -823,7 +643,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-onos-nofeature-ha
block: true
@@ -833,9 +653,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-bgpvpn-ha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-odl_l2-bgpvpn-ha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -843,7 +663,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
block: true
@@ -853,7 +673,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-bgpvpn-ha
block: true
@@ -863,9 +683,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-onos-sfc-ha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-onos-sfc-ha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -873,7 +693,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-onos-sfc-ha
block: true
@@ -883,7 +703,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-onos-sfc-ha
block: true
@@ -893,9 +713,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-sfc-noha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-odl_l2-sfc-noha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -903,7 +723,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-sfc-noha
block: true
@@ -913,7 +733,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-sfc-noha
block: true
@@ -923,9 +743,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-odl_l2-fdio-noha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-odl_l2-fdio-noha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -933,7 +753,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-fdio-noha
block: true
@@ -943,7 +763,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-odl_l2-fdio-noha
block: true
@@ -953,9 +773,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-nosdn-fdio-noha-{stream1}'
+ - project: 'apex-deploy-baremetal-os-nosdn-fdio-noha-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -963,7 +783,7 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-nosdn-fdio-noha
block: true
@@ -973,7 +793,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
predefined-parameters:
DEPLOY_SCENARIO=os-nosdn-fdio-noha
block: true
@@ -983,9 +803,9 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-baremetal-os-nosdn-fdio-noha-{stream1}'
+ - project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-ipv6-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
+ BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: true
same-node: true
@@ -993,19 +813,19 @@
build-step-failure-threshold: 'never'
block: true
- trigger-builds:
- - project: 'functest-apex-{daily-slave}-daily-{stream1}'
- predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-fdio-noha
- block: true
+ - project: 'apex-deploy-baremetal-os-nosdn-ovs-noha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/build
+ OPNFV_CLEAN=yes
+ git-revision: true
same-node: true
block-thresholds:
build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
+ block: true
- trigger-builds:
- - project: 'yardstick-apex-{slave}-daily-{stream1}'
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
predefined-parameters:
- DEPLOY_SCENARIO=os-nosdn-fdio-noha
+ DEPLOY_SCENARIO=os-nosdn-ovs-noha
block: true
same-node: true
block-thresholds:
@@ -1013,15 +833,15 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-ipv6-{stream1}'
- predefined-parameters: |
- BUILD_DIRECTORY=apex-build-{stream1}/build
- OPNFV_CLEAN=yes
- git-revision: true
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-nosdn-ovs-noha
+ block: true
same-node: true
block-thresholds:
build-step-failure-threshold: 'never'
- block: true
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
- job-template:
name: 'apex-gs-clean-{stream}'
@@ -1074,6 +894,10 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
- string:
+ name: GS_PATHNAME
+ default: '{gs-pathname}'
+ description: "Version directory where opnfv artifacts are stored in gs repository"
+ - string:
name: GS_URL
default: artifacts.opnfv.org/$PROJECT{gs-pathname}
description: "URL to Google Storage."
@@ -1124,9 +948,9 @@
- trigger:
name: 'apex-master'
triggers:
- - timed: '0 3 * * *'
+ - timed: '0 0 20 8 *'
- trigger:
- name: 'apex-brahmaputra'
+ name: 'apex-colorado'
triggers:
- timed: '0 3 * * *'
- trigger:
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 9d7c198d0..6ea73e12a 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -2,7 +2,6 @@
- project:
name: 'armband-ci'
project: 'armband'
- installer: 'fuel'
#--------------------------------
# BRANCH ANCHORS
@@ -11,8 +10,8 @@
stream: master
branch: '{stream}'
gs-pathname: ''
- brahmaputra: &brahmaputra
- stream: brahmaputra
+ colorado: &colorado
+ stream: colorado
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
#--------------------------------
@@ -22,16 +21,20 @@
#--------------------------------
pod:
- arm-pod1:
- <<: *brahmaputra
+ installer: fuel
+ <<: *colorado
- arm-pod2:
- <<: *brahmaputra
+ installer: fuel
+ <<: *colorado
#--------------------------------
# master
#--------------------------------
pod:
- arm-pod1:
+ installer: fuel
<<: *master
- arm-pod2:
+ installer: fuel
<<: *master
#--------------------------------
# scenarios
@@ -39,27 +42,27 @@
scenario:
# HA scenarios
- 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'daily-trigger-disabled'
+ auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l2-nofeature-ha':
- auto-trigger-name: 'armband-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l3-nofeature-ha':
- auto-trigger-name: 'daily-trigger-disabled'
+ auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l2-bgpvpn-ha':
- auto-trigger-name: 'daily-trigger-disabled'
+ auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
# NOHA scenarios
- 'os-odl_l2-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
+ auto-trigger-name: 'armband-{installer}-{scenario}-{pod}-{stream}-trigger'
jobs:
- - 'armband-{scenario}-{pod}-daily-{stream}'
- - 'armband-deploy-{pod}-daily-{stream}'
+ - 'armband-{installer}-{scenario}-{pod}-daily-{stream}'
+ - 'armband-{installer}-deploy-{pod}-daily-{stream}'
########################
# job templates
########################
- job-template:
- name: 'armband-{scenario}-{pod}-daily-{stream}'
+ name: 'armband-{installer}-{scenario}-{pod}-daily-{stream}'
concurrent: false
@@ -72,7 +75,7 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'armband-os-.*?-{pod}-daily-{stream}'
+ - 'armband-{installer}-os-.*?-{pod}-daily-.*'
block-level: 'NODE'
wrappers:
@@ -96,7 +99,7 @@
builders:
- trigger-builds:
- - project: 'armband-deploy-{pod}-daily-{stream}'
+ - project: 'armband-{installer}-deploy-{pod}-daily-{stream}'
current-parameters: false
predefined-parameters:
DEPLOY_SCENARIO={scenario}
@@ -115,7 +118,7 @@
unstable-threshold: 'FAILURE'
- job-template:
- name: 'armband-deploy-{pod}-daily-{stream}'
+ name: 'armband-{installer}-deploy-{pod}-daily-{stream}'
concurrent: false
@@ -128,8 +131,8 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'armband-deploy-{pod}-daily-{stream}'
- - 'armband-deploy-generic-daily-.*'
+ - 'armband-{installer}-deploy-{pod}-daily-{stream}'
+ - 'armband-{installer}-deploy-generic-daily-.*'
block-level: 'NODE'
parameters:
@@ -162,7 +165,7 @@
publishers:
- email:
- recipients: josep.puigdemont@enea.com armband@enea.com
+ recipients: armband@enea.com
########################
# parameter macros
@@ -190,16 +193,47 @@
#----------------------------------------------------------
# Enea Armband POD 1 Triggers running against master branch
#----------------------------------------------------------
-# No triggers for master for now
- trigger:
- name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-master-trigger'
+ name: 'armband-fuel-os-odl_l2-nofeature-ha-arm-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 3 * * 1,4'
+- trigger:
+ name: 'armband-fuel-os-nosdn-nofeature-ha-arm-pod1-master-trigger'
+ triggers:
+ - timed: '0 15 * * 1,4'
+- trigger:
+ name: 'armband-fuel-os-odl_l3-nofeature-ha-arm-pod1-master-trigger'
+ triggers:
+ - timed: '0 3 * * 2,5'
+- trigger:
+ name: 'armband-fuel-os-odl_l2-bgpvpn-ha-arm-pod1-master-trigger'
+ triggers:
+ - timed: '0 15 * * 2,5'
+- trigger:
+ name: 'armband-fuel-os-odl_l2-nofeature-noha-arm-pod1-master-trigger'
+ triggers:
+ - timed: '0 3 * * 3,6'
#---------------------------------------------------------------
# Enea Armband POD 1 Triggers running against brahmaputra branch
#---------------------------------------------------------------
- trigger:
- name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-brahmaputra-trigger'
+ name: 'armband-fuel-os-odl_l2-nofeature-ha-arm-pod1-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-nosdn-nofeature-ha-arm-pod1-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l3-nofeature-ha-arm-pod1-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l2-bgpvpn-ha-arm-pod1-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l2-nofeature-noha-arm-pod1-colorado-trigger'
triggers:
- timed: ''
#----------------------------------------------------------
@@ -207,13 +241,45 @@
#----------------------------------------------------------
# No triggers for master for now
- trigger:
- name: 'armband-os-odl_l2-nofeature-ha-arm-pod2-master-trigger'
+ name: 'armband-fuel-os-odl_l2-nofeature-ha-arm-pod2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-nosdn-nofeature-ha-arm-pod2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l3-nofeature-ha-arm-pod2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l2-bgpvpn-ha-arm-pod2-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l2-nofeature-noha-arm-pod2-master-trigger'
triggers:
- timed: ''
#---------------------------------------------------------------
# Enea Armband POD 2 Triggers running against brahmaputra branch
#---------------------------------------------------------------
- trigger:
- name: 'armband-os-odl_l2-nofeature-ha-arm-pod2-brahmaputra-trigger'
+ name: 'armband-fuel-os-odl_l2-nofeature-ha-arm-pod2-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-nosdn-nofeature-ha-arm-pod2-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l3-nofeature-ha-arm-pod2-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l2-bgpvpn-ha-arm-pod2-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'armband-fuel-os-odl_l2-nofeature-noha-arm-pod2-colorado-trigger'
triggers:
- timed: ''
diff --git a/jjb/armband/armband-download-artifact.sh b/jjb/armband/armband-download-artifact.sh
index 7d01c09cf..ed7897b8e 100755
--- a/jjb/armband/armband-download-artifact.sh
+++ b/jjb/armband/armband-download-artifact.sh
@@ -10,6 +10,8 @@
set -o errexit
set -o pipefail
+echo "Host info: $(hostname) $(hostname -I)"
+
# Configurable environment variables:
# ISOSTORE (/iso_mount/opnfv_ci)
diff --git a/jjb/armband/armband-project-jobs.yml b/jjb/armband/armband-project-jobs.yml
index 764a5d45b..10f8d6531 100644
--- a/jjb/armband/armband-project-jobs.yml
+++ b/jjb/armband/armband-project-jobs.yml
@@ -7,15 +7,17 @@
project: '{name}'
+ installer: 'fuel'
+
jobs:
- 'armband-verify-{stream}'
- - 'armband-build-daily-{stream}'
+ - 'armband-{installer}-build-daily-{stream}'
stream:
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
@@ -62,7 +64,7 @@
echo "Nothing to verify!"
- job-template:
- name: 'armband-build-daily-{stream}'
+ name: 'armband-{installer}-build-daily-{stream}'
concurrent: false
@@ -77,6 +79,7 @@
- project-parameter:
project: '{project}'
- 'opnfv-build-arm-defaults'
+ - '{installer}-defaults'
- armband-project-parameter:
gs-pathname: '{gs-pathname}'
@@ -103,7 +106,7 @@
publishers:
- email:
- recipients: josep.puigdemont@enea.com armband@enea.com
+ recipients: armband@enea.com
########################
# parameter macros
@@ -116,6 +119,10 @@
default: $WORKSPACE/build_output
description: "Directory where the build artifact will be located upon the completion of the build."
- string:
+ name: CACHE_DIRECTORY
+ default: $HOME/opnfv/cache/$INSTALLER_TYPE
+ description: "Directory where the cache to be used during the build is located."
+ - string:
name: GS_URL
default: artifacts.opnfv.org/$PROJECT{gs-pathname}
description: "URL to Google Storage."
diff --git a/jjb/armband/build.sh b/jjb/armband/build.sh
index 81917f6de..300306f77 100755
--- a/jjb/armband/build.sh
+++ b/jjb/armband/build.sh
@@ -1,6 +1,8 @@
#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
##############################################################################
# Copyright (c) 2016 Ericsson AB and others.
+# Copyright (c) 2016 Enea AB.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
@@ -10,16 +12,58 @@ set -o errexit
set -o nounset
set -o pipefail
+echo "Host info: $(hostname) $(hostname -I)"
+
cd $WORKSPACE
+# Armband requires initializing git submodules (e.g. for Fuel's clean_cache.sh)
+make submodules-init
+
+# remove the expired items from cache
+test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY
+
+LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties
+if [[ "$JOB_NAME" =~ "daily" ]]; then
+ # check to see if we already have an artifact on artifacts.opnfv.org
+ # for this commit during daily builds
+ echo "Checking to see if we already built and stored Armband Fuel ISO for this commit"
+
+ curl -s -o $LATEST_ISO_PROPERTIES http://$GS_URL/latest.properties 2>/dev/null
+fi
+
+# get metadata of latest ISO
+if grep -q OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES 2>/dev/null; then
+ LATEST_ISO_SHA1=$(grep OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
+ LATEST_ISO_URL=$(grep OPNFV_ARTIFACT_URL $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
+else
+ LATEST_ISO_SHA1=none
+fi
+
# get current SHA1
CURRENT_SHA1=$(git rev-parse HEAD)
+# set FORCE_BUILD to false for non-daily builds
+FORCE_BUILD=${FORCE_BUILD:-false}
+
+if [[ "$CURRENT_SHA1" == "$LATEST_ISO_SHA1" && "$FORCE_BUILD" == "false" ]]; then
+ echo "***************************************************"
+ echo " An ISO has already been built for this commit"
+ echo " $LATEST_ISO_URL"
+ echo "***************************************************"
+else
+ echo "This commit has not been built yet or forced build! Proceeding with the build."
+ /bin/rm -f $LATEST_ISO_PROPERTIES
+ echo
+fi
+
# log info to console
-echo "Starting the build of Armband. This could take some time..."
+echo "Starting the build of Armband $INSTALLER_TYPE. This could take some time..."
echo "-----------------------------------------------------------"
echo
+# create the cache directory if it doesn't exist
+mkdir -p $CACHE_DIRECTORY
+
# set OPNFV_ARTIFACT_VERSION
if [[ "$JOB_NAME" =~ "merge" ]]; then
echo "Building Fuel ISO for a merged change"
@@ -39,7 +83,7 @@ NOCACHE_ARG=${NOCACHE_ARG:-}
# start the build
cd $WORKSPACE/ci
-./build.sh $BUILD_DIRECTORY
+./build.sh -v $OPNFV_ARTIFACT_VERSION $NOCACHE_ARG -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY
# list the build artifacts
ls -al $BUILD_DIRECTORY
diff --git a/jjb/availability/availability.yml b/jjb/availability/availability.yml
index 0d887e760..c42efff11 100644
--- a/jjb/availability/availability.yml
+++ b/jjb/availability/availability.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: 'false'
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: 'false'
- job-template:
name: 'availability-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/bottlenecks/bottlenecks-ci-jobs.yml b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
index 686b7d45c..4bc56ab1b 100644
--- a/jjb/bottlenecks/bottlenecks-ci-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
@@ -18,8 +18,8 @@
gs-packagepath: '/{suite}'
#docker tag used for version control
docker-tag: 'latest'
- brahmaputra: &brahmaputra
- stream: brahmaputra
+ colorado: &colorado
+ stream: colorado
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
gs-packagepath: '/{stream}/{suite}'
@@ -48,26 +48,26 @@
slave-label: compass-baremetal
installer: compass
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: compass-virtual
installer: compass
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
#--------------------------------
# None-CI PODs
#--------------------------------
- - orange-pod2:
- slave-label: '{pod}'
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
- - orange-pod2:
- slave-label: '{pod}'
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
+ # - orange-pod2:
+ # slave-label: '{pod}'
+ # installer: joid
+ # auto-trigger-name: 'daily-trigger-disabled'
+ # <<: *colorado
+ # - orange-pod2:
+ # slave-label: '{pod}'
+ # installer: joid
+ # auto-trigger-name: 'daily-trigger-disabled'
+ # <<: *master
- huawei-pod2:
slave-label: '{pod}'
installer: compass
diff --git a/jjb/bottlenecks/bottlenecks-project-jobs.yml b/jjb/bottlenecks/bottlenecks-project-jobs.yml
index 28b49bc8b..fffc22def 100644
--- a/jjb/bottlenecks/bottlenecks-project-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-project-jobs.yml
@@ -19,10 +19,12 @@
gs-pathname: ''
#This is used for different test suite dependent packages storage
gs-packagepath: '/{suite}'
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
gs-packagepath: '/{stream}/{suite}'
+ disabled: false
suite:
- 'rubbos'
@@ -35,6 +37,8 @@
- job-template:
name: 'bottlenecks-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
@@ -74,6 +78,8 @@
- job-template:
name: 'bottlenecks-merge-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
@@ -107,6 +113,9 @@
- job-template:
name: 'bottlenecks-{suite}-upload-artifacts-{stream}'
+
+ disabled: '{obj:disabled}'
+
concurrent: true
properties:
diff --git a/jjb/compass4nfv/compass-build.sh b/jjb/compass4nfv/compass-build.sh
index d08c39c51..093debba7 100644
--- a/jjb/compass4nfv/compass-build.sh
+++ b/jjb/compass4nfv/compass-build.sh
@@ -35,7 +35,7 @@ ls -al $BUILD_DIRECTORY
echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_MD5SUM=$(md5sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)"
+ echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)"
echo "OPNFV_BUILD_URL=$BUILD_URL"
) > $BUILD_DIRECTORY/opnfv.properties
echo
diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml
index bb24fdf51..426e59777 100644
--- a/jjb/compass4nfv/compass-ci-jobs.yml
+++ b/jjb/compass4nfv/compass-ci-jobs.yml
@@ -10,8 +10,8 @@
stream: master
branch: '{stream}'
gs-pathname: ''
- brahmaputra: &brahmaputra
- stream: brahmaputra
+ colorado: &colorado
+ stream: colorado
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
#--------------------------------
@@ -31,11 +31,11 @@
- baremetal:
slave-label: compass-baremetal
os-version: 'trusty'
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: compass-virtual
os-version: 'trusty'
- <<: *brahmaputra
+ <<: *colorado
#--------------------------------
# master
#--------------------------------
@@ -48,27 +48,35 @@
- 'os-nosdn-nofeature-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ openstack-os-version: ''
- 'os-odl_l2-nofeature-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ openstack-os-version: ''
- 'os-odl_l3-nofeature-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ openstack-os-version: ''
- 'os-onos-nofeature-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ openstack-os-version: ''
- 'os-ocl-nofeature-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ openstack-os-version: ''
- 'os-onos-sfc-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ openstack-os-version: ''
- 'os-odl_l2-moon-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ openstack-os-version: 'xenial'
- 'os-nosdn-kvm-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ openstack-os-version: ''
jobs:
- 'compass-{scenario}-{pod}-daily-{stream}'
@@ -122,6 +130,7 @@
predefined-parameters: |
DEPLOY_SCENARIO={scenario}
COMPASS_OS_VERSION={os-version}
+ COMPASS_OS_VERSION_OPTION={openstack-os-version}
same-node: true
block: true
- trigger-builds:
@@ -238,7 +247,7 @@
- trigger:
name: 'compass-os-nosdn-nofeature-ha-huawei-pod2-master-trigger'
triggers:
- - timed: '0 3 * * *'
+ - timed: '0 19 * * *'
- trigger:
name: 'compass-os-odl_l2-nofeature-ha-huawei-pod2-master-trigger'
triggers:
@@ -246,11 +255,11 @@
- trigger:
name: 'compass-os-odl_l3-nofeature-ha-huawei-pod2-master-trigger'
triggers:
- - timed: '0 19 * * *'
+ - timed: '0 15 * * *'
- trigger:
name: 'compass-os-onos-nofeature-ha-huawei-pod2-master-trigger'
triggers:
- - timed: '0 15 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-ocl-nofeature-ha-huawei-pod2-master-trigger'
triggers:
@@ -258,7 +267,7 @@
- trigger:
name: 'compass-os-onos-sfc-ha-huawei-pod2-master-trigger'
triggers:
- - timed: '0 7 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l2-moon-ha-huawei-pod2-master-trigger'
triggers:
@@ -271,27 +280,27 @@
- trigger:
name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 3 * * *'
+ - timed: '0 2 * * *'
- trigger:
name: 'compass-os-odl_l2-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 23 * * *'
+ - timed: '0 22 * * *'
- trigger:
name: 'compass-os-odl_l3-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 19 * * *'
+ - timed: '0 18 * * *'
- trigger:
name: 'compass-os-onos-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 15 * * *'
+ - timed: '0 14 * * *'
- trigger:
name: 'compass-os-ocl-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 11 * * *'
+ - timed: '0 10 * * *'
- trigger:
name: 'compass-os-onos-sfc-ha-baremetal-master-trigger'
triggers:
- - timed: '0 7 * * *'
+ - timed: '0 6 * * *'
- trigger:
name: 'compass-os-odl_l2-moon-ha-baremetal-master-trigger'
triggers:
@@ -302,46 +311,46 @@
- timed: ''
- trigger:
- name: 'compass-os-nosdn-nofeature-ha-baremetal-brahmaputra-trigger'
+ name: 'compass-os-nosdn-nofeature-ha-baremetal-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-odl_l2-nofeature-ha-baremetal-brahmaputra-trigger'
+ name: 'compass-os-odl_l2-nofeature-ha-baremetal-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-baremetal-brahmaputra-trigger'
+ name: 'compass-os-odl_l3-nofeature-ha-baremetal-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-onos-nofeature-ha-baremetal-brahmaputra-trigger'
+ name: 'compass-os-onos-nofeature-ha-baremetal-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-ocl-nofeature-ha-baremetal-brahmaputra-trigger'
+ name: 'compass-os-ocl-nofeature-ha-baremetal-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-onos-sfc-ha-baremetal-brahmaputra-trigger'
+ name: 'compass-os-onos-sfc-ha-baremetal-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-odl_l2-moon-ha-baremetal-brahmaputra-trigger'
+ name: 'compass-os-odl_l2-moon-ha-baremetal-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-nosdn-kvm-ha-baremetal-brahmaputra-trigger'
+ name: 'compass-os-nosdn-kvm-ha-baremetal-colorado-trigger'
triggers:
- timed: ''
- trigger:
name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: '0 3 * * *'
+ - timed: '0 21 * * *'
- trigger:
name: 'compass-os-odl_l2-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: '0 23 * * *'
+ - timed: '0 20 * * *'
- trigger:
name: 'compass-os-odl_l3-nofeature-ha-virtual-master-trigger'
triggers:
@@ -349,53 +358,53 @@
- trigger:
name: 'compass-os-onos-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: '0 15 * * *'
+ - timed: '0 18 * * *'
- trigger:
name: 'compass-os-ocl-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: '0 11 * * *'
+ - timed: '0 16 * * *'
- trigger:
name: 'compass-os-onos-sfc-ha-virtual-master-trigger'
triggers:
- - timed: '0 7 * * *'
+ - timed: '0 15 * * *'
- trigger:
name: 'compass-os-odl_l2-moon-ha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 14 * * *'
- trigger:
name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-nosdn-nofeature-ha-virtual-brahmaputra-trigger'
+ name: 'compass-os-nosdn-nofeature-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-odl_l2-nofeature-ha-virtual-brahmaputra-trigger'
+ name: 'compass-os-odl_l2-nofeature-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-virtual-brahmaputra-trigger'
+ name: 'compass-os-odl_l3-nofeature-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-onos-nofeature-ha-virtual-brahmaputra-trigger'
+ name: 'compass-os-onos-nofeature-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-ocl-nofeature-ha-virtual-brahmaputra-trigger'
+ name: 'compass-os-ocl-nofeature-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-onos-sfc-ha-virtual-brahmaputra-trigger'
+ name: 'compass-os-onos-sfc-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-odl_l2-moon-ha-virtual-brahmaputra-trigger'
+ name: 'compass-os-odl_l2-moon-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-nosdn-kvm-ha-virtual-brahmaputra-trigger'
+ name: 'compass-os-nosdn-kvm-ha-virtual-colorado-trigger'
triggers:
- timed: ''
diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh
index d71316459..65e44b670 100644
--- a/jjb/compass4nfv/compass-deploy.sh
+++ b/jjb/compass4nfv/compass-deploy.sh
@@ -25,8 +25,18 @@ echo 1 > /proc/sys/vm/drop_caches
export CONFDIR=$WORKSPACE/deploy/conf
export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
+cd $WORKSPACE
+
+export OS_VERSION=${COMPASS_OS_VERSION}
+export OPENSTACK_VERSION=${COMPASS_OPENSTACK_VERSION}
+if [[ "${COMPASS_OS_VERSION_OPTION}" = "xenial" ]] && [[ "${OPENSTACK_VERSION}" = "mitaka" ]]; then
+ export OPENSTACK_VERSION=${OPENSTACK_VERSION}_${COMPASS_OS_VERSION_OPTION}
+ export OS_VERSION=${COMPASS_OS_VERSION_OPTION}
+fi
+
if [[ "${DEPLOY_SCENARIO}" =~ "-ocl" ]]; then
export NETWORK_CONF_FILE=network_ocl.yml
+ export OPENSTACK_VERSION=liberty
elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
export NETWORK_CONF_FILE=network_onos.yml
else
@@ -42,14 +52,6 @@ else
export DHA_CONF=$CONFDIR/hardware_environment/$NODE_NAME/${DEPLOY_SCENARIO}.yml
fi
-cd $WORKSPACE
-
-export OS_VERSION=${COMPASS_OS_VERSION}
-export OPENSTACK_VERSION=${COMPASS_OPENSTACK_VERSION}
-if [[ "${COMPASS_OS_VERSION_OPTION}" = "xenial" ]] && [[ "${OPENSTACK_VERSION}" = "mitaka" ]]; then
- export OPENSTACK_VERSION=${OPENSTACK_VERSION}_${COMPASS_OS_VERSION_OPTION}
- export OS_VERSION=${COMPASS_OS_VERSION_OPTION}
-fi
./deploy.sh --dha ${DHA_CONF} --network ${NETWORK_CONF}
if [ $? -ne 0 ]; then
echo "depolyment failed!"
diff --git a/jjb/compass4nfv/compass-makeppa.sh b/jjb/compass4nfv/compass-makeppa.sh
index 83cc059c0..fc5db2389 100644
--- a/jjb/compass4nfv/compass-makeppa.sh
+++ b/jjb/compass4nfv/compass-makeppa.sh
@@ -6,12 +6,12 @@ set -o pipefail
# make ppa
cd $WORKSPACE/
./build/make_repo.sh
-# calc MD5 of ppa
+# calc SHA512 of ppa
cd $PPA_CACHE
for i in $(find *.gz *.iso *.img -type f)
do
- md5=$(md5sum $i | cut -d ' ' -f1)
- echo $md5 > $i.md5
+ sha512sum=$(sha512sum $i | cut -d ' ' -f1)
+ echo $sha512sum > $i.sha512
curl -T $i $PPA_REPO
- curl -T $i.md5 $PPA_REPO
-done \ No newline at end of file
+ curl -T $i.sha512 $PPA_REPO
+done
diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml
index e92c7653b..bede7de46 100644
--- a/jjb/compass4nfv/compass-project-jobs.yml
+++ b/jjb/compass4nfv/compass-project-jobs.yml
@@ -12,7 +12,7 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
diff --git a/jjb/conductor/conductor.yml b/jjb/conductor/conductor.yml
index 247f4f268..a5f556ad8 100644
--- a/jjb/conductor/conductor.yml
+++ b/jjb/conductor/conductor.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'conductor-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/copper/copper.yml b/jjb/copper/copper.yml
index 8aad3f26b..b5045785c 100644
--- a/jjb/copper/copper.yml
+++ b/jjb/copper/copper.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'copper-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index cf9f6430b..3b407efc9 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -10,13 +10,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'doctor-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/domino/domino.yml b/jjb/domino/domino.yml
index 29e171b80..7cee98448 100644
--- a/jjb/domino/domino.yml
+++ b/jjb/domino/domino.yml
@@ -10,10 +10,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
+ disabled: false
+ - colorado:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'domino-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/dpacc/dpacc.yml b/jjb/dpacc/dpacc.yml
index 9d788ba0b..c660af57d 100644
--- a/jjb/dpacc/dpacc.yml
+++ b/jjb/dpacc/dpacc.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'dpacc-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/fastpathmetrics/fastpathmetrics.yml b/jjb/fastpathmetrics/fastpathmetrics.yml
index 40549e3c5..504e07f25 100644
--- a/jjb/fastpathmetrics/fastpathmetrics.yml
+++ b/jjb/fastpathmetrics/fastpathmetrics.yml
@@ -17,10 +17,10 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - brahmaputra:
+ - colorado:
branch: '{stream}'
- gs-pathname: ''
- disabled: true
+ gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'fastpathmetrics-verify-{stream}'
diff --git a/jjb/fuel/fuel-build.sh b/jjb/fuel/fuel-build.sh
index 662a1f46e..7e36a0c53 100755
--- a/jjb/fuel/fuel-build.sh
+++ b/jjb/fuel/fuel-build.sh
@@ -87,7 +87,7 @@ ls -al $BUILD_DIRECTORY
echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_MD5SUM=$(md5sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
+ echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
echo "OPNFV_BUILD_URL=$BUILD_URL"
) > $WORKSPACE/opnfv.properties
diff --git a/jjb/fuel/fuel-ci-jobs.yml b/jjb/fuel/fuel-ci-jobs.yml
index b9e201d46..0d31c99b4 100644
--- a/jjb/fuel/fuel-ci-jobs.yml
+++ b/jjb/fuel/fuel-ci-jobs.yml
@@ -13,7 +13,13 @@
master: &master
stream: master
branch: '{stream}'
+ disabled: false
gs-pathname: ''
+ colorado: &colorado
+ stream: colorado
+ branch: 'stable/{stream}'
+ disabled: false
+ gs-pathname: '/{stream}'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
@@ -26,12 +32,21 @@
- virtual:
slave-label: fuel-virtual
<<: *master
+ - baremetal:
+ slave-label: fuel-baremetal
+ <<: *colorado
+ - virtual:
+ slave-label: fuel-virtual
+ <<: *colorado
#--------------------------------
# None-CI PODs
#--------------------------------
- zte-pod1:
slave-label: zte-pod1
<<: *master
+ - zte-pod2:
+ slave-label: zte-pod2
+ <<: *master
- zte-pod3:
slave-label: zte-pod3
<<: *master
@@ -90,6 +105,8 @@
- job-template:
name: 'fuel-{scenario}-{pod}-daily-{stream}'
+ disabled: '{obj:disabled}'
+
concurrent: false
properties:
@@ -101,7 +118,7 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'fuel-os-.*?-{pod}-daily-{stream}'
+ - 'fuel-os-.*?-{pod}-daily-.*'
block-level: 'NODE'
wrappers:
@@ -134,7 +151,7 @@
same-node: true
block: true
- trigger-builds:
- - project: 'yardstick-fuel-{pod}-daily-{stream}'
+ - project: 'functest-fuel-{pod}-daily-{stream}'
current-parameters: false
predefined-parameters:
DEPLOY_SCENARIO={scenario}
@@ -145,7 +162,7 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
- trigger-builds:
- - project: 'functest-fuel-{pod}-daily-{stream}'
+ - project: 'yardstick-fuel-{pod}-daily-{stream}'
current-parameters: false
predefined-parameters:
DEPLOY_SCENARIO={scenario}
@@ -163,6 +180,8 @@
- job-template:
name: 'fuel-deploy-{pod}-daily-{stream}'
+ disabled: '{obj:disabled}'
+
concurrent: true
properties:
@@ -174,7 +193,7 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'fuel-deploy-{pod}-daily-{stream}'
+ - 'fuel-deploy-{pod}-daily-.*'
- 'fuel-deploy-generic-daily-.*'
block-level: 'NODE'
@@ -199,6 +218,9 @@
wrappers:
- build-name:
name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 150
+ abort: true
builders:
- description-setter:
@@ -233,49 +255,46 @@
########################
# trigger macros
########################
-# os-nosdn-nofeature-ha trigger
-# CI PODs
#-----------------------------------------------
# Triggers for job running on fuel-baremetal against master branch
#-----------------------------------------------
-
# HA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 20 * * *'
+ - timed: '5 20 * * *'
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 23 * * *'
+ - timed: '5 23 * * *'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 2 * * *'
+ - timed: '5 2 * * *'
- trigger:
name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 5 * * *'
+ - timed: '5 5 * * *'
- trigger:
name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 8 * * *'
+ - timed: '5 8 * * *'
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 11 * * *'
+ - timed: '5 11 * * *'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 14 * * *'
+ - timed: '5 14 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 17 * * *'
+ - timed: '5 17 * * *'
- trigger:
name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '0 20 * * *'
+ - timed: '5 20 * * *'
# NOHA Scenarios
- trigger:
@@ -315,6 +334,84 @@
triggers:
- timed: ''
#-----------------------------------------------
+# Triggers for job running on fuel-baremetal against colorado branch
+#-----------------------------------------------
+# HA Scenarios
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 20 * * *'
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 23 * * *'
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 2 * * *'
+- trigger:
+ name: 'fuel-os-onos-sfc-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 5 * * *'
+- trigger:
+ name: 'fuel-os-onos-nofeature-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 8 * * *'
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 11 * * *'
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 14 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 17 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: '0 20 * * *'
+
+# NOHA Scenarios
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+#-----------------------------------------------
# Triggers for job running on fuel-virtual against master branch
#-----------------------------------------------
- trigger:
@@ -353,45 +450,119 @@
name: 'fuel-os-nosdn-ovs-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
-
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: '0 13 * * *'
+ - timed: '5 13 * * *'
- trigger:
name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: '30 15 * * *'
+ - timed: '35 15 * * *'
- trigger:
name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: '0 18 * * *'
+ - timed: '5 18 * * *'
- trigger:
name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
triggers:
- - timed: '30 20 * * *'
+ - timed: '35 20 * * *'
- trigger:
name: 'fuel-os-onos-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: '0 23 * * *'
+ - timed: '5 23 * * *'
- trigger:
name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-master-trigger'
triggers:
- - timed: '30 1 * * *'
+ - timed: '35 1 * * *'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-master-trigger'
triggers:
- - timed: '0 4 * * *'
+ - timed: '5 4 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger'
triggers:
- - timed: '30 6 * * *'
+ - timed: '35 6 * * *'
- trigger:
name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
triggers:
+ - timed: '5 9 * * *'
+#-----------------------------------------------
+# Triggers for job running on fuel-virtual against colorado branch
+#-----------------------------------------------
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-ha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+# NOHA Scenarios
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: '0 13 * * *'
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: '30 15 * * *'
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: '0 18 * * *'
+- trigger:
+ name: 'fuel-os-onos-sfc-noha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: '30 20 * * *'
+- trigger:
+ name: 'fuel-os-onos-nofeature-noha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: '0 23 * * *'
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: '30 1 * * *'
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: '0 4 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-kvm-noha-virtual-daily-colorado-trigger'
+ triggers:
+ - timed: '30 6 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-ovs-noha-virtual-daily-colorado-trigger'
+ triggers:
- timed: '0 9 * * *'
-
#-----------------------------------------------
# ZTE POD1 Triggers running against master branch
#-----------------------------------------------
@@ -470,6 +641,82 @@
- timed: ''
#-----------------------------------------------
+# ZTE POD2 Triggers running against master branch
+#-----------------------------------------------
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: '0 18 * * *'
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+# NOHA Scenarios
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+#-----------------------------------------------
# ZTE POD3 Triggers running against master branch
#-----------------------------------------------
- trigger:
@@ -544,4 +791,233 @@
- trigger:
name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-master-trigger'
triggers:
- - timed: '' \ No newline at end of file
+ - timed: ''
+#-----------------------------------------------
+# ZTE POD1 Triggers running against colorado branch
+#-----------------------------------------------
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+# NOHA Scenarios
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+
+#-----------------------------------------------
+# ZTE POD2 Triggers running against colorado branch
+#-----------------------------------------------
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+# NOHA Scenarios
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+#-----------------------------------------------
+# ZTE POD3 Triggers running against colorado branch
+#-----------------------------------------------
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+# NOHA Scenarios
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-colorado-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh
index d78ddc5ba..5685444f8 100755
--- a/jjb/fuel/fuel-download-artifact.sh
+++ b/jjb/fuel/fuel-download-artifact.sh
@@ -10,6 +10,9 @@
set -o errexit
set -o pipefail
+# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
+[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=$GS_BASE_PROXY
+
if [[ "$JOB_NAME" =~ "merge" ]]; then
echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
# get the properties file for the Fuel ISO built for a merged change
@@ -47,6 +50,8 @@ if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
fi
fi
+[[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL
+
# log info to console
echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
echo "This could take some time..."
diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml
index c160fb893..cf893832b 100644
--- a/jjb/fuel/fuel-project-jobs.yml
+++ b/jjb/fuel/fuel-project-jobs.yml
@@ -13,7 +13,7 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - brahmaputra:
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
disabled: false
@@ -30,6 +30,8 @@
- job-template:
name: 'fuel-build-daily-{stream}'
+ disabled: '{obj:disabled}'
+
concurrent: false
properties:
@@ -223,7 +225,7 @@
concurrent: true
- disabled: false
+ disabled: '{obj:disabled}'
properties:
- throttle:
diff --git a/jjb/fuel/fuel-verify-jobs.yml b/jjb/fuel/fuel-verify-jobs.yml
index 2b6239422..f4bdbdd45 100644
--- a/jjb/fuel/fuel-verify-jobs.yml
+++ b/jjb/fuel/fuel-verify-jobs.yml
@@ -15,19 +15,19 @@
- colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
#####################################
# patch verification phases
#####################################
phase:
- 'basic':
- slave-label: 'opnfv-build-ubuntu'
+ slave-label: 'opnfv-build'
- 'build':
slave-label: 'opnfv-build-ubuntu'
- 'deploy-virtual':
- slave-label: 'fuel-virtual'
+ slave-label: 'opnfv-build'
- 'smoke-test':
- slave-label: 'fuel-virtual'
+ slave-label: 'opnfv-build'
#####################################
# jobs
#####################################
@@ -50,14 +50,7 @@
- throttle:
enabled: true
max-total: 4
- max-per-node: 1
option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-verify-master'
- - 'fuel-verify-colorado'
- block-level: 'NODE'
scm:
- gerrit-trigger-scm:
@@ -108,7 +101,7 @@
project: '{project}'
- gerrit-parameter:
branch: '{branch}'
- - 'fuel-virtual-defaults'
+ - 'opnfv-build-defaults'
- 'fuel-verify-defaults':
gs-pathname: '{gs-pathname}'
@@ -154,7 +147,7 @@
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
+ node-parameters: false
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
@@ -168,7 +161,7 @@
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
+ node-parameters: false
kill-phase-on: FAILURE
abort-all-job: true
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml
index 2917e5b39..8e3de3d75 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-ci-jobs.yml
@@ -14,8 +14,8 @@
branch: '{stream}'
gs-pathname: ''
docker-tag: 'latest'
- brahmaputra: &brahmaputra
- stream: brahmaputra
+ colorado: &colorado
+ stream: colorado
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
docker-tag: 'stable'
@@ -40,11 +40,11 @@
- baremetal:
slave-label: fuel-baremetal
installer: fuel
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: fuel-virtual
installer: fuel
- <<: *brahmaputra
+ <<: *colorado
# joid CI PODs
- baremetal:
slave-label: joid-baremetal
@@ -57,11 +57,11 @@
- baremetal:
slave-label: joid-baremetal
installer: joid
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: joid-virtual
installer: joid
- <<: *brahmaputra
+ <<: *colorado
# compass CI PODs
- baremetal:
slave-label: compass-baremetal
@@ -74,11 +74,11 @@
- baremetal:
slave-label: compass-baremetal
installer: compass
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: compass-virtual
installer: compass
- <<: *brahmaputra
+ <<: *colorado
# apex CI PODs
- apex-verify-master:
slave-label: '{pod}'
@@ -88,13 +88,21 @@
slave-label: '{pod}'
installer: apex
<<: *master
+ - apex-verify-colorado:
+ slave-label: '{pod}'
+ installer: apex
+ <<: *colorado
+ - apex-daily-colorado:
+ slave-label: '{pod}'
+ installer: apex
+ <<: *colorado
#--------------------------------
# None-CI PODs
#--------------------------------
- - orange-pod2:
+ - orange-pod1:
slave-label: '{pod}'
installer: joid
- <<: *brahmaputra
+ <<: *master
- orange-pod5:
slave-label: '{pod}'
installer: fuel
@@ -119,10 +127,18 @@
slave-label: '{pod}'
installer: fuel
<<: *master
+ - zte-pod2:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *master
+ - zte-pod3:
+ slave-label: '{pod}'
+ installer: fuel
+ <<: *master
- arm-pod1:
slave-label: '{pod}'
installer: fuel
- <<: *brahmaputra
+ <<: *colorado
#--------------------------------
testsuite:
@@ -160,8 +176,8 @@
parameters:
- project-parameter:
project: '{project}'
- - '{slave-label}-defaults'
- '{installer}-defaults'
+ - '{slave-label}-defaults'
- 'functest-{testsuite}-parameter'
- string:
name: DEPLOY_SCENARIO
@@ -222,6 +238,7 @@
- 'rally_full'
- 'vims'
- 'multisite'
+ - 'parser'
- parameter:
name: functest-parameter
parameters:
diff --git a/jjb/functest/functest-project-jobs.yml b/jjb/functest/functest-project-jobs.yml
index 7f86281fc..a9845459f 100644
--- a/jjb/functest/functest-project-jobs.yml
+++ b/jjb/functest/functest-project-jobs.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'functest-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/ipv6/ipv6.yml b/jjb/ipv6/ipv6.yml
index 5b9691257..d9dafdf69 100644
--- a/jjb/ipv6/ipv6.yml
+++ b/jjb/ipv6/ipv6.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'ipv6-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/joid/joid-ci-jobs.yml b/jjb/joid/joid-ci-jobs.yml
index ae03eab4a..6d0370983 100644
--- a/jjb/joid/joid-ci-jobs.yml
+++ b/jjb/joid/joid-ci-jobs.yml
@@ -15,10 +15,12 @@
master: &master
stream: master
branch: '{stream}'
+ disabled: false
gs-pathname: ''
- brahmaputra: &brahmaputra
- stream: brahmaputra
+ colorado: &colorado
+ stream: colorado
branch: 'stable/{stream}'
+ disabled: false
gs-pathname: '/{stream}'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
@@ -34,31 +36,26 @@
<<: *master
- baremetal:
slave-label: joid-baremetal
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: joid-virtual
- <<: *brahmaputra
+ <<: *colorado
#--------------------------------
# None-CI PODs
#--------------------------------
- - orange-pod2:
- slave-label: orange-pod2
- <<: *brahmaputra
- - orange-pod2:
- slave-label: orange-pod2
- <<: *master
- - juniper-pod1:
- slave-label: juniper-pod1
+ - orange-pod1:
+ slave-label: orange-pod1
<<: *master
#--------------------------------
-# new scenario descriptions
+# scenarios
+#--------------------------------
scenario:
- 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- 'os-nosdn-nofeature-ha':
auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- 'os-nosdn-lxd-ha':
- auto-trigger-name: 'daily-trigger-disabled'
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- 'os-nosdn-lxd-noha':
auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l2-nofeature-ha':
@@ -86,6 +83,8 @@
- job-template:
name: 'joid-{scenario}-{pod}-daily-{stream}'
+ disabled: '{obj:disabled}'
+
concurrent: false
properties:
@@ -97,7 +96,7 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'joid-os-.*?-{pod}-daily-{stream}'
+ - 'joid-os-.*?-{pod}-daily-.*'
block-level: 'NODE'
wrappers:
@@ -153,6 +152,8 @@
- job-template:
name: 'joid-deploy-{pod}-daily-{stream}'
+ disabled: '{obj:disabled}'
+
concurrent: true
properties:
@@ -164,7 +165,7 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'joid-deploy-{pod}-daily-{stream}'
+ - 'joid-deploy-{pod}-daily-.*'
block-level: 'NODE'
wrappers:
@@ -217,212 +218,181 @@
- trigger:
name: 'joid-os-nosdn-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 2 * * *'
+ - timed: '5 2 * * *'
- trigger:
name: 'joid-os-nosdn-nofeature-ha-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-nosdn-nofeature-ha-orange-pod2-master-trigger'
+ name: 'joid-os-nosdn-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+# os-nosdn-nofeature-ha trigger - branch: colorado
- trigger:
- name: 'joid-os-nosdn-nofeature-ha-juniper-pod1-master-trigger'
+ name: 'joid-os-nosdn-nofeature-ha-baremetal-colorado-trigger'
triggers:
- - timed: ''
-
-# os-nosdn-nofeature-ha trigger - branch: stable/brahmaputra
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-baremetal-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-virtual-brahmaputra-trigger'
- triggers:
- - timed: ''
+ - timed: '0 2 * * *'
- trigger:
- name: 'joid-os-nosdn-nofeature-ha-orange-pod2-brahmaputra-trigger'
+ name: 'joid-os-nosdn-nofeature-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-nosdn-nofeature-ha-juniper-pod1-brahmaputra-trigger'
+ name: 'joid-os-nosdn-nofeature-ha-orange-pod1-colorado-trigger'
triggers:
- timed: ''
-
# os-odl_l2-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 10 * * *'
+ - timed: '5 7 * * *'
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-orange-pod2-master-trigger'
+ name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+# os-odl_l2-nofeature-ha trigger - branch: colorado
- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-juniper-pod1-master-trigger'
+ name: 'joid-os-odl_l2-nofeature-ha-baremetal-colorado-trigger'
triggers:
- - timed: ''
-
-# os-odl_l2-nofeature-ha trigger - branch: stable/brahmaputra
+ - timed: '0 7 * * *'
- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-baremetal-brahmaputra-trigger'
+ name: 'joid-os-odl_l2-nofeature-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-virtual-brahmaputra-trigger'
+ name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-colorado-trigger'
triggers:
- timed: ''
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-orange-pod2-brahmaputra-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-juniper-pod1-brahmaputra-trigger'
- triggers:
- - timed: ''
-
# os-onos-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 18 * * *'
+ - timed: '5 12 * * *'
- trigger:
name: 'joid-os-onos-nofeature-ha-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-onos-nofeature-ha-orange-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-nofeature-ha-juniper-pod1-master-trigger'
+ name: 'joid-os-onos-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
-
-# os-onos-sfc-ha trigger - branch: master
-- trigger:
- name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 22 * * *'
+# os-onos-nofeature-ha trigger - branch: colorado
- trigger:
- name: 'joid-os-onos-sfc-ha-virtual-master-trigger'
+ name: 'joid-os-onos-nofeature-ha-baremetal-colorado-trigger'
triggers:
- - timed: ''
+ - timed: '0 12 * * *'
- trigger:
- name: 'joid-os-onos-sfc-ha-orange-pod2-master-trigger'
+ name: 'joid-os-onos-nofeature-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-onos-sfc-ha-juniper-pod1-master-trigger'
+ name: 'joid-os-onos-nofeature-ha-orange-pod1-colorado-trigger'
triggers:
- timed: ''
-
-# os-onos-nofeature-ha trigger - branch: stable/brahmaputra
+# os-onos-sfc-ha trigger - branch: master
- trigger:
- name: 'joid-os-onos-nofeature-ha-baremetal-brahmaputra-trigger'
+ name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
triggers:
- - timed: ''
+ - timed: '5 17 * * *'
- trigger:
- name: 'joid-os-onos-nofeature-ha-virtual-brahmaputra-trigger'
+ name: 'joid-os-onos-sfc-ha-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-onos-nofeature-ha-orange-pod2-brahmaputra-trigger'
+ name: 'joid-os-onos-sfc-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+# os-onos-sfc-ha trigger - branch: colorado
- trigger:
- name: 'joid-os-onos-nofeature-ha-juniper-pod1-brahmaputra-trigger'
+ name: 'joid-os-onos-sfc-ha-baremetal-colorado-trigger'
triggers:
- - timed: ''
+ - timed: '0 17 * * *'
- trigger:
- name: 'joid-os-onos-sfc-ha-baremetal-brahmaputra-trigger'
+ name: 'joid-os-onos-sfc-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-onos-sfc-ha-virtual-brahmaputra-trigger'
+ name: 'joid-os-onos-sfc-ha-orange-pod1-colorado-trigger'
triggers:
- timed: ''
+# os-nosdn-lxd-noha trigger - branch: master
- trigger:
- name: 'joid-os-onos-sfc-ha-orange-pod2-brahmaputra-trigger'
+ name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
triggers:
- - timed: ''
+ - timed: '5 22 * * *'
- trigger:
- name: 'joid-os-onos-sfc-ha-juniper-pod1-brahmaputra-trigger'
+ name: 'joid-os-nosdn-lxd-noha-virtual-master-trigger'
triggers:
- timed: ''
-
-# os-nosdn-lxd-noha trigger - branch: master
- trigger:
- name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
+ name: 'joid-os-nosdn-lxd-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
+# os-nosdn-lxd-noha trigger - branch: colorado
- trigger:
- name: 'joid-os-nosdn-lxd-noha-virtual-master-trigger'
+ name: 'joid-os-nosdn-lxd-noha-baremetal-colorado-trigger'
triggers:
- - timed: ''
+ - timed: '0 22 * * *'
- trigger:
- name: 'joid-os-nosdn-lxd-noha-orange-pod2-master-trigger'
+ name: 'joid-os-nosdn-lxd-noha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-nosdn-lxd-noha-juniper-pod1-master-trigger'
+ name: 'joid-os-nosdn-lxd-noha-orange-pod1-colorado-trigger'
triggers:
- timed: ''
-
-# os-nosdn-lxd-noha trigger - branch: stable/brahmaputra
+# os-nosdn-lxd-ha trigger - branch: master
- trigger:
- name: 'joid-os-nosdn-lxd-noha-baremetal-brahmaputra-trigger'
+ name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
triggers:
- - timed: ''
+ - timed: '5 10 * * *'
- trigger:
- name: 'joid-os-nosdn-lxd-noha-virtual-brahmaputra-trigger'
+ name: 'joid-os-nosdn-lxd-ha-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-nosdn-lxd-noha-orange-pod2-brahmaputra-trigger'
+ name: 'joid-os-nosdn-lxd-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+# os-nosdn-lxd-ha trigger - branch: colorado
- trigger:
- name: 'joid-os-nosdn-lxd-noha-juniper-pod1-brahmaputra-trigger'
+ name: 'joid-os-nosdn-lxd-ha-baremetal-colorado-trigger'
triggers:
- - timed: ''
-
-# os-nosdn-lxd-ha trigger - branch: master
+ - timed: '0 10 * * *'
- trigger:
- name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
+ name: 'joid-os-nosdn-lxd-ha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-nosdn-lxd-ha-virtual-master-trigger'
+ name: 'joid-os-nosdn-lxd-ha-orange-pod1-colorado-trigger'
triggers:
- timed: ''
+# os-nosdn-nofeature-noha trigger - branch: master
- trigger:
- name: 'joid-os-nosdn-lxd-ha-orange-pod2-master-trigger'
+ name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
triggers:
- - timed: ''
+ - timed: '5 4 * * *'
- trigger:
- name: 'joid-os-nosdn-lxd-ha-juniper-pod1-master-trigger'
+ name: 'joid-os-nosdn-nofeature-noha-virtual-master-trigger'
triggers:
- timed: ''
-
-# os-nosdn-lxd-ha trigger - branch: stable/brahmaputra
- trigger:
- name: 'joid-os-nosdn-lxd-ha-baremetal-brahmaputra-trigger'
+ name: 'joid-os-nosdn-nofeature-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
+# os-nosdn-nofeature-noha trigger - branch: colorado
- trigger:
- name: 'joid-os-nosdn-lxd-ha-virtual-brahmaputra-trigger'
+ name: 'joid-os-nosdn-nofeature-noha-baremetal-colorado-trigger'
triggers:
- - timed: ''
+ - timed: '0 4 * * *'
- trigger:
- name: 'joid-os-nosdn-lxd-ha-orange-pod2-brahmaputra-trigger'
+ name: 'joid-os-nosdn-nofeature-noha-virtual-colorado-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-os-nosdn-lxd-ha-juniper-pod1-brahmaputra-trigger'
+ name: 'joid-os-nosdn-nofeature-noha-orange-pod1-colorado-trigger'
triggers:
- timed: ''
diff --git a/jjb/joid/joid-deploy.sh b/jjb/joid/joid-deploy.sh
index da3676e28..05c2de1fc 100644
--- a/jjb/joid/joid-deploy.sh
+++ b/jjb/joid/joid-deploy.sh
@@ -10,10 +10,8 @@
set +e
set -o nounset
-PWD_FILENAME="passwords.sh"
-
-##
##
+## Functions
##
function exit_on_error {
RES=$1
@@ -24,50 +22,23 @@ function exit_on_error {
fi
}
-
##
## Create LAB_CONFIG folder if not exists
##
-mkdir -p $LAB_CONFIG
-
-##
-## Override default passwords with local settings if needed
-##
-if [ -e "$LAB_CONFIG/$PWD_FILENAME" ]; then
- echo "------ Load local passwords ------"
- source $LAB_CONFIG/$PWD_FILENAME
-else
- export MAAS_USER=ubuntu
- export MAAS_PASSWORD=ubuntu
- export OS_ADMIN_PASSWORD=openstack
-fi
+mkdir -p $LAB_CONFIG
##
## Set Joid pod config name
##
- # This part will be removed when pod names will be synced between jenkins and joid config
- case $NODE_NAME in
- *virtual*)
- POD=default ;;
- *)
- POD=$NODE_NAME ;;
- esac
- export POD_NAME=${POD/-}
-##
-## Parse Network config
-##
-
-EXTERNAL_NETWORK=${EXTERNAL_NETWORK:-}
-# split EXTERNAL_NETWORK=name;type;first ip;last ip; gateway;network
-IFS=';' read -r -a EXTNET <<< "$EXTERNAL_NETWORK"
-EXTNET_NAME=${EXTNET[0]}
-EXTNET_TYPE=${EXTNET[1]}
-EXTNET_FIP=${EXTNET[2]}
-EXTNET_LIP=${EXTNET[3]}
-EXTNET_GW=${EXTNET[4]}
-EXTNET_NET=${EXTNET[5]}
+case $NODE_NAME in
+ *virtual*)
+ POD=default ;;
+ *)
+ POD=$NODE_NAME ;;
+esac
+export POD_NAME=${POD/-}
##
## Redeploy MAAS or recover the previous config
@@ -77,11 +48,11 @@ cd $WORKSPACE/ci
if [ -e "$LAB_CONFIG/environments.yaml" ] && [ "$MAAS_REINSTALL" == "false" ]; then
echo "------ Recover Juju environment to use MAAS ------"
cp $LAB_CONFIG/environments.yaml .
+ cp $LAB_CONFIG/deployment.yaml .
+ if [ -e $LAB_CONFIG/deployconfig.yaml ]; then
+ cp $LAB_CONFIG/deployconfig.yaml .
+ fi
else
- MAASCONFIG=$WORKSPACE/ci/maas/${POD/-*}/${POD/*-}/deployment.yaml
- echo "------ Set MAAS password ------"
- sed -i -- "s/user: ubuntu/user: $MAAS_USER/" $MAASCONFIG
- sed -i -- "s/password: ubuntu/password: $MAAS_PASSWORD/" $MAASCONFIG
echo "------ Redeploy MAAS ------"
./00-maasdeploy.sh $POD_NAME
exit_on_error $? "MAAS Deploy FAILED"
@@ -117,24 +88,12 @@ fi
if [ "$HA_MODE" == 'noha' ]; then
HA_MODE='nonha'
fi
-SRCBUNDLE="${WORKSPACE}/ci/${SDN_CONTROLLER}/juju-deployer/"
-SRCBUNDLE="${SRCBUNDLE}/ovs-${SDN_CONTROLLER}-${HA_MODE}.yaml"
-
-
-# Modify Bundle
-echo "------ Set openstack password ------"
-sed -i -- "s/admin-password: openstack/admin-password: $OS_ADMIN_PASSWORD/" $SRCBUNDLE
-if [ -n "$EXTNET_NAME" ]; then
- echo "------ Set openstack default network ------"
- sed -i -- "s/neutron-external-network: ext_net/neutron-external-network: $EXTNET_NAME/" $SRCBUNDLE
+# Add extra to features
+if [ "$EXTRA" != "" ];then
+ NFV_FEATURES="${NFV_FEATURES}_${EXTRA}"
fi
-echo "------ Set ceph disks ------"
-#Find the first line of osd-devices to change the one for ceph, then the other for ceph-osd
-sed -i -- "s@osd-devices: /srv@osd-devices: $CEPH_DISKS@" $SRCBUNDLE
-sed -i -r -- "s/^(\s+osd-reformat: )'no'/\1'$CEPH_REFORMAT'/" $SRCBUNDLE
-
# temporary sfc feature is availble only on onos and trusty
if [ "$NFV_FEATURES" == 'sfc' ] && [ "$SDN_CONTROLLER" == 'onos' ];then
UBUNTU_DISTRO=trusty
@@ -156,17 +115,6 @@ exit_on_error $? "Main deploy FAILED"
JOID_ADMIN_OPENRC=$LAB_CONFIG/admin-openrc
echo "------ Create OpenRC file [$JOID_ADMIN_OPENRC] ------"
-# get Keystone ip
-case "$HA_MODE" in
- "ha")
- KEYSTONE=$(cat bundles.yaml |shyaml get-value openstack-phase1.services.keystone.options.vip)
- ;;
- *)
- KEYSTONE=$(juju status keystone |grep public-address|sed -- 's/.*\: //')
- ;;
-esac
-
-
# get controller IP
case "$SDN_CONTROLLER" in
"odl")
@@ -181,22 +129,12 @@ case "$SDN_CONTROLLER" in
esac
SDN_PASSWORD='admin'
-# export the openrc file
-cat << EOF > $JOID_ADMIN_OPENRC
-export OS_USERNAME=admin
-export OS_PASSWORD=$OS_ADMIN_PASSWORD
-export OS_TENANT_NAME=admin
-export OS_AUTH_URL=http://$KEYSTONE:35357/v2.0
-export OS_REGION_NAME=RegionOne
-export OS_ENDPOINT_TYPE='adminURL'
-export CINDER_ENDPOINT_TYPE='adminURL'
-export GLANCE_ENDPOINT_TYPE='adminURL'
-export KEYSTONE_ENDPOINT_TYPE='adminURL'
-export NEUTRON_ENDPOINT_TYPE='adminURL'
-export NOVA_ENDPOINT_TYPE='adminURL'
+# export the openrc file by getting the one generated by joid and add SDN
+# controller for Functest
+cp ./cloud/admin-openrc $JOID_ADMIN_OPENRC
+cat << EOF >> $JOID_ADMIN_OPENRC
export SDN_CONTROLLER=$SDN_CONTROLLER_IP
export SDN_PASSWORD=$SDN_PASSWORD
-export OS_INTERFACE=admin
EOF
##
@@ -205,28 +143,10 @@ EOF
echo "------ Backup Juju environment ------"
cp environments.yaml $LAB_CONFIG/
-
-##
-## Basic test to return a realistic result to jenkins
-##
-
-echo "------ Do basic test ------"
-source $JOID_ADMIN_OPENRC
-curl -i -sw '%{http_code}' -H "Content-Type: application/json" -d "
-{ \"auth\": {
- \"identity\": {
- \"methods\": [\"password\"],
- \"password\": {
- \"user\": {
- \"name\": \"admin\",
- \"domain\": { \"id\": \"default\" },
- \"password\": \"$OS_ADMIN_PASSWORD\"
- }
- }
- }
- }
-}" http://$KEYSTONE:5000/v3/auth/tokens |grep "HTTP/1.1 20" 2>&1 >/dev/null;
-exit_on_error $? "Deploy FAILED to auth to openstack"
+cp deployment.yaml $LAB_CONFIG/
+if [ -e deployconfig.yaml ]; then
+ cp deployconfig.yaml $LAB_CONFIG
+fi
##
## Exit success
diff --git a/jjb/joid/joid-verify-jobs.yml b/jjb/joid/joid-verify-jobs.yml
index aa5fc672d..9d362d800 100644
--- a/jjb/joid/joid-verify-jobs.yml
+++ b/jjb/joid/joid-verify-jobs.yml
@@ -15,7 +15,7 @@
- colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
#####################################
# patch verification phases
#####################################
@@ -38,10 +38,10 @@
- job-template:
name: 'joid-verify-{stream}'
- project-type: multijob
-
disabled: '{obj:disabled}'
+ project-type: multijob
+
concurrent: true
properties:
diff --git a/jjb/kvmfornfv/kvmfornfv-download-artifact.sh b/jjb/kvmfornfv/kvmfornfv-download-artifact.sh
index 89b7e3164..1f99f177b 100755
--- a/jjb/kvmfornfv/kvmfornfv-download-artifact.sh
+++ b/jjb/kvmfornfv/kvmfornfv-download-artifact.sh
@@ -1,4 +1,7 @@
#!/bin/bash
+set -o errexit
+set -o nounset
+set -o pipefail
if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
JOB_TYPE=${BASH_REMATCH[0]}
diff --git a/jjb/kvmfornfv/kvmfornfv-test.sh b/jjb/kvmfornfv/kvmfornfv-test.sh
index 7a47f9a6e..868de13bd 100755
--- a/jjb/kvmfornfv/kvmfornfv-test.sh
+++ b/jjb/kvmfornfv/kvmfornfv-test.sh
@@ -1,3 +1,10 @@
#!/bin/bash
-
+##########################################################
+##This script includes executing cyclictest scripts.
+##########################################################
+#The latest build packages are stored in build_output
ls -al $WORKSPACE/build_output
+
+#start the test
+cd $WORKSPACE
+./ci/test_kvmfornfv.sh
diff --git a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
index 190ab4b4d..327ea97e8 100755
--- a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
+++ b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
@@ -1,4 +1,6 @@
#!/bin/bash
+set -o errexit
+set -o nounset
if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
JOB_TYPE=${BASH_REMATCH[0]}
@@ -9,11 +11,13 @@ fi
case "$JOB_TYPE" in
verify)
- echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
+ echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
+ gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
+ echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
;;
daily)
- echo "Uploding daily artifacts This could take some time..."
+ echo "Uploading daily artifacts This could take some time..."
OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
GS_UPLOAD_LOCATION="gs://$GS_URL/$OPNFV_ARTIFACT_VERSION"
;;
@@ -22,7 +26,7 @@ case "$JOB_TYPE" in
exit 1
esac
-gsutil cp -r $WORKSPACE/build_output $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
+gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
gsutil -m setmeta -r \
-h "Cache-Control:private, max-age=0, no-transform" \
$GS_UPLOAD_LOCATION > /dev/null 2>&1
diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml
index 5fcb27042..4bb0a15b4 100644
--- a/jjb/kvmfornfv/kvmfornfv.yml
+++ b/jjb/kvmfornfv/kvmfornfv.yml
@@ -8,10 +8,10 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - brahmaputra:
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
#####################################
# patch verification phases
#####################################
@@ -139,6 +139,8 @@
- job-template:
name: 'kvmfornfv-merge-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/multisite/multisite.yml b/jjb/multisite/multisite.yml
index ab99eefad..21b973093 100644
--- a/jjb/multisite/multisite.yml
+++ b/jjb/multisite/multisite.yml
@@ -17,12 +17,20 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
+ timed: '@midnight'
+ - colorado:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+ timed: ''
- job-template:
name: 'multisite-verify-{stream}'
disabled: '{obj:disabled}'
+ concurrent: true
+
parameters:
- project-parameter:
project: '{project}'
@@ -82,6 +90,9 @@
name: KINGBIRD_LOG_FILE
default: $WORKSPACE/kingbird.log
- 'intel-virtual6-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-multisite-ha'
scm:
- gerrit-trigger-scm:
@@ -90,7 +101,7 @@
choosing-strategy: 'default'
triggers:
- - timed: '@midnight'
+ - timed: '{timed}'
builders:
- trigger-builds:
diff --git a/jjb/netready/netready.yml b/jjb/netready/netready.yml
index bc8f66691..3e2f95a76 100644
--- a/jjb/netready/netready.yml
+++ b/jjb/netready/netready.yml
@@ -10,13 +10,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'netready-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/octopus/octopus.yml b/jjb/octopus/octopus.yml
index 1cb71cc69..c4e34ca7d 100644
--- a/jjb/octopus/octopus.yml
+++ b/jjb/octopus/octopus.yml
@@ -13,13 +13,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'octopus-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/onosfw/onosfw.yml b/jjb/onosfw/onosfw.yml
index 0335b71b2..0c90c577a 100644
--- a/jjb/onosfw/onosfw.yml
+++ b/jjb/onosfw/onosfw.yml
@@ -10,11 +10,13 @@
# only master branch is enabled at the moment to keep no of jobs sane
stream:
- master:
- branch: 'master'
+ branch: '{stream}'
gs-pathname: ''
-# - brahmaputra:
-# branch: 'stable/brahmaputra'
-# gs-pathname: '/brahmaputra'
+ disabled: false
+ - colorado:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
project: 'onosfw'
@@ -24,6 +26,8 @@
- job-template:
name: 'onosfw-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
@@ -65,6 +69,8 @@
- job-template:
name: 'onosfw-daily-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
@@ -88,6 +94,8 @@
- job-template:
name: 'onosfw-build-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
@@ -143,7 +151,7 @@
echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_MD5SUM=$(md5sum $BUILD_DIRECTORY/onosfw.iso | cut -d' ' -f1)"
+ echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/onosfw.iso | cut -d' ' -f1)"
echo "OPNFV_BUILD_URL=$BUILD_URL"
) > $BUILD_DIRECTORY/opnfv.properties
echo
diff --git a/jjb/opnfv/installer-params.yml b/jjb/opnfv/installer-params.yml
index 833a1d449..fbcaa4ba2 100644
--- a/jjb/opnfv/installer-params.yml
+++ b/jjb/opnfv/installer-params.yml
@@ -71,21 +71,13 @@
description: 'OpenStack release (liberty|mitaka)'
- string:
name: EXTERNAL_NETWORK
- default: ext-net;flat;192.168.0.2;192.168.0.253;192.168.0.1;192.168.0.0/24
- description: "External network to create (name;type;first ip;last ip; gateway;network)"
- - string:
- name: CEPH_DISKS
- default: '/srv'
- description: "Disks to use by ceph by default (space separated list)"
+ default: ext-net4
+ description: "External network used for Floating ips."
- string:
name: LAB_CONFIG
default: "$HOME/joid_config"
description: "Local lab config and Openstack openrc location"
- string:
- name: CEPH_REFORMAT
- default: 'false'
- description: "Format or not disk before using ceph [true/false] (must be done the first time)"
- - string:
name: MAAS_REINSTALL
default: 'false'
description: "Reinstall MAAS and Bootstrap before deploy [true/false]"
diff --git a/jjb/opnfv/opnfv-docker.yml b/jjb/opnfv/opnfv-docker.yml
index 6b4861c01..4250eef64 100644
--- a/jjb/opnfv/opnfv-docker.yml
+++ b/jjb/opnfv/opnfv-docker.yml
@@ -20,8 +20,10 @@
stream:
- master:
branch: '{stream}'
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
+ disabled: false
########################
# job templates
@@ -29,7 +31,7 @@
- job-template:
name: '{project}-docker-build-push-{stream}'
- disabled: false
+ disabled: '{obj:disabled}'
parameters:
- project-parameter:
@@ -41,7 +43,7 @@
description: "To enable/disable pushing the image to Dockerhub."
- string:
name: BASE_VERSION
- default: "brahmaputra.3"
+ default: "colorado.0"
description: "Base version to be used."
- string:
name: DOCKER_REPO_NAME
@@ -72,7 +74,7 @@
- job-template:
name: 'yardstick-docker-build-push-{stream}'
- disabled: false
+ disabled: '{obj:disabled}'
parameters:
- project-parameter:
@@ -84,7 +86,7 @@
description: "To enable/disable pushing the image to Dockerhub."
- string:
name: BASE_VERSION
- default: "brahmaputra.3"
+ default: "colorado.0"
description: "Base version to be used."
- string:
name: DOCKER_REPO_NAME
diff --git a/jjb/opnfv/opnfv-docs.yml b/jjb/opnfv/opnfv-docs.yml
index 2b80b84f0..743657334 100644
--- a/jjb/opnfv/opnfv-docs.yml
+++ b/jjb/opnfv/opnfv-docs.yml
@@ -15,9 +15,11 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
########################
# job templates
@@ -26,6 +28,8 @@
- job-template:
name: 'opnfv-docs-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: $GERRIT_PROJECT
@@ -68,6 +72,8 @@
- job-template:
name: 'opnfv-docs-merge-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: $GERRIT_PROJECT
diff --git a/jjb/opnfv/opnfv-lint.yml b/jjb/opnfv/opnfv-lint.yml
index 9611a380c..f90f95dc2 100644
--- a/jjb/opnfv/opnfv-lint.yml
+++ b/jjb/opnfv/opnfv-lint.yml
@@ -14,9 +14,11 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
########################
# job templates
@@ -25,6 +27,8 @@
- job-template:
name: 'opnfv-lint-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: $GERRIT_PROJECT
@@ -51,7 +55,7 @@
comment-contains-value: 'reverify'
projects:
- project-compare-type: 'REG_EXP'
- project-pattern: 'functest|sdnvpn|qtip'
+ project-pattern: 'functest|sdnvpn|qtip|daisy'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml
index 4d73ad807..73c7c61b5 100644
--- a/jjb/opnfv/slave-params.yml
+++ b/jjb/opnfv/slave-params.yml
@@ -17,6 +17,34 @@
name: SSH_KEY
default: /root/.ssh/id_rsa
description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod1
+ default-slaves:
+ - lf-pod1
+- parameter:
+ name: 'apex-daily-colorado-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'apex-daily-colorado'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod1
+ default-slaves:
+ - lf-pod1
- parameter:
name: 'apex-verify-master-defaults'
parameters:
@@ -31,6 +59,42 @@
name: SSH_KEY
default: /root/.ssh/id_rsa
description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-virtual3
+ - intel-virtual4
+ - intel-virtual5
+ default-slaves:
+ - intel-virtual3
+ - intel-virtual4
+ - intel-virtual5
+- parameter:
+ name: 'apex-verify-colorado-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'apex-verify-colorado'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-virtual3
+ - intel-virtual4
+ - intel-virtual5
+ default-slaves:
+ - intel-virtual3
+ - intel-virtual4
+ - intel-virtual5
- parameter:
name: 'lf-pod1-defaults'
parameters:
@@ -97,13 +161,9 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- string:
- name: CEPH_DISKS
- default: /srv
- description: "Disks to use by ceph (comma separated list)"
- - string:
name: EXTERNAL_NETWORK
- default: ext-net;flat;10.5.15.5;10.5.15.250;10.5.15.254;10.5.15.0/24
- description: "External network to create for pod5 (name;type;first ip;last ip; gateway;network)"
+ default: ext-net
+ description: "External network floating ips"
#####################################################
# Parameters for CI virtual PODs
#####################################################
@@ -198,6 +258,21 @@
default: $WORKSPACE/build_output
description: "Directory where the build artifact will be located upon the completion of the build."
- parameter:
+ name: 'opnfv-build-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+- parameter:
name: 'huawei-build-defaults'
parameters:
- node:
@@ -336,6 +411,14 @@
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.6.2'
+ description: 'IP of the installer'
+ - string:
+ name: BRIDGE
+ default: 'br6'
+ description: 'pxe bridge for booting of Fuel master'
- parameter:
name: 'zte-pod2-defaults'
parameters:
@@ -352,8 +435,12 @@
description: 'Git URL to use on this Jenkins Slave'
- string:
name: INSTALLER_IP
- default: '10.20.1.2'
+ default: '10.20.7.2'
description: 'IP of the installer'
+ - string:
+ name: BRIDGE
+ default: 'br7'
+ description: 'pxe bridge for booting of Fuel master'
- parameter:
name: 'zte-pod3-defaults'
parameters:
@@ -391,6 +478,20 @@
default: /srv
description: "Disks to use by ceph (comma separated list)"
- parameter:
+ name: 'orange-pod1-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - orange-pod1
+ default-slaves:
+ - orange-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
name: 'orange-pod2-defaults'
parameters:
- node:
@@ -404,14 +505,6 @@
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: CEPH_DISKS
- default: /dev/sdb /dev/sdc
- description: "Disks to use by ceph by default (space separated list)"
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net;flat;161.105.231.2;161.105.231.62;161.105.231.1;161.105.231.0/26
- description: "External network to create (name;type;first ip;last ip; gateway;network)"
- parameter:
name: 'orange-pod5-defaults'
parameters:
diff --git a/jjb/opnfvdocs/opnfvdocs.yml b/jjb/opnfvdocs/opnfvdocs.yml
index f3e776c83..2bf87c2f4 100644
--- a/jjb/opnfvdocs/opnfvdocs.yml
+++ b/jjb/opnfvdocs/opnfvdocs.yml
@@ -16,9 +16,11 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
########################
# job templates
@@ -27,6 +29,8 @@
- job-template:
name: 'opnfvdocs-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: $GERRIT_PROJECT
@@ -68,6 +72,8 @@
- job-template:
name: 'opnfvdocs-merge-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: $GERRIT_PROJECT
diff --git a/jjb/ovsnfv/ovsnfv.yml b/jjb/ovsnfv/ovsnfv.yml
index ec5761b74..c6f3e4a51 100644
--- a/jjb/ovsnfv/ovsnfv.yml
+++ b/jjb/ovsnfv/ovsnfv.yml
@@ -13,14 +13,16 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - brahmaputra:
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: true
+ disabled: false
- job-template:
name: 'ovsnfv-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
@@ -68,6 +70,8 @@
- job-template:
name: 'ovsnfv-merge-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/parser/parser.yml b/jjb/parser/parser.yml
index 8c7283802..7f73a1380 100644
--- a/jjb/parser/parser.yml
+++ b/jjb/parser/parser.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'parser-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/pharos/pharos.yml b/jjb/pharos/pharos.yml
index 98d71286c..f7ea622e1 100644
--- a/jjb/pharos/pharos.yml
+++ b/jjb/pharos/pharos.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'pharos-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/prediction/prediction.yml b/jjb/prediction/prediction.yml
index b6471bf9f..a95cd98da 100644
--- a/jjb/prediction/prediction.yml
+++ b/jjb/prediction/prediction.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'prediction-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/promise/promise.yml b/jjb/promise/promise.yml
index 97a8b3eb3..1a4d628fd 100644
--- a/jjb/promise/promise.yml
+++ b/jjb/promise/promise.yml
@@ -14,13 +14,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'promise-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/qtip/qtip-ci-jobs.yml b/jjb/qtip/qtip-ci-jobs.yml
index ef455f8df..d454b0f07 100644
--- a/jjb/qtip/qtip-ci-jobs.yml
+++ b/jjb/qtip/qtip-ci-jobs.yml
@@ -13,38 +13,31 @@
stream: master
branch: '{stream}'
gs-pathname: ''
- brahmaputra: &brahmaputra
- stream: brahmaputra
- branch: 'stable/{stream}'
- gs-pathname: '{stream}'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
-# brahmaputra
+# master
#--------------------------------
pod:
- dell-pod1:
installer: compass
- auto-trigger-name: 'qtip-daily-dell-pod1-trigger'
- <<: *brahmaputra
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
- orange-pod2:
installer: joid
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
-#--------------------------------
-# master
-#--------------------------------
+ <<: *master
- juniper-pod1:
installer: joid
<<: *master
auto-trigger-name: 'daily-trigger-disabled'
- zte-pod1:
installer: fuel
- auto-trigger-name: 'qtip-daily-zte-pod1-trigger'
+ auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- zte-pod2:
installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
+ auto-trigger-name: 'qtip-daily-zte-pod2-trigger'
<<: *master
#--------------------------------
@@ -172,10 +165,10 @@
#trigger macros
#################
-- trigger:
- name: 'qtip-daily-dell-pod1-trigger'
- triggers:
- - timed: '0 3 * * *'
+#- trigger:
+# name: 'qtip-daily-dell-pod1-trigger'
+# triggers:
+# - timed: '0 3 * * *'
#- trigger:
# name: 'qtip-daily-juniper-pod1-trigger'
@@ -188,6 +181,7 @@
# - timed : ' 0 0 * * *'
- trigger:
- name: 'qtip-daily-zte-pod1-trigger'
+ name: 'qtip-daily-zte-pod2-trigger'
triggers:
- timed: '0 5 * * *'
+
diff --git a/jjb/qtip/qtip-project-jobs.yml b/jjb/qtip/qtip-project-jobs.yml
index 75f75116a..722a9beb3 100644
--- a/jjb/qtip/qtip-project-jobs.yml
+++ b/jjb/qtip/qtip-project-jobs.yml
@@ -11,13 +11,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'qtip-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/releng-macros.yaml b/jjb/releng-macros.yaml
index c14ea6c47..8328aec03 100644
--- a/jjb/releng-macros.yaml
+++ b/jjb/releng-macros.yaml
@@ -13,7 +13,7 @@
description: "URL to Google Storage."
- string:
name: GS_BASE_PROXY
- default: build.opnfv.org/artifacts/$PROJECT
+ default: build.opnfv.org/artifacts.opnfv.org/$PROJECT
description: "URL to Google Storage proxy"
- parameter:
diff --git a/jjb/storperf/storperf.yml b/jjb/storperf/storperf.yml
index fb70df751..026b643d3 100644
--- a/jjb/storperf/storperf.yml
+++ b/jjb/storperf/storperf.yml
@@ -12,13 +12,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'storperf-verify-{stream}'
+ disabled: '{obj:disabled}'
+
node: opnfv-build-ubuntu
parameters:
diff --git a/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml b/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml
index 0123fcdf9..c988c0627 100644
--- a/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml
+++ b/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml
@@ -10,13 +10,17 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'vnf_forwarding_graph-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/jjb/vswitchperf/vswitchperf.yml b/jjb/vswitchperf/vswitchperf.yml
index bf2fe8faa..363423de9 100644
--- a/jjb/vswitchperf/vswitchperf.yml
+++ b/jjb/vswitchperf/vswitchperf.yml
@@ -13,14 +13,18 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'vswitchperf-daily-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
@@ -50,6 +54,8 @@
- job-template:
name: 'vswitchperf-verify-{stream}'
+ disabled: '{obj:disabled}'
+
project-type: freestyle
concurrent: true
@@ -108,6 +114,8 @@
- job-template:
name: 'vswitchperf-merge-{stream}'
+ disabled: '{obj:disabled}'
+
project-type: freestyle
concurrent: true
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml
index 041eabd03..d9fb43555 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-ci-jobs.yml
@@ -1,5 +1,5 @@
###################################
-# job configuration for functest
+# job configuration for yardstick
###################################
- project:
name: yardstick
@@ -14,11 +14,11 @@
branch: '{stream}'
gs-pathname: ''
docker-tag: 'latest'
- brahmaputra: &brahmaputra
- stream: brahmaputra
+ colorado: &colorado
+ stream: colorado
branch: 'stable/{stream}'
gs-pathname: '{stream}'
- docker-tag: 'brahmaputra.1.0'
+ docker-tag: 'stable'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
@@ -43,12 +43,12 @@
slave-label: fuel-baremetal
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: fuel-virtual
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
# joid CI PODs
- baremetal:
slave-label: joid-baremetal
@@ -64,12 +64,12 @@
slave-label: joid-baremetal
installer: joid
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: joid-virtual
installer: joid
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
# compass CI PODs
- baremetal:
@@ -86,12 +86,12 @@
slave-label: compass-baremetal
installer: compass
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
- virtual:
slave-label: compass-virtual
installer: compass
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
#--------------------------------
# Installers not using labels
# CI PODs
@@ -107,20 +107,35 @@
slave-label: '{pod}'
installer: apex
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *colorado
#--------------------------------
# None-CI PODs
#--------------------------------
- - orange-pod2:
+ - orange-pod1:
slave-label: '{pod}'
installer: joid
auto-trigger-name: 'daily-trigger-disabled'
- <<: *brahmaputra
+ <<: *master
- zte-pod1:
slave-label: '{pod}'
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
+ - zte-pod2:
+ slave-label: '{pod}'
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - zte-pod3:
+ slave-label: '{pod}'
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - arm-pod1:
+ slave-label: '{pod}'
+ installer: fuel
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
- orange-pod2:
slave-label: '{pod}'
installer: joid
@@ -177,8 +192,8 @@
parameters:
- project-parameter:
project: '{project}'
- - '{slave-label}-defaults'
- '{installer}-defaults'
+ - '{slave-label}-defaults'
- 'yardstick-params-{slave-label}'
- string:
name: DEPLOY_SCENARIO
@@ -318,6 +333,14 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
+ name: 'yardstick-params-zte-pod2'
+ parameters:
+ - string:
+ name: YARDSTICK_DB_BACKEND
+ default: '-i 104.197.68.199:8086'
+ description: 'Arguments to use in order to choose the backend DB'
+
+- parameter:
name: 'yardstick-params-zte-pod3'
parameters:
- string:
@@ -326,6 +349,14 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
+ name: 'yardstick-params-orange-pod1'
+ parameters:
+ - string:
+ name: YARDSTICK_DB_BACKEND
+ default: '-i 104.197.68.199:8086'
+ description: 'Arguments to use in order to choose the backend DB'
+
+- parameter:
name: 'yardstick-params-orange-pod2'
parameters:
- string:
@@ -334,6 +365,14 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
+ name: 'yardstick-params-arm-pod1'
+ parameters:
+ - string:
+ name: YARDSTICK_DB_BACKEND
+ default: '-i 104.197.68.199:8086'
+ description: 'Arguments to use in order to choose the backend DB'
+
+- parameter:
name: 'yardstick-params-virtual'
parameters:
- string:
diff --git a/jjb/yardstick/yardstick-daily.sh b/jjb/yardstick/yardstick-daily.sh
index e8df9be58..b3705415f 100755
--- a/jjb/yardstick/yardstick-daily.sh
+++ b/jjb/yardstick/yardstick-daily.sh
@@ -23,16 +23,22 @@ elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
# replace the default one by the customized one provided by jenkins config
fi
+# Set iptables rule to allow forwarding return traffic for container
+if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
+ sudo iptables -I FORWARD -j RETURN
+fi
+
opts="--privileged=true --rm"
envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-e NODE_NAME=${NODE_NAME} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK} \
-e YARDSTICK_BRANCH=${GIT_BRANCH##origin/} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
-# Pull the latest image
+# Pull the image with correct tag
+echo "Yardstick: Pulling image opnfv/yardstick:${DOCKER_TAG}"
docker pull opnfv/yardstick:$DOCKER_TAG >$redirect
# Run docker
-cmd="sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/yardstick \
+cmd="sudo docker run ${opts} ${envs} ${labconfig} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
echo "Yardstick: Running docker cmd: ${cmd}"
${cmd}
diff --git a/jjb/yardstick/yardstick-project-jobs.yml b/jjb/yardstick/yardstick-project-jobs.yml
index 64031b75a..db07e9d83 100644
--- a/jjb/yardstick/yardstick-project-jobs.yml
+++ b/jjb/yardstick/yardstick-project-jobs.yml
@@ -15,9 +15,11 @@
- master:
branch: '{stream}'
gs-pathname: ''
- - brahmaputra:
+ disabled: false
+ - colorado:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ disabled: false
################################
# job templates
@@ -26,6 +28,8 @@
- job-template:
name: 'yardstick-verify-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
@@ -63,6 +67,8 @@
- job-template:
name: 'yardstick-merge-{stream}'
+ disabled: '{obj:disabled}'
+
parameters:
- project-parameter:
project: '{project}'
diff --git a/prototypes/bifrost/README.md b/prototypes/bifrost/README.md
new file mode 100644
index 000000000..fffd1de3d
--- /dev/null
+++ b/prototypes/bifrost/README.md
@@ -0,0 +1,48 @@
+=====================
+How to deploy bifrost
+=====================
+The scripts and playbooks defined on this repo, need to be combined with proper `Bifrost <http://git.openstack.org/cgit/openstack/bifrost>`_ code.
+
+Please follow that steps:
+
+1. Clone bifrost::
+
+ git clone https://git.openstack.org/openstack/bifrost /opt/bifrost
+
+2. Clone releng::
+
+ git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng
+
+3. Clone infracloud::
+
+ git clone https://git.openstack.org/openstack-infra/puppet-infracloud /opt/puppet-infracloud
+
+4. Combine releng scripts and playbooks with bifrost::
+
+ cp -R /opt/releng/prototypes/bifrost/* /opt/bifrost/
+
+5. Run destroy script if you need to cleanup previous environment::
+
+ cd /opt/bifrost
+ ./scripts/destroy_env.sh
+
+6. Run deployment script to spin up 3 vms with bifrost: jumphost, controller and compute::
+
+ cd /opt/bifrost
+ ./scripts/test-bifrost-deployment.sh
+
+It is likely that the script will show some errors due to timeout. Please ignore the errors, and wait until the vms are completely bootstrapped. To verify it you can check with ironic::
+
+ cd /opt/bifrost
+ source env-vars
+ ironic node-list
+
+And wait until all the vms are in **active** Provisioning State.
+
+7. Check the IPs assigned to each of the VMS. You can check it by looking at inventory:
+
+ cat /tmp/baremetal.csv
+
+8. You can enter into the vms with devuser login/pass:
+
+ ssh devuser@192.168.122.2
diff --git a/prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml b/prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml
new file mode 100644
index 000000000..69eb787e7
--- /dev/null
+++ b/prototypes/bifrost/playbooks/roles/bifrost-prepare-for-test-dynamic/defaults/main.yml
@@ -0,0 +1,4 @@
+---
+node_ssh_pause: 10
+wait_timeout: 1900
+multinode_testing: false
diff --git a/prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml b/prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml
new file mode 100644
index 000000000..ba548b305
--- /dev/null
+++ b/prototypes/bifrost/playbooks/test-bifrost-infracloud.yaml
@@ -0,0 +1,66 @@
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 RedHat and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+- hosts: localhost
+ connection: local
+ name: "Setting pre-test conditions"
+ become: yes
+ ignore_errors: yes
+ tasks:
+ - name: Remove pre-existing leases file
+ file: path=/var/lib/misc/dnsmasq.leases state=absent
+- hosts: localhost
+ connection: local
+ name: "Executes install, enrollment, and testing in one playbook"
+ become: no
+ gather_facts: yes
+ pre_tasks:
+ - name: "Override the ipv4_gateway setting"
+ set_fact:
+ ipv4_gateway: "192.168.122.1"
+ roles:
+ - { role: bifrost-prep-for-install, when: skip_install is not defined }
+ environment:
+ http_proxy: "{{ lookup('env','http_proxy') }}"
+ https_proxy: "{{ lookup('env','https_proxy') }}"
+- hosts: localhost
+ connection: local
+ name: "Executes install, enrollment, and testing in one playbook"
+ become: yes
+ gather_facts: yes
+ roles:
+ - role: bifrost-ironic-install
+ cleaning: false
+ testing: true
+ # NOTE(TheJulia): While the next step creates a ramdisk, some elements
+ # do not support ramdisk-image-create as they invoke steps to cleanup
+ # the ramdisk which causes ramdisk-image-create to believe it failed.
+ - { role: bifrost-create-dib-image, dib_imagename: "{{ http_boot_folder }}/ipa", build_ramdisk: false, dib_os_element: "{{ ipa_dib_os_element|default('debian') }}", dib_os_release: "jessie", dib_elements: "ironic-agent {{ ipa_extra_dib_elements | default('') }}", when: create_ipa_image | bool == true }
+ - { role: bifrost-create-dib-image, dib_imagetype: "qcow2", dib_imagename: "{{deploy_image}}", dib_os_element: "ubuntu-minimal", dib_os_release: "trusty", dib_elements: "vm serial-console simple-init devuser infra-cloud-bridge puppet growroot {{ extra_dib_elements|default('') }}", dib_packages: "openssh-server,vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl", when: create_image_via_dib | bool == true and transform_boot_image | bool == false }
+ environment:
+ http_proxy: "{{ lookup('env','http_proxy') }}"
+ https_proxy: "{{ lookup('env','https_proxy') }}"
+- hosts: baremetal
+ name: "Enroll node with Ironic"
+ become: no
+ connection: local
+ roles:
+ - role: ironic-enroll-dynamic
+ - { role: ironic-inspect-node, when: inspect_nodes | default('false') | bool == true }
+- hosts: baremetal
+ vars:
+ multinode_testing: "{{ inventory_dhcp | bool == true }}"
+ name: "Create configuration drive files and deploy machines."
+ become: no
+ connection: local
+ roles:
+ - role: bifrost-configdrives-dynamic
+ - role: bifrost-deploy-nodes-dynamic
+ - role: bifrost-prepare-for-test-dynamic
+ serial: 1
diff --git a/prototypes/bifrost/scripts/destroy_env.sh b/prototypes/bifrost/scripts/destroy_env.sh
new file mode 100755
index 000000000..aef4ccba3
--- /dev/null
+++ b/prototypes/bifrost/scripts/destroy_env.sh
@@ -0,0 +1,38 @@
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 RedHat and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+#!/bin/bash
+virsh destroy jumphost.opnfvlocal || true
+virsh destroy controller00.opnfvlocal || true
+virsh destroy compute00.opnfvlocal || true
+virsh undefine jumphost.opnfvlocal || true
+virsh undefine controller00.opnfvlocal || true
+virsh undefine compute00.opnfvlocal || true
+
+service ironic-conductor stop
+
+echo "removing from database"
+mysql -u root ironic --execute "truncate table ports;"
+mysql -u root ironic --execute "delete from node_tags;"
+mysql -u root ironic --execute "delete from nodes;"
+mysql -u root ironic --execute "delete from conductors;"
+echo "removing leases"
+> /var/lib/dnsmasq/dnsmasq.leases
+echo "removing logs"
+rm -rf /var/log/libvirt/baremetal_logs/*.log
+
+# clean up images
+rm -rf /httpboot/*
+rm -rf /tftpboot/*
+rm -rf /var/lib/libvirt/images/*.qcow2
+
+echo "restarting services"
+service libvirtd restart
+service ironic-api restart
+service ironic-conductor start
+service ironic-inspector restart
diff --git a/prototypes/bifrost/scripts/test-bifrost-deployment.sh b/prototypes/bifrost/scripts/test-bifrost-deployment.sh
new file mode 100755
index 000000000..5df58f8eb
--- /dev/null
+++ b/prototypes/bifrost/scripts/test-bifrost-deployment.sh
@@ -0,0 +1,121 @@
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+#!/bin/bash
+
+set -eux
+set -o pipefail
+export PYTHONUNBUFFERED=1
+SCRIPT_HOME="$(cd "$(dirname "$0")" && pwd)"
+BIFROST_HOME=$SCRIPT_HOME/..
+ANSIBLE_INSTALL_ROOT=${ANSIBLE_INSTALL_ROOT:-/opt/stack}
+ENABLE_VENV="false"
+USE_DHCP="false"
+USE_VENV="false"
+BUILD_IMAGE=true
+
+# Set defaults for ansible command-line options to drive the different
+# tests.
+
+# NOTE(TheJulia/cinerama): The variables defined on the command line
+# for the default and DHCP tests are to drive the use of Cirros as the
+# deployed operating system, and as such sets the test user to cirros,
+# and writes a debian style interfaces file out to the configuration
+# drive as cirros does not support the network_info.json format file
+# placed in the configuration drive. The "build image" test does not
+# use cirros.
+
+TEST_VM_NUM_NODES=3
+export TEST_VM_NODE_NAMES="jumphost.opnfvlocal controller00.opnfvlocal compute00.opnfvlocal"
+export VM_DOMAIN_TYPE="kvm"
+export VM_CPU=4
+export VM_DISK=100
+TEST_PLAYBOOK="test-bifrost-infracloud.yaml"
+USE_INSPECTOR=true
+USE_CIRROS=false
+TESTING_USER=root
+VM_MEMORY_SIZE="8192"
+DOWNLOAD_IPA=true
+CREATE_IPA_IMAGE=false
+INSPECT_NODES=true
+INVENTORY_DHCP=false
+INVENTORY_DHCP_STATIC_IP=false
+WRITE_INTERFACES_FILE=true
+
+# Set BIFROST_INVENTORY_SOURCE
+export BIFROST_INVENTORY_SOURCE=/tmp/baremetal.csv
+
+# DIB custom elements path
+export ELEMENTS_PATH=/usr/share/diskimage-builder/elements:/opt/puppet-infracloud/files/elements
+
+# settings for console access
+export DIB_DEV_USER_PWDLESS_SUDO=yes
+export DIB_DEV_USER_PASSWORD=devuser
+
+# Source Ansible
+# NOTE(TheJulia): Ansible stable-1.9 source method tosses an error deep
+# under the hood which -x will detect, so for this step, we need to suspend
+# and then re-enable the feature.
+set +x +o nounset
+$SCRIPT_HOME/env-setup.sh
+source ${ANSIBLE_INSTALL_ROOT}/ansible/hacking/env-setup
+ANSIBLE=$(which ansible-playbook)
+set -x -o nounset
+
+# Change working directory
+cd $BIFROST_HOME/playbooks
+
+# Syntax check of dynamic inventory test path
+${ANSIBLE} -vvvv \
+ -i inventory/localhost \
+ test-bifrost-create-vm.yaml \
+ --syntax-check \
+ --list-tasks
+${ANSIBLE} -vvvv \
+ -i inventory/localhost \
+ ${TEST_PLAYBOOK} \
+ --syntax-check \
+ --list-tasks \
+ -e testing_user=${TESTING_USER}
+
+# Create the test VMS
+${ANSIBLE} -vvvv \
+ -i inventory/localhost \
+ test-bifrost-create-vm.yaml \
+ -e test_vm_num_nodes=${TEST_VM_NUM_NODES} \
+ -e test_vm_memory_size=${VM_MEMORY_SIZE} \
+ -e enable_venv=${ENABLE_VENV} \
+ -e test_vm_domain_type=${VM_DOMAIN_TYPE}
+
+# Execute the installation and VM startup test.
+${ANSIBLE} -vvvv \
+ -i inventory/bifrost_inventory.py \
+ ${TEST_PLAYBOOK} \
+ -e use_cirros=${USE_CIRROS} \
+ -e testing_user=${TESTING_USER} \
+ -e test_vm_num_nodes=${TEST_VM_NUM_NODES} \
+ -e inventory_dhcp=${INVENTORY_DHCP} \
+ -e inventory_dhcp_static_ip=${INVENTORY_DHCP_STATIC_IP} \
+ -e enable_venv=${ENABLE_VENV} \
+ -e enable_inspector=${USE_INSPECTOR} \
+ -e inspect_nodes=${INSPECT_NODES} \
+ -e download_ipa=${DOWNLOAD_IPA} \
+ -e create_ipa_image=${CREATE_IPA_IMAGE} \
+ -e write_interfaces_file=${WRITE_INTERFACES_FILE} \
+ -e ipv4_gateway=192.168.122.1
+EXITCODE=$?
+
+if [ $EXITCODE != 0 ]; then
+ echo "****************************"
+ echo "Test failed. See logs folder"
+ echo "****************************"
+fi
+
+$SCRIPT_HOME/collect-test-info.sh
+
+exit $EXITCODE
diff --git a/prototypes/puppet-infracloud/.gitkeep b/prototypes/puppet-infracloud/.gitkeep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/prototypes/puppet-infracloud/.gitkeep
diff --git a/prototypes/puppet-infracloud/README.md b/prototypes/puppet-infracloud/README.md
new file mode 100644
index 000000000..f3bd67279
--- /dev/null
+++ b/prototypes/puppet-infracloud/README.md
@@ -0,0 +1,52 @@
+===============================
+How to deploy puppet-infracloud
+===============================
+The manifest and mmodules defined on this repo will deploy an OpenStack cloud based on `Infra Cloud <http://docs.openstack.org/infra/system-config/infra-cloud.html>`_ project.
+
+Once all the hardware is provisioned, enter in controller and compute nodes and follow these steps:
+
+1. Clone releng::
+
+ git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng
+
+2. Copy hiera to the right place::
+
+ cp /opt/releng/prototypes/puppet-infracloud/hiera/common.yaml /var/lib/hiera/
+
+3. Install modules::
+
+ cd /opt/releng/prototypes/puppet-infracloud
+ ./install_modules.sh
+
+4. Apply the infracloud manifest::
+
+ cd /opt/releng/prototypes/puppet-infracloud
+ puppet apply --manifests/site.pp --modulepath=/etc/puppet/modules:/opt/releng/prototypes/puppet-infracloud/modules
+
+5. Once you finish this operation on controller and compute nodes, you will have a functional OpenStack cloud.
+
+In jumphost, follow that steps:
+
+1. Clone releng::
+
+ git clone https://gerrit.opnfv.org/gerrit/releng /opt/releng
+
+2. Create OpenStack clouds config directory:
+
+ mkdir -p /root/.config/openstack
+
+3. Copy credentials file::
+
+ cp /opt/releng/prototypes/puppet-infracloud/creds/clouds.yaml /root/.config/openstack/
+
+4. Install openstack-client:
+
+ pip install python-openstackclient
+
+5. Export the desired cloud::
+
+ export OS_CLOUD=opnfv
+
+6. Start using it::
+
+ openstack server list
diff --git a/prototypes/puppet-infracloud/creds/clouds.yaml b/prototypes/puppet-infracloud/creds/clouds.yaml
new file mode 100644
index 000000000..eb44db66c
--- /dev/null
+++ b/prototypes/puppet-infracloud/creds/clouds.yaml
@@ -0,0 +1,12 @@
+clouds:
+ opnfv:
+ verify: False
+ auth:
+ auth_url: https://controller00.opnfvlocal:5000
+ project_name: opnfv
+ username: opnfv
+ password: pass
+ identity_api_version: '3'
+ region_name: RegionOne
+ user_domain_name: opnfv
+ project_domain_name: opnfv
diff --git a/prototypes/puppet-infracloud/hiera/common.yaml b/prototypes/puppet-infracloud/hiera/common.yaml
new file mode 100644
index 000000000..6c28f1972
--- /dev/null
+++ b/prototypes/puppet-infracloud/hiera/common.yaml
@@ -0,0 +1,77 @@
+keystone_rabbit_password: pass
+neutron_rabbit_password: pass
+nova_rabbit_password: pass
+root_mysql_password: pass
+keystone_mysql_password: pass
+glance_mysql_password: pass
+neutron_mysql_password: pass
+nova_mysql_password: pass
+keystone_admin_password: pass
+glance_admin_password: pass
+neutron_admin_password: pass
+nova_admin_password: pass
+keystone_admin_token: token
+ssl_key_file_contents: |
+ -----BEGIN PRIVATE KEY-----
+ MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC0YX6wsA/Jhe3q
+ ByoiLsyagO5rOCIyzDsMTV0YMWVIa/QybvS1vI+pK9FIoYPbqWFGHXmQF0DJYulb
+ GnB6A0GlT3YXuaKPucaaANr5hTjuEBF6LuQeq+OIO5u7+l56HGWbbVeB7+vnIxK9
+ 43G545aBZSGlUnVfFg+v+IQtmRr36iEa5UDd4sahDXcp2Dm3zGgkFhFKie6AJ4UU
+ TzrH2SL6Nhl7i+AenuoUEDdgDWfGnCXozLngfmhKDi6lHDmh5zJhFS7cKz14wLgF
+ 37fsWxxxEX8a6gtGYEEHqXV3x3AXO+U98pr15/xQM9O2O3mrqc/zkmcCRUwCjEeD
+ jEHey3UJAgMBAAECggEAGqapBEwPGRRbsY87b2+AtXdFQrw5eU3pj4jCr3dk4o1o
+ uCbiqxNgGnup4VRT2hmtkKF8O4jj/p1JozdF1RE0GsuhxCGeXiPxrwFfWSyQ28Ou
+ AWJ6O/njlVZRTTXRzbLyZEOEgWNEdJMfCsVXIUL6EsYxcW68fr8QtExAo0gSzvwe
+ IVyhopBy4A1jr5jWqjjlgJhoTHQCkp1e9pHiaW5WWHtk2DFdy6huw5PoDRppG42P
+ soMzqHy9AIWXrYaTGNjyybdJvbaiF0X5Bkr6k8ZxMlRuEb3Vpyrj7SsBrUifRJM3
+ +yheSq3drdQHlw5VrukoIgXGYB4zAQq3LndLoL5YTQKBgQDlzz/hB1IuGOKBXRHy
+ p0j+Lyoxt5EiOW2mdEkbTUYyYnD9EDbJ0wdQ5ijtWLw0J3AwhASkH8ZyljOVHKlY
+ Sq2Oo/uroIH4M8cVIBOJQ2/ak98ItLZ1OMMnDxlZva52jBfYwOEkg6OXeLOLmay6
+ ADfxQ56RFqreVHi9J0/jvpn9UwKBgQDI8CZrM4udJTP7gslxeDcRZw6W34CBBFds
+ 49d10Tfd05sysOludzWAfGFj27wqIacFcIyYQmnSga9lBhowv+RwdSjcb2QCCjOb
+ b2GdH+qSFU8BTOcd5FscCBV3U8Y1f/iYp0EQ1/GiG2AYcQC67kjWOO4/JZEXsmtq
+ LisFlWTcswKBgQCC/bs/nViuhei2LELKuafVmzTF2giUJX/m3Wm+cjGNDqew18kj
+ CXKmHks93tKIN+KvBNFQa/xF3G/Skt/EP+zl3XravUbYH0tfM0VvfE0JnjgHUlqe
+ PpiebvDYQlJrqDb/ihHLKm3ZLSfKbvIRo4Y/s3dy5CTJTgT0bLAQ9Nf5mQKBgGqb
+ Dqb9d+rtnACqSNnMn9q5xIHDHlhUx1VcJCm70Fn+NG7WcWJMGLSMSNdD8zafGA/I
+ wK7fPWmTqEx+ylJm3HnVjtI0vuheJTcoBq/oCPlsGLhl5pBzYOskVs8yQQyNUoUa
+ 52haSTZqM7eD7JFAbqBJIA2cjrf1zwtMZ0LVGegFAoGBAIFSkI+y4tDEEaSsxrMM
+ OBYEZDkffVar6/mDJukvyn0Q584K3I4eXIDoEEfMGgSN2Tza6QamuNFxOPCH+AAv
+ UKvckK4yuYkc7mQIgjCE8N8UF4kgsXjPek61TZT1QVI1aYFb78ZAZ0miudqWkx4t
+ YSNDj7llArylrPGHBLQ38X4/
+ -----END PRIVATE KEY-----
+ssl_cert_file_contents: |
+ -----BEGIN CERTIFICATE-----
+ MIIDcTCCAlmgAwIBAgIJAJsHSxF0u/oaMA0GCSqGSIb3DQEBCwUAME8xCzAJBgNV
+ BAYTAlVTMQ4wDAYDVQQHDAVXb3JsZDEOMAwGA1UECgwFT1BORlYxIDAeBgNVBAMM
+ F2NvbnRyb2xsZXIwMC5vcG5mdmxvY2FsMB4XDTE2MDgxNzE2MzQwOFoXDTE3MDgx
+ NzE2MzQwOFowTzELMAkGA1UEBhMCVVMxDjAMBgNVBAcMBVdvcmxkMQ4wDAYDVQQK
+ DAVPUE5GVjEgMB4GA1UEAwwXY29udHJvbGxlcjAwLm9wbmZ2bG9jYWwwggEiMA0G
+ CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC0YX6wsA/Jhe3qByoiLsyagO5rOCIy
+ zDsMTV0YMWVIa/QybvS1vI+pK9FIoYPbqWFGHXmQF0DJYulbGnB6A0GlT3YXuaKP
+ ucaaANr5hTjuEBF6LuQeq+OIO5u7+l56HGWbbVeB7+vnIxK943G545aBZSGlUnVf
+ Fg+v+IQtmRr36iEa5UDd4sahDXcp2Dm3zGgkFhFKie6AJ4UUTzrH2SL6Nhl7i+Ae
+ nuoUEDdgDWfGnCXozLngfmhKDi6lHDmh5zJhFS7cKz14wLgF37fsWxxxEX8a6gtG
+ YEEHqXV3x3AXO+U98pr15/xQM9O2O3mrqc/zkmcCRUwCjEeDjEHey3UJAgMBAAGj
+ UDBOMB0GA1UdDgQWBBQyFVbU5s2ihD0hX3W7GyHiHZGG1TAfBgNVHSMEGDAWgBQy
+ FVbU5s2ihD0hX3W7GyHiHZGG1TAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUA
+ A4IBAQB+xf7I9RVWzRNjMbWBDE6pBvOWnSksv7Jgr4cREvyOxBDaIoO3uQRDDu6r
+ RCgGs1CuwEaFX1SS/OVrKRFiy9kCU/LBZEFwaHRaL2Kj57Z2yNInPIiKB4h9jen2
+ 75fYrpq42XUDSI0NpsqAJpmcQqXOOo8V08FlH0/6h8mWdsfQfbyaf+g73+aRZds8
+ Q4ttmBrqY4Pi5CJW46w7LRCA5o92Di3GI9dAh9MVZ3023cTTjDkW04QbluphuTFj
+ O07Npz162/fHTXut+piV78t+1HlfYWY5TOSQMIVwenftA/Bn8+TQAgnLR+nGo/wu
+ oEaxLtj3Jr07+yIjL88ewT+c3fpq
+ -----END CERTIFICATE-----
+infracloud_mysql_password: pass
+opnfv_password: pass
+
+rabbitmq::package_gpg_key: 'https://www.rabbitmq.com/rabbitmq-release-signing-key.asc'
+rabbitmq::repo::apt::key: '0A9AF2115F4687BD29803A206B73A36E6026DFCA'
+
+hosts:
+ jumphost.opnfvlocal:
+ ip: 192.168.122.2
+ controller00.opnfvlocal:
+ ip: 192.168.122.3
+ compute00.opnfvlocal:
+ ip: 192.168.122.4
diff --git a/prototypes/puppet-infracloud/install_modules.sh b/prototypes/puppet-infracloud/install_modules.sh
new file mode 100755
index 000000000..5d5acd9c1
--- /dev/null
+++ b/prototypes/puppet-infracloud/install_modules.sh
@@ -0,0 +1,121 @@
+#!/bin/bash
+# Copyright 2014 OpenStack Foundation.
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+MODULE_PATH=`puppet config print modulepath | cut -d ':' -f 1`
+SCRIPT_NAME=$(basename $0)
+SCRIPT_DIR=$(readlink -f "$(dirname $0)")
+JUST_CLONED=0
+
+function remove_module {
+ local SHORT_MODULE_NAME=$1
+ if [ -n "$SHORT_MODULE_NAME" ]; then
+ rm -Rf "$MODULE_PATH/$SHORT_MODULE_NAME"
+ else
+ echo "ERROR: remove_module requires a SHORT_MODULE_NAME."
+ fi
+}
+
+function git_clone {
+ local MOD=$1
+ local DEST=$2
+
+ JUST_CLONED=1
+ for attempt in $(seq 0 3); do
+ clone_error=0
+ git clone $MOD $DEST && break || true
+ rm -rf $DEST
+ clone_error=1
+ done
+ return $clone_error
+}
+
+# Array of modules to be installed key:value is module:version.
+declare -A MODULES
+
+# Array of modues to be installed from source and without dependency resolution.
+# key:value is source location, revision to checkout
+declare -A SOURCE_MODULES
+
+# Array of modues to be installed from source and without dependency resolution from openstack git
+# key:value is source location, revision to checkout
+declare -A INTEGRATION_MODULES
+
+# load modules.env to populate MODULES[*] and SOURCE_MODULES[*]
+# for processing.
+MODULE_ENV_FILE=${MODULE_FILE:-modules.env}
+MODULE_ENV_PATH=${MODULE_ENV_PATH:-${SCRIPT_DIR}}
+if [ -f "${MODULE_ENV_PATH}/${MODULE_ENV_FILE}" ] ; then
+ . "${MODULE_ENV_PATH}/${MODULE_ENV_FILE}"
+fi
+
+if [ -z "${!MODULES[*]}" ] && [ -z "${!SOURCE_MODULES[*]}" ] ; then
+ echo ""
+ echo "WARNING: nothing to do, unable to find MODULES or SOURCE_MODULES"
+ echo " export options, try setting MODULE_ENV_PATH or MODULE_ENV_FILE"
+ echo " export to the proper location of modules.env file."
+ echo ""
+ exit 0
+fi
+
+MODULE_LIST=`puppet module list --color=false`
+
+# Install modules from source
+for MOD in ${!SOURCE_MODULES[*]} ; do
+ JUST_CLONED=0
+ # get the name of the module directory
+ if [ `echo $MOD | awk -F. '{print $NF}'` = 'git' ]; then
+ echo "Remote repos of the form repo.git are not supported: ${MOD}"
+ exit 1
+ fi
+
+ MODULE_NAME=`echo $MOD | awk -F- '{print $NF}'`
+
+ # set up git base command to use the correct path
+ GIT_CMD_BASE="git --git-dir=${MODULE_PATH}/${MODULE_NAME}/.git --work-tree ${MODULE_PATH}/${MODULE_NAME}"
+ # treat any occurrence of the module as a match
+ if ! echo $MODULE_LIST | grep "${MODULE_NAME}" >/dev/null 2>&1; then
+ # clone modules that are not installed
+ git_clone $MOD "${MODULE_PATH}/${MODULE_NAME}"
+ else
+ if [ ! -d ${MODULE_PATH}/${MODULE_NAME}/.git ]; then
+ echo "Found directory ${MODULE_PATH}/${MODULE_NAME} that is not a git repo, deleting it and reinstalling from source"
+ remove_module $MODULE_NAME
+ git_clone $MOD "${MODULE_PATH}/${MODULE_NAME}"
+ elif [ `${GIT_CMD_BASE} remote show origin | grep 'Fetch URL' | awk -F'URL: ' '{print $2}'` != $MOD ]; then
+ echo "Found remote in ${MODULE_PATH}/${MODULE_NAME} that does not match desired remote ${MOD}, deleting dir and re-cloning"
+ remove_module $MODULE_NAME
+ git_clone $MOD "${MODULE_PATH}/${MODULE_NAME}"
+ fi
+ fi
+
+ # fetch the latest refs from the repo
+ if [[ $JUST_CLONED -eq 0 ]] ; then
+ # If we just cloned the repo, we do not need to remote update
+ for attempt in $(seq 0 3); do
+ clone_error=0
+ $GIT_CMD_BASE remote update && break || true
+ clone_error=1
+ done
+ if [[ $clone_error -ne 0 ]] ; then
+ exit $clone_error
+ fi
+ fi
+ # make sure the correct revision is installed, I have to use rev-list b/c rev-parse does not work with tags
+ if [ `${GIT_CMD_BASE} rev-list HEAD --max-count=1` != `${GIT_CMD_BASE} rev-list ${SOURCE_MODULES[$MOD]} --max-count=1` ]; then
+ # checkout correct revision
+ $GIT_CMD_BASE checkout ${SOURCE_MODULES[$MOD]}
+ fi
+done
diff --git a/prototypes/puppet-infracloud/manifests/site.pp b/prototypes/puppet-infracloud/manifests/site.pp
new file mode 100644
index 000000000..e524918c6
--- /dev/null
+++ b/prototypes/puppet-infracloud/manifests/site.pp
@@ -0,0 +1,63 @@
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 RedHat and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+node 'controller00.opnfvlocal' {
+ $group = 'infracloud'
+ class { 'opnfv::server':
+ iptables_public_tcp_ports => [80,5000,5671,8774,9292,9696,35357], # logs,keystone,rabbit,nova,glance,neutron,keystone
+ sysadmins => hiera('sysadmins', []),
+ enable_unbound => false,
+ purge_apt_sources => false,
+ }
+ class { 'opnfv::controller':
+ keystone_rabbit_password => hiera('keystone_rabbit_password'),
+ neutron_rabbit_password => hiera('neutron_rabbit_password'),
+ nova_rabbit_password => hiera('nova_rabbit_password'),
+ root_mysql_password => hiera('infracloud_mysql_password'),
+ keystone_mysql_password => hiera('keystone_mysql_password'),
+ glance_mysql_password => hiera('glance_mysql_password'),
+ neutron_mysql_password => hiera('neutron_mysql_password'),
+ nova_mysql_password => hiera('nova_mysql_password'),
+ keystone_admin_password => hiera('keystone_admin_password'),
+ glance_admin_password => hiera('glance_admin_password'),
+ neutron_admin_password => hiera('neutron_admin_password'),
+ nova_admin_password => hiera('nova_admin_password'),
+ keystone_admin_token => hiera('keystone_admin_token'),
+ ssl_key_file_contents => hiera('ssl_key_file_contents'),
+ ssl_cert_file_contents => hiera('ssl_cert_file_contents'),
+ br_name => 'br-eth0',
+ controller_public_address => $::fqdn,
+ neutron_subnet_cidr => '192.168.122.0/24',
+ neutron_subnet_gateway => '192.168.122.1',
+ neutron_subnet_allocation_pools => [
+ 'start=192.168.122.50,end=192.168.122.254',
+ ],
+ opnfv_password => hiera('opnfv_password'),
+ }
+}
+
+node 'compute00.opnfvlocal' {
+ $group = 'infracloud'
+ class { 'opnfv::server':
+ sysadmins => hiera('sysadmins', []),
+ enable_unbound => false,
+ purge_apt_sources => false,
+ }
+
+ class { 'opnfv::compute':
+ nova_rabbit_password => hiera('nova_rabbit_password'),
+ neutron_rabbit_password => hiera('neutron_rabbit_password'),
+ neutron_admin_password => hiera('neutron_admin_password'),
+ ssl_cert_file_contents => hiera('ssl_cert_file_contents'),
+ ssl_key_file_contents => hiera('ssl_key_file_contents'),
+ br_name => 'br-eth0',
+ controller_public_address => 'controller00.opnfvlocal',
+ virt_type => 'qemu',
+ }
+}
+
diff --git a/prototypes/puppet-infracloud/modules.env b/prototypes/puppet-infracloud/modules.env
new file mode 100644
index 000000000..2df81ecc4
--- /dev/null
+++ b/prototypes/puppet-infracloud/modules.env
@@ -0,0 +1,81 @@
+# Copyright 2014 OpenStack Foundation.
+# Copyright 2016 RedHat.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# load additional modules from modules.env
+# modules.env should exist in the same folder as install_modules.sh
+#
+# - use export MODULE_FILE to specify an alternate config
+# when calling install_modules.sh.
+# This allows for testing environments that are configured with alternate
+# module configuration.
+
+# Source modules should use tags, explicit refs or remote branches because
+# we do not update local branches in this script.
+# Keep sorted
+
+OPENSTACK_GIT_ROOT=https://git.openstack.org
+
+# InfraCloud modules
+SOURCE_MODULES["$OPENSTACK_GIT_ROOT/openstack/puppet-cinder"]="origin/stable/mitaka"
+SOURCE_MODULES["$OPENSTACK_GIT_ROOT/openstack/puppet-glance"]="origin/stable/mitaka"
+SOURCE_MODULES["$OPENSTACK_GIT_ROOT/openstack/puppet-ironic"]="origin/stable/mitaka"
+SOURCE_MODULES["$OPENSTACK_GIT_ROOT/openstack/puppet-keystone"]="origin/stable/mitaka"
+SOURCE_MODULES["$OPENSTACK_GIT_ROOT/openstack/puppet-neutron"]="origin/stable/mitaka"
+SOURCE_MODULES["$OPENSTACK_GIT_ROOT/openstack/puppet-nova"]="origin/stable/mitaka"
+SOURCE_MODULES["$OPENSTACK_GIT_ROOT/openstack/puppet-openstack_extras"]="origin/stable/mitaka"
+SOURCE_MODULES["$OPENSTACK_GIT_ROOT/openstack/puppet-openstacklib"]="origin/stable/mitaka"
+
+SOURCE_MODULES["https://github.com/duritong/puppet-sysctl"]="v0.0.11"
+SOURCE_MODULES["https://github.com/nanliu/puppet-staging"]="1.0.0"
+SOURCE_MODULES["https://github.com/jfryman/puppet-selinux"]="v0.2.5"
+SOURCE_MODULES["https://github.com/maestrodev/puppet-wget"]="v1.6.0"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-apache"]="1.8.1"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-apt"]="2.1.0"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-concat"]="1.2.5"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-firewall"]="1.1.3"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-haproxy"]="1.5.0"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-inifile"]="1.1.3"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-mysql"]="3.6.2"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-ntp"]="3.2.1"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-rabbitmq"]="5.2.3"
+SOURCE_MODULES["https://github.com/puppetlabs/puppetlabs-stdlib"]="4.10.0"
+SOURCE_MODULES["https://github.com/rafaelfelix/puppet-pear"]="1.0.3"
+SOURCE_MODULES["https://github.com/saz/puppet-memcached"]="v2.6.0"
+SOURCE_MODULES["https://github.com/saz/puppet-timezone"]="v3.3.0"
+SOURCE_MODULES["https://github.com/stankevich/puppet-python"]="1.9.4"
+SOURCE_MODULES["https://github.com/vamsee/puppet-solr"]="0.0.8"
+SOURCE_MODULES["https://github.com/voxpupuli/puppet-alternatives"]="0.3.0"
+SOURCE_MODULES["https://github.com/voxpupuli/puppet-archive"]="v0.5.1"
+SOURCE_MODULES["https://github.com/voxpupuli/puppet-git_resource"]="0.3.0"
+SOURCE_MODULES["https://github.com/voxpupuli/puppet-nodejs"]="1.2.0"
+SOURCE_MODULES["https://github.com/voxpupuli/puppet-puppetboard"]="2.4.0"
+
+
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-httpd"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-infracloud"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-iptables"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-pip"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-snmpd"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-ssh"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-ssl_cert_check"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-sudoers"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-ulimit"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-unattended_upgrades"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-unbound"]="origin/master"
+INTEGRATION_MODULES["$OPENSTACK_GIT_ROOT/openstack-infra/puppet-user"]="origin/master"
+
+for MOD in ${!INTEGRATION_MODULES[*]}; do
+ SOURCE_MODULES[$MOD]=${INTEGRATION_MODULES[$MOD]}
+done
diff --git a/prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp b/prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp
new file mode 100644
index 000000000..ca548a5d5
--- /dev/null
+++ b/prototypes/puppet-infracloud/modules/opnfv/manifests/compute.pp
@@ -0,0 +1,23 @@
+class opnfv::compute (
+ $nova_rabbit_password,
+ $neutron_rabbit_password,
+ $neutron_admin_password,
+ $ssl_cert_file_contents,
+ $ssl_key_file_contents,
+ $br_name,
+ $controller_public_address,
+ $virt_type = 'kvm',
+) {
+ class { '::infracloud::compute':
+ nova_rabbit_password => $nova_rabbit_password,
+ neutron_rabbit_password => $neutron_rabbit_password,
+ neutron_admin_password => $neutron_admin_password,
+ ssl_cert_file_contents => $ssl_cert_file_contents,
+ ssl_key_file_contents => $ssl_key_file_contents,
+ br_name => $br_name,
+ controller_public_address => $controller_public_address,
+ virt_type => $virt_type,
+ }
+
+}
+
diff --git a/prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp b/prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp
new file mode 100644
index 000000000..7522692c1
--- /dev/null
+++ b/prototypes/puppet-infracloud/modules/opnfv/manifests/controller.pp
@@ -0,0 +1,85 @@
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 RedHat and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+class opnfv::controller (
+ $keystone_rabbit_password,
+ $neutron_rabbit_password,
+ $nova_rabbit_password,
+ $root_mysql_password,
+ $keystone_mysql_password,
+ $glance_mysql_password,
+ $neutron_mysql_password,
+ $nova_mysql_password,
+ $glance_admin_password,
+ $keystone_admin_password,
+ $neutron_admin_password,
+ $nova_admin_password,
+ $keystone_admin_token,
+ $ssl_key_file_contents,
+ $ssl_cert_file_contents,
+ $br_name,
+ $controller_public_address = $::fqdn,
+ $neutron_subnet_cidr,
+ $neutron_subnet_gateway,
+ $neutron_subnet_allocation_pools,
+ $opnfv_password,
+ $opnfv_email = 'opnfvuser@gmail.com',
+) {
+ class { '::infracloud::controller':
+ keystone_rabbit_password => $keystone_rabbit_password,
+ neutron_rabbit_password => $neutron_rabbit_password,
+ nova_rabbit_password => $nova_rabbit_password,
+ root_mysql_password => $root_mysql_password,
+ keystone_mysql_password => $keystone_mysql_password,
+ glance_mysql_password => $glance_mysql_password,
+ neutron_mysql_password => $neutron_mysql_password,
+ nova_mysql_password => $nova_mysql_password,
+ keystone_admin_password => $keystone_admin_password,
+ glance_admin_password => $glance_admin_password,
+ neutron_admin_password => $neutron_admin_password,
+ nova_admin_password => $nova_admin_password,
+ keystone_admin_token => $keystone_admin_token,
+ ssl_key_file_contents => $ssl_key_file_contents,
+ ssl_cert_file_contents => $ssl_cert_file_contents,
+ br_name => $br_name,
+ controller_public_address => $controller_public_address,
+ neutron_subnet_cidr => $neutron_subnet_cidr,
+ neutron_subnet_gateway => $neutron_subnet_gateway,
+ neutron_subnet_allocation_pools => $neutron_subnet_allocation_pools,
+ }
+
+ # create keystone creds
+ keystone_domain { 'opnfv':
+ ensure => present,
+ enabled => true,
+ }
+
+ keystone_tenant { 'opnfv':
+ ensure => present,
+ enabled => true,
+ description => 'OPNFV cloud',
+ domain => 'opnfv',
+ require => Keystone_domain['opnfv'],
+ }
+
+ keystone_user { 'opnfv':
+ ensure => present,
+ enabled => true,
+ domain => 'opnfv',
+ email => $opnfv_email,
+ password => $opnfv_password,
+ require => Keystone_tenant['opnfv'],
+ }
+
+ keystone_role { 'user': ensure => present }
+
+ keystone_user_role { 'opnfv::opnfv@opnfv::opnfv':
+ roles => [ 'user', 'admin', ],
+ }
+}
+
diff --git a/prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp b/prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp
new file mode 100644
index 000000000..5bbcd7506
--- /dev/null
+++ b/prototypes/puppet-infracloud/modules/opnfv/manifests/server.pp
@@ -0,0 +1,222 @@
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 RedHat and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+class opnfv::server (
+ $iptables_public_tcp_ports = [],
+ $iptables_public_udp_ports = [],
+ $iptables_rules4 = [],
+ $iptables_rules6 = [],
+ $sysadmins = [],
+ $enable_unbound = true,
+ $purge_apt_sources = true,
+) {
+ ###########################################################
+ # Classes for all hosts
+
+ include snmpd
+ include sudoers
+
+ class { 'iptables':
+ public_tcp_ports => $iptables_public_tcp_ports,
+ public_udp_ports => $all_udp,
+ rules4 => $iptables_rules4,
+ rules6 => $iptables_rules6,
+ }
+
+ class { 'timezone':
+ timezone => 'Etc/UTC',
+ }
+
+ if ($enable_unbound) {
+ class { 'unbound':
+ install_resolv_conf => $install_resolv_conf
+ }
+ }
+
+ if ($::in_chroot) {
+ notify { 'rsyslog in chroot':
+ message => 'rsyslog not refreshed, running in chroot',
+ }
+ $rsyslog_notify = []
+ } else {
+ service { 'rsyslog':
+ ensure => running,
+ enable => true,
+ hasrestart => true,
+ require => Package['rsyslog'],
+ }
+ $rsyslog_notify = [ Service['rsyslog'] ]
+ }
+
+ ###########################################################
+ # System tweaks
+
+ # Increase syslog message size in order to capture
+ # python tracebacks with syslog.
+ file { '/etc/rsyslog.d/99-maxsize.conf':
+ ensure => present,
+ # Note MaxMessageSize is not a puppet variable.
+ content => '$MaxMessageSize 6k',
+ owner => 'root',
+ group => 'root',
+ mode => '0644',
+ notify => $rsyslog_notify,
+ require => Package['rsyslog'],
+ }
+
+ # We don't like byobu
+ file { '/etc/profile.d/Z98-byobu.sh':
+ ensure => absent,
+ }
+
+ if $::osfamily == 'Debian' {
+
+ # Ubuntu installs their whoopsie package by default, but it eats through
+ # memory and we don't need it on servers
+ package { 'whoopsie':
+ ensure => absent,
+ }
+
+ package { 'popularity-contest':
+ ensure => absent,
+ }
+ }
+
+ ###########################################################
+ # Package resources for all operating systems
+
+ package { 'at':
+ ensure => present,
+ }
+
+ package { 'lvm2':
+ ensure => present,
+ }
+
+ package { 'strace':
+ ensure => present,
+ }
+
+ package { 'tcpdump':
+ ensure => present,
+ }
+
+ package { 'rsyslog':
+ ensure => present,
+ }
+
+ package { 'git':
+ ensure => present,
+ }
+
+ package { 'rsync':
+ ensure => present,
+ }
+
+ case $::osfamily {
+ 'RedHat': {
+ $packages = ['parted', 'puppet', 'wget', 'iputils']
+ $user_packages = ['emacs-nox', 'vim-enhanced']
+ $update_pkg_list_cmd = ''
+ }
+ 'Debian': {
+ $packages = ['parted', 'puppet', 'wget', 'iputils-ping']
+ case $::operatingsystemrelease {
+ /^(12|14)\.(04|10)$/: {
+ $user_packages = ['emacs23-nox', 'vim-nox', 'iftop',
+ 'sysstat', 'iotop']
+ }
+ default: {
+ $user_packages = ['emacs-nox', 'vim-nox']
+ }
+ }
+ $update_pkg_list_cmd = 'apt-get update >/dev/null 2>&1;'
+ }
+ default: {
+ fail("Unsupported osfamily: ${::osfamily} The 'openstack_project' module only supports osfamily Debian or RedHat (slaves only).")
+ }
+ }
+ package { $packages:
+ ensure => present
+ }
+
+ ###########################################################
+ # Package resources for specific operating systems
+
+ case $::osfamily {
+ 'Debian': {
+ # Purge and augment existing /etc/apt/sources.list if requested, and make
+ # sure apt-get update is run before any packages are installed
+ class { '::apt':
+ purge => { 'sources.list' => $purge_apt_sources }
+ }
+
+ # Make sure dig is installed
+ package { 'dnsutils':
+ ensure => present,
+ }
+ }
+ 'RedHat': {
+ # Make sure dig is installed
+ package { 'bind-utils':
+ ensure => present,
+ }
+ }
+ }
+
+ ###########################################################
+ # Manage ntp
+
+ include '::ntp'
+
+ if ($::osfamily == "RedHat") {
+ # Utils in ntp-perl are included in Debian's ntp package; we
+ # add it here for consistency. See also
+ # https://tickets.puppetlabs.com/browse/MODULES-3660
+ package { 'ntp-perl':
+ ensure => present
+ }
+ # NOTE(pabelanger): We need to ensure ntpdate service starts on boot for
+ # centos-7. Currently, ntpd explicitly require ntpdate to be running before
+ # the sync process can happen in ntpd. As a result, if ntpdate is not
+ # running, ntpd will start but fail to sync because of DNS is not properly
+ # setup.
+ package { 'ntpdate':
+ ensure => present,
+ }
+ service { 'ntpdate':
+ enable => true,
+ require => Package['ntpdate'],
+ }
+ }
+
+ ###########################################################
+ # Manage python/pip
+
+ $desired_virtualenv = '13.1.0'
+ class { '::pip':
+ optional_settings => {
+ 'extra-index-url' => '',
+ },
+ manage_pip_conf => true,
+ }
+
+ if (( versioncmp($::virtualenv_version, $desired_virtualenv) < 0 )) {
+ $virtualenv_ensure = $desired_virtualenv
+ } else {
+ $virtualenv_ensure = present
+ }
+ package { 'virtualenv':
+ ensure => $virtualenv_ensure,
+ provider => openstack_pip,
+ require => Class['pip'],
+ }
+
+ # add hosts entries
+ create_resources('host', hiera_hash('hosts'))
+}
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh
index e11df599b..47fbc91dc 100755
--- a/utils/fetch_os_creds.sh
+++ b/utils/fetch_os_creds.sh
@@ -142,7 +142,6 @@ elif [ "$installer_type" == "compass" ]; then
sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \
"scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null
sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null
- echo 'export OS_REGION_NAME=regionOne' >> $dest_path
info "This file contains the mgmt keystone API, we need the public one for our rc file"
public_ip=$(sshpass -p root ssh $ssh_options root@${installer_ip} \
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index d268a28de..4b710cab2 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -38,6 +38,11 @@ main () {
exit 1
fi
+ if [[ $(pwd) != "$jenkinshome" ]]; then
+ echo "This script needs to be run from the jenkins users home dir"
+ exit 1
+ fi
+
if [[ -z $slave_name || -z $slave_secret ]]; then
echo "slave name or secret not defined, please edit this file to define it"
exit 1
@@ -49,8 +54,8 @@ main () {
fi
if [[ $(whoami) != "root" ]]; then
- if grep "^Defaults requiretty" /etc/sudoers
- then echo "please comment out Defaults requiretty from /etc/sudoers"
+ if sudo -l | grep "requiretty"; then
+ echo "please comment out Defaults requiretty from /etc/sudoers"
exit 1
fi
fi
diff --git a/utils/test/reporting/functest/reporting-status.py b/utils/test/reporting/functest/reporting-status.py
index 622c375cc..7c943d8b3 100644..100755
--- a/utils/test/reporting/functest/reporting-status.py
+++ b/utils/test/reporting/functest/reporting-status.py
@@ -99,8 +99,9 @@ for version in conf.versions:
for test_case in testValid:
test_case.checkRunnable(installer, s,
test_case.getConstraints())
- logger.debug("testcase %s is %s" %
+ logger.debug("testcase %s (%s) is %s" %
(test_case.getDisplayName(),
+ test_case.getName(),
test_case.isRunnable))
time.sleep(1)
if test_case.isRunnable:
@@ -131,8 +132,10 @@ for version in conf.versions:
for test_case in otherTestCases:
test_case.checkRunnable(installer, s,
test_case.getConstraints())
- logger.info("testcase %s is %s" %
- (test_case.getName(), test_case.isRunnable))
+ logger.debug("testcase %s (%s) is %s" %
+ (test_case.getDisplayName(),
+ test_case.getName(),
+ test_case.isRunnable))
time.sleep(1)
if test_case.isRunnable:
dbName = test_case.getDbName()
diff --git a/utils/test/reporting/functest/reporting-tempest.py b/utils/test/reporting/functest/reporting-tempest.py
index e3f4e3306..0dc1dd343 100644..100755
--- a/utils/test/reporting/functest/reporting-tempest.py
+++ b/utils/test/reporting/functest/reporting-tempest.py
@@ -24,104 +24,108 @@ logger.info("nb tests executed > %s s " % criteria_nb_test)
logger.info("test duration < %s s " % criteria_duration)
logger.info("success rate > %s " % criteria_success_rate)
-for installer in installers:
- # we consider the Tempest results of the last PERIOD days
- url = conf.URL_BASE + "?case=tempest_smoke_serial"
- request = Request(url + '&period=' + str(PERIOD) +
- '&installer=' + installer + '&version=master')
- logger.info("Search tempest_smoke_serial results for installer %s"
- % installer)
- try:
- response = urlopen(request)
- k = response.read()
- results = json.loads(k)
- except URLError, e:
- logger.error("Error code: %s" % e)
-
- test_results = results['results']
-
- scenario_results = {}
- criteria = {}
- errors = {}
-
- for r in test_results:
- # Retrieve all the scenarios per installer
- # In Brahmaputra use version
- # Since Colorado use scenario
- if not r['scenario'] in scenario_results.keys():
- scenario_results[r['scenario']] = []
- scenario_results[r['scenario']].append(r)
-
- for s, s_result in scenario_results.items():
- scenario_results[s] = s_result[0:5]
- # For each scenario, we build a result object to deal with
- # results, criteria and error handling
- for result in scenario_results[s]:
- result["start_date"] = result["start_date"].split(".")[0]
-
- # retrieve results
- # ****************
- nb_tests_run = result['details']['tests']
- nb_tests_failed = result['details']['failures']
- if nb_tests_run != 0:
- success_rate = 100*(int(nb_tests_run) -
- int(nb_tests_failed)) / int(nb_tests_run)
- else:
- success_rate = 0
-
- result['details']["tests"] = nb_tests_run
- result['details']["Success rate"] = str(success_rate) + "%"
-
- # Criteria management
- # *******************
- crit_tests = False
- crit_rate = False
- crit_time = False
-
- # Expect that at least 165 tests are run
- if nb_tests_run >= criteria_nb_test:
- crit_tests = True
-
- # Expect that at least 90% of success
- if success_rate >= criteria_success_rate:
- crit_rate = True
-
- # Expect that the suite duration is inferior to 30m
- if result['details']['duration'] < criteria_duration:
- crit_time = True
-
- result['criteria'] = {'tests': crit_tests,
- 'Success rate': crit_rate,
- 'duration': crit_time}
- try:
- logger.debug("Scenario %s, Installer %s"
- % (s_result[1]['scenario'], installer))
- logger.debug("Nb Test run: %s" % nb_tests_run)
- logger.debug("Test duration: %s"
- % result['details']['duration'])
- logger.debug("Success rate: %s" % success_rate)
- except:
- logger.error("Data format error")
-
- # Error management
- # ****************
- try:
- errors = result['details']['errors']
- result['errors'] = errors.replace('{0}', '')
- except:
- logger.error("Error field not present (Brahamputra runs?)")
-
- templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
- templateEnv = jinja2.Environment(loader=templateLoader)
-
- TEMPLATE_FILE = "/template/index-tempest-tmpl.html"
- template = templateEnv.get_template(TEMPLATE_FILE)
-
- outputText = template.render(scenario_results=scenario_results,
- items=items,
- installer=installer)
-
- with open(conf.REPORTING_PATH + "/release/master/index-tempest-" +
- installer + ".html", "wb") as fh:
- fh.write(outputText)
+# For all the versions
+for version in conf.versions:
+ for installer in conf.installers:
+ # we consider the Tempest results of the last PERIOD days
+ url = conf.URL_BASE + "?case=tempest_smoke_serial"
+ request = Request(url + '&period=' + str(PERIOD) +
+ '&installer=' + installer +
+ '&version=' + version)
+ logger.info("Search tempest_smoke_serial results for installer %s"
+ " for version %s"
+ % (installer, version))
+ try:
+ response = urlopen(request)
+ k = response.read()
+ results = json.loads(k)
+ except URLError, e:
+ logger.error("Error code: %s" % e)
+
+ test_results = results['results']
+
+ scenario_results = {}
+ criteria = {}
+ errors = {}
+
+ for r in test_results:
+ # Retrieve all the scenarios per installer
+ # In Brahmaputra use version
+ # Since Colorado use scenario
+ if not r['scenario'] in scenario_results.keys():
+ scenario_results[r['scenario']] = []
+ scenario_results[r['scenario']].append(r)
+
+ for s, s_result in scenario_results.items():
+ scenario_results[s] = s_result[0:5]
+ # For each scenario, we build a result object to deal with
+ # results, criteria and error handling
+ for result in scenario_results[s]:
+ result["start_date"] = result["start_date"].split(".")[0]
+
+ # retrieve results
+ # ****************
+ nb_tests_run = result['details']['tests']
+ nb_tests_failed = result['details']['failures']
+ if nb_tests_run != 0:
+ success_rate = 100*(int(nb_tests_run) -
+ int(nb_tests_failed)) / int(nb_tests_run)
+ else:
+ success_rate = 0
+
+ result['details']["tests"] = nb_tests_run
+ result['details']["Success rate"] = str(success_rate) + "%"
+
+ # Criteria management
+ # *******************
+ crit_tests = False
+ crit_rate = False
+ crit_time = False
+
+ # Expect that at least 165 tests are run
+ if nb_tests_run >= criteria_nb_test:
+ crit_tests = True
+
+ # Expect that at least 90% of success
+ if success_rate >= criteria_success_rate:
+ crit_rate = True
+
+ # Expect that the suite duration is inferior to 30m
+ if result['details']['duration'] < criteria_duration:
+ crit_time = True
+
+ result['criteria'] = {'tests': crit_tests,
+ 'Success rate': crit_rate,
+ 'duration': crit_time}
+ try:
+ logger.debug("Scenario %s, Installer %s"
+ % (s_result[1]['scenario'], installer))
+ logger.debug("Nb Test run: %s" % nb_tests_run)
+ logger.debug("Test duration: %s"
+ % result['details']['duration'])
+ logger.debug("Success rate: %s" % success_rate)
+ except:
+ logger.error("Data format error")
+
+ # Error management
+ # ****************
+ try:
+ errors = result['details']['errors']
+ result['errors'] = errors.replace('{0}', '')
+ except:
+ logger.error("Error field not present (Brahamputra runs?)")
+
+ templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
+ templateEnv = jinja2.Environment(loader=templateLoader)
+
+ TEMPLATE_FILE = "/template/index-tempest-tmpl.html"
+ template = templateEnv.get_template(TEMPLATE_FILE)
+
+ outputText = template.render(scenario_results=scenario_results,
+ items=items,
+ installer=installer)
+
+ with open(conf.REPORTING_PATH + "/release/" + version +
+ "/index-tempest-" + installer + ".html", "wb") as fh:
+ fh.write(outputText)
logger.info("Tempest automatic reporting succesfully generated.")
diff --git a/utils/test/reporting/functest/reporting-vims.py b/utils/test/reporting/functest/reporting-vims.py
index d0436ed14..a83d92f0a 100644..100755
--- a/utils/test/reporting/functest/reporting-vims.py
+++ b/utils/test/reporting/functest/reporting-vims.py
@@ -33,81 +33,87 @@ logger.info("****************************************")
installers = conf.installers
step_order = ["initialisation", "orchestrator", "vIMS", "sig_test"]
logger.info("Start processing....")
-for installer in installers:
- logger.info("Search vIMS results for installer %s" % installer)
- request = Request(conf.URL_BASE + '?case=vims&installer=' + installer)
-
- try:
- response = urlopen(request)
- k = response.read()
- results = json.loads(k)
- except URLError, e:
- logger.error("Error code: %s" % e)
-
- test_results = results['results']
-
- logger.debug("Results found: %s" % test_results)
-
- scenario_results = {}
- for r in test_results:
- if not r['scenario'] in scenario_results.keys():
- scenario_results[r['scenario']] = []
- scenario_results[r['scenario']].append(r)
-
- for s, s_result in scenario_results.items():
- scenario_results[s] = s_result[0:5]
- logger.debug("Search for success criteria")
- for result in scenario_results[s]:
- result["start_date"] = result["start_date"].split(".")[0]
- sig_test = result['details']['sig_test']['result']
- if not sig_test == "" and isinstance(sig_test, list):
- format_result = sig_test_format(sig_test)
- if format_result['failures'] > format_result['passed']:
- result['details']['sig_test']['duration'] = 0
- result['details']['sig_test']['result'] = format_result
- nb_step_ok = 0
- nb_step = len(result['details'])
-
- for step_name, step_result in result['details'].items():
- if step_result['duration'] != 0:
- nb_step_ok += 1
- m, s = divmod(step_result['duration'], 60)
- m_display = ""
- if int(m) != 0:
- m_display += str(int(m)) + "m "
- step_result['duration_display'] = m_display + str(int(s)) + "s"
-
- result['pr_step_ok'] = 0
- if nb_step != 0:
- result['pr_step_ok'] = (float(nb_step_ok)/nb_step)*100
- try:
- logger.debug("Scenario %s, Installer %s"
- % (s_result[1]['scenario'], installer))
- logger.debug("Orchestrator deployment: %s s"
- % result['details']['orchestrator']['duration'])
- logger.debug("vIMS deployment: %s s"
- % result['details']['vIMS']['duration'])
- logger.debug("Signaling testing: %s s"
- % result['details']['sig_test']['duration'])
- logger.debug("Signaling testing results: %s"
- % format_result)
- except:
- logger.error("Data badly formatted")
- logger.debug("------------------------------------------------")
-
- templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
- templateEnv = jinja2.Environment(loader=templateLoader)
-
- TEMPLATE_FILE = "/template/index-vims-tmpl.html"
- template = templateEnv.get_template(TEMPLATE_FILE)
-
- outputText = template.render(scenario_results=scenario_results,
- step_order=step_order,
- installer=installer)
-
- with open(conf.REPORTING_PATH +
- "/release/master/index-vims-" +
- installer + ".html", "wb") as fh:
- fh.write(outputText)
+
+# For all the versions
+for version in conf.versions:
+ for installer in installers:
+ logger.info("Search vIMS results for installer: %s, version: %s"
+ % (installer, version))
+ request = Request(conf.URL_BASE + '?case=vims&installer=' +
+ installer + '&version=' + version)
+
+ try:
+ response = urlopen(request)
+ k = response.read()
+ results = json.loads(k)
+ except URLError, e:
+ logger.error("Error code: %s" % e)
+
+ test_results = results['results']
+
+ logger.debug("Results found: %s" % test_results)
+
+ scenario_results = {}
+ for r in test_results:
+ if not r['scenario'] in scenario_results.keys():
+ scenario_results[r['scenario']] = []
+ scenario_results[r['scenario']].append(r)
+
+ for s, s_result in scenario_results.items():
+ scenario_results[s] = s_result[0:5]
+ logger.debug("Search for success criteria")
+ for result in scenario_results[s]:
+ result["start_date"] = result["start_date"].split(".")[0]
+ sig_test = result['details']['sig_test']['result']
+ if not sig_test == "" and isinstance(sig_test, list):
+ format_result = sig_test_format(sig_test)
+ if format_result['failures'] > format_result['passed']:
+ result['details']['sig_test']['duration'] = 0
+ result['details']['sig_test']['result'] = format_result
+ nb_step_ok = 0
+ nb_step = len(result['details'])
+
+ for step_name, step_result in result['details'].items():
+ if step_result['duration'] != 0:
+ nb_step_ok += 1
+ m, s = divmod(step_result['duration'], 60)
+ m_display = ""
+ if int(m) != 0:
+ m_display += str(int(m)) + "m "
+
+ step_result['duration_display'] = m_display + str(int(s)) + "s"
+
+ result['pr_step_ok'] = 0
+ if nb_step != 0:
+ result['pr_step_ok'] = (float(nb_step_ok)/nb_step)*100
+ try:
+ logger.debug("Scenario %s, Installer %s"
+ % (s_result[1]['scenario'], installer))
+ logger.debug("Orchestrator deployment: %s s"
+ % result['details']['orchestrator']['duration'])
+ logger.debug("vIMS deployment: %s s"
+ % result['details']['vIMS']['duration'])
+ logger.debug("Signaling testing: %s s"
+ % result['details']['sig_test']['duration'])
+ logger.debug("Signaling testing results: %s"
+ % format_result)
+ except:
+ logger.error("Data badly formatted")
+ logger.debug("----------------------------------------")
+
+ templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
+ templateEnv = jinja2.Environment(loader=templateLoader)
+
+ TEMPLATE_FILE = "/template/index-vims-tmpl.html"
+ template = templateEnv.get_template(TEMPLATE_FILE)
+
+ outputText = template.render(scenario_results=scenario_results,
+ step_order=step_order,
+ installer=installer)
+
+ with open(conf.REPORTING_PATH +
+ "/release/" + version + "/index-vims-" +
+ installer + ".html", "wb") as fh:
+ fh.write(outputText)
logger.info("vIMS report succesfully generated")
diff --git a/utils/test/reporting/functest/reportingConf.py b/utils/test/reporting/functest/reportingConf.py
index a58eeecc9..9230cb286 100644
--- a/utils/test/reporting/functest/reportingConf.py
+++ b/utils/test/reporting/functest/reportingConf.py
@@ -10,14 +10,13 @@
#
# ****************************************************
installers = ["apex", "compass", "fuel", "joid"]
-# installers = ["apex"]
# list of test cases declared in testcases.yaml but that must not be
# taken into account for the scoring
-blacklist = ["odl", "ovno", "security_scan", "copper", "moon"]
+blacklist = ["ovno", "security_scan", 'odl-sfc']
# versions = ["brahmaputra", "master"]
-versions = ["master"]
-PERIOD = 10
-MAX_SCENARIO_CRITERIA = 18
+versions = ["master", "colorado"]
+PERIOD = 50
+MAX_SCENARIO_CRITERIA = 50
# get the last 5 test results to determinate the success criteria
NB_TESTS = 5
# REPORTING_PATH = "/usr/share/nginx/html/reporting/functest"
diff --git a/utils/test/reporting/functest/template/index-status-tmpl.html b/utils/test/reporting/functest/template/index-status-tmpl.html
index 0c3fa9426..da2213bc0 100644
--- a/utils/test/reporting/functest/template/index-status-tmpl.html
+++ b/utils/test/reporting/functest/template/index-status-tmpl.html
@@ -21,7 +21,7 @@
<h3 class="text-muted">Functest status page ({{version}})</h3>
<nav>
<ul class="nav nav-justified">
- <li class="active"><a href="index.html">Home</a></li>
+ <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
<li><a href="index-status-apex.html">Apex</a></li>
<li><a href="index-status-compass.html">Compass</a></li>
<li><a href="index-status-fuel.html">Fuel</a></li>
@@ -101,7 +101,7 @@
</div>
</div>
{%- endfor %}
- *: not used for scenario validation
+ see <a href="https://wiki.opnfv.org/pages/viewpage.action?pageId=6828617">Functest scoring wiki page</a> for details on scenario scoring
</div>
<div class="col-md-1"></div>
</div>
diff --git a/utils/test/reporting/functest/template/index-tempest-tmpl.html b/utils/test/reporting/functest/template/index-tempest-tmpl.html
index c56214346..42d7ed339 100644
--- a/utils/test/reporting/functest/template/index-tempest-tmpl.html
+++ b/utils/test/reporting/functest/template/index-tempest-tmpl.html
@@ -21,7 +21,7 @@
<h3 class="text-muted">Tempest status page</h3>
<nav>
<ul class="nav nav-justified">
- <li class="active"><a href="index.html">Home</a></li>
+ <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
<li><a href="index-tempest-apex.html">Apex</a></li>
<li><a href="index-tempest-compass.html">Compass</a></li>
<li><a href="index-tempest-fuel.html">Fuel</a></li>
diff --git a/utils/test/reporting/functest/template/index-vims-tmpl.html b/utils/test/reporting/functest/template/index-vims-tmpl.html
index 25499dc46..3836be91f 100644
--- a/utils/test/reporting/functest/template/index-vims-tmpl.html
+++ b/utils/test/reporting/functest/template/index-vims-tmpl.html
@@ -21,7 +21,7 @@
<h3 class="text-muted">vIMS status page</h3>
<nav>
<ul class="nav nav-justified">
- <li class="active"><a href="index.html">Home</a></li>
+ <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li>
<li><a href="index-vims-fuel.html">Fuel</a></li>
<li><a href="index-vims-compass.html">Compass</a></li>
<li><a href="index-vims-joid.html">JOID</a></li>
diff --git a/utils/test/reporting/functest/testCase.py b/utils/test/reporting/functest/testCase.py
index e19853a09..a906f0da8 100644
--- a/utils/test/reporting/functest/testCase.py
+++ b/utils/test/reporting/functest/testCase.py
@@ -35,7 +35,12 @@ class TestCase(object):
'promise': 'Promise',
'moon': 'moon',
'copper': 'copper',
- 'security_scan': 'security'
+ 'security_scan': 'security',
+ 'multisite': 'multisite',
+ 'domino': 'domino',
+ 'odl-sfc': 'SFC',
+ 'onos_sfc': 'SFC',
+ 'parser':'parser'
}
try:
self.displayName = display_name_matrix[self.name]
@@ -122,8 +127,13 @@ class TestCase(object):
'doctor': 'doctor-notification',
'promise': 'promise',
'moon': 'moon',
- 'copper': 'copper',
- 'security_scan': 'security'
+ 'copper': 'copper-notification',
+ 'security_scan': 'security',
+ 'multisite': 'multisite',
+ 'domino': 'domino-multinode',
+ 'odl-sfc': 'odl-sfc',
+ 'onos_sfc': 'onos_sfc',
+ 'parser':'parser-basics'
}
try:
return test_match_matrix[self.name]
diff --git a/utils/test/result_collection_api/update/README.md b/utils/test/result_collection_api/update/README.md
index d3aef7efe..cb0e67b33 100644
--- a/utils/test/result_collection_api/update/README.md
+++ b/utils/test/result_collection_api/update/README.md
@@ -79,26 +79,21 @@ install ansible, please refer:
```
http://docs.ansible.com/ansible/intro_installation.html
```
-run update.yml
+
+playbook-update.sh
+
arguments:
-: host: remote server, must provide
-user: user used to access to remote server, default to root
-port: exposed port used to access to testapi, default to 8000
-image: testapi's docker image, default to opnfv/testapi:latest
-update_path: templates directory in remote server, default to /tmp/testapi
-mongodb_url: url of mongodb, default to 172.17.0.1, docker0 ip
-swagger_url: swagger access url, default to http://host:port
+: -h|--help show this help text
+-r|--remote remote server
+-u|--user ssh username used to access to remote server
+-i|--identity ssh PublicKey file used to access to remote server
+-e|--execute execute update, if not set just check the ansible connectivity
usage:
```
-ansible-playbook update.yml --extra-vars "
-host=10.63.243.17
-user=zte
-port=8000
-image=opnfv/testapi
-update_path=/tmp/testapi
-mongodb_url=mongodb://172.17.0.1:27017
-swagger_url=http://10.63.243.17:8000"```
+ssh-agent ./playbook-update.sh -r testresults.opnfv.org -u serena -i ~/.ssh/id_rsa -e
+```
+
> **Note:**
> - If documents need to be changed, please modify file
diff --git a/utils/test/result_collection_api/update/playbook-update.sh b/utils/test/result_collection_api/update/playbook-update.sh
new file mode 100755
index 000000000..86d30e4b2
--- /dev/null
+++ b/utils/test/result_collection_api/update/playbook-update.sh
@@ -0,0 +1,90 @@
+#!/bin/bash
+
+#
+# Author: Serena Feng (feng.xiaoewi@zte.com.cn)
+# Update testapi on remote server using ansible playbook automatically
+#
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+set -e
+
+usage="Script to trigger update automatically.
+
+usage:
+ bash $(basename "$0") [-h|--help] [-h <host>] [-u username] [-i identityfile] [-e|--execute]
+
+where:
+ -h|--help show this help text
+ -r|--remote remote server
+ -u|--user ssh username used to access to remote server
+ -i|--identity ssh PublicKey file used to access to remote server
+ -e|--execute execute update, if not set just check the ansible connectivity"
+
+remote=testresults.opnfv.org
+user=root
+identity=~/.ssh/id_rsa
+hosts=./hosts
+execute=false
+
+# Parse parameters
+while [[ $# > 0 ]]
+ do
+ key="$1"
+ case $key in
+ -h|--help)
+ echo "$usage"
+ exit 0
+ shift
+ ;;
+ -r|--remote)
+ remote="$2"
+ shift
+ ;;
+ -u|--user)
+ user="$2"
+ shift
+ ;;
+ -i|--identity)
+ identity="$2"
+ shift
+ ;;
+ -e|--execute)
+ execute=true
+ ;;
+ *)
+ echo "unknown option"
+ exit 1
+ ;;
+ esac
+ shift # past argument or value
+done
+
+echo $remote > $hosts
+
+echo "add authentication"
+ssh-add $identity
+
+echo "test ansible connectivity"
+ansible -i ./hosts $remote -m ping -u $user
+
+echo "test playbook connectivity"
+ansible-playbook -i $hosts test.yml -e "host=$remote user=$user"
+
+if [ $execute == true ]; then
+ echo "do update"
+ ansible-playbook -i $hosts update.yml -e "host=$remote \
+ user=$user \
+ port=8082 \
+ image=opnfv/testapi \
+ update_path=/home/$user/testapi \
+ mongodb_url=mongodb://172.17.0.1:27017 \
+ swagger_url=http://testresults.opnfv.org/test"
+fi
+
+rm -fr $hosts
+ssh-agent -k
diff --git a/utils/test/result_collection_api/update/templates/rm_images.sh b/utils/test/result_collection_api/update/templates/rm_images.sh
new file mode 100755
index 000000000..6722573b4
--- /dev/null
+++ b/utils/test/result_collection_api/update/templates/rm_images.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+number=`docker images | awk 'NR != 1' | grep testapi | wc -l`
+if [ $number -gt 0 ]; then
+ images=`docker images -a | awk 'NR != 1' | grep testapi | awk '{print $1}'`
+ echo "begin to rm images $images"
+ docker images | awk 'NR != 1' | grep testapi | awk '{print $3}' | xargs docker rmi -f &>/dev/null
+fi
diff --git a/utils/test/result_collection_api/update/test.yml b/utils/test/result_collection_api/update/test.yml
new file mode 100644
index 000000000..a8868720d
--- /dev/null
+++ b/utils/test/result_collection_api/update/test.yml
@@ -0,0 +1,12 @@
+---
+- hosts: "{{ host }}"
+ remote_user: "{{ user }}"
+ become: yes
+ become_method: sudo
+ vars:
+ user: "root"
+ tasks:
+ - name: test connectivity
+ command: "echo hello {{ host }}"
+ register: result
+ - debug: msg="{{ result }}"
diff --git a/utils/test/result_collection_api/update/update.yml b/utils/test/result_collection_api/update/update.yml
index 08839564a..e6663d905 100644
--- a/utils/test/result_collection_api/update/update.yml
+++ b/utils/test/result_collection_api/update/update.yml
@@ -8,6 +8,7 @@
port: "8000"
update_path: "/tmp/testapi"
image: "opnfv/testapi"
+ mode: "pull"
mongodb_url: "mongodb://172.17.0.1:27017"
swagger_url: "http://{{ host }}:{{ port }}"
tasks:
@@ -19,6 +20,11 @@
copy:
src: templates/
dest: "{{ update_path }}"
+ - name: transfer Dockerfile
+ copy:
+ src: ../docker/Dockerfile
+ dest: "{{ update_path }}"
+ when: mode == "build"
- name: backup mongodb database
command: "python {{ update_path }}/backup_mongodb.py -u {{ mongodb_url }} -o {{ update_path }}"
- name: stop and remove old versions
@@ -26,10 +32,13 @@
register: rm_result
- debug: msg="{{ rm_result.stderr }}"
- name: delete old docker images
- command: docker rmi "{{ image }}"
+ command: bash "{{ update_path }}/rm_images.sh"
ignore_errors: true
- name: update mongodb
command: "python {{ update_path }}/update_mongodb.py -u {{ mongodb_url }}"
+ - name: docker build image
+ command: "docker build -t {{ image }} {{ update_path }}"
+ when: mode == "build"
- name: docker start testapi server
command: docker run -dti -p "{{ port }}:8000"
-e "mongodb_url={{ mongodb_url }}"