summaryrefslogtreecommitdiffstats
path: root/jjb
diff options
context:
space:
mode:
Diffstat (limited to 'jjb')
-rw-r--r--jjb/3rd_party_ci/odl-netvirt.yml354
-rwxr-xr-xjjb/apex/apex-build.sh6
-rwxr-xr-xjjb/apex/apex-deploy.sh40
-rwxr-xr-xjjb/apex/apex-download-artifact.sh21
-rw-r--r--jjb/apex/apex-project-jobs.yml127
-rwxr-xr-xjjb/apex/apex-unit-test.sh22
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh10
-rwxr-xr-xjjb/apex/apex-workspace-cleanup.sh7
-rw-r--r--jjb/apex/apex.yml754
-rw-r--r--jjb/apex/apex.yml.j2426
-rw-r--r--jjb/apex/scenarios.yaml.hidden17
-rw-r--r--jjb/apex/update-build-result.groovy5
-rw-r--r--jjb/armband/armband-ci-jobs.yml453
-rwxr-xr-xjjb/armband/armband-deploy.sh136
-rwxr-xr-xjjb/armband/armband-download-artifact.sh73
-rw-r--r--jjb/armband/armband-project-jobs.yml94
-rw-r--r--jjb/armband/armband-verify-jobs.yml76
-rwxr-xr-xjjb/armband/armband-workspace-cleanup.sh15
-rwxr-xr-xjjb/armband/build.sh105
-rwxr-xr-xjjb/armband/upload-artifacts.sh93
-rw-r--r--jjb/auto/auto.yml56
-rw-r--r--jjb/barometer/barometer.yml185
-rw-r--r--jjb/bottlenecks/bottlenecks-ci-jobs.yml248
-rw-r--r--jjb/bottlenecks/bottlenecks-cleanup.sh1
-rw-r--r--jjb/bottlenecks/bottlenecks-project-jobs.yml260
-rw-r--r--jjb/bottlenecks/bottlenecks-run-suite.sh178
-rw-r--r--jjb/calipso/calipso.yml61
-rw-r--r--jjb/ci_gate_security/anteater-report-to-gerrit.sh8
-rw-r--r--jjb/ci_gate_security/anteater-security-audit-weekly.sh2
-rw-r--r--jjb/ci_gate_security/anteater-security-audit.sh4
-rw-r--r--jjb/ci_gate_security/opnfv-ci-gate-security.yml138
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yml340
-rw-r--r--jjb/compass4nfv/compass-deploy.sh34
-rw-r--r--jjb/compass4nfv/compass-dovetail-jobs.yml2
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yml7
-rw-r--r--jjb/compass4nfv/compass-verify-jobs.yml12
-rw-r--r--jjb/compass4nfv/compass-workspace-cleanup.sh7
-rw-r--r--jjb/container4nfv/container4nfv-project.yml (renamed from jjb/openretriever/openretriever-project.yml)6
-rw-r--r--jjb/cperf/cperf-ci-jobs.yml264
-rw-r--r--jjb/daisy4nfv/daisy-daily-jobs.yml27
-rwxr-xr-xjjb/daisy4nfv/daisy-deploy.sh2
-rw-r--r--jjb/daisy4nfv/daisy-project-jobs.yml13
-rw-r--r--jjb/daisy4nfv/daisy4nfv-merge-jobs.yml7
-rw-r--r--jjb/daisy4nfv/daisy4nfv-verify-jobs.yml8
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-workspace-cleanup.sh16
-rw-r--r--jjb/doctor/doctor.yml32
-rw-r--r--jjb/dovetail/dovetail-ci-jobs.yml56
-rwxr-xr-xjjb/dovetail/dovetail-cleanup.sh24
-rwxr-xr-xjjb/dovetail/dovetail-run.sh66
-rw-r--r--jjb/dovetail/dovetail-weekly-jobs.yml1
-rw-r--r--jjb/dpacc/dpacc.yml77
-rwxr-xr-xjjb/fuel/fuel-build.sh109
-rw-r--r--jjb/fuel/fuel-daily-jobs.yml623
-rwxr-xr-xjjb/fuel/fuel-deploy.sh123
-rwxr-xr-xjjb/fuel/fuel-download-artifact.sh43
-rw-r--r--jjb/fuel/fuel-project-jobs.yml220
-rwxr-xr-xjjb/fuel/fuel-upload-artifact.sh118
-rw-r--r--jjb/fuel/fuel-verify-jobs.yml80
-rw-r--r--jjb/fuel/fuel-weekly-jobs.yml45
-rwxr-xr-xjjb/fuel/fuel-workspace-cleanup.sh15
-rwxr-xr-xjjb/functest/functest-alpine.sh84
-rw-r--r--jjb/functest/functest-daily-jobs.yml114
-rwxr-xr-xjjb/functest/functest-loop.sh6
-rw-r--r--jjb/functest/functest-project-jobs.yml185
-rwxr-xr-xjjb/functest/functest-suite.sh6
-rwxr-xr-xjjb/functest/set-functest-env.sh41
-rw-r--r--jjb/global/installer-params.yml234
-rw-r--r--jjb/global/releng-defaults.yml12
-rw-r--r--jjb/global/releng-macros.yml705
-rw-r--r--jjb/global/slave-params.yml1655
-rw-r--r--jjb/joid/joid-daily-jobs.yml589
-rw-r--r--jjb/joid/joid-deploy.sh3
-rw-r--r--jjb/joid/joid-verify-jobs.yml293
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yml20
-rwxr-xr-xjjb/multisite/fuel-deploy-for-multisite.sh124
-rw-r--r--jjb/multisite/multisite-daily-jobs.yml305
-rw-r--r--jjb/nfvbench/nfvbench.yml93
-rw-r--r--jjb/opera/opera-daily-jobs.yml128
-rw-r--r--jjb/opera/opera-project-jobs.yml43
-rw-r--r--jjb/opera/opera-verify-jobs.yml198
-rw-r--r--jjb/opnfvdocs/docs-rtd.yaml118
-rw-r--r--jjb/opnfvdocs/opnfvdocs.yml167
-rw-r--r--jjb/opnfvdocs/project.cfg2
-rw-r--r--jjb/orchestra/orchestra-daily-jobs.yml99
-rw-r--r--jjb/orchestra/orchestra-project-jobs.yml49
-rw-r--r--jjb/ovn4nfv/ovn4nfv-daily-jobs.yml87
-rw-r--r--jjb/ovn4nfv/ovn4nfv-project-jobs.yml52
-rw-r--r--jjb/ovsnfv/ovsnfv.yml1
-rw-r--r--jjb/qtip/helpers/cleanup-deploy.sh21
-rw-r--r--jjb/qtip/helpers/validate-deploy.sh40
-rw-r--r--jjb/qtip/helpers/validate-setup.sh24
-rw-r--r--jjb/qtip/qtip-experimental-jobs.yml44
-rw-r--r--jjb/qtip/qtip-validate-jobs.yml77
-rw-r--r--jjb/qtip/qtip-verify-jobs.yml118
-rw-r--r--jjb/releng/automate.yml35
-rw-r--r--jjb/releng/docker-deploy.sh157
-rwxr-xr-xjjb/releng/generate-job-list.sh70
-rw-r--r--jjb/releng/opnfv-docker-arm.yml14
-rw-r--r--jjb/releng/opnfv-docker.sh2
-rw-r--r--jjb/releng/opnfv-docker.yml62
-rw-r--r--jjb/releng/opnfv-lint.yml4
-rw-r--r--jjb/releng/opnfv-repo-archiver.sh66
-rw-r--r--jjb/releng/opnfv-utils.yml50
-rw-r--r--jjb/releng/releng-ci-jobs.yml23
-rw-r--r--jjb/securedlab/check-jinja2.yml6
-rw-r--r--jjb/sfc/sfc-project-jobs.yml (renamed from jjb/multisite/multisite-verify-jobs.yml)29
-rw-r--r--jjb/snaps/snaps-verify-jobs.yml (renamed from jjb/snaps/snaps.yml)21
-rw-r--r--jjb/storperf/storperf-daily-jobs.yml175
-rw-r--r--jjb/storperf/storperf-verify-jobs.yml190
-rw-r--r--jjb/storperf/storperf.yml136
-rw-r--r--jjb/test-requirements.txt1
-rw-r--r--jjb/xci/bifrost-cleanup-job.yml9
-rw-r--r--jjb/xci/bifrost-periodic-jobs.yml4
-rwxr-xr-xjjb/xci/bifrost-provision.sh10
-rw-r--r--jjb/xci/bifrost-verify-jobs.yml15
-rwxr-xr-xjjb/xci/bifrost-verify.sh20
-rw-r--r--jjb/xci/osa-periodic-jobs.yml260
-rw-r--r--jjb/xci/xci-daily-jobs.yml40
-rwxr-xr-xjjb/xci/xci-deploy.sh4
-rwxr-xr-xjjb/xci/xci-functest.sh3
-rw-r--r--jjb/xci/xci-verify-jobs.yml236
-rw-r--r--jjb/yardstick/yardstick-daily-jobs.yml132
-rwxr-xr-xjjb/yardstick/yardstick-daily.sh37
-rwxr-xr-xjjb/yardstick/yardstick-get-k8s-conf.sh8
124 files changed, 7272 insertions, 6636 deletions
diff --git a/jjb/3rd_party_ci/odl-netvirt.yml b/jjb/3rd_party_ci/odl-netvirt.yml
index a937acbed..01017f307 100644
--- a/jjb/3rd_party_ci/odl-netvirt.yml
+++ b/jjb/3rd_party_ci/odl-netvirt.yml
@@ -1,37 +1,39 @@
+---
- project:
name: 'netvirt'
project: 'netvirt'
installer: 'netvirt'
-#####################################
-# branch definitions
-#####################################
+ #####################################
+ # branch definitions
+ #####################################
stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - carbon:
- branch: 'stable/carbon'
- gs-pathname: ''
- disabled: false
-#####################################
-# patch verification phases
-#####################################
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - carbon:
+ branch: 'stable/carbon'
+ gs-pathname: ''
+ disabled: false
+ #####################################
+ # patch verification phases
+ #####################################
phase:
- - 'create-apex-vms':
- slave-label: 'odl-netvirt-virtual-intel'
- - 'install-netvirt':
- slave-label: 'odl-netvirt-virtual-intel'
- - 'postprocess':
- slave-label: 'odl-netvirt-virtual-intel'
-#####################################
-# jobs
-#####################################
+ - 'create-apex-vms':
+ slave-label: 'odl-netvirt-virtual-intel'
+ - 'install-netvirt':
+ slave-label: 'odl-netvirt-virtual-intel'
+ - 'postprocess':
+ slave-label: 'odl-netvirt-virtual-intel'
+ #####################################
+ # jobs
+ #####################################
jobs:
- - 'odl-netvirt-verify-virtual-{stream}'
- - 'odl-netvirt-verify-virtual-{phase}-{stream}'
+ - 'odl-netvirt-verify-virtual-{stream}'
+ - 'odl-netvirt-verify-virtual-{phase}-{stream}'
+
#####################################
# job templates
#####################################
@@ -45,116 +47,118 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 5
- max-per-node: 1
- option: 'project'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 5
+ max-per-node: 1
+ option: 'project'
scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/apex
- branches:
- - 'origin/master'
- timeout: 15
- wipe-workspace: true
+ - git:
+ url: https://gerrit.opnfv.org/gerrit/apex
+ branches:
+ - 'origin/master'
+ timeout: 15
+ wipe-workspace: true
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: NETVIRT_ARTIFACT
- default: distribution-karaf.tar.gz
- - 'odl-netvirt-virtual-intel-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: NETVIRT_ARTIFACT
+ default: distribution-karaf.tar.gz
+ - 'odl-netvirt-virtual-intel-defaults'
triggers:
- - gerrit:
- server-name: 'git.opendaylight.org'
- trigger-on:
- # - comment-added-contains-event:
- # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS'
- # - comment-added-contains-event:
- # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE'
- - comment-added-contains-event:
- comment-contains-value: 'opnfv-test'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- readable-message: true
+ - gerrit:
+ server-name: 'git.opendaylight.org'
+ trigger-on:
+ # yamllint disable rule:line-length
+ # - comment-added-contains-event:
+ # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS'
+ # - comment-added-contains-event:
+ # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE'
+ # yamllint enable rule:line-length
+ - comment-added-contains-event:
+ comment-contains-value: 'opnfv-test'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ readable-message: true
builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: create-apex-vms
- condition: SUCCESSFUL
- projects:
- - name: 'odl-netvirt-verify-virtual-create-apex-vms-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- APEX_ENV_NUMBER=$APEX_ENV_NUMBER
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: install-netvirt
- condition: SUCCESSFUL
- projects:
- - name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}'
- current-parameters: false
- predefined-parameters: |
- ODL_BRANCH={branch}
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: functest
- condition: SUCCESSFUL
- projects:
- - name: 'functest-netvirt-virtual-suite-master'
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
- FUNCTEST_SUITE_NAME=odl_netvirt
- RC_FILE_PATH=$HOME/cloner-info/overcloudrc
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
- - multijob:
- name: postprocess
- condition: ALWAYS
- projects:
- - name: 'odl-netvirt-verify-virtual-postprocess-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: create-apex-vms
+ condition: SUCCESSFUL
+ projects:
+ - name: 'odl-netvirt-verify-virtual-create-apex-vms-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
+ GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
+ GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
+ NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
+ APEX_ENV_NUMBER=$APEX_ENV_NUMBER
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: install-netvirt
+ condition: SUCCESSFUL
+ projects:
+ - name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ ODL_BRANCH={branch}
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
+ GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
+ GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
+ NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: functest
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-netvirt-virtual-suite-master'
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
+ FUNCTEST_SUITE_NAME=odl_netvirt
+ RC_FILE_PATH=$HOME/cloner-info/overcloudrc
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: false
+ - multijob:
+ name: postprocess
+ condition: ALWAYS
+ projects:
+ - name: 'odl-netvirt-verify-virtual-postprocess-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
+ GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
+ GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
+ NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: false
- job-template:
name: 'odl-netvirt-verify-virtual-{phase}-{stream}'
@@ -164,71 +168,71 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 5
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
- - 'odl-netvirt-verify-virtual-install-netvirt-.*'
- - 'functest-netvirt-virtual-suite-.*'
- - 'odl-netvirt-verify-virtual-postprocess-.*'
- block-level: 'NODE'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 5
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
+ - 'odl-netvirt-verify-virtual-install-netvirt-.*'
+ - 'functest-netvirt-virtual-suite-.*'
+ - 'odl-netvirt-verify-virtual-postprocess-.*'
+ block-level: 'NODE'
wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 360
+ fail: true
scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/apex
- branches:
- - 'origin/master'
- timeout: 15
- wipe-workspace: true
+ - git:
+ url: https://gerrit.opnfv.org/gerrit/apex
+ branches:
+ - 'origin/master'
+ timeout: 15
+ wipe-workspace: true
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-bgpvpn-noha'
- description: 'Scenario to deploy and test'
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/apex
- description: "URL to Google Storage with snapshot artifacts."
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{slave-label}-defaults'
+ - '{installer}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-bgpvpn-noha'
+ description: 'Scenario to deploy and test'
+ - string:
+ name: GS_URL
+ default: artifacts.opnfv.org/apex
+ description: "URL to Google Storage with snapshot artifacts."
builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-builder'
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-verify-{phase}-builder'
#####################################
# builder macros
#####################################
- builder:
name: 'netvirt-verify-create-apex-vms-builder'
builders:
- - shell:
- !include-raw: ../apex/apex-snapshot-deploy.sh
+ - shell:
+ !include-raw: ../apex/apex-snapshot-deploy.sh
- builder:
name: 'netvirt-verify-install-netvirt-builder'
builders:
- - shell:
- !include-raw: ./download-netvirt-artifact.sh
- - shell:
- !include-raw: ./install-netvirt.sh
+ - shell:
+ !include-raw: ./download-netvirt-artifact.sh
+ - shell:
+ !include-raw: ./install-netvirt.sh
- builder:
name: 'netvirt-verify-postprocess-builder'
builders:
- - shell:
- !include-raw: ./postprocess-netvirt.sh
+ - shell:
+ !include-raw: ./postprocess-netvirt.sh
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
index 58d9f1a40..ad94ba3d7 100755
--- a/jjb/apex/apex-build.sh
+++ b/jjb/apex/apex-build.sh
@@ -28,8 +28,10 @@ fi
BUILD_DIRECTORY=${WORKSPACE}/build
# start the build
-cd $WORKSPACE/ci
-./build.sh $BUILD_ARGS
+pushd ${BUILD_DIRECTORY}
+make clean
+popd
+python3 apex/build.py $BUILD_ARGS
RPM_VERSION=$(grep Version: $WORKSPACE/build/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
# list the contents of BUILD_OUTPUT directory
echo "Build Directory is ${BUILD_DIRECTORY}/../.build"
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
index 3a2ca606b..a47e3a5db 100755
--- a/jjb/apex/apex-deploy.sh
+++ b/jjb/apex/apex-deploy.sh
@@ -3,7 +3,6 @@ set -o errexit
set -o nounset
set -o pipefail
-APEX_PKGS="common undercloud onos"
IPV6_FLAG=False
# log info to console
@@ -11,6 +10,8 @@ echo "Starting the Apex deployment."
echo "--------------------------------------------------------"
echo
+sudo rm -rf /tmp/tmp*
+
if [ -z "$DEPLOY_SCENARIO" ]; then
echo "Deploy scenario not set!"
exit 1
@@ -36,7 +37,7 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
# Settings for deploying from git workspace
DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
- DEPLOY_CMD="${WORKSPACE}/ci/deploy.sh"
+ DEPLOY_CMD="opnfv-deploy --image-dir ${WORKSPACE}/.build"
CLEAN_CMD="${WORKSPACE}/ci/clean.sh"
RESOURCES="${WORKSPACE}/.build/"
CONFIG="${WORKSPACE}/build"
@@ -47,6 +48,11 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
# Ensure artifacts were downloaded and extracted correctly
# TODO(trozet) add verification here
+ # Install dev build
+ mkdir -p ~/tmp
+ mv -f .build ~/tmp/
+ sudo pip3 install --upgrade --force-reinstall .
+ mv -f ~/tmp/.build .
else
DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
@@ -57,12 +63,17 @@ else
BASE=$CONFIG
IMAGES=$RESOURCES
LIB="/var/opt/opnfv/lib"
-
+ sudo mkdir -p /var/log/apex
+ sudo chmod 777 /var/log/apex
+ cd /var/log/apex
fi
# Install Dependencies
# Make sure python34 dependencies are installed
-for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do
+dependencies="epel-release python34 python34-devel libvirt-devel python34-pip \
+ansible python34-PyYAML python34-jinja2 python34-setuptools python-tox ansible"
+
+for dep_pkg in $dependencies; do
if ! rpm -q ${dep_pkg} > /dev/null; then
if ! sudo yum install -y ${dep_pkg}; then
echo "Failed to install ${dep_pkg}"
@@ -71,31 +82,12 @@ for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do
fi
done
-# Make sure jinja2 is installed
-for python_pkg in jinja2; do
- if ! python3.4 -c "import $python_pkg"; then
- echo "$python_pkg package not found for python3.4, attempting to install..."
- if ! sudo easy_install-3.4 $python_pkg; then
- echo -e "Failed to install $python_pkg package for python3.4"
- exit 1
- fi
- fi
-done
-
if [[ "$JOB_NAME" =~ "virtual" ]]; then
# Make sure ipxe-roms-qemu package is updated to latest.
# This package is needed for multi virtio nic PXE boot in virtual environment.
sudo yum update -y ipxe-roms-qemu
- if [ -z ${PYTHONPATH:-} ]; then
- export PYTHONPATH=${WORKSPACE}/lib/python
- else
- export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python
- fi
fi
-# set env vars to deploy cmd
-DEPLOY_CMD="BASE=${BASE} IMAGES=${IMAGES} LIB=${LIB} ${DEPLOY_CMD}"
-
if [ "$OPNFV_CLEAN" == 'yes' ]; then
if sudo test -e '/root/inventory/pod_settings.yaml'; then
clean_opts='-i /root/inventory/pod_settings.yaml'
@@ -103,7 +95,7 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then
clean_opts=''
fi
- sudo BASE=${BASE} LIB=${LIB} ${CLEAN_CMD} ${clean_opts}
+ sudo ${CLEAN_CMD} ${clean_opts}
fi
if echo ${DEPLOY_SCENARIO} | grep ipv6; then
diff --git a/jjb/apex/apex-download-artifact.sh b/jjb/apex/apex-download-artifact.sh
index 52c3c67ec..a11fb65b8 100755
--- a/jjb/apex/apex-download-artifact.sh
+++ b/jjb/apex/apex-download-artifact.sh
@@ -3,8 +3,6 @@ set -o errexit
set -o nounset
set -o pipefail
-APEX_PKGS="common undercloud onos"
-
# log info to console
echo "Downloading the Apex artifact. This could take some time..."
echo "--------------------------------------------------------"
@@ -23,7 +21,7 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
popd > /dev/null
else
- echo "Will download RPMs..."
+ echo "Will use RPMs..."
# Must be RPMs/ISO
echo "Downloading latest properties file"
@@ -35,14 +33,13 @@ else
source $BUILD_DIRECTORY/opnfv.properties
RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
- RPM_LIST=${RPM_INSTALL_PATH}/$(basename $OPNFV_RPM_URL)
+ RPM_LIST=$(basename $OPNFV_RPM_URL)
# find version of RPM
VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
# build RPM List which already includes base Apex RPM
- for pkg in ${APEX_PKGS}; do
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}.noarch.rpm"
- done
+ RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
+ RPM_LIST+=" python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
# remove old / install new RPMs
if rpm -q opnfv-apex > /dev/null; then
@@ -51,10 +48,20 @@ else
sudo yum remove -y ${INSTALLED_RPMS}
fi
fi
+ # Create an rpms dir on slave
+ mkdir -p ~/apex_rpms
+ pushd ~/apex_rpms
+ # Remove older rpms which do not match this version
+ find . ! -name "*${VERSION_EXTENSION}.noarch.rpm" -type f -exec rm -f {} +
+ # Download RPM only if changed on server
+ for rpm in $RPM_LIST; do
+ wget -N ${RPM_INSTALL_PATH}/${rpm}
+ done
if ! sudo yum install -y $RPM_LIST; then
echo "Unable to install new RPMs: $RPM_LIST"
exit 1
fi
+ popd
fi
# TODO: Uncomment these lines to verify SHA512SUMs once the sums are
diff --git a/jjb/apex/apex-project-jobs.yml b/jjb/apex/apex-project-jobs.yml
new file mode 100644
index 000000000..973ad913a
--- /dev/null
+++ b/jjb/apex/apex-project-jobs.yml
@@ -0,0 +1,127 @@
+---
+- project:
+ name: 'apex-project-jobs'
+ project: 'apex'
+
+ stream:
+ - master: &master
+ branch: 'master'
+ gs-pathname: ''
+ concurrent-builds: 3
+ disabled: false
+
+ - danube: &danube
+ branch: 'stable/danube'
+ gs-pathname: '/danube'
+ concurrent-builds: 1
+ disabled: true
+
+ jobs:
+ - 'apex-build-{stream}'
+ - 'apex-verify-iso-{stream}'
+
+# Build phase
+- job-template:
+ name: 'apex-build-{stream}'
+
+ # Job template for builds
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: 'apex-build-{stream}'
+
+ disabled: false
+
+ concurrent: true
+
+ parameters:
+ - '{project}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+
+ scm:
+ - git-scm-gerrit
+
+ wrappers:
+ - timeout:
+ timeout: 150
+ fail: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ max-per-node: '{concurrent-builds}'
+ max-total: 10
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify-iso-{stream}'
+
+ builders:
+ - 'apex-build'
+ - inject:
+ properties-content: ARTIFACT_TYPE=rpm
+ - 'apex-upload-artifact'
+
+# ISO verify job
+- job-template:
+ name: 'apex-verify-iso-{stream}'
+
+ # Job template for builds
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: 'apex-virtual-{stream}'
+
+ disabled: false
+
+ concurrent: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from parameters macro."
+
+ scm:
+ - git-scm
+
+ properties:
+ - logrotate-default
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+
+ builders:
+ - 'apex-iso-verify'
+ - inject:
+ properties-content: ARTIFACT_TYPE=iso
+ - 'apex-upload-artifact'
+
+########################
+# builder macros
+########################
+- builder:
+ name: 'apex-build'
+ builders:
+ - shell:
+ !include-raw: ./apex-build.sh
+
+- builder:
+ name: 'apex-iso-verify'
+ builders:
+ - shell:
+ !include-raw: ./apex-iso-verify.sh
diff --git a/jjb/apex/apex-unit-test.sh b/jjb/apex/apex-unit-test.sh
index 12cb862b0..3112c9d36 100755
--- a/jjb/apex/apex-unit-test.sh
+++ b/jjb/apex/apex-unit-test.sh
@@ -8,10 +8,24 @@ echo "--------------------------------------------------------------------------
echo
-pushd ci/ > /dev/null
-sudo BASE="${WORKSPACE}/build" LIB="${WORKSPACE}/lib" ./clean.sh
-./test.sh
-popd
+pushd build/ > /dev/null
+for pkg in yamllint rpmlint iproute epel-release python34-devel python34-nose python34-PyYAML python-pep8 python34-mock python34-pip; do
+ if ! rpm -q ${pkg} > /dev/null; then
+ if ! sudo yum install -y ${pkg}; then
+ echo "Failed to install ${pkg} package..."
+ exit 1
+ fi
+ fi
+done
+
+# Make sure coverage is installed
+if ! python3 -c "import coverage" &> /dev/null; then sudo pip3 install coverage; fi
+
+make rpmlint
+make python-pep8-check
+make yamllint
+make python-tests
+popd > /dev/null
echo "--------------------------------------------------------"
echo "Unit Tests Done!"
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
index f53451d41..4037d25ad 100755
--- a/jjb/apex/apex-upload-artifact.sh
+++ b/jjb/apex/apex-upload-artifact.sh
@@ -126,15 +126,13 @@ elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
- for pkg in common undercloud onos; do
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
- done
+ RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
+ RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
SRPM_INSTALL_PATH=$BUILD_DIRECTORY
SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
- for pkg in common undercloud onos; do
- SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}"
- done
+ SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
+ SRPM_LIST+=" ${SRPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
signrpm
diff --git a/jjb/apex/apex-workspace-cleanup.sh b/jjb/apex/apex-workspace-cleanup.sh
deleted file mode 100755
index d2f71a562..000000000
--- a/jjb/apex/apex-workspace-cleanup.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# delete everything that is in $WORKSPACE
-sudo /bin/rm -rf $WORKSPACE
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index a395cf2e5..c71bee1d0 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -6,37 +6,133 @@
- 'apex-verify-gate-{stream}'
- 'apex-verify-unit-tests-{stream}'
- 'apex-runner-cperf-{stream}'
- - 'apex-build-{stream}'
- 'apex-deploy-{platform}-{stream}'
- 'apex-daily-master'
- 'apex-daily-danube'
- 'apex-csit-promote-daily-{stream}'
- 'apex-fdio-promote-daily-{stream}'
- - 'apex-verify-iso-{stream}'
- - 'apex-run-deploy-test-baremetal-{stream}'
+ - 'apex-{scenario}-baremetal-{scenario_stream}'
+ - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- 'apex-upload-snapshot'
- 'apex-create-snapshot'
+ - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
# stream: branch with - in place of / (eg. stable-arno)
# branch: branch (eg. stable/arno)
stream:
- - master:
+ - master: &master
branch: 'master'
gs-pathname: ''
build-slave: 'apex-build-master'
virtual-slave: 'apex-virtual-master'
baremetal-slave: 'apex-baremetal-master'
verify-scenario: 'os-odl-nofeature-ha'
- concurrent-builds: 3
+ scenario_stream: 'master'
- - danube:
+ - danube: &danube
branch: 'stable/danube'
gs-pathname: '/danube'
build-slave: 'apex-build-danube'
virtual-slave: 'apex-virtual-danube'
baremetal-slave: 'apex-baremetal-danube'
verify-scenario: 'os-odl_l3-nofeature-ha'
- concurrent-builds: 1
- disabled: false
+ scenario_stream: 'danube'
+ disabled: true
+
+ scenario:
+ - 'os-nosdn-nofeature-noha':
+ <<: *danube
+ - 'os-nosdn-nofeature-ha':
+ <<: *danube
+ - 'os-nosdn-nofeature-ha-ipv6':
+ <<: *danube
+ - 'os-nosdn-ovs-noha':
+ <<: *danube
+ - 'os-nosdn-ovs-ha':
+ <<: *danube
+ - 'os-nosdn-fdio-noha':
+ <<: *danube
+ - 'os-nosdn-fdio-ha':
+ <<: *danube
+ - 'os-nosdn-kvm-ha':
+ <<: *danube
+ - 'os-nosdn-kvm-noha':
+ <<: *danube
+ - 'os-odl_l2-fdio-noha':
+ <<: *danube
+ - 'os-odl_l2-fdio-ha':
+ <<: *danube
+ - 'os-odl_netvirt-fdio-noha':
+ <<: *danube
+ - 'os-odl_l2-sfc-noha':
+ <<: *danube
+ - 'os-odl_l3-nofeature-noha':
+ <<: *danube
+ - 'os-odl_l3-nofeature-ha':
+ <<: *danube
+ - 'os-odl_l3-ovs-noha':
+ <<: *danube
+ - 'os-odl_l3-ovs-ha':
+ <<: *danube
+ - 'os-odl-bgpvpn-ha':
+ <<: *danube
+ - 'os-odl-gluon-noha':
+ <<: *danube
+ - 'os-odl_l3-fdio-noha':
+ <<: *danube
+ - 'os-odl_l3-fdio-ha':
+ <<: *danube
+ - 'os-odl_l3-fdio_dvr-noha':
+ <<: *danube
+ - 'os-odl_l3-fdio_dvr-ha':
+ <<: *danube
+ - 'os-odl_l3-csit-noha':
+ <<: *danube
+ - 'os-onos-nofeature-ha':
+ <<: *danube
+ - 'os-ovn-nofeature-noha':
+ <<: *danube
+ - 'os-nosdn-nofeature-noha':
+ <<: *master
+ - 'os-nosdn-nofeature-ha':
+ <<: *master
+ - 'os-odl-nofeature-ha':
+ <<: *master
+ - 'os-odl-nofeature-noha':
+ <<: *master
+ - 'os-odl-bgpvpn-ha':
+ <<: *master
+ - 'os-ovn-nofeature-noha':
+ <<: *master
+ - 'os-nosdn-fdio-noha':
+ <<: *master
+ - 'os-nosdn-fdio-ha':
+ <<: *master
+ - 'os-odl-fdio-noha':
+ <<: *master
+ - 'os-odl-fdio-ha':
+ <<: *master
+ - 'os-nosdn-bar-ha':
+ <<: *master
+ - 'os-nosdn-bar-noha':
+ <<: *master
+ - 'os-nosdn-nofeature-ha-ipv6':
+ <<: *master
+ - 'os-nosdn-ovs_dpdk-noha':
+ <<: *master
+ - 'os-nosdn-ovs_dpdk-ha':
+ <<: *master
+ - 'os-nosdn-kvm_ovs_dpdk-noha':
+ <<: *master
+ - 'os-nosdn-kvm_ovs_dpdk-ha':
+ <<: *master
+ - 'os-odl-sfc-noha':
+ <<: *master
+ - 'os-odl-sfc-ha':
+ <<: *master
+ - 'os-odl-fdio-dvr-noha':
+ <<: *master
+ - 'os-odl-fdio-dvr-ha':
+ <<: *master
platform:
- 'baremetal'
@@ -79,9 +175,37 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'false'
file-paths:
- compare-type: ANT
- pattern: 'tests/**'
+ pattern: 'apex/tests/**'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: '*'
+ - compare-type: ANT
+ pattern: 'apex/*'
+ - compare-type: ANT
+ pattern: 'build/**'
+ - compare-type: ANT
+ pattern: 'lib/**'
+ - compare-type: ANT
+ pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/build/**'
+ - compare-type: ANT
+ pattern: 'apex/common/**'
+ - compare-type: ANT
+ pattern: 'apex/inventory/**'
+ - compare-type: ANT
+ pattern: 'apex/network/**'
+ - compare-type: ANT
+ pattern: 'apex/overcloud/**'
+ - compare-type: ANT
+ pattern: 'apex/settings/**'
+ - compare-type: ANT
+ pattern: 'apex/undercloud/**'
+ - compare-type: ANT
+ pattern: 'apex/virtual/**'
properties:
- logrotate-default
- throttle:
@@ -135,20 +259,49 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
file-paths:
- compare-type: ANT
- pattern: 'ci/**'
+ pattern: '*'
+ - compare-type: ANT
+ pattern: 'apex/*'
- compare-type: ANT
pattern: 'build/**'
- compare-type: ANT
pattern: 'lib/**'
- compare-type: ANT
pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/build/**'
+ - compare-type: ANT
+ pattern: 'apex/common/**'
+ - compare-type: ANT
+ pattern: 'apex/inventory/**'
+ - compare-type: ANT
+ pattern: 'apex/network/**'
+ - compare-type: ANT
+ pattern: 'apex/overcloud/**'
+ - compare-type: ANT
+ pattern: 'apex/settings/**'
+ - compare-type: ANT
+ pattern: 'apex/undercloud/**'
+ - compare-type: ANT
+ pattern: 'apex/virtual/**'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'apex/tests/**'
+ - compare-type: ANT
+ pattern: 'docs/**'
properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
- throttle:
- max-per-node: 3
+ max-per-node: 1
max-total: 10
option: 'project'
@@ -271,11 +424,18 @@
pattern: 'lib/**'
- compare-type: ANT
pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/**'
properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
- throttle:
- max-per-node: 3
+ max-per-node: 1
max-total: 10
option: 'project'
@@ -299,6 +459,10 @@
kill-phase-on: FAILURE
abort-all-job: true
git-revision: true
+ - shell: |
+ echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo 'os-.*') > detected_scenario
+ - inject:
+ properties-file: detected_scenario
- multijob:
name: functest-smoke
condition: SUCCESSFUL
@@ -306,7 +470,7 @@
- name: 'functest-apex-virtual-suite-{stream}'
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO={verify-scenario}
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
FUNCTEST_SUITE_NAME=healthcheck
GERRIT_BRANCH=$GERRIT_BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
@@ -383,121 +547,26 @@
abort-all-job: false
git-revision: false
-# Build phase
-- job-template:
- name: 'apex-build-{stream}'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{build-slave}'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - timeout:
- timeout: 150
- fail: true
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: {concurrent-builds}
- max-total: 10
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify-iso-{stream}'
-
- builders:
- - 'apex-build'
- - inject:
- properties-content: ARTIFACT_TYPE=rpm
- - 'apex-upload-artifact'
-
-# ISO verify job
-- job-template:
- name: 'apex-verify-iso-{stream}'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-iso-verify'
- - inject:
- properties-content: ARTIFACT_TYPE=iso
- - 'apex-upload-artifact'
-
# Deploy job
- job-template:
name: 'apex-deploy-{platform}-{stream}'
- # Job template for virtual deployment
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
node: 'apex-{platform}-{stream}'
concurrent: true
disabled: false
-
+ quiet-period: 30
scm:
- git-scm-gerrit
wrappers:
- timeout:
- timeout: 120
+ timeout: 140
fail: true
parameters:
+ - '{project}-{platform}-{stream}-defaults'
- project-parameter:
project: '{project}'
branch: '{branch}'
@@ -521,6 +590,8 @@
- 'apex-deploy.*'
- 'functest.*'
- 'yardstick.*'
+ - 'dovetail.*'
+ - 'storperf.*'
- throttle:
max-per-node: 1
max-total: 10
@@ -531,18 +602,13 @@
description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- 'apex-download-artifact'
- 'apex-deploy'
- - 'apex-workspace-cleanup'
+ - 'clean-workspace'
# Baremetal Deploy and Test
- job-template:
- name: 'apex-run-deploy-test-baremetal-{stream}'
+ name: 'apex-{scenario}-baremetal-{scenario_stream}'
- # Job template for daily build
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
project-type: 'multijob'
disabled: false
@@ -552,7 +618,7 @@
parameters:
- '{project}-defaults'
- - '{project}-baremetal-{stream}-defaults'
+ - '{project}-baremetal-{scenario_stream}-defaults'
- project-parameter:
project: '{project}'
branch: '{branch}'
@@ -560,7 +626,7 @@
gs-pathname: '{gs-pathname}'
- string:
name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
+ default: '{scenario}'
description: "Scenario to deploy with."
properties:
- logrotate-default
@@ -572,14 +638,19 @@
- 'apex-runner.*'
- 'apex-.*-promote.*'
- 'apex-run.*'
+ - 'apex-.+-baremetal-.+'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
builders:
- description-setter:
description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- multijob:
name: 'Baremetal Deploy'
- condition: ALWAYS
+ condition: SUCCESSFUL
projects:
- - name: 'apex-deploy-baremetal-{stream}'
+ - name: 'apex-deploy-baremetal-{scenario_stream}'
node-parameters: true
current-parameters: true
predefined-parameters: |
@@ -591,10 +662,65 @@
abort-all-job: true
git-revision: false
- multijob:
+ name: 'OPNFV Test Suite'
+ projects:
+ - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ publishers:
+ - groovy-postbuild:
+ script:
+ !include-raw-escape: ./update-build-result.groovy
+
+# Baremetal test job
+- job-template:
+ name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+
+ project-type: 'multijob'
+
+ disabled: false
+
+ parameters:
+ - '{project}-defaults'
+ - '{project}-baremetal-{scenario_stream}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{scenario}'
+ description: "Scenario to deploy with."
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-runner.*'
+ - 'apex-.*-promote.*'
+ - 'apex-run.*'
+ - 'apex-testsuite-.+-baremetal-.+'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+ builders:
+ - description-setter:
+ description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+ - multijob:
name: Functest
condition: ALWAYS
projects:
- - name: 'functest-apex-baremetal-daily-{stream}'
+ - name: 'functest-apex-baremetal-daily-{scenario_stream}'
node-parameters: true
current-parameters: false
predefined-parameters:
@@ -606,7 +732,7 @@
name: Yardstick
condition: ALWAYS
projects:
- - name: 'yardstick-apex-baremetal-daily-{stream}'
+ - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
node-parameters: true
current-parameters: false
predefined-parameters:
@@ -614,6 +740,41 @@
kill-phase-on: NEVER
abort-all-job: false
git-revision: false
+ - multijob:
+ name: Dovetail
+ condition: ALWAYS
+ projects:
+ - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+ abort-all-job: false
+ git-revision: false
+ - multijob:
+ name: StorPerf
+ condition: ALWAYS
+ projects:
+ - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+# Build status is always success due conditional plugin prefetching
+# build status before multijob phases execute
+# - conditional-step:
+# condition-kind: current-status
+# condition-worst: SUCCESS
+# condtion-best: SUCCESS
+# on-evaluation-failure: mark-unstable
+# steps:
+# - shell: 'echo "Tests Passed"'
# danube Daily
@@ -686,261 +847,235 @@
condition: SUCCESSFUL
projects:
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-nofeature-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-nofeature-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha-ipv6
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-ovs-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-ovs-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-ovs-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-ovs-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-fdio-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-fdio-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-fdio-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-fdio-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-kvm-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-kvm-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-nosdn-kvm-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-kvm-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l2-fdio-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l2-fdio-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l2-fdio-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l2-fdio-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_netvirt-fdio-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l2-sfc-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l2-sfc-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-nofeature-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-nofeature-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-ovs-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-ovs-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-ovs-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-ovs-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl-bgpvpn-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl-bgpvpn-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl-gluon-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl-gluon-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-fdio-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-fdio-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-fdio-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-fdio-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-fdio_dvr-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-fdio_dvr-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-fdio_dvr-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-fdio_dvr-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-odl_l3-csit-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-csit-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-onos-nofeature-ha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-onos-nofeature-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-danube'
+ - name: 'apex-os-ovn-nofeature-noha-baremetal-danube'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-ovn-nofeature-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
@@ -1017,41 +1152,190 @@
condition: SUCCESSFUL
projects:
- - name: 'apex-run-deploy-test-baremetal-master'
+ - name: 'apex-os-nosdn-nofeature-noha-baremetal-master'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-master'
+ - name: 'apex-os-nosdn-nofeature-ha-baremetal-master'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-master'
+ - name: 'apex-os-odl-nofeature-ha-baremetal-master'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl-nofeature-ha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
git-revision: false
- - name: 'apex-run-deploy-test-baremetal-master'
+ - name: 'apex-os-odl-nofeature-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-odl-bgpvpn-ha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-ovn-nofeature-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-fdio-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-fdio-ha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-odl-fdio-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-odl-fdio-ha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-bar-ha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-bar-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-ovs_dpdk-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-ovs_dpdk-ha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-kvm_ovs_dpdk-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-nosdn-kvm_ovs_dpdk-ha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-odl-sfc-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-odl-sfc-ha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-odl-fdio-dvr-noha-baremetal-master'
+ node-parameters: false
+ current-parameters: false
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ - name: 'apex-os-odl-fdio-dvr-ha-baremetal-master'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl-nofeature-noha
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
@@ -1294,6 +1578,85 @@
abort-all-job: true
git-revision: false
+# Flex job
+- job-template:
+ name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
+
+ project-type: 'multijob'
+
+ disabled: false
+
+ node: 'flex-pod2'
+
+ scm:
+ - git-scm
+ triggers:
+ - 'apex-{stream}'
+ parameters:
+ - '{project}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-ha'
+ description: "Scenario to deploy with."
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-runner.*'
+ - 'apex-.*-promote.*'
+ - 'apex-run.*'
+ - 'apex-.+-baremetal-.+'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+ builders:
+ - description-setter:
+ description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+ - multijob:
+ name: 'Baremetal Deploy'
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-baremetal-{stream}'
+ node-parameters: true
+ current-parameters: true
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: Yardstick
+ condition: ALWAYS
+ projects:
+ - name: 'yardstick-apex-baremetal-daily-{stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+
########################
# parameter macros
########################
@@ -1343,25 +1706,6 @@
!include-raw: ./apex-unit-test.sh
- builder:
- name: 'apex-build'
- builders:
- - shell:
- !include-raw: ./apex-build.sh
-
-- builder:
- name: 'apex-workspace-cleanup'
- builders:
- - shell:
- !include-raw: ./apex-workspace-cleanup.sh
-
-- builder:
- name: 'apex-iso-verify'
- builders:
- - shell:
- !include-raw: ./apex-iso-verify.sh
-
-
-- builder:
name: 'apex-upload-artifact'
builders:
- shell:
@@ -1385,8 +1729,8 @@
- trigger:
name: 'apex-master'
triggers:
- - timed: '0 3 1 1 7'
+ - timed: '0 12 * * *'
- trigger:
name: 'apex-danube'
triggers:
- - timed: '0 12 * * *'
+ - timed: '0 3 1 1 7'
diff --git a/jjb/apex/apex.yml.j2 b/jjb/apex/apex.yml.j2
index 752cf281d..356c718d5 100644
--- a/jjb/apex/apex.yml.j2
+++ b/jjb/apex/apex.yml.j2
@@ -6,37 +6,45 @@
- 'apex-verify-gate-{stream}'
- 'apex-verify-unit-tests-{stream}'
- 'apex-runner-cperf-{stream}'
- - 'apex-build-{stream}'
- 'apex-deploy-{platform}-{stream}'
- 'apex-daily-master'
- 'apex-daily-danube'
- 'apex-csit-promote-daily-{stream}'
- 'apex-fdio-promote-daily-{stream}'
- - 'apex-verify-iso-{stream}'
- - 'apex-run-deploy-test-baremetal-{stream}'
+ - 'apex-{scenario}-baremetal-{scenario_stream}'
+ - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- 'apex-upload-snapshot'
- 'apex-create-snapshot'
+ - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
# stream: branch with - in place of / (eg. stable-arno)
# branch: branch (eg. stable/arno)
stream:
- - master:
+ - master: &master
branch: 'master'
gs-pathname: ''
build-slave: 'apex-build-master'
virtual-slave: 'apex-virtual-master'
baremetal-slave: 'apex-baremetal-master'
verify-scenario: 'os-odl-nofeature-ha'
- concurrent-builds: 3
+ scenario_stream: 'master'
- - danube:
+ - danube: &danube
branch: 'stable/danube'
gs-pathname: '/danube'
build-slave: 'apex-build-danube'
virtual-slave: 'apex-virtual-danube'
baremetal-slave: 'apex-baremetal-danube'
verify-scenario: 'os-odl_l3-nofeature-ha'
- concurrent-builds: 1
- disabled: false
+ scenario_stream: 'danube'
+ disabled: true
+
+ scenario:
+ {%- for stream in scenarios %}
+ {%- for scenario in scenarios[stream] %}
+ - '{{scenario}}':
+ <<: *{{stream}}
+ {%- endfor %}
+ {%- endfor %}
platform:
- 'baremetal'
@@ -79,9 +87,37 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'false'
file-paths:
- compare-type: ANT
- pattern: 'tests/**'
+ pattern: 'apex/tests/**'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: '*'
+ - compare-type: ANT
+ pattern: 'apex/*'
+ - compare-type: ANT
+ pattern: 'build/**'
+ - compare-type: ANT
+ pattern: 'lib/**'
+ - compare-type: ANT
+ pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/build/**'
+ - compare-type: ANT
+ pattern: 'apex/common/**'
+ - compare-type: ANT
+ pattern: 'apex/inventory/**'
+ - compare-type: ANT
+ pattern: 'apex/network/**'
+ - compare-type: ANT
+ pattern: 'apex/overcloud/**'
+ - compare-type: ANT
+ pattern: 'apex/settings/**'
+ - compare-type: ANT
+ pattern: 'apex/undercloud/**'
+ - compare-type: ANT
+ pattern: 'apex/virtual/**'
properties:
- logrotate-default
- throttle:
@@ -135,20 +171,49 @@
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
file-paths:
- compare-type: ANT
- pattern: 'ci/**'
+ pattern: '*'
+ - compare-type: ANT
+ pattern: 'apex/*'
- compare-type: ANT
pattern: 'build/**'
- compare-type: ANT
pattern: 'lib/**'
- compare-type: ANT
pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/build/**'
+ - compare-type: ANT
+ pattern: 'apex/common/**'
+ - compare-type: ANT
+ pattern: 'apex/inventory/**'
+ - compare-type: ANT
+ pattern: 'apex/network/**'
+ - compare-type: ANT
+ pattern: 'apex/overcloud/**'
+ - compare-type: ANT
+ pattern: 'apex/settings/**'
+ - compare-type: ANT
+ pattern: 'apex/undercloud/**'
+ - compare-type: ANT
+ pattern: 'apex/virtual/**'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'apex/tests/**'
+ - compare-type: ANT
+ pattern: 'docs/**'
properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
- throttle:
- max-per-node: 3
+ max-per-node: 1
max-total: 10
option: 'project'
@@ -271,11 +336,18 @@
pattern: 'lib/**'
- compare-type: ANT
pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/**'
properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
- throttle:
- max-per-node: 3
+ max-per-node: 1
max-total: 10
option: 'project'
@@ -299,6 +371,10 @@
kill-phase-on: FAILURE
abort-all-job: true
git-revision: true
+ - shell: |
+ echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo 'os-.*') > detected_scenario
+ - inject:
+ properties-file: detected_scenario
- multijob:
name: functest-smoke
condition: SUCCESSFUL
@@ -306,7 +382,7 @@
- name: 'functest-apex-virtual-suite-{stream}'
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO={verify-scenario}
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
FUNCTEST_SUITE_NAME=healthcheck
GERRIT_BRANCH=$GERRIT_BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
@@ -383,121 +459,26 @@
abort-all-job: false
git-revision: false
-# Build phase
-- job-template:
- name: 'apex-build-{stream}'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{build-slave}'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - timeout:
- timeout: 150
- fail: true
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: {concurrent-builds}
- max-total: 10
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify-iso-{stream}'
-
- builders:
- - 'apex-build'
- - inject:
- properties-content: ARTIFACT_TYPE=rpm
- - 'apex-upload-artifact'
-
-# ISO verify job
-- job-template:
- name: 'apex-verify-iso-{stream}'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-iso-verify'
- - inject:
- properties-content: ARTIFACT_TYPE=iso
- - 'apex-upload-artifact'
-
# Deploy job
- job-template:
name: 'apex-deploy-{platform}-{stream}'
- # Job template for virtual deployment
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
node: 'apex-{platform}-{stream}'
concurrent: true
disabled: false
-
+ quiet-period: 30
scm:
- git-scm-gerrit
wrappers:
- timeout:
- timeout: 120
+ timeout: 140
fail: true
parameters:
+ - '{project}-{platform}-{stream}-defaults'
- project-parameter:
project: '{project}'
branch: '{branch}'
@@ -521,6 +502,8 @@
- 'apex-deploy.*'
- 'functest.*'
- 'yardstick.*'
+ - 'dovetail.*'
+ - 'storperf.*'
- throttle:
max-per-node: 1
max-total: 10
@@ -531,18 +514,13 @@
description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- 'apex-download-artifact'
- 'apex-deploy'
- - 'apex-workspace-cleanup'
+ - 'clean-workspace'
# Baremetal Deploy and Test
- job-template:
- name: 'apex-run-deploy-test-baremetal-{stream}'
+ name: 'apex-{scenario}-baremetal-{scenario_stream}'
- # Job template for daily build
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
project-type: 'multijob'
disabled: false
@@ -552,7 +530,7 @@
parameters:
- '{project}-defaults'
- - '{project}-baremetal-{stream}-defaults'
+ - '{project}-baremetal-{scenario_stream}-defaults'
- project-parameter:
project: '{project}'
branch: '{branch}'
@@ -560,7 +538,7 @@
gs-pathname: '{gs-pathname}'
- string:
name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
+ default: '{scenario}'
description: "Scenario to deploy with."
properties:
- logrotate-default
@@ -572,14 +550,19 @@
- 'apex-runner.*'
- 'apex-.*-promote.*'
- 'apex-run.*'
+ - 'apex-.+-baremetal-.+'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
builders:
- description-setter:
description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- multijob:
name: 'Baremetal Deploy'
- condition: ALWAYS
+ condition: SUCCESSFUL
projects:
- - name: 'apex-deploy-baremetal-{stream}'
+ - name: 'apex-deploy-baremetal-{scenario_stream}'
node-parameters: true
current-parameters: true
predefined-parameters: |
@@ -591,10 +574,65 @@
abort-all-job: true
git-revision: false
- multijob:
+ name: 'OPNFV Test Suite'
+ projects:
+ - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: true
+ git-revision: false
+
+ publishers:
+ - groovy-postbuild:
+ script:
+ !include-raw-escape: ./update-build-result.groovy
+
+# Baremetal test job
+- job-template:
+ name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
+
+ project-type: 'multijob'
+
+ disabled: false
+
+ parameters:
+ - '{project}-defaults'
+ - '{project}-baremetal-{scenario_stream}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{scenario}'
+ description: "Scenario to deploy with."
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-runner.*'
+ - 'apex-.*-promote.*'
+ - 'apex-run.*'
+ - 'apex-testsuite-.+-baremetal-.+'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+ builders:
+ - description-setter:
+ description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+ - multijob:
name: Functest
condition: ALWAYS
projects:
- - name: 'functest-apex-baremetal-daily-{stream}'
+ - name: 'functest-apex-baremetal-daily-{scenario_stream}'
node-parameters: true
current-parameters: false
predefined-parameters:
@@ -606,14 +644,49 @@
name: Yardstick
condition: ALWAYS
projects:
- - name: 'yardstick-apex-baremetal-daily-{stream}'
+ - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+ - multijob:
+ name: Dovetail
+ condition: ALWAYS
+ projects:
+ - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+ abort-all-job: false
+ git-revision: false
+ - multijob:
+ name: StorPerf
+ condition: ALWAYS
+ projects:
+ - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
node-parameters: true
current-parameters: false
predefined-parameters:
DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
kill-phase-on: NEVER
abort-all-job: false
git-revision: false
+# Build status is always success due conditional plugin prefetching
+# build status before multijob phases execute
+# - conditional-step:
+# condition-kind: current-status
+# condition-worst: SUCCESS
+# condtion-best: SUCCESS
+# on-evaluation-failure: mark-unstable
+# steps:
+# - shell: 'echo "Tests Passed"'
{% for stream in scenarios %}
# {{ stream }} Daily
@@ -686,11 +759,10 @@
condition: SUCCESSFUL
projects:
{% for scenario in scenarios[stream] %}
- - name: 'apex-run-deploy-test-baremetal-{{ stream }}'
+ - name: 'apex-{{ scenario }}-baremetal-{{ stream }}'
node-parameters: false
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO={{scenario}}
OPNFV_CLEAN=yes
kill-phase-on: NEVER
abort-all-job: true
@@ -933,6 +1005,85 @@
abort-all-job: true
git-revision: false
+# Flex job
+- job-template:
+ name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
+
+ project-type: 'multijob'
+
+ disabled: false
+
+ node: 'flex-pod2'
+
+ scm:
+ - git-scm
+ triggers:
+ - 'apex-{stream}'
+ parameters:
+ - '{project}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-ha'
+ description: "Scenario to deploy with."
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-runner.*'
+ - 'apex-.*-promote.*'
+ - 'apex-run.*'
+ - 'apex-.+-baremetal-.+'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+ builders:
+ - description-setter:
+ description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
+ - multijob:
+ name: 'Baremetal Deploy'
+ condition: SUCCESSFUL
+ projects:
+ - name: 'apex-deploy-baremetal-{stream}'
+ node-parameters: true
+ current-parameters: true
+ predefined-parameters: |
+ OPNFV_CLEAN=yes
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ git-revision: false
+ - multijob:
+ name: Yardstick
+ condition: ALWAYS
+ projects:
+ - name: 'yardstick-apex-baremetal-daily-{stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
+
########################
# parameter macros
########################
@@ -982,25 +1133,6 @@
!include-raw: ./apex-unit-test.sh
- builder:
- name: 'apex-build'
- builders:
- - shell:
- !include-raw: ./apex-build.sh
-
-- builder:
- name: 'apex-workspace-cleanup'
- builders:
- - shell:
- !include-raw: ./apex-workspace-cleanup.sh
-
-- builder:
- name: 'apex-iso-verify'
- builders:
- - shell:
- !include-raw: ./apex-iso-verify.sh
-
-
-- builder:
name: 'apex-upload-artifact'
builders:
- shell:
@@ -1024,9 +1156,9 @@
- trigger:
name: 'apex-master'
triggers:
- - timed: '0 3 1 1 7'
+ - timed: '0 12 * * *'
- trigger:
name: 'apex-danube'
triggers:
- - timed: '0 12 * * *'
+ - timed: '0 3 1 1 7'
diff --git a/jjb/apex/scenarios.yaml.hidden b/jjb/apex/scenarios.yaml.hidden
index dc9107a11..def4e796a 100644
--- a/jjb/apex/scenarios.yaml.hidden
+++ b/jjb/apex/scenarios.yaml.hidden
@@ -3,6 +3,23 @@ master:
- 'os-nosdn-nofeature-ha'
- 'os-odl-nofeature-ha'
- 'os-odl-nofeature-noha'
+ - 'os-odl-bgpvpn-ha'
+ - 'os-ovn-nofeature-noha'
+ - 'os-nosdn-fdio-noha'
+ - 'os-nosdn-fdio-ha'
+ - 'os-odl-fdio-noha'
+ - 'os-odl-fdio-ha'
+ - 'os-nosdn-bar-ha'
+ - 'os-nosdn-bar-noha'
+ - 'os-nosdn-nofeature-ha-ipv6'
+ - 'os-nosdn-ovs_dpdk-noha'
+ - 'os-nosdn-ovs_dpdk-ha'
+ - 'os-nosdn-kvm_ovs_dpdk-noha'
+ - 'os-nosdn-kvm_ovs_dpdk-ha'
+ - 'os-odl-sfc-noha'
+ - 'os-odl-sfc-ha'
+ - 'os-odl-fdio-dvr-noha'
+ - 'os-odl-fdio-dvr-ha'
danube:
- 'os-nosdn-nofeature-noha'
- 'os-nosdn-nofeature-ha'
diff --git a/jjb/apex/update-build-result.groovy b/jjb/apex/update-build-result.groovy
new file mode 100644
index 000000000..9edca6b6b
--- /dev/null
+++ b/jjb/apex/update-build-result.groovy
@@ -0,0 +1,5 @@
+import hudson.model.*
+if (manager.logContains('^.*apex-deploy-baremetal.*SUCCESS$')
+ && manager.build.@result == hudson.model.Result.FAILURE) {
+ manager.build.@result = hudson.model.Result.UNSTABLE
+}
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 55d8ff975..faa5971e1 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -11,27 +11,27 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- danube: &danube
- stream: danube
+ euphrates: &euphrates
+ stream: euphrates
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
+ disabled: true
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
# CI POD's
#--------------------------------
-# danube
+# euphrates
#--------------------------------
pod:
- armband-baremetal:
slave-label: armband-baremetal
installer: fuel
- <<: *danube
+ <<: *euphrates
- armband-virtual:
slave-label: armband-virtual
installer: fuel
- <<: *danube
+ <<: *euphrates
#--------------------------------
# master
#--------------------------------
@@ -44,65 +44,17 @@
installer: fuel
<<: *master
#--------------------------------
-# NONE-CI POD's
-#--------------------------------
-# danube
-#--------------------------------
- - arm-pod2:
- slave-label: arm-pod2
- installer: fuel
- <<: *danube
- - arm-pod3:
- slave-label: arm-pod3
- installer: fuel
- <<: *danube
- - arm-pod4:
- slave-label: arm-pod4
- installer: fuel
- <<: *danube
- - arm-virtual1:
- slave-label: arm-virtual1
- installer: fuel
- <<: *danube
-#--------------------------------
-# master
-#--------------------------------
- - arm-pod2:
- slave-label: arm-pod2
- installer: fuel
- <<: *master
- - arm-pod3:
- slave-label: arm-pod3
- installer: fuel
- <<: *master
- - arm-pod4:
- slave-label: arm-pod4
- installer: fuel
- <<: *master
- - arm-virtual1:
- slave-label: arm-virtual1
- installer: fuel
- <<: *master
-#--------------------------------
# scenarios
#--------------------------------
scenario:
# HA scenarios
- 'os-nosdn-nofeature-ha':
auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l2-nofeature-ha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l3-nofeature-ha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l2-bgpvpn-ha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l2-sfc-ha':
+ - 'os-odl-nofeature-ha':
auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
# NOHA scenarios
- - 'os-odl_l2-nofeature-noha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l2-sfc-noha':
+ - 'os-nosdn-nofeature-noha':
auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
jobs:
@@ -130,6 +82,7 @@
use-build-blocker: true
blocking-jobs:
- '{installer}-os-.*?-{pod}-daily-.*'
+ - 'armband-verify-.*'
block-level: 'NODE'
wrappers:
@@ -143,14 +96,13 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - '{installer}-defaults'
+ - '{installer}-defaults':
+ gs-pathname: '{gs-pathname}'
- '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
default: '{scenario}'
- - armband-ci-parameter:
- gs-pathname: '{gs-pathname}'
builders:
- trigger-builds:
@@ -187,10 +139,10 @@
# 3.only proposed_tests testsuite here(refstack, ha, ipv6, bgpvpn)
# 4.not used for release criteria or compliance,
# only to debug the dovetail tool bugs with arm pods
- # 5.only run against scenario os-(nosdn|odl_l2)-(nofeature-bgpvpn)-ha
+ # 5.only run against scenario os-(nosdn|odl)-(nofeature-bgpvpn)-ha
- conditional-step:
condition-kind: regex-match
- regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
+ regex: os-(nosdn|odl)-(nofeature|bgpvpn)-ha
label: '{scenario}'
steps:
- trigger-builds:
@@ -228,14 +180,13 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - '{installer}-defaults'
+ - '{installer}-defaults':
+ gs-pathname: '{gs-pathname}'
- '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - armband-ci-parameter:
- gs-pathname: '{gs-pathname}'
+ default: 'os-odl-nofeature-ha'
scm:
- git-scm
@@ -246,32 +197,12 @@
builders:
- shell:
- !include-raw-escape: ./armband-download-artifact.sh
- - shell:
- !include-raw-escape: ./armband-deploy.sh
+ !include-raw-escape: ../fuel/fuel-deploy.sh
publishers:
- email:
recipients: armband@enea.com
-
-########################
-# parameter macros
-########################
-- parameter:
- name: armband-ci-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
+ - email-jenkins-admins-on-failure
########################
# trigger macros
@@ -281,375 +212,59 @@
# Enea Armband CI Baremetal Triggers running against master branch
#-----------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 1 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-master-trigger'
+ name: 'fuel-os-odl-nofeature-ha-armband-baremetal-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 16 * * *'
+#----------------------------------------------------------------------
+# Enea Armband CI Baremetal Triggers running against euphrates branch
+#----------------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-master-trigger'
+ name: 'fuel-os-odl-nofeature-ha-armband-baremetal-euphrates-trigger'
triggers:
- timed: ''
-
-#----------------------------------------------------------------------
-# Enea Armband CI Baremetal Triggers running against danube branch
-#----------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-danube-trigger'
- triggers:
- - timed: '0 0,16 * * 2,4'
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-danube-trigger'
- triggers:
- - timed: '0 0 * * 1,5,7'
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-danube-trigger'
- triggers:
- - timed: '0 16 * * 1,5,7'
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-danube-trigger'
- triggers:
- - timed: '0 8 * * 2,4,6'
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-danube-trigger'
- triggers:
- - timed: '0 8 * * 1,3,5,7'
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-armband-baremetal-danube-trigger'
- triggers:
- - timed: '0 0 * * 3,6'
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-armband-baremetal-danube-trigger'
- triggers:
- - timed: '0 16 * * 3,6'
#---------------------------------------------------------------
# Enea Armband CI Virtual Triggers running against master branch
#---------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-master-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-armband-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-master-trigger'
+ name: 'fuel-os-odl-nofeature-ha-armband-virtual-master-trigger'
triggers:
- timed: ''
#--------------------------------------------------------------------
-# Enea Armband CI Virtual Triggers running against danube branch
+# Enea Armband CI Virtual Triggers running against euphrates branch
#--------------------------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-armband-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-armband-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-armband-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-armband-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-armband-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-armband-virtual-danube-trigger'
- triggers:
- - timed: ''
-
-#--------------------------------------------------------------------
-# Enea Armband Non CI Virtual Triggers running against danube branch
-#--------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-danube-trigger'
- triggers:
- - timed: ''
-
-#--------------------------------------------------------------------
-# Enea Armband Non CI Virtual Triggers running against master branch
-#--------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-virtual1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-virtual1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-virtual1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-virtual1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-virtual1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-virtual1-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-virtual1-master-trigger'
- triggers:
- - timed: ''
-
-#----------------------------------------------------------
-# Enea Armband POD 2 Triggers running against master branch
-#----------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-master-trigger'
- triggers:
- - timed: ''
-#---------------------------------------------------------------
-# Enea Armband POD 2 Triggers running against danube branch
-#---------------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod2-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod2-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod2-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod2-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod2-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod2-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod2-danube-trigger'
- triggers:
- - timed: ''
-#----------------------------------------------------------
-# Enea Armband POD 3 Triggers running against master branch
-#----------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-master-trigger'
- triggers:
- - timed: ''
-#---------------------------------------------------------------
-# Enea Armband POD 3 Triggers running against danube branch
-#---------------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod3-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod3-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod3-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod3-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod3-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod3-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod3-danube-trigger'
- triggers:
- - timed: ''
-#--------------------------------------------------------------------------
-# Enea Armband POD 3 Triggers running against master branch (aarch64 slave)
-#--------------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod4-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod4-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod4-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod4-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod4-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod4-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod4-master-trigger'
- triggers:
- - timed: ''
-#--------------------------------------------------------------------------
-# Enea Armband POD 3 Triggers running against danube branch (aarch64 slave)
-#--------------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-arm-pod4-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-arm-pod4-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-arm-pod4-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-arm-pod4-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-arm-pod4-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-arm-pod4-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-armband-virtual-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-arm-pod4-danube-trigger'
+ name: 'fuel-os-odl-nofeature-ha-armband-virtual-euphrates-trigger'
triggers:
- timed: ''
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
deleted file mode 100755
index e445e0850..000000000
--- a/jjb/armband/armband-deploy.sh
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# (c) 2016 Enea Software AB
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-set -o pipefail
-
-export TERM="vt220"
-
-# source the file so we get OPNFV vars
-source latest.properties
-
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- # set simplest scenario for virtual deploys to run for merges
- DEPLOY_SCENARIO="os-nosdn-nofeature-ha"
-else
- # for none-merge deployments
- # checkout the commit that was used for building the downloaded artifact
- # to make sure the ISO and deployment mechanism uses same versions
- echo "Checking out $OPNFV_GIT_SHA1"
- git checkout $OPNFV_GIT_SHA1 --quiet
-fi
-
-# set deployment parameters
-export TMPDIR=${WORKSPACE}/tmpdir
-
-# arm-pod4 is an aarch64 jenkins slave for the same POD as the
-# x86 jenkins slave arm-pod3; therefore we use the same pod name
-# to deploy the pod from both jenkins slaves
-if [[ "${NODE_NAME}" == "arm-pod4" ]]; then
- NODE_NAME="arm-pod3"
-fi
-
-LAB_NAME=${NODE_NAME/-*}
-POD_NAME=${NODE_NAME/*-}
-
-# we currently support enea
-if [[ ! $LAB_NAME =~ (arm|enea) ]]; then
- echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
- exit 1
-fi
-
-echo "Using configuration for $LAB_NAME"
-
-# create TMPDIR if it doesn't exist
-mkdir -p $TMPDIR
-
-cd $WORKSPACE
-if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
- echo "Cloning securedlab repo $BRANCH"
- git clone --quiet --branch $BRANCH $LAB_CONFIG_URL lab-config
- LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
-
- # Source local_env if present, which contains POD-specific config
- local_env="${WORKSPACE}/lab-config/labs/$LAB_NAME/$POD_NAME/fuel/config/local_env"
- if [ -e $local_env ]; then
- echo "-- Sourcing local environment file"
- source $local_env
- fi
-fi
-
-if [[ "$NODE_NAME" =~ "virtual" ]]; then
- POD_NAME="virtual_kvm"
-fi
-
-# releng wants us to use nothing else but opnfv.iso for now. We comply.
-ISO_FILE=$WORKSPACE/opnfv.iso
-
-# log file name
-FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
-
-# Deploy Cache (to enable just create the deploy-cache subdir)
-# NOTE: Only available when ISO files are cached using ISOSTORE mechanism
-DEPLOY_CACHE=${ISOSTORE:-/iso_mount/opnfv_ci}/${BRANCH##*/}/deploy-cache
-if [[ -d "${DEPLOY_CACHE}" ]]; then
- echo "Deploy cache dir present."
- echo "--------------------------------------------------------"
- echo "Fuel@OPNFV deploy cache: ${DEPLOY_CACHE}"
- DEPLOY_CACHE="-C ${DEPLOY_CACHE}"
-else
- DEPLOY_CACHE=""
-fi
-
-# construct the command
-DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -b ${LAB_CONFIG_URL} \
- -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://${ISO_FILE} \
- -H -B ${DEFAULT_BRIDGE:-pxebr} -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME \
- ${DEPLOY_CACHE}"
-
-# log info to console
-echo "Deployment parameters"
-echo "--------------------------------------------------------"
-echo "Scenario: $DEPLOY_SCENARIO"
-echo "Lab: $LAB_NAME"
-echo "POD: $POD_NAME"
-echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
-echo
-echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# start the deployment
-echo "Issuing command"
-echo "$DEPLOY_COMMAND"
-echo
-
-$DEPLOY_COMMAND
-exit_code=$?
-
-echo
-echo "--------------------------------------------------------"
-echo "Deployment is done!"
-
-# upload logs for baremetal deployments
-# work with virtual deployments is still going on so we skip that for the timebeing
-if [[ "$JOB_NAME" =~ "baremetal-daily" ]]; then
- echo "Uploading deployment logs"
- gsutil cp $WORKSPACE/$FUEL_LOG_FILENAME gs://$GS_URL/logs/$FUEL_LOG_FILENAME > /dev/null 2>&1
- echo "Logs are available as http://$GS_URL/logs/$FUEL_LOG_FILENAME"
-fi
-
-if [[ $exit_code -ne 0 ]]; then
- echo "Deployment failed!"
- exit $exit_code
-else
- echo "Deployment is successful!"
-fi
diff --git a/jjb/armband/armband-download-artifact.sh b/jjb/armband/armband-download-artifact.sh
deleted file mode 100755
index e2dd097b6..000000000
--- a/jjb/armband/armband-download-artifact.sh
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-
-echo "Host info: $(hostname) $(hostname -I)"
-
-# Configurable environment variables:
-# ISOSTORE (/iso_mount/opnfv_ci)
-
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
- # get the properties file for the Armband Fuel ISO built for a merged change
- curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
-else
- # get the latest.properties file in order to get info regarding latest artifact
- echo "Downloading http://$GS_URL/latest.properties"
- curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
-fi
-
-# source the file so we get artifact metadata, it will exit if it doesn't exist
-source latest.properties
-
-# echo the info about artifact that is used during the deployment
-OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
-echo "Using $OPNFV_ARTIFACT for deployment"
-
-# Releng doesn't want us to use anything but opnfv.iso for now. We comply.
-ISO_FILE=${WORKSPACE}/opnfv.iso
-
-# using ISOs for verify & merge jobs from local storage will be enabled later
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # check if we already have the ISO to avoid redownload
- ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}/${BRANCH##*/}
- if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
- echo "ISO exists locally. Skipping the download and using the file from ISO store"
- ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE}
- echo "--------------------------------------------------------"
- echo
- ls -al ${ISO_FILE}
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
- exit 0
- fi
-fi
-
-# Use gsutils if available
-if $(which gsutil &>/dev/null); then
- DOWNLOAD_URL="gs://$OPNFV_ARTIFACT_URL"
- CMD="gsutil cp ${DOWNLOAD_URL} ${ISO_FILE}"
-else
- # download image
- # -f returns error if the file was not found or on server error
- DOWNLOAD_URL="http://$OPNFV_ARTIFACT_URL"
- CMD="curl -f -s -o ${ISO_FILE} ${DOWNLOAD_URL}"
-fi
-
-# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL $DOWNLOAD_URL"
-echo "This could take some time..."
-echo "--------------------------------------------------------"
-echo "$CMD"
-$CMD
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/armband/armband-project-jobs.yml b/jjb/armband/armband-project-jobs.yml
deleted file mode 100644
index f6840a008..000000000
--- a/jjb/armband/armband-project-jobs.yml
+++ /dev/null
@@ -1,94 +0,0 @@
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: armband
-
- project: '{name}'
-
- installer: 'fuel'
-
- jobs:
- - 'armband-{installer}-build-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'armband-{installer}-build-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-enea-defaults'
- - '{installer}-defaults'
- - armband-project-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: '0 H/4 * * *'
-
- wrappers:
- - timeout:
- timeout: 360
- fail: true
-
- builders:
- - shell:
- !include-raw-escape: ./build.sh
- - shell:
- !include-raw-escape: ./upload-artifacts.sh
-
- publishers:
- - email:
- recipients: armband@enea.com
-
-########################
-# parameter macros
-########################
-- parameter:
- name: armband-project-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
- - choice:
- name: FORCE_BUILD
- choices:
- - 'false'
- - 'true'
- description: 'Force build even if there are no changes in the armband repo. Default false'
diff --git a/jjb/armband/armband-verify-jobs.yml b/jjb/armband/armband-verify-jobs.yml
index 567456d9b..c43dc7f82 100644
--- a/jjb/armband/armband-verify-jobs.yml
+++ b/jjb/armband/armband-verify-jobs.yml
@@ -12,22 +12,20 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - danube:
+ - euphrates:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
+ disabled: true
#####################################
# patch verification phases
#####################################
phase:
- 'basic':
- slave-label: 'opnfv-build-enea'
- - 'build':
- slave-label: 'opnfv-build-enea'
+ slave-label: 'armband-virtual'
- 'deploy-virtual':
- slave-label: 'opnfv-build-enea'
+ slave-label: 'armband-virtual'
- 'smoke-test':
- slave-label: 'opnfv-build-enea'
+ slave-label: 'armband-virtual'
#####################################
# jobs
#####################################
@@ -52,6 +50,11 @@
enabled: true
max-total: 4
option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'fuel-os-.*?-virtual-daily-.*'
+ block-level: 'NODE'
scm:
- git-scm-gerrit
@@ -96,8 +99,9 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - 'opnfv-build-enea-defaults'
- - 'armband-verify-defaults':
+ - 'armband-virtual-defaults':
+ installer: '{installer}'
+ - '{installer}-defaults':
gs-pathname: '{gs-pathname}'
builders:
@@ -118,20 +122,6 @@
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'armband-verify-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
name: deploy-virtual
condition: SUCCESSFUL
projects:
@@ -171,7 +161,8 @@
- logrotate-default
- throttle:
enabled: true
- max-total: 6
+ max-total: 2
+ max-per-node: 1
option: 'project'
- build-blocker:
use-build-blocker: true
@@ -193,8 +184,9 @@
project: '{project}'
branch: '{branch}'
- '{slave-label}-defaults'
- - '{installer}-defaults'
- - 'armband-verify-defaults':
+ - 'armband-virtual-defaults':
+ installer: '{installer}'
+ - '{installer}-defaults':
gs-pathname: '{gs-pathname}'
builders:
@@ -213,20 +205,10 @@
echo "Not activated!"
- builder:
- name: 'armband-verify-build-macro'
- builders:
- - shell:
- !include-raw: ./build.sh
- - shell:
- !include-raw: ./armband-workspace-cleanup.sh
-
-- builder:
name: 'armband-verify-deploy-virtual-macro'
builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
+ - shell:
+ !include-raw: ../fuel/fuel-deploy.sh
- builder:
name: 'armband-verify-smoke-test-macro'
@@ -235,21 +217,3 @@
#!/bin/bash
echo "Not activated!"
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'armband-verify-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/armband/armband-workspace-cleanup.sh b/jjb/armband/armband-workspace-cleanup.sh
deleted file mode 100755
index d8948c7a0..000000000
--- a/jjb/armband/armband-workspace-cleanup.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# delete the $WORKSPACE to open some space
-/bin/rm -rf $WORKSPACE
diff --git a/jjb/armband/build.sh b/jjb/armband/build.sh
deleted file mode 100755
index a71cf1112..000000000
--- a/jjb/armband/build.sh
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# Copyright (c) 2016 Enea AB.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-export TERM="vt220"
-
-echo "Host info: $(hostname) $(hostname -I)"
-
-cd $WORKSPACE
-
-# Armband requires initializing git submodules (e.g. for Fuel's clean_cache.sh)
-make submodules-init
-
-# remove the expired items from cache
-test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY
-
-LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties
-if [[ "$JOB_NAME" =~ "daily" ]]; then
- # check to see if we already have an artifact on artifacts.opnfv.org
- # for this commit during daily builds
- echo "Checking to see if we already built and stored Armband Fuel ISO for this commit"
-
- curl -s -o $LATEST_ISO_PROPERTIES http://$GS_URL/latest.properties 2>/dev/null
-fi
-
-# get metadata of latest ISO
-if grep -q OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES 2>/dev/null; then
- LATEST_ISO_SHA1=$(grep OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
- LATEST_ISO_URL=$(grep OPNFV_ARTIFACT_URL $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
-else
- LATEST_ISO_SHA1=none
-fi
-
-# get current SHA1
-CURRENT_SHA1=$(git rev-parse HEAD)
-
-# set FORCE_BUILD to false for non-daily builds
-FORCE_BUILD=${FORCE_BUILD:-false}
-
-if [[ "$CURRENT_SHA1" == "$LATEST_ISO_SHA1" && "$FORCE_BUILD" == "false" ]]; then
- echo "***************************************************"
- echo " An ISO has already been built for this commit"
- echo " $LATEST_ISO_URL"
- echo "***************************************************"
-else
- echo "This commit has not been built yet or forced build! Proceeding with the build."
- /bin/rm -f $LATEST_ISO_PROPERTIES
- echo
-fi
-
-# log info to console
-echo "Starting the build of Armband $INSTALLER_TYPE. This could take some time..."
-echo "-----------------------------------------------------------"
-echo
-
-# create the cache directory if it doesn't exist
-mkdir -p $CACHE_DIRECTORY
-
-# set OPNFV_ARTIFACT_VERSION
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Building Fuel ISO for a merged change"
- export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
- echo "Not supported"
- exit 1
-else
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-fi
-
-NOCACHE_PATTERN="verify: no-cache"
-if [[ "$JOB_NAME" =~ "verify" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "$NOCACHE_PATTERN" ]]; then
- echo "The cache will not be used for this build!"
- NOCACHE_ARG="-f P"
-fi
-NOCACHE_ARG=${NOCACHE_ARG:-}
-
-# start the build
-cd $WORKSPACE/ci
-./build.sh -v $OPNFV_ARTIFACT_VERSION $NOCACHE_ARG -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY
-
-# list the build artifacts
-ls -al $BUILD_DIRECTORY
-
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/armband/upload-artifacts.sh b/jjb/armband/upload-artifacts.sh
deleted file mode 100755
index 97987e2c5..000000000
--- a/jjb/armband/upload-artifacts.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-# configurable environment variables:
-# ISOSTORE (/iso_mount/opnfv_ci)
-
-# check if we built something
-if [ -f $WORKSPACE/.noupload ]; then
- echo "Nothing new to upload. Exiting."
- /bin/rm -f $WORKSPACE/.noupload
- exit 0
-fi
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-
-# storing ISOs for verify & merge jobs will be done once we get the disk array
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # store ISO locally on NFS first
- ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}
- if [[ -d "$ISOSTORE" ]]; then
- ISOSTORE=${ISOSTORE}/${BRANCH##*/}
- mkdir -p $ISOSTORE
-
- # remove all but most recent 3 ISOs first to keep iso_mount clean & tidy
- cd $ISOSTORE
- ls -tp | grep -v '/' | tail -n +4 | xargs -d '\n' /bin/rm -f --
-
- # store ISO
- echo "Storing latest ISO in local storage"
- touch .storing
- /bin/cp -f $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
- $ISOSTORE/opnfv-$OPNFV_ARTIFACT_VERSION.iso
- rm .storing
- fi
-fi
-
-# log info to console
-echo "Uploading armband artifacts. This could take some time..."
-echo
-
-echo "Started at $(date)"
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-elif [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Uploaded Armband Fuel ISO for a merged change"
-fi
-echo "Ended at $(date)"
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-# BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done."
- exit 1
-fi
-
-echo "Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-echo
-echo "--------------------------------------------------------"
-echo
diff --git a/jjb/auto/auto.yml b/jjb/auto/auto.yml
new file mode 100644
index 000000000..fefa37626
--- /dev/null
+++ b/jjb/auto/auto.yml
@@ -0,0 +1,56 @@
+---
+- project:
+ name: auto
+
+ project: '{name}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+
+ jobs:
+ - 'auto-verify-{stream}'
+
+- job-template:
+ name: 'auto-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ echo "Nothing to verify!"
diff --git a/jjb/barometer/barometer.yml b/jjb/barometer/barometer.yml
index 2d3e972f8..8a5bea02e 100644
--- a/jjb/barometer/barometer.yml
+++ b/jjb/barometer/barometer.yml
@@ -1,3 +1,4 @@
+---
###################################################
# All the jobs except verify have been removed!
# They will only be enabled on request by projects!
@@ -8,19 +9,19 @@
project: '{name}'
jobs:
- - 'barometer-verify-{stream}'
- - 'barometer-merge-{stream}'
- - 'barometer-daily-{stream}'
+ - 'barometer-verify-{stream}'
+ - 'barometer-merge-{stream}'
+ - 'barometer-daily-{stream}'
stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'barometer-verify-{stream}'
@@ -28,44 +29,44 @@
disabled: '{obj:disabled}'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
builders:
- - shell: |
- pwd
- cd src
- make clobber
- make
+ - shell: |
+ pwd
+ cd src
+ make clobber
+ make
- job-template:
name: 'barometer-merge-{stream}'
@@ -77,46 +78,46 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 3
- max-per-node: 2
- option: 'project'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 3
+ max-per-node: 2
+ option: 'project'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- - git-scm
+ - git-scm
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**'
builders:
- - shell: |
- pwd
- cd src
- make clobber
- make
+ - shell: |
+ pwd
+ cd src
+ make clobber
+ make
- job-template:
name: 'barometer-daily-{stream}'
@@ -128,27 +129,27 @@
concurrent: false
properties:
- - logrotate-default
+ - logrotate-default
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - barometer-project-parameter:
- gs-pathname: '{gs-pathname}'
- - 'opnfv-build-centos-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - barometer-project-parameter:
+ gs-pathname: '{gs-pathname}'
+ - 'opnfv-build-centos-defaults'
scm:
- - git-scm
+ - git-scm
triggers:
- - timed: '@midnight'
+ - timed: '@midnight'
builders:
- - shell:
- !include-raw-escape: ./barometer-build.sh
- - shell:
- !include-raw-escape: ./barometer-upload-artifact.sh
+ - shell:
+ !include-raw-escape: ./barometer-build.sh
+ - shell:
+ !include-raw-escape: ./barometer-upload-artifact.sh
########################
# parameter macros
@@ -156,7 +157,7 @@
- parameter:
name: barometer-project-parameter
parameters:
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
+ - string:
+ name: GS_URL
+ default: '$GS_BASE{gs-pathname}'
+ description: "URL to Google Storage."
diff --git a/jjb/bottlenecks/bottlenecks-ci-jobs.yml b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
index c56ca19e9..a8654d0cf 100644
--- a/jjb/bottlenecks/bottlenecks-ci-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
@@ -1,3 +1,4 @@
+---
####################################
# job configuration for bottlenecks
####################################
@@ -6,77 +7,75 @@
project: 'bottlenecks'
-#--------------------------------
-# BRANCH ANCHORS
-#--------------------------------
+ # -------------------------------
+ # BRANCH ANCHORS
+ # -------------------------------
master: &master
- stream: master
- branch: '{stream}'
- #This is used for common project file storage
- gs-pathname: ''
- #This is used for different test suite dependent packages storage
- gs-packagepath: '/{suite}'
- #docker tag used for version control
- docker-tag: 'latest'
+ stream: master
+ branch: '{stream}'
+ # This is used for common project file storage
+ gs-pathname: ''
+ # This is used for different test suite dependent packages storage
+ gs-packagepath: '/{suite}'
+ # docker tag used for version control
+ docker-tag: 'latest'
danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- gs-packagepath: '/{stream}/{suite}'
- docker-tag: 'stable'
-#--------------------------------
-# POD, INSTALLER, AND BRANCH MAPPING
-#--------------------------------
-# Installers using labels
-# CI PODs
-# This section should only contain the installers
-# that have been switched using labels for slaves
-#--------------------------------
+ stream: danube
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ gs-packagepath: '/{stream}/{suite}'
+ docker-tag: 'stable'
+ # -------------------------------
+ # POD, INSTALLER, AND BRANCH MAPPING
+ # -------------------------------
+ # Installers using labels
+ # CI PODs
+ # This section should only contain the installers
+ # that have been switched using labels for slaves
+ # -------------------------------
pod:
-#compass CI PODs
- - baremetal:
- slave-label: compass-baremetal
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: compass-virtual
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: compass-baremetal
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *danube
- - virtual:
- slave-label: compass-virtual
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *danube
-
-#--------------------------------
-# None-CI PODs
-#--------------------------------
- # - orange-pod2:
- # slave-label: '{pod}'
- # installer: joid
- # auto-trigger-name: 'daily-trigger-disabled'
- # <<: *danube
- # - orange-pod2:
- # slave-label: '{pod}'
- # installer: joid
- # auto-trigger-name: 'daily-trigger-disabled'
- # <<: *master
-#--------------------------------------------
+ # compass CI PODs
+ - baremetal:
+ slave-label: compass-baremetal-master
+ installer: compass
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - virtual:
+ slave-label: compass-virtual-master
+ installer: compass
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - baremetal:
+ slave-label: compass-baremetal-branch
+ installer: compass
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *danube
+ - virtual:
+ slave-label: compass-virtual-branch
+ installer: compass
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *danube
+
+ # -------------------------------
+ # None-CI PODs
+ # -------------------------------
+ # - orange-pod2:
+ # slave-label: '{pod}'
+ # installer: joid
+ # auto-trigger-name: 'daily-trigger-disabled'
+ # <<: *danube
+ # - orange-pod2:
+ # slave-label: '{pod}'
+ # installer: joid
+ # auto-trigger-name: 'daily-trigger-disabled'
+ # <<: *master
+ # -------------------------------------------
suite:
- - 'rubbos'
- - 'vstf'
- - 'posca_stress_traffic'
- - 'posca_stress_ping'
+ - 'posca_stress_traffic'
+ - 'posca_stress_ping'
jobs:
- - 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
+ - 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
################################
# job templates
@@ -85,52 +84,53 @@
name: 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 180
- abort: true
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 180
+ abort: true
triggers:
- - '{auto-trigger-name}'
+ - '{auto-trigger-name}'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
- - 'bottlenecks-params-{slave-label}'
- - string:
- name: REPO_DIR
- default: "/home/opnfv/bottlenecks"
- description: "Directory where the repository is cloned"
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - string:
- name: GERRIT_REFSPEC_DEBUG
- default: 'true'
- description: "Gerrit refspec for debug."
- - string:
- name: SUITE_NAME
- default: '{suite}'
- description: "test suite name."
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: "docker image tag used for version control"
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{slave-label}-defaults'
+ - '{installer}-defaults'
+ - 'bottlenecks-params-{slave-label}'
+ - string:
+ name: REPO_DIR
+ default: "/home/opnfv/bottlenecks"
+ description: "Directory where the repository is cloned"
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+ - string:
+ name: GERRIT_REFSPEC_DEBUG
+ default: 'true'
+ description: "Gerrit refspec for debug."
+ - string:
+ name: SUITE_NAME
+ default: '{suite}'
+ description: "test suite name."
+ - string:
+ name: DOCKER_TAG
+ default: '{docker-tag}'
+ description: "docker image tag used for version control"
scm:
- - git-scm
+ - git-scm
builders:
- - 'bottlenecks-env-cleanup'
- - 'bottlenecks-run-suite'
+ - 'bottlenecks-env-cleanup'
+ - 'bottlenecks-run-suite'
publishers:
- - email:
- recipients: hongbo.tianhongbo@huawei.com matthew.lijun@huawei.com liangqi1@huawei.com sunshine.wang@huawei.com
+ - email:
+ recipients: gabriel.yuyang@huawei.com, liyin11@huawei.com
+ - email-jenkins-admins-on-failure
########################
# builder macros
@@ -138,39 +138,47 @@
- builder:
name: bottlenecks-env-cleanup
builders:
- - shell:
- !include-raw: ./bottlenecks-cleanup.sh
+ - shell:
+ !include-raw: ./bottlenecks-cleanup.sh
- builder:
name: bottlenecks-run-suite
builders:
- - shell:
- !include-raw: ./bottlenecks-run-suite.sh
+ - shell:
+ !include-raw: ./bottlenecks-run-suite.sh
####################
# parameter macros
####################
- parameter:
- name: 'bottlenecks-params-compass-baremetal'
+ name: 'bottlenecks-params-compass-baremetal-master'
parameters:
- - string:
- name: BOTTLENECKS_DB_TARGET
- default: '104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
+ - string:
+ name: BOTTLENECKS_DB_TARGET
+ default: 'http://testresults.opnfv.org/test/api/v1/results'
+ description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'bottlenecks-params-compass-virtual'
+ name: 'bottlenecks-params-compass-virtual-master'
parameters:
- - string:
- name: BOTTLENECKS_DB_TARGET
- default: ''
- description: 'Arguments to use in order to choose the backend DB'
+ - string:
+ name: BOTTLENECKS_DB_TARGET
+ default: 'http://testresults.opnfv.org/test/api/v1/results'
+ description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'bottlenecks-params-orange-pod2'
+ name: 'bottlenecks-params-compass-baremetal-branch'
parameters:
- - string:
- name: BOTTLENECKS_DB_TARGET
- default: '104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
+ - string:
+ name: BOTTLENECKS_DB_TARGET
+ default: 'http://testresults.opnfv.org/test/api/v1/results'
+ description: 'Arguments to use in order to choose the backend DB'
+
+- parameter:
+ name: 'bottlenecks-params-compass-virtual-branch'
+ parameters:
+ - string:
+ name: BOTTLENECKS_DB_TARGET
+ default: 'http://testresults.opnfv.org/test/api/v1/results'
+ description: 'Arguments to use in order to choose the backend DB'
diff --git a/jjb/bottlenecks/bottlenecks-cleanup.sh b/jjb/bottlenecks/bottlenecks-cleanup.sh
index 04e620c7f..d0e2088c7 100644
--- a/jjb/bottlenecks/bottlenecks-cleanup.sh
+++ b/jjb/bottlenecks/bottlenecks-cleanup.sh
@@ -10,6 +10,7 @@
#clean up correlated dockers and their images
bash $WORKSPACE/docker/docker_cleanup.sh -d bottlenecks --debug
+bash $WORKSPACE/docker/docker_cleanup.sh -d Bottlenecks --debug
bash $WORKSPACE/docker/docker_cleanup.sh -d yardstick --debug
bash $WORKSPACE/docker/docker_cleanup.sh -d kibana --debug
bash $WORKSPACE/docker/docker_cleanup.sh -d elasticsearch --debug
diff --git a/jjb/bottlenecks/bottlenecks-project-jobs.yml b/jjb/bottlenecks/bottlenecks-project-jobs.yml
index 5dced2aad..c7c9b4208 100644
--- a/jjb/bottlenecks/bottlenecks-project-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-project-jobs.yml
@@ -1,3 +1,4 @@
+---
###################################################
# Non-ci jobs for Bottlenecks project
# They will only be enabled on request by projects!
@@ -8,29 +9,29 @@
project: 'bottlenecks'
jobs:
- - 'bottlenecks-verify-{stream}'
- - 'bottlenecks-merge-{stream}'
- - 'bottlenecks-{suite}-upload-artifacts-{stream}'
+ - 'bottlenecks-verify-{stream}'
+ - 'bottlenecks-merge-{stream}'
+ - 'bottlenecks-{suite}-upload-artifacts-{stream}'
stream:
- - master:
- branch: '{stream}'
- #This is used for common project file storage
- gs-pathname: ''
- #This is used for different test suite dependent packages storage
- gs-packagepath: '/{suite}'
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- gs-packagepath: '/{stream}/{suite}'
- disabled: false
+ - master:
+ branch: '{stream}'
+ # This is used for common project file storage
+ gs-pathname: ''
+ # This is used for different test suite dependent packages storage
+ gs-packagepath: '/{suite}'
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ gs-packagepath: '/{stream}/{suite}'
+ disabled: false
suite:
- - 'rubbos'
- - 'vstf'
- - 'posca_stress_traffic'
- - 'posca_stress_ping'
+ - 'rubbos'
+ - 'vstf'
+ - 'posca_stress_traffic'
+ - 'posca_stress_ping'
################################
# job templates
@@ -42,36 +43,37 @@
disabled: '{obj:disabled}'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+
builders:
- #- bottlenecks-hello
- - bottlenecks-unit-tests
+ # - bottlenecks-hello
+ - bottlenecks-unit-tests
- job-template:
name: 'bottlenecks-merge-{stream}'
@@ -79,31 +81,31 @@
disabled: '{obj:disabled}'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- - git-scm
+ - git-scm
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
builders:
- - bottlenecks-hello
- #- bottlenecks-unit-tests
+ - bottlenecks-hello
+ # - bottlenecks-unit-tests
- job-template:
name: 'bottlenecks-{suite}-upload-artifacts-{stream}'
@@ -114,27 +116,27 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - bottlenecks-parameter:
- gs-packagepath: '{gs-packagepath}'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+ - bottlenecks-parameter:
+ gs-packagepath: '{gs-packagepath}'
scm:
- - git-scm
+ - git-scm
builders:
- - 'bottlenecks-builder-upload-artifact'
- - 'bottlenecks-workspace-cleanup'
+ - 'bottlenecks-builder-upload-artifact'
+ - 'bottlenecks-workspace-cleanup'
####################
# parameter macros
@@ -142,82 +144,82 @@
- parameter:
name: bottlenecks-parameter
parameters:
- - string:
- name: CACHE_DIR
- default: $WORKSPACE/cache{gs-packagepath}
- description: "the cache to store packages downloaded from public IP"
- - string:
- name: SUITE_URL
- default: gs://artifacts.opnfv.org/bottlenecks{gs-packagepath}
- description: "LF artifacts url for storage of bottlenecks packages"
- - string:
- name: PACKAGE_URL
- default: http://205.177.226.237:9999/bottlenecks{gs-packagepath}/
- description: "the url where we store the packages used for bottlenecks rubbos"
+ - string:
+ name: CACHE_DIR
+ default: $WORKSPACE/cache{gs-packagepath}
+ description: "the cache to store packages downloaded from public IP"
+ - string:
+ name: SUITE_URL
+ default: gs://artifacts.opnfv.org/bottlenecks{gs-packagepath}
+ description: "LF artifacts url for storage of bottlenecks packages"
+ - string:
+ name: PACKAGE_URL
+ default: http://205.177.226.237:9999/bottlenecks{gs-packagepath}/
+ description: "the url where we store the packages used for bottlenecks rubbos"
####################################
-#builders for bottlenecks project
+# builders for bottlenecks project
####################################
- builder:
name: bottlenecks-builder-upload-artifact
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
+ - shell: |
+ #!/bin/bash
+ set -o errexit
- echo "Bottlenecks: upload to artifacts from the public IP"
+ echo "Bottlenecks: upload to artifacts from the public IP"
- [[ -d $CACHE_DIR ]] || mkdir -p $CACHE_DIR
+ [[ -d $CACHE_DIR ]] || mkdir -p $CACHE_DIR
- for file in $(curl -s $PACKAGE_URL |
- grep href |
- sed 's/.*href="//' |
- sed 's/".*//' |
- grep '^[a-zA-Z].*'); do
- curl --connect-timeout 10 -o $CACHE_DIR/$file $PACKAGE_URL$file -v
- echo "bottlenecks: copy file $CACHE_DIR/$file to $SUITE_URL"
- gsutil cp $CACHE_DIR/$file $SUITE_URL
- done
+ for file in $(curl -s $PACKAGE_URL |
+ grep href |
+ sed 's/.*href="//' |
+ sed 's/".*//' |
+ grep '^[a-zA-Z].*'); do
+ curl --connect-timeout 10 -o $CACHE_DIR/$file $PACKAGE_URL$file -v
+ echo "bottlenecks: copy file $CACHE_DIR/$file to $SUITE_URL"
+ gsutil cp $CACHE_DIR/$file $SUITE_URL
+ done
- builder:
name: bottlenecks-workspace-cleanup
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
+ - shell: |
+ #!/bin/bash
+ set -o errexit
- echo "Bottlenecks: cleanup cache used for storage downloaded packages"
+ echo "Bottlenecks: cleanup cache used for storage downloaded packages"
- /bin/rm -rf $CACHE_DIR
+ /bin/rm -rf $CACHE_DIR
- builder:
name: bottlenecks-unit-tests
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
- echo "Running unit tests..."
- cd $WORKSPACE
- virtualenv $WORKSPACE/bottlenecks_venv
- source $WORKSPACE/bottlenecks_venv/bin/activate
+ echo "Running unit tests..."
+ cd $WORKSPACE
+ virtualenv $WORKSPACE/bottlenecks_venv
+ source $WORKSPACE/bottlenecks_venv/bin/activate
- # install python packages
- easy_install -U setuptools
- easy_install -U pip
- pip install -r $WORKSPACE/requirements/verify.txt
+ # install python packages
+ easy_install -U setuptools
+ easy_install -U pip
+ pip install -r $WORKSPACE/requirements/verify.txt
- # unit tests
- /bin/bash $WORKSPACE/verify.sh
+ # unit tests
+ /bin/bash $WORKSPACE/verify.sh
- deactivate
+ deactivate
- builder:
name: bottlenecks-hello
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
+ - shell: |
+ #!/bin/bash
+ set -o errexit
- echo -e "Wellcome to Bottlenecks! \nMerge event is planning to support more functions! "
+ echo -e "Wellcome to Bottlenecks! \nMerge event is planning to support more functions! "
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
index e6f8d1ba5..6d4d2d8d1 100644
--- a/jjb/bottlenecks/bottlenecks-run-suite.sh
+++ b/jjb/bottlenecks/bottlenecks-run-suite.sh
@@ -1,66 +1,148 @@
#!/bin/bash
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
#set -e
[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
BOTTLENECKS_IMAGE=opnfv/bottlenecks
REPORT="True"
-if [[ $SUITE_NAME == rubbos || $SUITE_NAME == vstf ]]; then
- echo "Bottlenecks: to pull image $BOTTLENECKS_IMAGE:${DOCKER_TAG}"
- docker pull $BOTTLENECKS_IMAGE:$DOCKER_TAG >${redirect}
+RELENG_REPO=${WORKSPACE}/releng
+[ -d ${RELENG_REPO} ] && rm -rf ${RELENG_REPO}
+git clone https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO} >${redirect}
- echo "Bottlenecks: docker start running"
- opts="--privileged=true -id"
- envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
- -e BOTTLENECKS_BRANCH=${BOTTLENECKS_BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
- -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL}"
- cmd="sudo docker run ${opts} ${envs} $BOTTLENECKS_IMAGE:${DOCKER_TAG} /bin/bash"
- echo "Bottlenecks: docker cmd running ${cmd}"
- ${cmd} >${redirect}
-
- echo "Bottlenecks: obtain docker id"
- container_id=$(docker ps | grep "$BOTTLENECKS_IMAGE:${DOCKER_TAG}" | awk '{print $1}' | head -1)
- if [ -z ${container_id} ]; then
- echo "Cannot find $BOTTLENECKS_IMAGE container ID ${container_id}. Please check if it exists."
- docker ps -a
+YARDSTICK_REPO=${WORKSPACE}/yardstick
+[ -d ${YARDSTICK_REPO} ] && rm -rf ${YARDSTICK_REPO}
+git clone https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO} >${redirect}
+
+OPENRC=/tmp/admin_rc.sh
+OS_CACERT=/tmp/os_cacert
+
+BOTTLENECKS_CONFIG=/tmp
+
+if [[ $SUITE_NAME == *posca* ]]; then
+ POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
+
+ # Preparing OpenStack RC and Cacert files
+ echo "BOTTLENECKS INFO: fetching os credentials from $INSTALLER_TYPE"
+ if [[ $INSTALLER_TYPE == 'compass' ]]; then
+ if [[ ${BRANCH} == 'master' ]]; then
+ ${RELENG_REPO}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} -o ${OS_CACERT} >${redirect}
+ if [[ -f ${OS_CACERT} ]]; then
+ echo "BOTTLENECKS INFO: successfully fetching os_cacert for openstack: ${OS_CACERT}"
+ else
+ echo "BOTTLENECKS ERROR: couldn't find os_cacert file: ${OS_CACERT}, please check if the it's been properly provided."
+ exit 1
+ fi
+ else
+ ${RELENG_REPO}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} >${redirect}
+ fi
+ fi
+
+ if [[ -f ${OPENRC} ]]; then
+ echo "BOTTLENECKS INFO: openstack credentials path is ${OPENRC}"
+ if [[ $INSTALLER_TYPE == 'compass' && ${BRANCH} == 'master' ]]; then
+ echo "BOTTLENECKS INFO: writing ${OS_CACERT} to ${OPENRC}"
+ echo "export OS_CACERT=${OS_CACERT}" >> ${OPENRC}
+ fi
+ cat ${OPENRC}
+ else
+ echo "BOTTLENECKS ERROR: couldn't find openstack rc file: ${OPENRC}, please check if the it's been properly provided."
exit 1
fi
- echo "Bottlenecks: to prepare openstack environment"
- prepare_env="${REPO_DIR}/ci/prepare_env.sh"
- echo "Bottlenecks: docker cmd running: ${prepare_env}"
- sudo docker exec ${container_id} ${prepare_env}
-
- echo "Bottlenecks: to run testsuite ${SUITE_NAME}"
- run_testsuite="${REPO_DIR}/run_tests.sh -s ${SUITE_NAME}"
- echo "Bottlenecks: docker cmd running: ${run_testsuite}"
- sudo docker exec ${container_id} ${run_testsuite}
-else
- echo "Bottlenecks: installing POSCA docker-compose"
- if [ -d usr/local/bin/docker-compose ]; then
- rm -rf usr/local/bin/docker-compose
+ # Finding and crearting POD description files from different deployments
+ ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+
+ if [ "$INSTALLER_TYPE" == "fuel" ]; then
+ echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
+ sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
+ fi
+
+ if [ "$INSTALLER_TYPE" == "apex" ]; then
+ echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
+ sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
fi
- curl -L https://github.com/docker/compose/releases/download/1.11.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
- chmod +x /usr/local/bin/docker-compose
- echo "Bottlenecks: composing up dockers"
- cd $WORKSPACE
- docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml up -d
+ set +e
- echo "Bottlenecks: running traffic stress/factor testing in posca testsuite "
- POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
+ sudo pip install virtualenv
+
+ cd ${RELENG_REPO}/modules
+ sudo virtualenv venv
+ source venv/bin/activate
+ sudo pip install -e ./ >/dev/null
+ sudo pip install netaddr
+
+ if [[ ${INSTALLER_TYPE} == compass ]]; then
+ options="-u root -p root"
+ elif [[ ${INSTALLER_TYPE} == fuel ]]; then
+ options="-u root -p r00tme"
+ elif [[ ${INSTALLER_TYPE} == apex ]]; then
+ options="-u stack -k /root/.ssh/id_rsa"
+ else
+ echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
+ fi
+
+ if [[ ${INSTALLER_TYPE} != compass ]]; then
+ cmd="sudo python ${RELENG_REPO}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
+ -i ${INSTALLER_IP} ${options} -f ${BOTTLENECKS_CONFIG}/pod.yaml \
+ -s ${BOTTLENECKS_CONFIG}/id_rsa"
+ echo ${cmd}
+ ${cmd}
+ else
+ cmd="sudo cp ${YARDSTICK_REPO}/etc/yardstick/nodes/compass_sclab_virtual/pod.yaml \
+ ${BOTTLENECKS_CONFIG}"
+ echo ${cmd}
+ ${cmd}
+ fi
+
+ deactivate
+
+ set -e
+
+ cd ${WORKSPACE}
+
+ if [ -f ${BOTTLENECKS_CONFIG}/pod.yaml ]; then
+ echo "FILE: ${BOTTLENECKS_CONFIG}/pod.yaml:"
+ cat ${BOTTLENECKS_CONFIG}/pod.yaml
+ else
+ echo "ERROR: cannot find file ${BOTTLENECKS_CONFIG}/pod.yaml. Please check if it is existing."
+ sudo ls -al ${BOTTLENECKS_CONFIG}
+ fi
+
+ # Pulling Bottlenecks docker and passing environment variables
+ echo "INFO: pulling Bottlenecks docker ${DOCKER_TAG}"
+ docker pull opnfv/bottlenecks:${DOCKER_TAG} >$redirect
+
+ opts="--privileged=true -id"
+ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+ -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
+ -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
+ -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \
+ -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e BUILD_TAG=${BUILD_TAG}"
+ docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp"
+
+ cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
+ echo "BOTTLENECKS INFO: running docker run commond: ${cmd}"
+ ${cmd} >$redirect
+ sleep 5
+
+ # Running test cases through Bottlenecks docker
if [[ $SUITE_NAME == posca_stress_traffic ]]; then
TEST_CASE=posca_factor_system_bandwidth
- echo "Bottlenecks: pulling tutum/influxdb for yardstick"
- docker pull tutum/influxdb:0.13
- sleep 5
- docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE $REPORT
+ testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
+ echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
+ ${testcase_cmd} >$redirect
elif [[ $SUITE_NAME == posca_stress_ping ]]; then
TEST_CASE=posca_factor_ping
- sleep 5
- docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE $REPORT
+ testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
+ echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
+ ${testcase_cmd} >$redirect
fi
-
- echo "Bottlenecks: cleaning up docker-compose images and dockers"
- docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml down --rmi all
-fi \ No newline at end of file
+fi
diff --git a/jjb/calipso/calipso.yml b/jjb/calipso/calipso.yml
new file mode 100644
index 000000000..c5ba8eb1e
--- /dev/null
+++ b/jjb/calipso/calipso.yml
@@ -0,0 +1,61 @@
+- project:
+ name: calipso
+
+ project: '{name}'
+
+ jobs:
+ - 'calipso-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ disabled: false
+
+- job-template:
+ name: 'calipso-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-defaults'
+
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+
+ builders:
+ - verify-unit-tests
+
+- builder:
+ name: verify-unit-tests
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+ cd $WORKSPACE
+ PYTHONPATH=$PWD/app app/test/verify.sh
diff --git a/jjb/ci_gate_security/anteater-report-to-gerrit.sh b/jjb/ci_gate_security/anteater-report-to-gerrit.sh
index fc3018fb4..a749d1db4 100644
--- a/jjb/ci_gate_security/anteater-report-to-gerrit.sh
+++ b/jjb/ci_gate_security/anteater-report-to-gerrit.sh
@@ -12,14 +12,14 @@ if [[ -e securityaudit.log ]] ; then
if grep ERROR securityaudit.log; then
EXITSTATUS=1
fi
-
- cat securityaudit.log | awk -F"ERROR - " '{print $2}' > shortlog
-
+
+ awk -F"ERROR - " '{print $2}' securityaudit.log | sed -e "s/\"/\\\\\"/g;s/\'/\\\\/g"> shortlog
+
ssh -p 29418 gerrit.opnfv.org \
"gerrit review -p $GERRIT_PROJECT \
-m \"$(cat shortlog)\" \
$GERRIT_PATCHSET_REVISION \
--notify NONE"
-
+
exit $EXITSTATUS
fi
diff --git a/jjb/ci_gate_security/anteater-security-audit-weekly.sh b/jjb/ci_gate_security/anteater-security-audit-weekly.sh
index 436a173bc..11909636a 100644
--- a/jjb/ci_gate_security/anteater-security-audit-weekly.sh
+++ b/jjb/ci_gate_security/anteater-security-audit-weekly.sh
@@ -15,7 +15,7 @@ source $WORKSPACE/opnfv-projects.sh
for project in "${PROJECT_LIST[@]}"
do
- cmd="anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
+ cmd="/home/opnfv/venv/bin/anteater --project testproj --path /home/opnfv/anteater/allrepos/$project"
echo "Executing command inside container"
echo "$cmd"
echo "--------------------------------------------------------"
diff --git a/jjb/ci_gate_security/anteater-security-audit.sh b/jjb/ci_gate_security/anteater-security-audit.sh
index 9bd3cc34f..35f9354e0 100644
--- a/jjb/ci_gate_security/anteater-security-audit.sh
+++ b/jjb/ci_gate_security/anteater-security-audit.sh
@@ -1,5 +1,7 @@
#!/bin/bash
cd $WORKSPACE
+REPORTDIR='.reports'
+mkdir -p $REPORTDIR
echo "Generating patchset file to list changed files"
git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
echo "Changed files are"
@@ -7,7 +9,7 @@ echo "--------------------------------------------------------"
cat $WORKSPACE/patchset
echo "--------------------------------------------------------"
-vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT"
+vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT -v $WORKSPACE/$REPORTDIR:/home/opnfv/anteater/$REPORTDIR"
envs="-e PROJECT=$PROJECT"
echo "Pulling releng-anteater docker image"
diff --git a/jjb/ci_gate_security/opnfv-ci-gate-security.yml b/jjb/ci_gate_security/opnfv-ci-gate-security.yml
index e2f6ceb7b..33179537c 100644
--- a/jjb/ci_gate_security/opnfv-ci-gate-security.yml
+++ b/jjb/ci_gate_security/opnfv-ci-gate-security.yml
@@ -1,3 +1,4 @@
+---
# SPDX-license-identifier: Apache-2.0
########################
# Job configuration for opnfv-anteater (security audit)
@@ -9,14 +10,14 @@
project: anteaterfw
jobs:
- - 'opnfv-security-audit-verify-{stream}'
- - 'opnfv-security-audit-weekly-{stream}'
+ - 'opnfv-security-audit-verify-{stream}'
+ - 'opnfv-security-audit-weekly-{stream}'
stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
########################
# job templates
@@ -27,19 +28,19 @@
disabled: '{obj:disabled}'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-build3'
- description: 'Slave label on Jenkins'
- - project-parameter:
- project: releng
- branch: '{branch}'
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-build3'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: releng
+ branch: '{branch}'
triggers:
- - timed: '@weekly'
+ - timed: '@weekly'
builders:
- - anteater-security-audit-weekly
+ - anteater-security-audit-weekly
- job-template:
name: 'opnfv-security-audit-verify-{stream}'
@@ -47,72 +48,81 @@
disabled: '{obj:disabled}'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-build3'
- description: 'Slave label on Jenkins'
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-build3'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: $GERRIT_PROJECT
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ # yamllint disable rule:line-length
+ description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
+ # yamllint enable rule:line-length
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
+ # yamllint disable rule:line-length
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|functest|octopus|pharos|releng|sandbox'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'REG_EXP'
+ project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|cooper|cperf|daisy|doctor|dovetail|dpacc|enfv|escalator|fds|fuel|functest|octopus|pharos|releng|sandbox|yardstick'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: '**'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
+ # yamllint enable rule:line-length
builders:
- - anteater-security-audit
- - report-security-audit-result-to-gerrit
+ - anteater-security-audit
+ - report-security-audit-result-to-gerrit
+ publishers:
+ - archive-artifacts:
+ artifacts: ".reports/*"
+
########################
# builder macros
########################
- builder:
name: anteater-security-audit
builders:
- - shell:
- !include-raw: ./anteater-security-audit.sh
+ - shell:
+ !include-raw: ./anteater-security-audit.sh
- builder:
name: report-security-audit-result-to-gerrit
builders:
- - shell:
- !include-raw: ./anteater-report-to-gerrit.sh
+ - shell:
+ !include-raw: ./anteater-report-to-gerrit.sh
+# yamllint disable rule:indentation
- builder:
name: anteater-security-audit-weekly
builders:
- - shell:
- !include-raw:
- - ./anteater-clone-all-repos.sh
- - ./anteater-security-audit-weekly.sh
-
+ - shell:
+ !include-raw:
+ - ./anteater-clone-all-repos.sh
+ - ./anteater-security-audit-weekly.sh
+# yamllint enable rule:indentation
diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml
index 24724912a..8b4a74bd1 100644
--- a/jjb/compass4nfv/compass-ci-jobs.yml
+++ b/jjb/compass4nfv/compass-ci-jobs.yml
@@ -10,12 +10,14 @@
stream: master
branch: '{stream}'
gs-pathname: ''
+ ppa-pathname: '/{stream}'
disabled: false
openstack-version: ocata
danube: &danube
stream: danube
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ ppa-pathname: '/{stream}'
disabled: false
openstack-version: newton
#--------------------------------
@@ -76,6 +78,33 @@
- 'os-nosdn-openo-ha':
disabled: false
auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-odl-sfc-ha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-dpdk-ha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'k8-nosdn-nofeature-ha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-nofeature-noha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-odl_l3-nofeature-noha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-odl_l2-moon-noha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-kvm-noha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-odl-sfc-noha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-dpdk-noha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
jobs:
@@ -118,6 +147,7 @@
- compass-ci-parameter:
installer: '{installer}'
gs-pathname: '{gs-pathname}'
+ ppa-pathname: '{ppa-pathname}'
- string:
name: DEPLOY_SCENARIO
default: '{scenario}'
@@ -182,11 +212,13 @@
WED: true
FRI: true
SUN: true
+ use-build-time: true
steps:
- trigger-builds:
- - project: 'dovetail-compass-{pod}-proposed_tests-master'
+ - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
current-parameters: false
- predefined-parameters:
+ predefined-parameters: |
+ DOCKER_TAG=latest
DEPLOY_SCENARIO={scenario}
block: true
same-node: true
@@ -206,12 +238,13 @@
- condition-kind: day-of-week
day-selector: select-days
days:
- TUE: true
- THU: true
+ TUES: true
+ THURS: true
SAT: true
+ use-build-time: true
steps:
- trigger-builds:
- - project: 'dovetail-compass-{pod}-proposed_tests-danube'
+ - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
current-parameters: false
predefined-parameters:
DEPLOY_SCENARIO={scenario}
@@ -225,14 +258,23 @@
condition-kind: and
condition-operands:
- condition-kind: regex-match
- regex: os-(nosdn|odl_l2)-(nofeature|bgpvpn)-ha
+ regex: os-(nosdn|odl_l2|onos|odl_l3)-nofeature-ha
label: '{scenario}'
- condition-kind: regex-match
regex: master
label: '{stream}'
steps:
- trigger-builds:
- - project: 'dovetail-compass-{pod}-proposed_tests-master'
+ - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
- job-template:
name: 'compass-deploy-{pod}-daily-{stream}'
@@ -269,6 +311,7 @@
- compass-ci-parameter:
installer: '{installer}'
gs-pathname: '{gs-pathname}'
+ ppa-pathname: '{ppa-pathname}'
- '{slave-label}-defaults'
- '{installer}-defaults'
@@ -278,10 +321,25 @@
builders:
- description-setter:
description: "POD: $NODE_NAME"
- - shell:
- !include-raw-escape: ./compass-download-artifact.sh
- - shell:
- !include-raw-escape: ./compass-deploy.sh
+ - conditional-step:
+ condition-kind: regex-match
+ regex: master
+ label: '{stream}'
+ steps:
+ - shell:
+ !include-raw-escape: ./compass-build.sh
+ - shell:
+ !include-raw-escape: ./compass-deploy.sh
+ - conditional-step:
+ condition-kind: regex-match
+ regex: danube
+ label: '{stream}'
+ steps:
+ - shell:
+ !include-raw-escape: ./compass-download-artifact.sh
+ - shell:
+ !include-raw-escape: ./compass-deploy.sh
+
########################
# parameter macros
@@ -297,10 +355,24 @@
name: GS_URL
default: '$GS_BASE{gs-pathname}'
description: "URL to Google Storage."
+ - string:
+ name: CACHE_DIRECTORY
+ default: "$HOME/opnfv/cache/$PROJECT{gs-pathname}"
+ description: "Directory where the cache to be used during the build is located."
+ - string:
+ name: PPA_REPO
+ default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
+ - string:
+ name: PPA_CACHE
+ default: "$WORKSPACE/work/repo/"
########################
# trigger macros
########################
+
+#---------------------------
+# ha-baremetal-centos-master
+#---------------------------
- trigger:
name: 'compass-os-nosdn-nofeature-ha-baremetal-centos-master-trigger'
triggers:
@@ -312,7 +384,7 @@
- trigger:
name: 'compass-os-odl_l2-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- - timed: '0 23 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l3-nofeature-ha-baremetal-centos-master-trigger'
triggers:
@@ -320,15 +392,15 @@
- trigger:
name: 'compass-os-onos-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- - timed: '0 7 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-ocl-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- - timed: '0 11 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-onos-sfc-ha-baremetal-centos-master-trigger'
triggers:
- - timed: '0 3 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l2-moon-ha-baremetal-centos-master-trigger'
triggers:
@@ -337,19 +409,62 @@
name: 'compass-os-nosdn-kvm-ha-baremetal-centos-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'compass-os-nosdn-dpdk-ha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-ha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-k8-nosdn-nofeature-ha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+
+#-----------------------------
+# noha-baremetal-centos-master
+#-----------------------------
+- trigger:
+ name: 'compass-os-nosdn-nofeature-noha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl_l3-nofeature-noha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl_l2-moon-noha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-nosdn-kvm-noha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-noha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-nosdn-dpdk-noha-baremetal-centos-master-trigger'
+ triggers:
+ - timed: ''
+#--------------------
+# ha-baremetal-master
+#--------------------
- trigger:
name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 2 * * *'
+ - timed: '0 20 * * *'
- trigger:
name: 'compass-os-nosdn-openo-ha-baremetal-master-trigger'
triggers:
- - timed: '0 3 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l2-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 22 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l3-nofeature-ha-baremetal-master-trigger'
triggers:
@@ -357,24 +472,67 @@
- trigger:
name: 'compass-os-onos-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 14 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-ocl-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '0 10 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-onos-sfc-ha-baremetal-master-trigger'
triggers:
- - timed: '0 6 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l2-moon-ha-baremetal-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 12 * * *'
- trigger:
name: 'compass-os-nosdn-kvm-ha-baremetal-master-trigger'
triggers:
+ - timed: '0 14 * * *'
+- trigger:
+ name: 'compass-os-nosdn-dpdk-ha-baremetal-master-trigger'
+ triggers:
+ - timed: '0 16 * * *'
+- trigger:
+ name: 'compass-k8-nosdn-nofeature-ha-baremetal-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-ha-baremetal-master-trigger'
+ triggers:
+ - timed: '0 10 * * *'
+
+#----------------------
+# noha-baremetal-master
+#----------------------
+- trigger:
+ name: 'compass-os-nosdn-kvm-noha-baremetal-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-nosdn-nofeature-noha-baremetal-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl_l3-nofeature-noha-baremetal-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl_l2-moon-noha-baremetal-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-noha-baremetal-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-nosdn-dpdk-noha-baremetal-master-trigger'
+ triggers:
- timed: ''
+#--------------------
+# ha-baremetal-danube
+#--------------------
- trigger:
name: 'compass-os-nosdn-nofeature-ha-baremetal-danube-trigger'
triggers:
@@ -411,7 +569,50 @@
name: 'compass-os-nosdn-kvm-ha-baremetal-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'compass-os-nosdn-dpdk-ha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-k8-nosdn-nofeature-ha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-ha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+#----------------------
+# noha-baremetal-danube
+#----------------------
+- trigger:
+ name: 'compass-os-nosdn-kvm-noha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-nosdn-nofeature-noha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl_l3-nofeature-noha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl_l2-moon-noha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-noha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-nosdn-dpdk-noha-baremetal-danube-trigger'
+ triggers:
+ - timed: ''
+
+#------------------
+# ha-virtual-master
+#------------------
- trigger:
name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger'
triggers:
@@ -419,11 +620,11 @@
- trigger:
name: 'compass-os-nosdn-openo-ha-virtual-master-trigger'
triggers:
- - timed: '0 22 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l2-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: '0 20 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l3-nofeature-ha-virtual-master-trigger'
triggers:
@@ -431,24 +632,67 @@
- trigger:
name: 'compass-os-onos-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: '0 18 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-ocl-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: '0 16 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-onos-sfc-ha-virtual-master-trigger'
triggers:
- - timed: '0 15 * * *'
+ - timed: ''
- trigger:
name: 'compass-os-odl_l2-moon-ha-virtual-master-trigger'
triggers:
- - timed: '0 14 * * *'
+ - timed: '0 12 * * *'
- trigger:
name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
triggers:
+ - timed: '0 13 * * *'
+- trigger:
+ name: 'compass-os-nosdn-dpdk-ha-virtual-master-trigger'
+ triggers:
+ - timed: '0 17 * * *'
+- trigger:
+ name: 'compass-k8-nosdn-nofeature-ha-virtual-master-trigger'
+ triggers:
- timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-ha-virtual-master-trigger'
+ triggers:
+ - timed: '0 16 * * *'
+
+#--------------------
+# noha-virtual-master
+#--------------------
+- trigger:
+ name: 'compass-os-nosdn-kvm-noha-virtual-master-trigger'
+ triggers:
+ - timed: '0 13 * * *'
+- trigger:
+ name: 'compass-os-nosdn-nofeature-noha-virtual-master-trigger'
+ triggers:
+ - timed: '0 14 * * *'
+- trigger:
+ name: 'compass-os-odl_l3-nofeature-noha-virtual-master-trigger'
+ triggers:
+ - timed: '0 15 * * *'
+- trigger:
+ name: 'compass-os-odl_l2-moon-noha-virtual-master-trigger'
+ triggers:
+ - timed: '0 18 * * *'
+- trigger:
+ name: 'compass-os-odl-sfc-noha-virtual-master-trigger'
+ triggers:
+ - timed: '0 20 * * *'
+- trigger:
+ name: 'compass-os-nosdn-dpdk-noha-virtual-master-trigger'
+ triggers:
+ - timed: '0 11 * * *'
+#------------------
+# ha-virtual-danube
+#------------------
- trigger:
name: 'compass-os-nosdn-nofeature-ha-virtual-danube-trigger'
triggers:
@@ -485,3 +729,43 @@
name: 'compass-os-nosdn-kvm-ha-virtual-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'compass-os-nosdn-dpdk-ha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-ha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-k8-nosdn-nofeature-ha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+
+#--------------------
+# noha-virtual-danube
+#--------------------
+- trigger:
+ name: 'compass-os-nosdn-kvm-noha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-nosdn-nofeature-noha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl_l3-nofeature-noha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl_l2-moon-noha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-odl-sfc-noha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'compass-os-nosdn-dpdk-noha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh
index 2668ccdf8..9d4ae5175 100644
--- a/jjb/compass4nfv/compass-deploy.sh
+++ b/jjb/compass4nfv/compass-deploy.sh
@@ -6,24 +6,23 @@ echo "Starting the deployment on baremetal environment using $INSTALLER_TYPE. Th
echo "--------------------------------------------------------"
echo
-# source the properties file so we get OPNFV vars
-source $BUILD_DIRECTORY/latest.properties
-
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # for none-merge deployments
- # checkout the commit that was used for building the downloaded artifact
- # to make sure the ISO and deployment mechanism uses same versions
- echo "Checking out $OPNFV_GIT_SHA1"
- git checkout $OPNFV_GIT_SHA1 --quiet
-fi
-
echo 1 > /proc/sys/vm/drop_caches
export CONFDIR=$WORKSPACE/deploy/conf
if [[ "$BRANCH" = 'stable/danube' ]]; then
+ # source the properties file so we get OPNFV vars
+ source $BUILD_DIRECTORY/latest.properties
+ # echo the info about artifact that is used during the deployment
+ echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+
+ if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
+ # for none-merge deployments
+ # checkout the commit that was used for building the downloaded artifact
+ # to make sure the ISO and deployment mechanism uses same versions
+ echo "Checking out $OPNFV_GIT_SHA1"
+ git checkout $OPNFV_GIT_SHA1 --quiet
+ fi
+
export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
else
export ISO_URL=file://$BUILD_DIRECTORY/compass.tar.gz
@@ -40,6 +39,8 @@ elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
export NETWORK_CONF_FILE=network_onos.yml
elif [[ "${DEPLOY_SCENARIO}" =~ "-openo" ]]; then
export NETWORK_CONF_FILE=network_openo.yml
+elif [[ "${DEPLOY_SCENARIO}" =~ "-dpdk" ]]; then
+ export NETWORK_CONF_FILE=network_dpdk.yml
else
export NETWORK_CONF_FILE=network.yml
fi
@@ -51,6 +52,11 @@ fi
if [[ "$NODE_NAME" =~ "-virtual" ]]; then
export NETWORK_CONF=$CONFDIR/vm_environment/$NODE_NAME/${NETWORK_CONF_FILE}
export DHA_CONF=$CONFDIR/vm_environment/${DEPLOY_SCENARIO}.yml
+ if [[ "${DEPLOY_SCENARIO}" =~ "-moon-noha" ]]; then
+ export VIRT_NUMBER=3
+ elif [[ "${DEPLOY_SCENARIO}" =~ "-noha" ]]; then
+ export VIRT_NUMBER=2
+ fi
else
export INSTALL_NIC=eth1
export NETWORK_CONF=$CONFDIR/hardware_environment/$NODE_NAME/${NETWORK_CONF_FILE}
diff --git a/jjb/compass4nfv/compass-dovetail-jobs.yml b/jjb/compass4nfv/compass-dovetail-jobs.yml
index 67d1e4eee..101db8241 100644
--- a/jjb/compass4nfv/compass-dovetail-jobs.yml
+++ b/jjb/compass4nfv/compass-dovetail-jobs.yml
@@ -19,7 +19,7 @@
#------------------------------------
pod:
- baremetal:
- slave-label: compass-baremetal
+ slave-label: compass-baremetal-branch
os-version: 'xenial'
<<: *danube
#-----------------------------------
diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml
index 59482459e..e612ef65f 100644
--- a/jjb/compass4nfv/compass-project-jobs.yml
+++ b/jjb/compass4nfv/compass-project-jobs.yml
@@ -21,14 +21,14 @@
disabled: false
jobs:
- - 'compass-build-iso-{stream}'
+ - '{installer}-build-daily-{stream}'
- 'compass-build-ppa-{stream}'
########################
# job templates
########################
- job-template:
- name: 'compass-build-iso-{stream}'
+ name: '{installer}-build-daily-{stream}'
disabled: '{obj:disabled}'
@@ -64,8 +64,7 @@
!include-raw-escape: ./compass-build.sh
- shell:
!include-raw-escape: ./compass-upload-artifact.sh
- - shell:
- !include-raw-escape: ./compass-workspace-cleanup.sh
+ - 'clean-workspace'
- job-template:
name: 'compass-build-ppa-{stream}'
diff --git a/jjb/compass4nfv/compass-verify-jobs.yml b/jjb/compass4nfv/compass-verify-jobs.yml
index e43f976b5..ee91e02da 100644
--- a/jjb/compass4nfv/compass-verify-jobs.yml
+++ b/jjb/compass4nfv/compass-verify-jobs.yml
@@ -29,7 +29,7 @@
os-version: 'xenial'
openstack-os-version: ''
- 'centos7':
- disabled: false
+ disabled: true
os-version: 'centos7'
openstack-os-version: ''
#####################################
@@ -157,16 +157,18 @@
condition: SUCCESSFUL
projects:
- name: 'functest-compass-virtual-suite-{stream}'
- current-parameters: true
- predefined-parameters:
+ current-parameters: false
+ predefined-parameters: |
FUNCTEST_SUITE_NAME=healthcheck
+ DEPLOY_SCENARIO=os-nosdn-nofeature-ha
node-parameters: true
kill-phase-on: NEVER
abort-all-job: true
- name: 'functest-compass-virtual-suite-{stream}'
- current-parameters: true
- predefined-parameters:
+ current-parameters: false
+ predefined-parameters: |
FUNCTEST_SUITE_NAME=vping_ssh
+ DEPLOY_SCENARIO=os-nosdn-nofeature-ha
node-parameters: true
kill-phase-on: NEVER
abort-all-job: true
diff --git a/jjb/compass4nfv/compass-workspace-cleanup.sh b/jjb/compass4nfv/compass-workspace-cleanup.sh
deleted file mode 100644
index 98201af9b..000000000
--- a/jjb/compass4nfv/compass-workspace-cleanup.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# delete everything that is in $WORKSPACE
-/bin/rm -rf $WORKSPACE \ No newline at end of file
diff --git a/jjb/openretriever/openretriever-project.yml b/jjb/container4nfv/container4nfv-project.yml
index 3bcfab6d3..3b29b36f8 100644
--- a/jjb/openretriever/openretriever-project.yml
+++ b/jjb/container4nfv/container4nfv-project.yml
@@ -3,12 +3,12 @@
# They will only be enabled on request by projects!
###################################################
- project:
- name: openretriever
+ name: container4nfv
project: '{name}'
jobs:
- - 'openretriever-verify-{stream}'
+ - 'container4nfv-verify-{stream}'
stream:
- master:
@@ -21,7 +21,7 @@
disabled: false
- job-template:
- name: 'openretriever-verify-{stream}'
+ name: 'container4nfv-verify-{stream}'
disabled: '{obj:disabled}'
diff --git a/jjb/cperf/cperf-ci-jobs.yml b/jjb/cperf/cperf-ci-jobs.yml
index dc209d644..fdd3509d1 100644
--- a/jjb/cperf/cperf-ci-jobs.yml
+++ b/jjb/cperf/cperf-ci-jobs.yml
@@ -1,3 +1,4 @@
+---
###################################
# job configuration for cperf
###################################
@@ -5,40 +6,39 @@
name: cperf-ci-jobs
project: cperf
-#--------------------------------
-# BRANCH ANCHORS
-#--------------------------------
+ # -------------------------------
+ # BRANCH ANCHORS
+ # -------------------------------
master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+ docker-tag: 'latest'
danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- docker-tag: 'stable'
-
-#--------------------------------
-# POD, INSTALLER, AND BRANCH MAPPING
-#--------------------------------
+ stream: danube
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ docker-tag: 'stable'
+
+ # -------------------------------
+ # POD, INSTALLER, AND BRANCH MAPPING
+ # -------------------------------
pod:
-#--------------------------------
-# master
-#--------------------------------
- - intel-pod2:
- installer: apex
- <<: *master
- - intel-pod2:
- installer: apex
- <<: *danube
-#--------------------------------
+ # -------------------------------
+ # master
+ # -------------------------------
+ - intel-pod2:
+ installer: apex
+ <<: *master
+ - intel-pod2:
+ installer: apex
+ <<: *danube
testsuite:
- - 'daily'
+ - 'daily'
jobs:
- - 'cperf-{installer}-{pod}-{testsuite}-{stream}'
+ - 'cperf-{installer}-{pod}-{testsuite}-{stream}'
################################
# job template
@@ -49,35 +49,35 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
wrappers:
- - build-name:
- name: '$BUILD_NUMBER Suite: $CPERF_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 400
- abort: true
+ - build-name:
+ name: '$BUILD_NUMBER Suite: $CPERF_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 400
+ abort: true
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{pod}-defaults'
- - '{installer}-defaults'
- - cperf-parameter:
- testsuite: '{testsuite}'
- gs-pathname: '{gs-pathname}'
- docker-tag: '{docker-tag}'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{pod}-defaults'
+ - '{installer}-defaults'
+ - cperf-parameter:
+ testsuite: '{testsuite}'
+ gs-pathname: '{gs-pathname}'
+ docker-tag: '{docker-tag}'
scm:
- - git-scm
+ - git-scm
builders:
- - 'cperf-{testsuite}-builder'
+ - 'cperf-{testsuite}-builder'
########################
# parameter macros
@@ -85,22 +85,22 @@
- parameter:
name: cperf-parameter
parameters:
- - string:
- name: CPERF_SUITE_NAME
- default: '{testsuite}'
- description: "Suite name to run"
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where the opnfv documents will be stored in gs repository"
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debug output information"
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
+ - string:
+ name: CPERF_SUITE_NAME
+ default: '{testsuite}'
+ description: "Suite name to run"
+ - string:
+ name: GS_PATHNAME
+ default: '{gs-pathname}'
+ description: "Version directory where the opnfv documents will be stored in gs repository"
+ - string:
+ name: CI_DEBUG
+ default: 'false'
+ description: "Show debug output information"
+ - string:
+ name: DOCKER_TAG
+ default: '{docker-tag}'
+ description: 'Tag to pull docker image'
########################
# trigger macros
@@ -112,79 +112,79 @@
- builder:
name: cperf-daily-builder
builders:
- - 'cperf-cleanup'
- - 'cperf-robot-cbench'
+ - 'cperf-cleanup'
+ - 'cperf-robot-cbench'
- builder:
name: cperf-robot-cbench
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
- undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
-
- sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/overcloudrc /tmp/overcloudrc
- sudo chmod 755 /tmp/overcloudrc
- source /tmp/overcloudrc
-
- # robot suites need the ssh key to log in to controller nodes, so throwing it
- # in tmp, and mounting /tmp as $HOME as far as robot is concerned
- sudo rm -rf /tmp/.ssh
- sudo mkdir /tmp/.ssh
- sudo chmod 0700 /tmp/.ssh
- sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/.ssh/id_rsa /tmp/.ssh/
- sudo chown -R jenkins-ci:jenkins-ci /tmp/.ssh
- # done with sudo. jenkins-ci is the user from this point
- chmod 0600 /tmp/.ssh/id_rsa
-
- # cbench requires the openflow drop test feature to be installed.
- sshpass -p karaf ssh -o StrictHostKeyChecking=no \
- -o UserKnownHostsFile=/dev/null \
- -o LogLevel=error \
- -p 8101 karaf@$SDN_CONTROLLER_IP \
- feature:install odl-openflowplugin-flow-services-ui odl-openflowplugin-drop-test
-
- docker pull opnfv/cperf:$DOCKER_TAG
-
- robot_cmd="pybot -e exclude -L TRACE -d /tmp \
- -v ODL_SYSTEM_1_IP:${SDN_CONTROLLER_IP} \
- -v ODL_SYSTEM_IP:${SDN_CONTROLLER_IP} \
- -v BUNDLEFOLDER:/opt/opendaylight \
- -v RESTCONFPORT:8081 \
- -v USER_HOME:/tmp \
- -v USER:heat-admin \
- -v ODL_SYSTEM_USER:heat-admin \
- -v TOOLS_SYSTEM_IP:localhost \
- -v of_port:6653"
- robot_suite="/home/opnfv/repos/odl_test/csit/suites/openflowplugin/Performance/010_Cbench.robot"
-
- docker run -i -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+ undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
+ grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+ INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+
+ sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/overcloudrc /tmp/overcloudrc
+ sudo chmod 755 /tmp/overcloudrc
+ source /tmp/overcloudrc
+
+ # robot suites need the ssh key to log in to controller nodes, so throwing it
+ # in tmp, and mounting /tmp as $HOME as far as robot is concerned
+ sudo rm -rf /tmp/.ssh
+ sudo mkdir /tmp/.ssh
+ sudo chmod 0700 /tmp/.ssh
+ sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/.ssh/id_rsa /tmp/.ssh/
+ sudo chown -R jenkins-ci:jenkins-ci /tmp/.ssh
+ # done with sudo. jenkins-ci is the user from this point
+ chmod 0600 /tmp/.ssh/id_rsa
+
+ # cbench requires the openflow drop test feature to be installed.
+ sshpass -p karaf ssh -o StrictHostKeyChecking=no \
+ -o UserKnownHostsFile=/dev/null \
+ -o LogLevel=error \
+ -p 8101 karaf@$SDN_CONTROLLER_IP \
+ feature:install odl-openflowplugin-flow-services-ui odl-openflowplugin-drop-test
+
+ docker pull opnfv/cperf:$DOCKER_TAG
+
+ robot_cmd="pybot -e exclude -L TRACE -d /tmp \
+ -v ODL_SYSTEM_1_IP:${SDN_CONTROLLER_IP} \
+ -v ODL_SYSTEM_IP:${SDN_CONTROLLER_IP} \
+ -v BUNDLEFOLDER:/opt/opendaylight \
+ -v RESTCONFPORT:8081 \
+ -v USER_HOME:/tmp \
+ -v USER:heat-admin \
+ -v ODL_SYSTEM_USER:heat-admin \
+ -v TOOLS_SYSTEM_IP:localhost \
+ -v of_port:6653"
+ robot_suite="/home/opnfv/repos/odl_test/csit/suites/openflowplugin/Performance/010_Cbench.robot"
+
+ docker run -i -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
- builder:
name: cperf-cleanup
builders:
- - shell: |
- #!/bin/bash
- [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
- echo "Cleaning up docker containers/images..."
- # Remove previous running containers if exist
- if [[ ! -z $(docker ps -a | grep opnfv/cperf) ]]; then
- echo "Removing existing opnfv/cperf containers..."
- docker ps -a | grep opnfv/cperf | awk '{print $1}' | xargs docker rm -f >${redirect}
- fi
-
- # Remove existing images if exist
- if [[ ! -z $(docker images | grep opnfv/cperf) ]]; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep opnfv/cperf >${redirect}
- image_tags=($(docker images | grep opnfv/cperf | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- echo "Removing docker image opnfv/cperf:$tag..."
- docker rmi opnfv/cperf:$tag >/dev/null
- done
- fi
+ - shell: |
+ #!/bin/bash
+ [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+
+ echo "Cleaning up docker containers/images..."
+ # Remove previous running containers if exist
+ if [[ ! -z $(docker ps -a | grep opnfv/cperf) ]]; then
+ echo "Removing existing opnfv/cperf containers..."
+ docker ps -a | grep opnfv/cperf | awk '{print $1}' | xargs docker rm -f >${redirect}
+ fi
+
+ # Remove existing images if exist
+ if [[ ! -z $(docker images | grep opnfv/cperf) ]]; then
+ echo "Docker images to remove:"
+ docker images | head -1 && docker images | grep opnfv/cperf >${redirect}
+ image_tags=($(docker images | grep opnfv/cperf | awk '{print $2}'))
+ for tag in "${image_tags[@]}"; do
+ echo "Removing docker image opnfv/cperf:$tag..."
+ docker rmi opnfv/cperf:$tag >/dev/null
+ done
+ fi
diff --git a/jjb/daisy4nfv/daisy-daily-jobs.yml b/jjb/daisy4nfv/daisy-daily-jobs.yml
index aac76baa4..9a680e7b8 100644
--- a/jjb/daisy4nfv/daisy-daily-jobs.yml
+++ b/jjb/daisy4nfv/daisy-daily-jobs.yml
@@ -45,6 +45,9 @@
# NOHA scenarios
- 'os-nosdn-nofeature-noha':
auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
+ # ODL_L3 scenarios
+ - 'os-odl-nofeature-ha':
+ auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
jobs:
- '{project}-{scenario}-{pod}-daily-{stream}'
@@ -174,26 +177,36 @@
#-----------------------------------------------
# Triggers for job running on daisy-baremetal against master branch
#-----------------------------------------------
-# HA Scenarios
+# Basic HA Scenarios
- trigger:
name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: ''
-# NOHA Scenarios
+ - timed: '0 16 * * *'
+# Basic NOHA Scenarios
- trigger:
name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
triggers:
- timed: ''
+# ODL Scenarios
+- trigger:
+ name: 'daisy-os-odl-nofeature-ha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: '0 12 * * *'
#-----------------------------------------------
# Triggers for job running on daisy-virtual against master branch
#-----------------------------------------------
+# Basic HA Scenarios
- trigger:
name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
triggers:
- - timed: ''
-# NOHA Scenarios
+ - timed: '0 16 * * *'
+# Basic NOHA Scenarios
- trigger:
name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: 'H 8,22 * * *'
-
+ - timed: ''
+# ODL Scenarios
+- trigger:
+ name: 'daisy-os-odl-nofeature-ha-virtual-daily-master-trigger'
+ triggers:
+ - timed: '0 12 * * *'
diff --git a/jjb/daisy4nfv/daisy-deploy.sh b/jjb/daisy4nfv/daisy-deploy.sh
index 785f3a571..0bcac4b02 100755
--- a/jjb/daisy4nfv/daisy-deploy.sh
+++ b/jjb/daisy4nfv/daisy-deploy.sh
@@ -28,7 +28,7 @@ git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/securedlab --quiet \
# daisy ci/deploy/deploy.sh use $BASE_DIR/labs dir
cp -r securedlab/labs .
-DEPLOY_COMMAND="sudo ./ci/deploy/deploy.sh -b $BASE_DIR \
+DEPLOY_COMMAND="sudo -E ./ci/deploy/deploy.sh -b $BASE_DIR \
-l $LAB_NAME -p $POD_NAME -B $BRIDGE -s $DEPLOY_SCENARIO"
# log info to console
diff --git a/jjb/daisy4nfv/daisy-project-jobs.yml b/jjb/daisy4nfv/daisy-project-jobs.yml
index 0a9d43d25..bcb89105e 100644
--- a/jjb/daisy4nfv/daisy-project-jobs.yml
+++ b/jjb/daisy4nfv/daisy-project-jobs.yml
@@ -20,10 +20,6 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
phase:
- 'build':
@@ -58,13 +54,15 @@
use-build-blocker: true
blocking-jobs:
- '{installer}-daily-.*'
+ - 'daisy4nfv-merge-build-.*'
+ - 'daisy4nfv-verify-build-.*'
block-level: 'NODE'
scm:
- git-scm
triggers:
- - timed: '0 H/8 * * *'
+ - timed: '0 8 * * *'
parameters:
- project-parameter:
@@ -129,6 +127,7 @@
publishers:
- '{installer}-recipients'
+ - email-jenkins-admins-on-failure
- job-template:
name: '{installer}-{phase}-daily-{stream}'
@@ -186,8 +185,7 @@
!include-raw: ./daisy4nfv-build.sh
- shell:
!include-raw: ./daisy4nfv-upload-artifact.sh
- - shell:
- !include-raw: ./daisy4nfv-workspace-cleanup.sh
+ - 'clean-workspace'
- builder:
name: 'daisy-deploy-daily-macro'
@@ -213,6 +211,7 @@
publishers:
- email:
recipients: hu.zhijiang@zte.com.cn lu.yao135@zte.com.cn zhou.ya@zte.com.cn yangyang1@zte.com.cn julienjut@gmail.com
+ - email-jenkins-admins-on-failure
- parameter:
name: 'daisy-project-parameter'
diff --git a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
index 561ffbe24..97d830f9f 100644
--- a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
@@ -154,6 +154,7 @@
use-build-blocker: true
blocking-jobs:
- '{alias}-merge-{phase}-.*'
+ - '{installer}-daily-.*'
block-level: 'NODE'
scm:
@@ -191,8 +192,7 @@
!include-raw: ./daisy4nfv-build.sh
- shell:
!include-raw: ./daisy4nfv-upload-artifact.sh
- - shell:
- !include-raw: ./daisy4nfv-workspace-cleanup.sh
+ - 'clean-workspace'
- builder:
name: 'daisy-merge-deploy-virtual-macro'
@@ -201,8 +201,7 @@
!include-raw: ./daisy4nfv-download-artifact.sh
- shell:
!include-raw: ./daisy-deploy.sh
- - shell:
- !include-raw: ./daisy4nfv-workspace-cleanup.sh
+ - 'clean-workspace'
#####################################
# parameter macros
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
index dff0ff0a4..1828ce459 100644
--- a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
@@ -50,7 +50,7 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - '{installer}-merge-build-.*'
+ - '{alias}-merge-build-.*'
block-level: 'NODE'
scm:
@@ -147,8 +147,9 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - '{installer}-merge-build-.*'
+ - '{alias}-merge-build-.*'
- '{alias}-verify-build-.*'
+ - '{installer}-daily-.*'
block-level: 'NODE'
scm:
@@ -182,8 +183,7 @@
!include-raw: ./daisy4nfv-basic.sh
- shell:
!include-raw: ./daisy4nfv-build.sh
- - shell:
- !include-raw: ./daisy4nfv-workspace-cleanup.sh
+ - 'clean-workspace'
- builder:
name: daisy-verify-unit-macro
diff --git a/jjb/daisy4nfv/daisy4nfv-workspace-cleanup.sh b/jjb/daisy4nfv/daisy4nfv-workspace-cleanup.sh
deleted file mode 100755
index 26f7e9a01..000000000
--- a/jjb/daisy4nfv/daisy4nfv-workspace-cleanup.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# delete the $WORKSPACE to open some space
-/bin/rm -rf $WORKSPACE
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index 5a438e44d..b007e1432 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -17,11 +17,9 @@
installer:
- apex:
- slave-label: 'ool-virtual1'
- pod: 'ool-virtual1'
+ slave-label: 'doctor-apex-verify'
- fuel:
- slave-label: 'ool-virtual2'
- pod: 'ool-virtual2'
+ slave-label: 'doctor-fuel-verify'
#- joid:
# slave-label: 'ool-virtual3'
# pod: 'ool-virtual3'
@@ -32,22 +30,16 @@
task:
- verify:
- profiler: 'none'
auto-trigger-name: 'doctor-verify'
is-python: false
- - profiling:
- profiler: 'poc'
- auto-trigger-name: 'experimental'
- is-python: false
- python-verify:
- profiler: 'none'
auto-trigger-name: 'doctor-verify'
is-python: true
pod:
- - arm-pod2:
+ - armband-baremetal:
slave-label: '{pod}'
- - arm-pod3:
+ - armband-virtual:
slave-label: '{pod}'
jobs:
@@ -116,6 +108,7 @@
- '{auto-trigger-name}':
project: '{project}'
branch: '{branch}'
+ files: 'tests/**'
builders:
- shell: "[ -e tests/run.sh ] && bash -n ./tests/run.sh"
@@ -154,7 +147,7 @@
default: 'doctor-notification'
- string:
name: TESTCASE_OPTIONS
- default: '-e INSPECTOR_TYPE={inspector} -e PROFILER_TYPE={profiler} -e PYTHON_ENABLE={is-python} -v $WORKSPACE:/home/opnfv/repos/doctor'
+ default: '-e INSPECTOR_TYPE={inspector} -e PYTHON_ENABLE={is-python} -v $WORKSPACE:/home/opnfv/repos/doctor'
description: 'Addtional parameters specific to test case(s)'
# functest-parameter
- string:
@@ -181,6 +174,7 @@
- '{auto-trigger-name}':
project: '{project}'
branch: '{branch}'
+ files: 'tests/**'
builders:
- 'clean-workspace-log'
@@ -191,9 +185,14 @@
# so this symbolic link should not be in 'tests/'. Otherwise,
# we'll have the same log twice in jenkins console log.
ln -sfn $HOME/opnfv/functest/results/{stream} functest_results
+ # NOTE: Get functest script in $WORKSPACE. This functest script is
+ # needed to perform VM image download in set-functest-env.sh
+ # from E release cycle.
+ mkdir -p functest/ci
+ wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O functest/ci/download_images.sh
- 'functest-suite-builder'
- shell: |
- functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log"
+ functest_log="$HOME/opnfv/functest/results/{stream}/$FUNCTEST_SUITE_NAME.log"
# NOTE: checking the test result, as the previous job could return
# 0 regardless the result of doctor test scenario.
grep -e ' OK$' $functest_log || exit 1
@@ -202,7 +201,8 @@
- archive:
artifacts: 'tests/*.log'
- archive:
- artifacts: 'functest_results/{project}.log'
+ artifacts: 'functest_results/$FUNCTEST_SUITE_NAME.log'
+ - email-jenkins-admins-on-failure
#####################################
@@ -231,7 +231,7 @@
branch-pattern: '**/{branch}'
file-paths:
- compare-type: ANT
- pattern: 'tests/**'
+ pattern: '{files}'
skip-vote:
successful: true
failed: true
diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml
index 6bcaea26f..938fdf915 100644
--- a/jjb/dovetail/dovetail-ci-jobs.yml
+++ b/jjb/dovetail/dovetail-ci-jobs.yml
@@ -25,7 +25,7 @@
branch: 'stable/{stream}'
dovetail-branch: master
gs-pathname: '/{stream}'
- docker-tag: 'cvp.0.1.0'
+ docker-tag: 'cvp.0.6.0'
#-----------------------------------
# POD, PLATFORM, AND BRANCH MAPPING
@@ -88,13 +88,23 @@
# that have not been switched using labels for slaves
#--------------------------------
#apex PODs
- - lf-pod1:
- slave-label: '{pod}'
+ - virtual:
+ slave-label: apex-virtual-master
SUT: apex
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - lf-pod1:
- slave-label: '{pod}'
+ - baremetal:
+ slave-label: apex-baremetal-master
+ SUT: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - virtual:
+ slave-label: apex-virtual-danube
+ SUT: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *danube
+ - baremetal:
+ slave-label: apex-baremetal-danube
SUT: apex
auto-trigger-name: 'daily-trigger-disabled'
<<: *danube
@@ -127,21 +137,6 @@
SUT: compass
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - arm-pod2:
- slave-label: '{pod}'
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - arm-pod3:
- slave-label: '{pod}'
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - arm-virtual1:
- slave-label: '{pod}'
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- zte-pod1:
slave-label: zte-pod1
SUT: fuel
@@ -167,9 +162,13 @@
SUT: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *danube
+ - huawei-pod4:
+ slave-label: huawei-pod4
+ SUT: apex
+ auto-trigger-name: 'apex-huawei-pod4-{testsuite}-danube-trigger'
+ <<: *danube
#--------------------------------
testsuite:
- - 'debug'
- 'compliance_set'
- 'proposed_tests'
@@ -197,7 +196,7 @@
- build-name:
name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- timeout:
- timeout: 180
+ timeout: 300
abort: true
- fix-workspace-permissions
@@ -248,6 +247,7 @@
artifacts: 'results/**/*'
allow-empty: true
fingerprint: true
+ - email-jenkins-admins-on-failure
#--------------------------
# builder macros
@@ -263,3 +263,15 @@
builders:
- shell:
!include-raw: ./dovetail-cleanup.sh
+
+#--------------------------
+# trigger macros
+#--------------------------
+- trigger:
+ name: 'apex-huawei-pod4-proposed_tests-danube-trigger'
+ triggers:
+ - timed: '0 1 * * *'
+- trigger:
+ name: 'apex-huawei-pod4-compliance_set-danube-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/dovetail/dovetail-cleanup.sh b/jjb/dovetail/dovetail-cleanup.sh
index 3ae0cbcc9..2d66fe022 100755
--- a/jjb/dovetail/dovetail-cleanup.sh
+++ b/jjb/dovetail/dovetail-cleanup.sh
@@ -12,16 +12,16 @@
# clean up dependent project docker images, which has no containers and image tag None
clean_images=(opnfv/functest opnfv/yardstick opnfv/testapi mongo)
for clean_image in "${clean_images[@]}"; do
- echo "Removing image $image_id, which has no containers and image tag is None"
dangling_images=($(docker images -f "dangling=true" | grep ${clean_image} | awk '{print $3}'))
if [[ -n ${dangling_images} ]]; then
for image_id in "${dangling_images[@]}"; do
+ echo "Removing image $image_id, which has no containers and image tag is None"
docker rmi $image_id >${redirect}
done
fi
done
-echo "Remove containers with image opnfv/dovetail:<None>..."
+echo "Remove dovetail images with tag None and containers with these images ..."
dangling_images=($(docker images -f "dangling=true" | grep opnfv/dovetail | awk '{print $3}'))
if [[ -n ${dangling_images} ]]; then
for image_id in "${dangling_images[@]}"; do
@@ -37,13 +37,13 @@ if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then
docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect}
fi
-echo "Remove dovetail existing images if exist..."
-if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep opnfv/dovetail >${redirect}
- image_tags=($(docker images | grep opnfv/dovetail | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- echo "Removing docker image opnfv/dovetail:$tag..."
- docker rmi opnfv/dovetail:$tag >${redirect}
- done
-fi
+#echo "Remove dovetail existing images if exist..."
+#if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then
+# echo "Docker images to remove:"
+# docker images | head -1 && docker images | grep opnfv/dovetail >${redirect}
+# image_tags=($(docker images | grep opnfv/dovetail | awk '{print $2}'))
+# for tag in "${image_tags[@]}"; do
+# echo "Removing docker image opnfv/dovetail:$tag..."
+# docker rmi opnfv/dovetail:$tag >${redirect}
+# done
+#fi
diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh
index 85bc54d90..9c4e205ae 100755
--- a/jjb/dovetail/dovetail-run.sh
+++ b/jjb/dovetail/dovetail-run.sh
@@ -69,6 +69,8 @@ else
exit 1
fi
+set +e
+
sudo pip install virtualenv
cd ${releng_repo}/modules
@@ -81,6 +83,8 @@ if [[ ${INSTALLER_TYPE} == compass ]]; then
options="-u root -p root"
elif [[ ${INSTALLER_TYPE} == fuel ]]; then
options="-u root -p r00tme"
+elif [[ ${INSTALLER_TYPE} == apex ]]; then
+ options="-u stack -k /root/.ssh/id_rsa"
else
echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
echo "HA test cases may not run properly."
@@ -93,6 +97,8 @@ ${cmd}
deactivate
+set -e
+
cd ${WORKSPACE}
if [ -f ${DOVETAIL_CONFIG}/pod.yaml ]; then
@@ -111,9 +117,31 @@ if [ "$INSTALLER_TYPE" == "fuel" ]; then
sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
fi
+if [ "$INSTALLER_TYPE" == "apex" ]; then
+ echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
+ sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
+fi
+
+image_path=${HOME}/opnfv/dovetail/images
+if [[ ! -d ${image_path} ]]; then
+ mkdir -p ${image_path}
+fi
# sdnvpn test case needs to download this image first before running
-echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
-wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${DOVETAIL_CONFIG}
+ubuntu_image=${image_path}/ubuntu-16.04-server-cloudimg-amd64-disk1.img
+if [[ ! -f ${ubuntu_image} ]]; then
+ echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
+ wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${image_path}
+fi
+sudo cp ${ubuntu_image} ${DOVETAIL_CONFIG}
+
+# functest needs to download this image first before running
+cirros_image=${image_path}/cirros-0.3.5-x86_64-disk.img
+if [[ ! -f ${cirros_image} ]]; then
+ echo "Download image cirros-0.3.5-x86_64-disk.img ..."
+ wget -q -nc http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img -P ${image_path}
+fi
+sudo cp ${cirros_image} ${DOVETAIL_CONFIG}
+
opts="--privileged=true -id"
@@ -145,6 +173,37 @@ if [ $(docker ps | grep "opnfv/dovetail:${DOCKER_TAG}" | wc -l) == 0 ]; then
exit 1
fi
+# Modify tempest_conf.yaml file
+tempest_conf_file=${DOVETAIL_CONFIG}/tempest_conf.yaml
+if [[ ${INSTALLER_TYPE} == 'compass' || ${INSTALLER_TYPE} == 'apex' ]]; then
+ volume_device='vdb'
+else
+ volume_device='vdc'
+fi
+
+cat << EOF >$tempest_conf_file
+
+compute:
+ min_compute_nodes: 2
+ volume_device_name: ${volume_device}
+ min_microversion: 2.2
+ max_microversion: latest
+
+compute-feature-enabled:
+ live_migration: True
+ block_migration_for_live_migration: True
+ block_migrate_cinder_iscsi: True
+ attach_encrypted_volume: True
+
+EOF
+
+echo "${tempest_conf_file}..."
+cat ${tempest_conf_file}
+
+cp_tempest_cmd="docker cp ${DOVETAIL_CONFIG}/tempest_conf.yaml $container_id:/home/opnfv/dovetail/dovetail/userconfig"
+echo "exec command: ${cp_tempest_cmd}"
+$cp_tempest_cmd
+
list_cmd="dovetail list ${TESTSUITE}"
run_cmd="dovetail run --testsuite ${TESTSUITE} -d"
echo "Container exec command: ${list_cmd}"
@@ -159,5 +218,8 @@ sudo cp -r ${DOVETAIL_HOME}/results ./
# PRIMARY_GROUP=$(id -gn $CURRENT_USER)
# sudo chown -R ${CURRENT_USER}:${PRIMARY_GROUP} ${WORKSPACE}/results
+#remove useless workspace from yardstick to save disk space
+sudo rm -rf ./results/workspace
+
echo "Dovetail: done!"
diff --git a/jjb/dovetail/dovetail-weekly-jobs.yml b/jjb/dovetail/dovetail-weekly-jobs.yml
index 700657d68..11904cbcd 100644
--- a/jjb/dovetail/dovetail-weekly-jobs.yml
+++ b/jjb/dovetail/dovetail-weekly-jobs.yml
@@ -120,6 +120,7 @@
artifacts: 'results/**/*'
allow-empty: true
fingerprint: true
+ - email-jenkins-admins-on-failure
########################
# builder macros
diff --git a/jjb/dpacc/dpacc.yml b/jjb/dpacc/dpacc.yml
index 63eb044ad..39f3d563c 100644
--- a/jjb/dpacc/dpacc.yml
+++ b/jjb/dpacc/dpacc.yml
@@ -1,3 +1,4 @@
+---
###################################################
# All the jobs except verify have been removed!
# They will only be enabled on request by projects!
@@ -8,17 +9,17 @@
project: '{name}'
jobs:
- - 'dpacc-verify-{stream}'
+ - 'dpacc-verify-{stream}'
stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
- job-template:
name: 'dpacc-verify-{stream}'
@@ -26,38 +27,38 @@
disabled: '{obj:disabled}'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
builders:
- - shell: |
- echo "Nothing to verify!"
+ - shell: |
+ echo "Nothing to verify!"
diff --git a/jjb/fuel/fuel-build.sh b/jjb/fuel/fuel-build.sh
deleted file mode 100755
index 2c0d12a80..000000000
--- a/jjb/fuel/fuel-build.sh
+++ /dev/null
@@ -1,109 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-# disable Fuel iso build for master branch
-if [[ "$BRANCH" == 'master' ]]; then
- touch $WORKSPACE/.noupload
- echo "--------------------------------------------------------"
- echo "Done!"
- exit 0
-fi
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-export TERM="vt220"
-
-cd $WORKSPACE
-
-# remove the expired items from cache
-test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY
-
-LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties
-if [[ "$JOB_NAME" =~ "daily" ]]; then
- # check to see if we already have an artifact on artifacts.opnfv.org
- # for this commit during daily builds
- echo "Checking to see if we already built and stored Fuel ISO for this commit"
-
- curl -s -o $LATEST_ISO_PROPERTIES http://$GS_URL/latest.properties 2>/dev/null
-fi
-
-# get metadata of latest ISO
-if grep -q OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES 2>/dev/null; then
- LATEST_ISO_SHA1=$(grep OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
- LATEST_ISO_URL=$(grep OPNFV_ARTIFACT_URL $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
-else
- LATEST_ISO_SHA1=none
-fi
-
-# get current SHA1
-CURRENT_SHA1=$(git rev-parse HEAD)
-
-# set FORCE_BUILD to false for non-daily builds
-FORCE_BUILD=${FORCE_BUILD:-false}
-
-if [[ "$CURRENT_SHA1" == "$LATEST_ISO_SHA1" && "$FORCE_BUILD" == "false" ]]; then
- echo "***************************************************"
- echo " An ISO has already been built for this commit"
- echo " $LATEST_ISO_URL"
- echo "***************************************************"
-# echo "Nothing new to build. Exiting."
-# touch $WORKSPACE/.noupload
-# exit 0
-else
- echo "This commit has not been built yet or forced build! Proceeding with the build."
- /bin/rm -f $LATEST_ISO_PROPERTIES
- echo
-fi
-
-# log info to console
-echo "Starting the build of $INSTALLER_TYPE. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# create the cache directory if it doesn't exist
-mkdir -p $CACHE_DIRECTORY
-
-# set OPNFV_ARTIFACT_VERSION
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Building Fuel ISO for a merged change"
- export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-else
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-fi
-
-NOCACHE_PATTERN="verify: no-cache"
-if [[ "$JOB_NAME" =~ "verify" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "$NOCACHE_PATTERN" ]]; then
- echo "The cache will not be used for this build!"
- NOCACHE_ARG="-f P"
-fi
-NOCACHE_ARG=${NOCACHE_ARG:-}
-
-# start the build
-cd $WORKSPACE/ci
-./build.sh -v $OPNFV_ARTIFACT_VERSION $NOCACHE_ARG -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY
-
-# list the build artifacts
-ls -al $BUILD_DIRECTORY
-
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/fuel/fuel-daily-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml
index 68677089d..c30cfed72 100644
--- a/jjb/fuel/fuel-daily-jobs.yml
+++ b/jjb/fuel/fuel-daily-jobs.yml
@@ -15,6 +15,11 @@
branch: '{stream}'
disabled: false
gs-pathname: ''
+ euphrates: &euphrates
+ stream: euphrates
+ branch: 'stable/{stream}'
+ disabled: true
+ gs-pathname: '/{stream}'
danube: &danube
stream: danube
branch: 'stable/{stream}'
@@ -34,27 +39,27 @@
<<: *master
- baremetal:
slave-label: fuel-baremetal
- <<: *danube
+ <<: *euphrates
- virtual:
slave-label: fuel-virtual
- <<: *danube
+ <<: *euphrates
#--------------------------------
# None-CI PODs
#--------------------------------
- zte-pod1:
slave-label: zte-pod1
<<: *master
- - zte-pod2:
- slave-label: zte-pod2
- <<: *master
- zte-pod3:
slave-label: zte-pod3
<<: *master
- zte-pod1:
slave-label: zte-pod1
- <<: *danube
+ <<: *euphrates
- zte-pod3:
slave-label: zte-pod3
+ <<: *euphrates
+ - zte-pod1:
+ slave-label: zte-pod1
<<: *danube
#--------------------------------
# scenarios
@@ -63,18 +68,12 @@
# HA scenarios
- 'os-nosdn-nofeature-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l2-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l3-nofeature-ha':
+ - 'os-odl-nofeature-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-onos-sfc-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-onos-nofeature-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l2-sfc-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l2-bgpvpn-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-kvm-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-ovs-ha':
@@ -88,18 +87,12 @@
# NOHA scenarios
- 'os-nosdn-nofeature-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l2-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l3-nofeature-noha':
+ - 'os-odl-nofeature-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-onos-sfc-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-onos-nofeature-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l2-sfc-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl_l2-bgpvpn-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-kvm-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-ovs-noha':
@@ -135,6 +128,7 @@
blocking-jobs:
- 'fuel-os-.*?-{pod}-daily-.*'
- 'fuel-os-.*?-{pod}-weekly-.*'
+ - 'fuel-verify-.*'
block-level: 'NODE'
wrappers:
@@ -148,14 +142,13 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - '{installer}-defaults'
+ - '{installer}-defaults':
+ gs-pathname: '{gs-pathname}'
- '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
default: '{scenario}'
- - fuel-ci-parameter:
- gs-pathname: '{gs-pathname}'
builders:
- description-setter:
@@ -193,11 +186,12 @@
# 2.here the stream means the SUT stream, dovetail stream is defined in its own job
# 3.only debug testsuite here(refstack, ha, ipv6, bgpvpn)
# 4.not used for release criteria or compliance,
- # only to debug the dovetail tool bugs with bgpvpn
- # 5,only run against scenario os-odl_l2-bgpvpn-ha(regex used here, can extend to more scenarios future)
+ # only to debug the dovetail tool bugs with bgpvpn and nosdn-nofeature
+ # 5.only run against scenario os-odl-bgpvpn-ha(regex used here, can extend to more scenarios future)
+ # 6.ZTE pod1, os-nosdn-nofeature-ha and os-odl-bgpvpn-ha, run against danube
- conditional-step:
condition-kind: regex-match
- regex: os-odl_l2-bgpvpn-ha
+ regex: os-(nosdn-nofeature|odl-bgpvpn)-ha
label: '{scenario}'
steps:
- trigger-builds:
@@ -215,6 +209,7 @@
publishers:
- email:
recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+ - email-jenkins-admins-on-failure
- job-template:
name: 'fuel-deploy-{pod}-daily-{stream}'
@@ -243,14 +238,13 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - '{installer}-defaults'
+ - '{installer}-defaults':
+ gs-pathname: '{gs-pathname}'
- '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - fuel-ci-parameter:
- gs-pathname: '{gs-pathname}'
+ default: 'os-odl-nofeature-ha'
- string:
name: DEPLOY_TIMEOUT
default: '150'
@@ -274,30 +268,9 @@
publishers:
- email:
recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+ - email-jenkins-admins-on-failure
########################
-# parameter macros
-########################
-- parameter:
- name: fuel-ci-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
- - string:
- name: SSH_KEY
- default: "/tmp/mcp.rsa"
- description: "Path to private SSH key to access environment nodes. For MCP deployments only."
-########################
# trigger macros
########################
#-----------------------------------------------
@@ -307,19 +280,15 @@
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '' # '5 20 * * *'
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '' # '5 23 * * *'
+ - timed: '5 20 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
+ name: 'fuel-os-odl-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '' # '5 2 * * *'
+ - timed: '5 2 * * *'
- trigger:
name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '' # '5 5 * * *'
+ - timed: '5 5 * * *'
- trigger:
name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
triggers:
@@ -329,14 +298,6 @@
triggers:
- timed: '' # '5 8 * * *'
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '' # '5 11 * * *'
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '' # '5 14 * * *'
-- trigger:
name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
triggers:
- timed: '' # '5 17 * * *'
@@ -354,11 +315,7 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-master-trigger'
+ name: 'fuel-os-odl-nofeature-noha-baremetal-daily-master-trigger'
triggers:
- timed: ''
- trigger:
@@ -370,14 +327,6 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-master-trigger'
triggers:
- timed: ''
@@ -394,96 +343,72 @@
triggers:
- timed: ''
#-----------------------------------------------
-# Triggers for job running on fuel-baremetal against danube branch
+# Triggers for job running on fuel-baremetal against euphrates branch
#-----------------------------------------------
# HA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-euphrates-trigger'
triggers:
- - timed: '0 20 * * *'
+ - timed: '' # '0 20 * * *'
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-ha-baremetal-daily-euphrates-trigger'
triggers:
- - timed: '0 23 * * *'
+ - timed: '' # '0 2 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-danube-trigger'
- triggers:
- - timed: '0 2 * * *'
-- trigger:
- name: 'fuel-os-onos-sfc-ha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-ha-baremetal-daily-euphrates-trigger'
triggers:
- timed: '' # '0 5 * * *'
- trigger:
- name: 'fuel-os-onos-nofeature-ha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-ha-baremetal-daily-euphrates-trigger'
triggers:
- timed: '' # '0 8 * * *'
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-danube-trigger'
- triggers:
- - timed: '0 11 * * *'
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-euphrates-trigger'
triggers:
- - timed: '0 14 * * *'
+ - timed: '' # '0 17 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-euphrates-trigger'
triggers:
- - timed: '0 17 * * *'
+ - timed: '' # '0 20 * * *'
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-euphrates-trigger'
triggers:
- - timed: '0 20 * * *'
+ - timed: '' # '0 12 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-euphrates-trigger'
triggers:
- - timed: '0 12 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-danube-trigger'
- triggers:
- - timed: '0 8 * * *'
+ - timed: '' # '0 8 * * *'
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-baremetal-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-noha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-noha-baremetal-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-noha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-noha-baremetal-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-noha-baremetal-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-euphrates-trigger'
triggers:
- timed: ''
#-----------------------------------------------
@@ -494,11 +419,7 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-master-trigger'
+ name: 'fuel-os-odl-nofeature-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
- trigger:
@@ -510,14 +431,6 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
name: 'fuel-os-nosdn-kvm-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
@@ -539,13 +452,9 @@
triggers:
- timed: '5 13 * * *'
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '' # '35 15 * * *'
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-master-trigger'
+ name: 'fuel-os-odl-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: '' # '5 18 * * *'
+ - timed: '5 18 * * *'
- trigger:
name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
triggers:
@@ -555,21 +464,13 @@
triggers:
- timed: '' # '5 23 * * *'
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '' # '35 1 * * *'
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '' # '5 4 * * *'
-- trigger:
name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger'
triggers:
- timed: '' # '35 6 * * *'
- trigger:
name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
triggers:
- - timed: '' # '5 9 * * *'
+ - timed: '5 9 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
triggers:
@@ -579,97 +480,73 @@
triggers:
- timed: '' # '30 20 * * *'
#-----------------------------------------------
-# Triggers for job running on fuel-virtual against danube branch
+# Triggers for job running on fuel-virtual against euphrates branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-virtual-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-ha-virtual-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-virtual-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-ha-virtual-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-ha-virtual-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-ha-virtual-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-virtual-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-virtual-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-virtual-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-virtual-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-euphrates-trigger'
triggers:
- timed: ''
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-euphrates-trigger'
triggers:
- - timed: '0 13 * * *'
+ - timed: '' # '0 13 * * *'
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-virtual-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-noha-virtual-daily-euphrates-trigger'
triggers:
- - timed: '30 15 * * *'
+ - timed: '' # '0 18 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-virtual-daily-danube-trigger'
- triggers:
- - timed: '0 18 * * *'
-- trigger:
- name: 'fuel-os-onos-sfc-noha-virtual-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-noha-virtual-daily-euphrates-trigger'
triggers:
- timed: '' # '30 20 * * *'
- trigger:
- name: 'fuel-os-onos-nofeature-noha-virtual-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-noha-virtual-daily-euphrates-trigger'
triggers:
- timed: '' # '0 23 * * *'
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-danube-trigger'
- triggers:
- - timed: '30 1 * * *'
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-virtual-daily-euphrates-trigger'
triggers:
- - timed: '0 4 * * *'
+ - timed: '' # '30 6 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-virtual-daily-euphrates-trigger'
triggers:
- - timed: '30 6 * * *'
+ - timed: '' # '0 9 * * *'
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-euphrates-trigger'
triggers:
- - timed: '0 9 * * *'
+ - timed: '' # '0 16 * * *'
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-euphrates-trigger'
triggers:
- - timed: '0 16 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-danube-trigger'
- triggers:
- - timed: '0 20 * * *'
+ - timed: '' # '0 20 * * *'
#-----------------------------------------------
# ZTE POD1 Triggers running against master branch
#-----------------------------------------------
@@ -678,11 +555,7 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: '0 10 * * *'
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-master-trigger'
+ name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
@@ -694,21 +567,13 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-master-trigger'
triggers:
- - timed: ''
+ - timed: '0 18 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-master-trigger'
triggers:
@@ -723,11 +588,7 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-master-trigger'
+ name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
- trigger:
@@ -739,14 +600,6 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
@@ -762,112 +615,15 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
-
-#-----------------------------------------------
-# ZTE POD2 Triggers running against master branch
-#-----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-master-trigger'
- triggers:
- - timed: ''
#-----------------------------------------------
# ZTE POD3 Triggers running against master branch
#-----------------------------------------------
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-master-trigger'
triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
+ - timed: '0 10 * * *'
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-master-trigger'
+ name: 'fuel-os-odl-nofeature-ha-zte-pod3-daily-master-trigger'
triggers:
- timed: ''
- trigger:
@@ -879,17 +635,9 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-master-trigger'
triggers:
- - timed: '0 10 * * *'
+ - timed: ''
- trigger:
name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-master-trigger'
triggers:
@@ -908,11 +656,7 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-master-trigger'
+ name: 'fuel-os-odl-nofeature-noha-zte-pod3-daily-master-trigger'
triggers:
- timed: ''
- trigger:
@@ -924,14 +668,6 @@
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-master-trigger'
triggers:
- timed: ''
@@ -948,279 +684,206 @@
triggers:
- timed: ''
#-----------------------------------------------
-# ZTE POD1 Triggers running against danube branch
+# ZTE POD1 Triggers running against euphrates branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-daily-danube-trigger'
- triggers:
- - timed: '0 2 * * *'
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod1-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod1-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-euphrates-trigger'
triggers:
- timed: ''
-
#-----------------------------------------------
-# ZTE POD2 Triggers running against danube branch
+# ZTE POD3 Triggers running against euphrates branch
#-----------------------------------------------
- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod2-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod2-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-euphrates-trigger'
triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-ha-zte-pod2-daily-danube-trigger'
- triggers:
- - timed: ''
+ - timed: '' # '0 18 * * *'
- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-ha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-euphrates-trigger'
triggers:
- - timed: ''
+ - timed: '' # '0 2 * * *'
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod2-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod2-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-noha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-noha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-noha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-danube-trigger'
- triggers:
- - timed: ''
-#-----------------------------------------------
-# ZTE POD3 Triggers running against danube branch
-#-----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod3-daily-danube-trigger'
- triggers:
- - timed: '0 18 * * *'
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-euphrates-trigger'
triggers:
- timed: ''
+#------------------------------------------------
+# ZTE POD1 Triggers running against danube branch
+#------------------------------------------------
- trigger:
- name: 'fuel-os-odl_l3-nofeature-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-danube-trigger'
triggers:
- - timed: ''
+ - timed: '0 2 * * 6'
- trigger:
- name: 'fuel-os-onos-sfc-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod3-daily-danube-trigger'
- triggers:
- - timed: '0 2 * * *'
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
# NOHA Scenarios
- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l2-nofeature-noha-zte-pod3-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl_l3-nofeature-noha-zte-pod3-daily-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-noha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-onos-nofeature-noha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-sfc-noha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-odl_l2-bgpvpn-noha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm-noha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-danube-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index 2fb5c71e4..6525c7ccb 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -1,7 +1,7 @@
#!/bin/bash
# SPDX-license-identifier: Apache-2.0
##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
+# Copyright (c) 2017 Ericsson AB, Mirantis Inc., Enea Software AB and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
@@ -12,7 +12,7 @@ set -o pipefail
export TERM="vt220"
-if [[ "$BRANCH" != 'master' ]]; then
+if [[ "$BRANCH" =~ 'danube' ]]; then
# source the file so we get OPNFV vars
source latest.properties
@@ -20,75 +20,99 @@ if [[ "$BRANCH" != 'master' ]]; then
echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
fi
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- # set simplest scenario for virtual deploys to run for merges
+# shellcheck disable=SC2153
+if [[ "${JOB_NAME}" =~ 'verify' ]]; then
+ # set simplest scenario for virtual deploys to run for verify
DEPLOY_SCENARIO="os-nosdn-nofeature-ha"
-elif [[ "$BRANCH" != 'master' ]]; then
- # for none-merge deployments
+elif [[ "${BRANCH}" =~ 'danube' ]]; then
+ # for Danube deployments (no artifact for current master or newer branches)
# checkout the commit that was used for building the downloaded artifact
# to make sure the ISO and deployment mechanism uses same versions
- echo "Checking out $OPNFV_GIT_SHA1"
- git checkout $OPNFV_GIT_SHA1 --quiet
+ echo "Checking out ${OPNFV_GIT_SHA1}"
+ git checkout "${OPNFV_GIT_SHA1}" --quiet
fi
# set deployment parameters
-export TMPDIR=$HOME/tmpdir
+export TMPDIR=${HOME}/tmpdir
BRIDGE=${BRIDGE:-pxebr}
+# shellcheck disable=SC2153
LAB_NAME=${NODE_NAME/-*}
+# shellcheck disable=SC2153
POD_NAME=${NODE_NAME/*-}
-
-if [[ "$NODE_NAME" =~ "virtual" ]]; then
- POD_NAME="virtual_kvm"
-fi
-
-# we currently support ericsson, intel, lf and zte labs
-if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf|zte) ]]; then
- echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
- exit 1
+# Armband might override LAB_CONFIG_URL, all others use the default
+LAB_CONFIG_URL=${LAB_CONFIG_URL:-'ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab'}
+
+# Fuel requires deploy script to be ran with sudo, Armband does not
+SUDO='sudo -E'
+if [ "${PROJECT}" = 'fuel' ]; then
+ # Fuel does not use any POD-specific configuration for virtual deploys
+ if [[ "${NODE_NAME}" =~ "virtual" ]]; then
+ POD_NAME="virtual_kvm"
+ fi
+ # Fuel currently supports ericsson, intel, lf and zte labs
+ if [[ ! "${LAB_NAME}" =~ (ericsson|intel|lf|zte) ]]; then
+ echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
+ exit 1
+ fi
else
- echo "Using configuration for $LAB_NAME"
+ SUDO=
+ # Armband currently supports arm, enea labs
+ if [[ ! "${LAB_NAME}" =~ (arm|enea) ]]; then
+ echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
+ exit 1
+ fi
fi
-# create TMPDIR if it doesn't exist
-export TMPDIR=$HOME/tmpdir
-mkdir -p $TMPDIR
+echo "Using configuration for ${LAB_NAME}"
+
+# create TMPDIR if it doesn't exist, change permissions
+mkdir -p "${TMPDIR}"
+chmod a+x "${HOME}" "${TMPDIR}"
-# change permissions down to TMPDIR
-chmod a+x $HOME
-chmod a+x $TMPDIR
+cd "${WORKSPACE}" || exit 1
+if [[ "${LAB_CONFIG_URL}" =~ ^(git|ssh):// ]]; then
+ echo "Cloning securedlab repo ${BRANCH}"
+ git clone --quiet --branch "${BRANCH}" "${LAB_CONFIG_URL}" lab-config
+ LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
-# clone the securedlab repo
-cd $WORKSPACE
-echo "Cloning securedlab repo $BRANCH"
-git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
- --branch $BRANCH
+ # Source local_env if present, which contains POD-specific config
+ local_env="${WORKSPACE}/lab-config/labs/${LAB_NAME}/${POD_NAME}/fuel/config/local_env"
+ if [ -e "${local_env}" ]; then
+ echo "-- Sourcing local environment file"
+ source "${local_env}"
+ fi
+fi
+
+# releng wants us to use nothing else but opnfv.iso for now. We comply.
+ISO_FILE=file://${WORKSPACE}/opnfv.iso
# log file name
FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
# construct the command
-DEPLOY_COMMAND="sudo $WORKSPACE/ci/deploy.sh -b file://$WORKSPACE/securedlab \
- -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso \
- -H -B $BRIDGE -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
+DEPLOY_COMMAND="${SUDO} ${WORKSPACE}/ci/deploy.sh -b ${LAB_CONFIG_URL} \
+ -l ${LAB_NAME} -p ${POD_NAME} -s ${DEPLOY_SCENARIO} -i ${ISO_FILE} \
+ -B ${DEFAULT_BRIDGE:-${BRIDGE}} -S ${TMPDIR} \
+ -L ${WORKSPACE}/${FUEL_LOG_FILENAME}"
# log info to console
echo "Deployment parameters"
echo "--------------------------------------------------------"
-echo "Scenario: $DEPLOY_SCENARIO"
-echo "Lab: $LAB_NAME"
-echo "POD: $POD_NAME"
-[[ "$BRANCH" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
+echo "Scenario: ${DEPLOY_SCENARIO}"
+echo "Lab: ${LAB_NAME}"
+echo "POD: ${POD_NAME}"
+[[ "${BRANCH}" != 'master' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
echo
-echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
+echo "Starting the deployment using ${INSTALLER_TYPE}. This could take some time..."
echo "--------------------------------------------------------"
echo
# start the deployment
echo "Issuing command"
-echo "$DEPLOY_COMMAND"
+echo "${DEPLOY_COMMAND}"
echo
-$DEPLOY_COMMAND
+${DEPLOY_COMMAND}
exit_code=$?
echo
@@ -96,17 +120,18 @@ echo "--------------------------------------------------------"
echo "Deployment is done!"
# upload logs for baremetal deployments
-# work with virtual deployments is still going on so we skip that for the timebeing
-if [[ "$JOB_NAME" =~ (baremetal-daily|baremetal-weekly) ]]; then
+# work with virtual deployments is still going on, so skip that for now
+if [[ "${JOB_NAME}" =~ (baremetal-daily|baremetal-weekly) ]]; then
echo "Uploading deployment logs"
- gsutil cp $WORKSPACE/$FUEL_LOG_FILENAME gs://$GS_URL/logs/$FUEL_LOG_FILENAME > /dev/null 2>&1
- echo "Logs are available as http://$GS_URL/logs/$FUEL_LOG_FILENAME"
+ gsutil cp "${WORKSPACE}/${FUEL_LOG_FILENAME}" \
+ "gs://${GS_URL}/logs/${FUEL_LOG_FILENAME}" > /dev/null 2>&1
+ echo "Logs are available at http://${GS_URL}/logs/${FUEL_LOG_FILENAME}"
fi
-if [[ $exit_code -ne 0 ]]; then
+if [[ "${exit_code}" -ne 0 ]]; then
echo "Deployment failed!"
- exit $exit_code
-else
- echo "Deployment is successful!"
- exit 0
+ exit "${exit_code}"
fi
+
+echo "Deployment is successful!"
+exit 0
diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh
index c3b8253de..02ca10305 100755
--- a/jjb/fuel/fuel-download-artifact.sh
+++ b/jjb/fuel/fuel-download-artifact.sh
@@ -11,20 +11,14 @@ set -o errexit
set -o pipefail
# disable Fuel ISO download for master branch
-[[ "$BRANCH" == 'master' ]] && exit 0
+[[ ! "$BRANCH" =~ (danube) ]] && exit 0
-# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
+# use proxy url to replace the normal URL, or googleusercontent.com will be blocked randomly
[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
- # get the properties file for the Fuel ISO built for a merged change
- curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
-else
- # get the latest.properties file in order to get info regarding latest artifact
- echo "Downloading http://$GS_URL/latest.properties"
- curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
-fi
+# get the latest.properties file in order to get info regarding latest artifact
+echo "Downloading http://$GS_URL/latest.properties"
+curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
# check if we got the file
[[ -f $WORKSPACE/latest.properties ]] || exit 1
@@ -36,21 +30,18 @@ source $WORKSPACE/latest.properties
OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
echo "Using $OPNFV_ARTIFACT for deployment"
-# using ISOs for verify & merge jobs from local storage will be enabled later
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # check if we already have the ISO to avoid redownload
- ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}"
- if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
- echo "ISO exists locally. Skipping the download and using the file from ISO store"
- ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso
- echo "--------------------------------------------------------"
- echo
- ls -al $WORKSPACE/opnfv.iso
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
- exit 0
- fi
+# check if we already have the ISO to avoid redownload
+ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}"
+if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
+ echo "ISO exists locally. Skipping the download and using the file from ISO store"
+ ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso
+ echo "--------------------------------------------------------"
+ echo
+ ls -al $WORKSPACE/opnfv.iso
+ echo
+ echo "--------------------------------------------------------"
+ echo "Done!"
+ exit 0
fi
[[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL
diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml
index 1f0ddd363..cfcbf3695 100644
--- a/jjb/fuel/fuel-project-jobs.yml
+++ b/jjb/fuel/fuel-project-jobs.yml
@@ -13,206 +13,18 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - danube:
+ - euphrates:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
+ disabled: true
jobs:
- - 'fuel-build-daily-{stream}'
- - 'fuel-merge-build-{stream}'
- - 'fuel-merge-deploy-virtual-{stream}'
- 'fuel-deploy-generic-daily-{stream}'
########################
# job templates
########################
- job-template:
- name: 'fuel-build-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - '{installer}-defaults'
- - choice:
- name: FORCE_BUILD
- choices:
- - 'false'
- - 'true'
- description: "Force build even if there is no changes in fuel repo. Default false"
- - fuel-project-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '0 H/4 * * *'
-
- wrappers:
- - timeout:
- timeout: 360
- fail: true
-
- builders:
- - shell:
- !include-raw-escape: ./fuel-build.sh
- - shell:
- !include-raw-escape: ./fuel-upload-artifact.sh
- - shell:
- !include-raw-escape: ./fuel-workspace-cleanup.sh
-
- publishers:
- - email:
- recipients: fzhadaev@mirantis.com
-
-- job-template:
- name: 'fuel-merge-build-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - '{installer}-defaults'
- - fuel-project-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'deploy/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - shell:
- !include-raw-escape: ./fuel-build.sh
- - shell:
- !include-raw-escape: ./fuel-upload-artifact.sh
- - shell:
- !include-raw-escape: ./fuel-workspace-cleanup.sh
-
-- job-template:
- name: 'fuel-merge-deploy-virtual-{stream}'
-
- disabled: true
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 2
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-os-.*?-virtual-daily-.*'
- - 'fuel-merge-deploy-virtual-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'fuel-virtual-defaults':
- installer: '{installer}'
- - '{installer}-defaults'
- - fuel-project-parameter:
- gs-pathname: '{gs-pathname}'
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'deploy/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- dependency-jobs: 'fuel-merge-build-{stream}'
-
- builders:
- - shell:
- !include-raw-escape: ./fuel-download-artifact.sh
- - shell:
- !include-raw-escape: ./fuel-deploy.sh
- - shell:
- !include-raw-escape: ./fuel-workspace-cleanup.sh
-
- publishers:
- - email:
- recipients: fzhadaev@mirantis.com
-
-- job-template:
name: 'fuel-deploy-generic-daily-{stream}'
concurrent: true
@@ -236,14 +48,15 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - '{installer}-defaults'
+ - '{installer}-defaults':
+ gs-pathname: '{gs-pathname}'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- string:
name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
+ default: 'os-odl-nofeature-ha'
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
@@ -253,8 +66,6 @@
- ericsson-pod1
default-slaves:
- ericsson-pod2
- - fuel-project-parameter:
- gs-pathname: '{gs-pathname}'
scm:
- git-scm
@@ -265,25 +76,4 @@
builders:
- shell:
- !include-raw-escape: ./fuel-download-artifact.sh
- - shell:
!include-raw-escape: ./fuel-deploy.sh
-
-########################
-# parameter macros
-########################
-- parameter:
- name: fuel-project-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/fuel/fuel-upload-artifact.sh b/jjb/fuel/fuel-upload-artifact.sh
deleted file mode 100755
index d1ac3509b..000000000
--- a/jjb/fuel/fuel-upload-artifact.sh
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-# check if we built something
-if [ -f $WORKSPACE/.noupload ]; then
- echo "Nothing new to upload. Exiting."
- /bin/rm -f $WORKSPACE/.noupload
- exit 0
-fi
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-nfsstore () {
-# storing ISOs for verify & merge jobs will be done once we get the disk array
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # store ISO locally on NFS first
- ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}"
- if [[ -d "$ISOSTORE" ]]; then
- # remove all but most recent 5 ISOs first to keep iso_mount clean & tidy
- cd $ISOSTORE
- ls -tp | grep -v '/' | tail -n +6 | xargs -d '\n' /bin/rm -f --
-
- # store ISO
- echo "Storing $INSTALLER_TYPE artifact on NFS..."
- /bin/cp -f $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
- $ISOSTORE/opnfv-$OPNFV_ARTIFACT_VERSION.iso
- fi
-fi
-}
-
-importkey () {
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-}
-
-signiso () {
-gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso
-
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
-echo "ISO signature Upload Complete!"
-}
-
-uploadiso () {
-# log info to console
-echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
-echo
-
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-elif [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Uploaded Fuel ISO for a merged change"
-fi
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-# BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- echo "Check log $WORKSPACE/gsutil.iso.log on the machine where this build is done."
- exit 1
-fi
-
-echo "Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-echo
-echo "--------------------------------------------------------"
-echo
-}
-
-nfsstore
-
-if [[ "$JOB_NAME" =~ merge ]]; then
- uploadiso
-elif [[ "$JOB_NAME" =~ build ]]; then
- importkey
- signiso
- uploadiso
-fi
-
diff --git a/jjb/fuel/fuel-verify-jobs.yml b/jjb/fuel/fuel-verify-jobs.yml
index 549f7dafa..45197fc4e 100644
--- a/jjb/fuel/fuel-verify-jobs.yml
+++ b/jjb/fuel/fuel-verify-jobs.yml
@@ -12,22 +12,20 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - danube:
+ - euphrates:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
+ disabled: true
#####################################
# patch verification phases
#####################################
phase:
- 'basic':
- slave-label: 'opnfv-build-ubuntu'
- - 'build':
- slave-label: 'opnfv-build-ubuntu'
+ slave-label: 'fuel-virtual'
- 'deploy-virtual':
- slave-label: 'opnfv-build-ubuntu'
+ slave-label: 'fuel-virtual'
- 'smoke-test':
- slave-label: 'opnfv-build-ubuntu'
+ slave-label: 'fuel-virtual'
#####################################
# jobs
#####################################
@@ -52,6 +50,11 @@
enabled: true
max-total: 4
option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'fuel-os-.*?-virtual-daily-.*'
+ block-level: 'NODE'
scm:
- git-scm-gerrit
@@ -85,9 +88,7 @@
- compare-type: ANT
pattern: 'ci/**'
- compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'deploy/**'
+ pattern: 'mcp/**'
disable-strict-forbidden-file-verification: 'true'
forbidden-file-paths:
- compare-type: ANT
@@ -98,8 +99,9 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'fuel-verify-defaults':
+ - 'fuel-virtual-defaults':
+ installer: '{installer}'
+ - '{installer}-defaults':
gs-pathname: '{gs-pathname}'
builders:
@@ -120,20 +122,6 @@
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
name: deploy-virtual
condition: SUCCESSFUL
projects:
@@ -173,7 +161,8 @@
- logrotate-default
- throttle:
enabled: true
- max-total: 6
+ max-total: 2
+ max-per-node: 1
option: 'project'
- build-blocker:
use-build-blocker: true
@@ -195,8 +184,9 @@
project: '{project}'
branch: '{branch}'
- '{slave-label}-defaults'
- - '{installer}-defaults'
- - 'fuel-verify-defaults':
+ - 'fuel-virtual-defaults':
+ installer: '{installer}'
+ - '{installer}-defaults':
gs-pathname: '{gs-pathname}'
builders:
@@ -215,20 +205,10 @@
echo "Not activated!"
- builder:
- name: 'fuel-verify-build-macro'
- builders:
- - shell:
- !include-raw: ./fuel-build.sh
- - shell:
- !include-raw: ./fuel-workspace-cleanup.sh
-
-- builder:
name: 'fuel-verify-deploy-virtual-macro'
builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
+ - shell:
+ !include-raw: ./fuel-deploy.sh
- builder:
name: 'fuel-verify-smoke-test-macro'
@@ -237,21 +217,3 @@
#!/bin/bash
echo "Not activated!"
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'fuel-verify-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/fuel/fuel-weekly-jobs.yml b/jjb/fuel/fuel-weekly-jobs.yml
index bd42ed85c..e1563ea38 100644
--- a/jjb/fuel/fuel-weekly-jobs.yml
+++ b/jjb/fuel/fuel-weekly-jobs.yml
@@ -15,10 +15,10 @@
branch: '{stream}'
disabled: false
gs-pathname: ''
- danube: &danube
- stream: danube
+ euphrates: &euphrates
+ stream: euphrates
branch: 'stable/{stream}'
- disabled: false
+ disabled: true
gs-pathname: '/{stream}'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
@@ -34,10 +34,10 @@
<<: *master
- baremetal:
slave-label: fuel-baremetal
- <<: *danube
+ <<: *euphrates
- virtual:
slave-label: fuel-virtual
- <<: *danube
+ <<: *euphrates
#--------------------------------
# scenarios
#--------------------------------
@@ -72,6 +72,7 @@
blocking-jobs:
- 'fuel-os-.*?-{pod}-daily-.*'
- 'fuel-os-.*?-{pod}-weekly-.*'
+ - 'fuel-verify-.*'
block-level: 'NODE'
wrappers:
@@ -85,14 +86,13 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - '{installer}-defaults'
+ - '{installer}-defaults':
+ gs-pathname: '{gs-pathname}'
- '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
default: '{scenario}'
- - fuel-weekly-parameter:
- gs-pathname: '{gs-pathname}'
builders:
- description-setter:
@@ -119,6 +119,7 @@
publishers:
- email:
recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+ - email-jenkins-admins-on-failure
- job-template:
name: 'fuel-deploy-{pod}-weekly-{stream}'
@@ -147,14 +148,13 @@
- project-parameter:
project: '{project}'
branch: '{branch}'
- - '{installer}-defaults'
+ - '{installer}-defaults':
+ gs-pathname: '{gs-pathname}'
- '{slave-label}-defaults':
installer: '{installer}'
- string:
name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - fuel-weekly-parameter:
- gs-pathname: '{gs-pathname}'
+ default: 'os-odl-nofeature-ha'
- string:
name: DEPLOY_TIMEOUT
default: '150'
@@ -171,33 +171,14 @@
- description-setter:
description: "Built on $NODE_NAME"
- shell:
- !include-raw-escape: ./fuel-download-artifact.sh
- - shell:
!include-raw-escape: ./fuel-deploy.sh
publishers:
- email:
recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
+ - email-jenkins-admins-on-failure
########################
-# parameter macros
-########################
-- parameter:
- name: fuel-weekly-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
-########################
# trigger macros
########################
#-----------------------------------------------
diff --git a/jjb/fuel/fuel-workspace-cleanup.sh b/jjb/fuel/fuel-workspace-cleanup.sh
deleted file mode 100755
index d8948c7a0..000000000
--- a/jjb/fuel/fuel-workspace-cleanup.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# delete the $WORKSPACE to open some space
-/bin/rm -rf $WORKSPACE
diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh
new file mode 100755
index 000000000..57398faf6
--- /dev/null
+++ b/jjb/functest/functest-alpine.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+set -e
+set +u
+set +o pipefail
+
+[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+FUNCTEST_DIR=/home/opnfv/functest
+
+# Prepare OpenStack credentials volume
+if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
+ rc_file=$LAB_CONFIG/admin-openrc
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+ cacert_file_vol="-v ${HOME}/os_cacert:${FUNCTEST_DIR}/conf/os_cacert"
+ echo "export OS_CACERT=${FUNCTEST_DIR}/conf/os_cacert" >> ${HOME}/opnfv-openrc.sh
+ rc_file=${HOME}/opnfv-openrc.sh
+else
+ rc_file=${HOME}/opnfv-openrc.sh
+fi
+rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/openstack.creds"
+
+
+# Set iptables rule to allow forwarding return traffic for container
+if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
+ sudo iptables -I FORWARD -j RETURN
+fi
+
+DEPLOY_TYPE=baremetal
+[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
+HOST_ARCH=$(uname -m)
+
+echo "Functest: Start Docker and prepare environment"
+
+echo "Functest: Download images that will be used by test cases"
+images_dir="${HOME}/opnfv/functest/images"
+download_script=${WORKSPACE}/functest/ci/download_images.sh
+if [[ ! -f ${download_script} ]]; then
+ # to support Danube as well
+ wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O ${download_script} 2> ${redirect}
+fi
+chmod +x ${download_script}
+${download_script} ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
+
+images_vol="-v ${images_dir}:${FUNCTEST_DIR}/images"
+
+dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
+mkdir -p ${dir_result}
+sudo rm -rf ${dir_result}/*
+results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
+custom_params=
+test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(cat ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG})
+
+envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+ -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
+ -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
+
+if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-ha'* ]]; then
+ ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+ openo_msb_port=${openo_msb_port:-80}
+ openo_msb_endpoint="$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
+ 'mysql -ucompass -pcompass -Dcompass -e "select package_config from cluster;" \
+ | sed s/,/\\n/g | grep openo_ip | cut -d \" -f 4'):$openo_msb_port"
+
+ envs=${env}" -e OPENO_MSB_ENDPOINT=${openo_msb_endpoint}"
+fi
+
+volumes="${images_vol} ${results_vol} ${sshkey_vol} ${rc_file_vol} ${cacert_file_vol}"
+
+set +e
+
+if [ ${FUNCTEST_SUITE_NAME} == 'healthcheck' ]; then
+ tiers=(healthcheck)
+else
+ tiers=(healthcheck smoke features vnf)
+fi
+
+for tier in ${tiers[@]}; do
+ FUNCTEST_IMAGE=opnfv/functest-${tier}
+ echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
+ docker pull ${FUNCTEST_IMAGE}>/dev/null
+ cmd="docker run ${envs} ${volumes} ${FUNCTEST_IMAGE}"
+ echo "Running Functest tier '${tier}'. CMD: ${cmd}"
+ ${cmd}
+done
diff --git a/jjb/functest/functest-daily-jobs.yml b/jjb/functest/functest-daily-jobs.yml
index 8de092d29..b1d7e748d 100644
--- a/jjb/functest/functest-daily-jobs.yml
+++ b/jjb/functest/functest-daily-jobs.yml
@@ -150,22 +150,6 @@
slave-label: '{pod}'
installer: apex
<<: *master
- - arm-pod2:
- slave-label: '{pod}'
- installer: fuel
- <<: *master
- - arm-pod3:
- slave-label: '{pod}'
- installer: fuel
- <<: *master
- - arm-pod4:
- slave-label: '{pod}'
- installer: fuel
- <<: *master
- - arm-virtual1:
- slave-label: '{pod}'
- installer: fuel
- <<: *master
- zte-pod1:
slave-label: '{pod}'
installer: fuel
@@ -186,29 +170,23 @@
slave-label: '{pod}'
installer: fuel
<<: *danube
- - arm-pod2:
- slave-label: '{pod}'
- installer: fuel
- <<: *danube
- - arm-pod3:
- slave-label: '{pod}'
- installer: fuel
- <<: *danube
- - arm-pod4:
- slave-label: '{pod}'
- installer: fuel
- <<: *danube
- - arm-virtual1:
- slave-label: '{pod}'
- installer: fuel
- <<: *danube
# PODs for verify jobs triggered by each patch upload
- - ool-virtual1:
- slave-label: '{pod}'
- installer: apex
- <<: *master
+# - ool-virtual1:
+# slave-label: '{pod}'
+# installer: apex
+# <<: *master
#--------------------------------
+ alpine-pod:
+ - ericsson-virtual-pod1bl01:
+ slave-label: '{alpine-pod}'
+ installer: fuel
+ <<: *master
+ - huawei-virtual5:
+ slave-label: '{alpine-pod}'
+ installer: compass
+ <<: *master
+
testsuite:
- 'suite':
job-timeout: 60
@@ -217,6 +195,7 @@
jobs:
- 'functest-{installer}-{pod}-{testsuite}-{stream}'
+ - 'functest-alpine-{installer}-{alpine-pod}-{testsuite}-{stream}'
################################
# job template
@@ -269,6 +248,46 @@
description: "Built on $NODE_NAME"
- 'functest-{testsuite}-builder'
+- job-template:
+ name: 'functest-alpine-{installer}-{alpine-pod}-{testsuite}-{stream}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: '{job-timeout}'
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
+ - 'functest-{testsuite}-parameter'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-noha'
+ - functest-parameter:
+ gs-pathname: '{gs-pathname}'
+
+ scm:
+ - git-scm
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'functest-alpine-daily-builder'
+
########################
# parameter macros
########################
@@ -345,10 +364,9 @@
name: functest-daily-builder
builders:
- 'functest-cleanup'
- - 'set-functest-env'
+ - 'set-functest-env-alpine'
- 'functest-daily'
- 'functest-store-results'
- - 'functest-exit'
- builder:
name: functest-suite-builder
@@ -360,10 +378,20 @@
- 'functest-exit'
- builder:
+ name: functest-alpine-daily-builder
+ builders:
+ - shell:
+ !include-raw:
+ - ./functest-env-presetup.sh
+ - ../../utils/fetch_os_creds.sh
+ - ./functest-alpine.sh
+ - ../../utils/push-test-logs.sh
+
+- builder:
name: functest-daily
builders:
- shell:
- !include-raw: ./functest-loop.sh
+ !include-raw: ./functest-alpine.sh
- builder:
@@ -382,6 +410,14 @@
- ./set-functest-env.sh
- builder:
+ name: set-functest-env-alpine
+ builders:
+ - shell:
+ !include-raw:
+ - ./functest-env-presetup.sh
+ - ../../utils/fetch_os_creds.sh
+
+- builder:
name: functest-store-results
builders:
- shell:
diff --git a/jjb/functest/functest-loop.sh b/jjb/functest/functest-loop.sh
index a590d9f95..00a5f135b 100755
--- a/jjb/functest/functest-loop.sh
+++ b/jjb/functest/functest-loop.sh
@@ -2,7 +2,11 @@
set +e
[[ "$PUSH_RESULTS_TO_DB" == "true" ]] && flags+="-r"
-cmd="run_tests -t all ${flags}"
+if [ "$BRANCH" == 'master' ]; then
+ cmd="run_tests -t all ${flags}"
+else
+ cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
+fi
container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
docker exec $container_id $cmd
diff --git a/jjb/functest/functest-project-jobs.yml b/jjb/functest/functest-project-jobs.yml
index 7036f20c0..c25e4ab4a 100644
--- a/jjb/functest/functest-project-jobs.yml
+++ b/jjb/functest/functest-project-jobs.yml
@@ -9,6 +9,8 @@
jobs:
- 'functest-verify-{stream}'
+ - 'functest-verify-{phase}-{stream}'
+ - 'functest-docs-upload-{stream}'
stream:
- master:
@@ -18,13 +20,23 @@
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
+ disabled: true
+
+ phase:
+ - 'unit-tests-and-docs':
+ slave-label: 'opnfv-build-ubuntu'
+ - 'build-x86_64':
+ slave-label: 'opnfv-build-ubuntu'
+ - 'build-aarch64':
+ slave-label: 'opnfv-build-ubuntu-arm'
- job-template:
name: 'functest-verify-{stream}'
disabled: '{obj:disabled}'
+ project-type: 'multijob'
+
parameters:
- project-parameter:
project: '{project}'
@@ -35,6 +47,109 @@
- git-scm-gerrit
triggers:
+ - 'functest-verify-triggers-macro':
+ project: '{project}'
+ branch: '{branch}'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+ # we do nothing here as the main stuff will be done
+ # in phase jobs
+ echo "Triggering phase jobs!"
+ - multijob:
+ name: 'functest-build-and-unittest'
+ execution-type: PARALLEL
+ projects:
+ - name: 'functest-verify-unit-tests-and-docs-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ git-revision: true
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: false
+ - name: 'functest-verify-build-x86_64-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ ARCH=x86_64
+ git-revision: true
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: false
+ - name: 'functest-verify-build-aarch64-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ ARCH=aarch64
+ git-revision: true
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: false
+
+- job-template:
+ name: 'functest-verify-{phase}-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ wrappers:
+ - ssh-agent-wrapper
+ - build-timeout:
+ timeout: 30
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{slave-label}-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ builders:
+ - 'functest-verify-{phase}-builders-macro'
+
+ publishers:
+ - 'functest-verify-{phase}-publishers-macro'
+
+- job-template:
+ name: 'functest-docs-upload-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm
+
+ triggers:
+ - 'functest-docs-upload-triggers-macro':
+ project: '{project}'
+ branch: '{branch}'
+
+ builders:
+ - functest-upload-doc-artifact
+
+################################
+# job triggers
+################################
+- trigger:
+ name: 'functest-verify-triggers-macro'
+ triggers:
- gerrit:
server-name: 'gerrit.opnfv.org'
trigger-on:
@@ -57,10 +172,55 @@
forbidden-file-paths:
- compare-type: ANT
pattern: 'docs/**|.gitignore'
+- trigger:
+ name: 'functest-docs-upload-triggers-macro'
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+################################
+# job builders
+################################
+- builder:
+ name: 'functest-verify-unit-tests-and-docs-builders-macro'
builders:
- - functest-unit-tests-and-docs-build
-
+ - shell: |
+ cd $WORKSPACE && tox
+- builder:
+ name: 'functest-verify-build-x86_64-builders-macro'
+ builders:
+ - shell: |
+ echo "Not activated!"
+- builder:
+ name: 'functest-verify-build-aarch64-builders-macro'
+ builders:
+ - shell: |
+ echo "Not activated!"
+- builder:
+ name: 'functest-upload-doc-artifact'
+ builders:
+ - shell: |
+ cd $WORKSPACE && tox -edocs
+ wget -O - https://git.opnfv.org/releng/plain/utils/upload-artifact.sh | bash -s "api/_build" "docs"
+################################
+# job publishers
+################################
+- publisher:
+ name: 'functest-verify-unit-tests-and-docs-publishers-macro'
publishers:
- junit:
results: nosetests.xml
@@ -79,13 +239,12 @@
healthy: 50
unhealthy: 40
failing: 30
-
-################################
-# job builders
-################################
-
-- builder:
- name: functest-unit-tests-and-docs-build
- builders:
- - shell: |
- cd $WORKSPACE && tox
+ - email-jenkins-admins-on-failure
+- publisher:
+ name: 'functest-verify-build-x86_64-publishers-macro'
+ publishers:
+ - email-jenkins-admins-on-failure
+- publisher:
+ name: 'functest-verify-build-aarch64-publishers-macro'
+ publishers:
+ - email-jenkins-admins-on-failure
diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh
index 469a57726..9b7f1356c 100755
--- a/jjb/functest/functest-suite.sh
+++ b/jjb/functest/functest-suite.sh
@@ -10,7 +10,11 @@ global_ret_val=0
tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
for test in ${tests[@]}; do
- cmd="run_tests -t $test"
+ if [ "$BRANCH" == 'master' ]; then
+ cmd="run_tests -t $test"
+ else
+ cmd="python /home/opnfv/repos/functest/functest/ci/run_tests.py -t $test"
+ fi
docker exec $container_id $cmd
let global_ret_val+=$?
done
diff --git a/jjb/functest/set-functest-env.sh b/jjb/functest/set-functest-env.sh
index 01cab5e0f..bb79dacbe 100755
--- a/jjb/functest/set-functest-env.sh
+++ b/jjb/functest/set-functest-env.sh
@@ -6,11 +6,20 @@ set +o pipefail
[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+DEPLOY_TYPE=baremetal
+[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
+HOST_ARCH=$(uname -m)
+
# Prepare OpenStack credentials volume
+rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds"
+
if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
rc_file_vol="-v $LAB_CONFIG/admin-openrc:/home/opnfv/functest/conf/openstack.creds"
-else
- rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/home/opnfv/functest/conf/openstack.creds"
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+ cacert_file_vol="-v ${HOME}/os_cacert:/home/opnfv/functest/conf/os_cacert"
+ echo "export OS_CACERT=/home/opnfv/functest/conf/os_cacert" >> ${HOME}/opnfv-openrc.sh
+elif [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
+ cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
fi
@@ -19,17 +28,15 @@ if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FOR
sudo iptables -I FORWARD -j RETURN
fi
-DEPLOY_TYPE=baremetal
-[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
-
echo "Functest: Start Docker and prepare environment"
if [ "$BRANCH" != 'stable/danube' ]; then
echo "Functest: Download images that will be used by test cases"
images_dir="${HOME}/opnfv/functest/images"
chmod +x ${WORKSPACE}/functest/ci/download_images.sh
- ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} 2> ${redirect}
+ ${WORKSPACE}/functest/ci/download_images.sh ${images_dir} > ${redirect}
images_vol="-v ${images_dir}:/home/opnfv/functest/images"
+ echo "Functest: Images successfully downloaded"
fi
dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
@@ -38,13 +45,21 @@ sudo rm -rf ${dir_result}/*
results_vol="-v ${dir_result}:/home/opnfv/functest/results"
custom_params=
test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(cat ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG})
+echo "Functest: custom parameters successfully retrieved: ${custom_params}"
envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
-e BUILD_TAG=${BUILD_TAG} -e CI_DEBUG=${CI_DEBUG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
+ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
+
+if [ "${INSTALLER_TYPE}" == 'fuel' ] && [ "$BRANCH" != 'stable/danube' ]; then
+ COMPUTE_ARCH=$(ssh -l ubuntu ${INSTALLER_IP} -i ${SSH_KEY} ${ssh_options} \
+ "sudo salt 'cmp*' grains.get cpuarch --out yaml | awk '{print \$2; exit}'")
+ envs="${envs} -e POD_ARCH=${COMPUTE_ARCH}"
+fi
+
if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-ha'* ]]; then
- ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
openo_msb_port=${openo_msb_port:-80}
openo_msb_endpoint="$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
'mysql -ucompass -pcompass -Dcompass -e "select package_config from cluster;" \
@@ -54,12 +69,13 @@ if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} == *'os-nosdn-openo-h
fi
if [ "$BRANCH" != 'stable/danube' ]; then
- volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+ volumes="${images_vol} ${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol} ${cacert_file_vol}"
else
volumes="${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
fi
-HOST_ARCH=$(uname -m)
+echo "Functest: volumes defined"
+
FUNCTEST_IMAGE="opnfv/functest"
if [ "$HOST_ARCH" = "aarch64" ]; then
FUNCTEST_IMAGE="${FUNCTEST_IMAGE}_${HOST_ARCH}"
@@ -90,7 +106,12 @@ if [ $(docker ps | grep "${FUNCTEST_IMAGE}:${DOCKER_TAG}" | wc -l) == 0 ]; then
exit 1
fi
-cmd="prepare_env start"
+if [ "$BRANCH" == 'master' ]; then
+ cmd="prepare_env start"
+else
+ cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
+fi
+
echo "Executing command inside the docker: ${cmd}"
docker exec ${container_id} ${cmd}
diff --git a/jjb/global/installer-params.yml b/jjb/global/installer-params.yml
index ee154af03..8c01a9f1f 100644
--- a/jjb/global/installer-params.yml
+++ b/jjb/global/installer-params.yml
@@ -1,142 +1,144 @@
+---
- parameter:
name: 'apex-defaults'
parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.X.X'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: apex
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'external'
- description: 'external network for test'
+ - string:
+ name: INSTALLER_IP
+ default: '192.168.X.X'
+ description: 'IP of the installer'
+ - string:
+ name: INSTALLER_TYPE
+ default: apex
+ description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: EXTERNAL_NETWORK
+ default: 'external'
+ description: 'external network for test'
- parameter:
name: 'compass-defaults'
parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.200.2'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: compass
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'ext-net'
- description: 'external network for test'
+ - string:
+ name: INSTALLER_IP
+ default: '192.168.200.2'
+ description: 'IP of the installer'
+ - string:
+ name: INSTALLER_TYPE
+ default: compass
+ description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: EXTERNAL_NETWORK
+ default: 'ext-net'
+ description: 'external network for test'
- parameter:
name: 'fuel-defaults'
parameters:
- - string:
- name: INSTALLER_IP
- default: '10.20.0.2'
- description: 'IP of the installer'
- - string:
- name: SALT_MASTER_IP
- default: '192.168.10.100'
- description: 'IP of the salt master (for mcp deployments)'
- - string:
- name: SSH_KEY
- default: '/tmp/mcp.rsa'
- description: 'Path to private SSH key to access environment nodes'
- - string:
- name: INSTALLER_TYPE
- default: fuel
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'admin_floating_net'
- description: 'external network for test'
- - string:
- name: BRIDGE
- default: 'pxebr'
- description: 'pxe bridge for booting of Fuel master'
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.0.2'
+ description: 'IP of the installer'
+ - string:
+ name: SSH_KEY
+ default: "/var/lib/opnfv/mcp.rsa"
+ description: 'Path to private SSH key to access environment nodes'
+ - string:
+ name: INSTALLER_TYPE
+ default: fuel
+ description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: EXTERNAL_NETWORK
+ default: 'admin_floating_net'
+ description: 'external network for test'
+ - string:
+ name: BRIDGE
+ default: 'pxebr'
+ description: 'Bridge(s) to be used by salt master'
+ - string:
+ name: GS_URL
+ default: '$GS_BASE{gs-pathname}'
+ description: "URL to Google Storage."
- parameter:
name: 'joid-defaults'
parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.122.5'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: joid
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: MODEL
- default: 'os'
- description: 'Model to deploy (os|k8)'
- - string:
- name: OS_RELEASE
- default: 'newton'
- description: 'OpenStack release (mitaka|newton)'
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net
- description: "External network used for Floating ips."
- - string:
- name: LAB_CONFIG
- default: "$HOME/joid_config"
- description: "Local lab config and Openstack openrc location"
- - string:
- name: MAAS_REINSTALL
- default: 'false'
- description: "Reinstall MAAS and Bootstrap before deploy [true/false]"
- - string:
- name: UBUNTU_DISTRO
- default: 'xenial'
- description: "Ubuntu distribution to use for Openstack (xenial)"
- - string:
- name: CPU_ARCHITECTURE
- default: 'amd64'
- description: "CPU Architecture to use for Ubuntu distro "
+ - string:
+ name: INSTALLER_IP
+ default: '192.168.122.5'
+ description: 'IP of the installer'
+ - string:
+ name: INSTALLER_TYPE
+ default: joid
+ description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: MODEL
+ default: 'os'
+ description: 'Model to deploy (os|k8)'
+ - string:
+ name: OS_RELEASE
+ default: 'ocata'
+ description: 'OpenStack release (mitaka|newton|ocata)'
+ - string:
+ name: EXTERNAL_NETWORK
+ default: ext-net
+ description: "External network used for Floating ips."
+ - string:
+ name: LAB_CONFIG
+ default: "$HOME/joid_config"
+ description: "Local lab config and Openstack openrc location"
+ - string:
+ name: MAAS_REINSTALL
+ default: 'false'
+ description: "Reinstall MAAS and Bootstrap before deploy [true/false]"
+ - string:
+ name: UBUNTU_DISTRO
+ default: 'xenial'
+ description: "Ubuntu distribution to use for Openstack (xenial)"
+ - string:
+ name: CPU_ARCHITECTURE
+ default: 'amd64'
+ description: "CPU Architecture to use for Ubuntu distro "
- parameter:
name: 'daisy-defaults'
parameters:
- - string:
- name: INSTALLER_IP
- default: '10.20.7.3'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: daisy
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: BRIDGE
- default: 'br7'
- description: 'pxe bridge for booting of Fuel master'
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.7.3'
+ description: 'IP of the installer'
+ - string:
+ name: INSTALLER_TYPE
+ default: daisy
+ description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: BRIDGE
+ default: 'br7'
+ description: 'pxe bridge for booting of Daisy master'
- parameter:
name: 'infra-defaults'
parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.122.2'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: infra
- description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: INSTALLER_IP
+ default: '192.168.122.2'
+ description: 'IP of the installer'
+ - string:
+ name: INSTALLER_TYPE
+ default: infra
+ description: 'Installer used for deploying OPNFV on this POD'
+
- parameter:
name: 'netvirt-defaults'
parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.X.X'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: apex
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'external'
- description: 'external network for test'
+ - string:
+ name: INSTALLER_IP
+ default: '192.168.X.X'
+ description: 'IP of the installer'
+ - string:
+ name: INSTALLER_TYPE
+ default: apex
+ description: 'Installer used for deploying OPNFV on this POD'
+ - string:
+ name: EXTERNAL_NETWORK
+ default: 'external'
+ description: 'external network for test'
diff --git a/jjb/global/releng-defaults.yml b/jjb/global/releng-defaults.yml
index 283888603..2e94767e8 100644
--- a/jjb/global/releng-defaults.yml
+++ b/jjb/global/releng-defaults.yml
@@ -1,14 +1,22 @@
+---
# jjb defaults
- defaults:
name: global
wrappers:
- - ssh-agent-wrapper
+ - ssh-agent-wrapper
project-type: freestyle
node: master
properties:
- - logrotate-default
+ - logrotate-default
+
+ publishers:
+ # Any project that has a publisher will not have this macro
+ # included due to the nature of JJB defaults. Projects will have
+ # to explicitly add this macro to their list of publishers in
+ # order for emails to be sent.
+ - email-jenkins-admins-on-failure
diff --git a/jjb/global/releng-macros.yml b/jjb/global/releng-macros.yml
index e4dfa8d80..59415f5ca 100644
--- a/jjb/global/releng-macros.yml
+++ b/jjb/global/releng-macros.yml
@@ -1,3 +1,4 @@
+---
# Releng macros
#
# NOTE: make sure macros are listed in execution ordered.
@@ -14,250 +15,258 @@
- parameter:
name: project-parameter
parameters:
- - string:
- name: PROJECT
- default: '{project}'
- description: "JJB configured PROJECT parameter to identify an opnfv Gerrit project"
- - string:
- name: GS_BASE
- default: artifacts.opnfv.org/$PROJECT
- description: "URL to Google Storage."
- - string:
- name: GS_BASE_PROXY
- default: build.opnfv.org/artifacts.opnfv.org/$PROJECT
- description: "URL to Google Storage proxy"
- - string:
- name: BRANCH
- default: '{branch}'
- description: "JJB configured BRANCH parameter (e.g. master, stable/danube)"
- - string:
- name: GERRIT_BRANCH
- default: '{branch}'
- description: "JJB configured GERRIT_BRANCH parameter (deprecated)"
+ - string:
+ name: PROJECT
+ default: '{project}'
+ description: "JJB configured PROJECT parameter to identify an opnfv Gerrit project"
+ - string:
+ name: GS_BASE
+ default: artifacts.opnfv.org/$PROJECT
+ description: "URL to Google Storage."
+ - string:
+ name: GS_BASE_PROXY
+ default: build.opnfv.org/artifacts.opnfv.org/$PROJECT
+ description: "URL to Google Storage proxy"
+ - string:
+ name: BRANCH
+ default: '{branch}'
+ description: "JJB configured BRANCH parameter (e.g. master, stable/danube)"
+ - string:
+ name: GERRIT_BRANCH
+ default: '{branch}'
+ description: "JJB configured GERRIT_BRANCH parameter (deprecated)"
- property:
name: logrotate-default
properties:
- - build-discarder:
- days-to-keep: 60
- num-to-keep: 200
- artifact-days-to-keep: 60
- artifact-num-to-keep: 200
+ - build-discarder:
+ days-to-keep: 60
+ num-to-keep: 200
+ artifact-days-to-keep: 60
+ artifact-num-to-keep: 200
- scm:
name: git-scm
scm:
- - git: &git-scm-defaults
- credentials-id: '$SSH_CREDENTIAL_ID'
- url: '$GIT_BASE'
- branches:
- - 'origin/$BRANCH'
- timeout: 15
+ - git: &git-scm-defaults
+ credentials-id: '$SSH_CREDENTIAL_ID'
+ url: '$GIT_BASE'
+ branches:
+ - 'origin/$BRANCH'
+ timeout: 15
- scm:
name: git-scm-gerrit
scm:
- - git:
- choosing-strategy: 'gerrit'
- refspec: '$GERRIT_REFSPEC'
- <<: *git-scm-defaults
+ - git:
+ choosing-strategy: 'gerrit'
+ refspec: '$GERRIT_REFSPEC'
+ <<: *git-scm-defaults
- scm:
name: git-scm-with-submodules
scm:
- - git:
- credentials-id: '$SSH_CREDENTIAL_ID'
- url: '$GIT_BASE'
- refspec: ''
- branches:
- - 'refs/heads/{branch}'
- skip-tag: true
- wipe-workspace: true
- submodule:
- recursive: true
- timeout: 20
+ - git:
+ credentials-id: '$SSH_CREDENTIAL_ID'
+ url: '$GIT_BASE'
+ refspec: ''
+ branches:
+ - 'refs/heads/{branch}'
+ skip-tag: true
+ wipe-workspace: true
+ submodule:
+ recursive: true
+ timeout: 20
- trigger:
name: 'daily-trigger-disabled'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'weekly-trigger-disabled'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: gerrit-trigger-patchset-created
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: 'ANT'
- pattern: '{files}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: '{files}'
+ skip-vote:
+ successful: false
+ failed: false
+ unstable: false
+ notbuilt: false
- trigger:
name: gerrit-trigger-change-merged
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: 'ANT'
- pattern: '{files}'
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: '{files}'
- trigger:
name: 'experimental'
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - comment-added-contains-event:
- comment-contains-value: 'check-experimental'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: 'ANT'
- pattern: 'tests/**'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - comment-added-contains-event:
+ comment-contains-value: 'check-experimental'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: '{files}'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
- wrapper:
name: ssh-agent-wrapper
wrappers:
- - ssh-agent-credentials:
- users:
- - 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
+ - ssh-agent-credentials:
+ users:
+ - 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
+
+- wrapper:
+ name: build-timeout
+ wrappers:
+ - timeout:
+ timeout: '{timeout}'
+ timeout-var: 'BUILD_TIMEOUT'
+ fail: true
- wrapper:
name: fix-workspace-permissions
wrappers:
- - pre-scm-buildstep:
+ - pre-scm-buildstep:
- shell: |
- #!/bin/bash
- sudo chown -R $USER:$USER $WORKSPACE || exit 1
+ #!/bin/bash
+ sudo chown -R $USER:$USER $WORKSPACE || exit 1
- builder:
name: build-html-and-pdf-docs-output
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
- git clone ssh://gerrit.opnfv.org:29418/opnfvdocs docs_build/_opnfvdocs
- GERRIT_COMMENT=gerrit_comment.txt ./docs_build/_opnfvdocs/scripts/docs-build.sh
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
+ git clone ssh://gerrit.opnfv.org:29418/opnfvdocs docs_build/_opnfvdocs
+ GERRIT_COMMENT=gerrit_comment.txt ./docs_build/_opnfvdocs/scripts/docs-build.sh
- builder:
name: upload-under-review-docs-to-opnfv-artifacts
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
-
- [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
- [[ -d docs_output ]] || exit 0
-
- echo
- echo "###########################"
- echo "UPLOADING DOCS UNDER REVIEW"
- echo "###########################"
- echo
-
- gs_base="artifacts.opnfv.org/$PROJECT/review"
- gs_path="$gs_base/$GERRIT_CHANGE_NUMBER"
- local_path="upload/$GERRIT_CHANGE_NUMBER"
-
- mkdir -p upload
- mv docs_output "$local_path"
- gsutil -m cp -r "$local_path" "gs://$gs_base"
-
- gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- "gs://$gs_path"/**.html > /dev/null 2>&1
-
- echo "Document link(s):" >> gerrit_comment.txt
- find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
- sed -e "s|^$local_path| http://$gs_path|" >> gerrit_comment.txt
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
+
+ [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
+ [[ -d docs_output ]] || exit 0
+
+ echo
+ echo "###########################"
+ echo "UPLOADING DOCS UNDER REVIEW"
+ echo "###########################"
+ echo
+
+ gs_base="artifacts.opnfv.org/$PROJECT/review"
+ gs_path="$gs_base/$GERRIT_CHANGE_NUMBER"
+ local_path="upload/$GERRIT_CHANGE_NUMBER"
+
+ mkdir -p upload
+ mv docs_output "$local_path"
+ gsutil -m cp -r "$local_path" "gs://$gs_base"
+
+ gsutil -m setmeta \
+ -h "Content-Type:text/html" \
+ -h "Cache-Control:private, max-age=0, no-transform" \
+ "gs://$gs_path"/**.html > /dev/null 2>&1
+
+ echo "Document link(s):" >> gerrit_comment.txt
+ find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
+ sed -e "s|^$local_path| http://$gs_path|" >> gerrit_comment.txt
- builder:
name: upload-generated-docs-to-opnfv-artifacts
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
- [[ -d docs_output ]] || exit 0
+ [[ -d docs_output ]] || exit 0
- echo
- echo "########################"
- echo "UPLOADING GENERATED DOCS"
- echo "########################"
- echo
+ echo
+ echo "########################"
+ echo "UPLOADING GENERATED DOCS"
+ echo "########################"
+ echo
- echo "gs_path="$GS_URL/docs""
- echo "local_path="upload/docs""
+ echo "gs_path="$GS_URL/docs""
+ echo "local_path="upload/docs""
- gs_path="$GS_URL/docs"
- local_path="upload/docs"
+ gs_path="$GS_URL/docs"
+ local_path="upload/docs"
- mkdir -p upload
- mv docs_output "$local_path"
- ls "$local_path"
+ mkdir -p upload
+ mv docs_output "$local_path"
+ ls "$local_path"
- echo "gsutil -m cp -r "$local_path"/* "gs://$gs_path""
- gsutil -m cp -r "$local_path"/* "gs://$gs_path"
+ echo "gsutil -m cp -r "$local_path"/* "gs://$gs_path""
+ gsutil -m cp -r "$local_path"/* "gs://$gs_path"
- gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- "gs://$gs_path"/**.html > /dev/null 2>&1
+ gsutil -m setmeta \
+ -h "Content-Type:text/html" \
+ -h "Cache-Control:private, max-age=0, no-transform" \
+ "gs://$gs_path"/**.html > /dev/null 2>&1
- echo "Document link(s):" >> gerrit_comment.txt
- find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
- sed -e "s|^$local_path| http://$gs_path|" >> gerrit_comment.txt
+ echo "Document link(s):" >> gerrit_comment.txt
+ find "$local_path" | grep -e 'index.html$' -e 'pdf$' | \
+ sed -e "s|^$local_path| http://$gs_path|" >> gerrit_comment.txt
# To take advantage of this macro, have your build write
# out the file 'gerrit_comment.txt' with information to post
@@ -265,213 +274,223 @@
- builder:
name: report-build-result-to-gerrit
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
- if [[ -e gerrit_comment.txt ]] ; then
- echo
- echo "posting review comment to gerrit..."
- echo
- cat gerrit_comment.txt
- echo
- ssh -p 29418 gerrit.opnfv.org \
- "gerrit review -p $GERRIT_PROJECT \
- -m '$(cat gerrit_comment.txt)' \
- $GERRIT_PATCHSET_REVISION \
- --notify NONE"
- fi
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
+ if [[ -e gerrit_comment.txt ]] ; then
+ echo
+ echo "posting review comment to gerrit..."
+ echo
+ cat gerrit_comment.txt
+ echo
+ ssh -p 29418 gerrit.opnfv.org \
+ "gerrit review -p $GERRIT_PROJECT \
+ -m '$(cat gerrit_comment.txt)' \
+ $GERRIT_PATCHSET_REVISION \
+ --notify NONE"
+ fi
- builder:
name: remove-old-docs-from-opnfv-artifacts
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
-
- [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
-
- gs_path="artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
-
- if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then
- echo
- echo "Deleting Out-of-dated Documents..."
- gsutil -m rm -r "gs://$gs_path"
- fi
- gs_path="artifacts.opnfv.org/review/$GERRIT_CHANGE_NUMBER"
-
- if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then
- echo
- echo "Deleting Out-of-dated Documents..."
- gsutil -m rm -r "gs://$gs_path"
- fi
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
+
+ [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
+
+ gs_path="artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
+
+ if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then
+ echo
+ echo "Deleting Out-of-dated Documents..."
+ gsutil -m rm -r "gs://$gs_path"
+ fi
+ gs_path="artifacts.opnfv.org/review/$GERRIT_CHANGE_NUMBER"
+
+ if gsutil ls "gs://$gs_path" > /dev/null 2>&1 ; then
+ echo
+ echo "Deleting Out-of-dated Documents..."
+ gsutil -m rm -r "gs://$gs_path"
+ fi
- builder:
name: build-and-upload-artifacts-json-api
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- export PATH=$PATH:/usr/local/bin/
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ export PATH=$PATH:/usr/local/bin/
- virtualenv -p python2.7 $WORKSPACE/releng_artifacts
- source $WORKSPACE/releng_artifacts/bin/activate
+ virtualenv -p python2.7 $WORKSPACE/releng_artifacts
+ source $WORKSPACE/releng_artifacts/bin/activate
- # install python packages
- pip install google-api-python-client
+ # install python packages
+ pip install google-api-python-client
- # generate and upload index file
- echo "Generating Artifacts API ..."
- python $WORKSPACE/utils/opnfv-artifacts.py > index.json
- gsutil cp index.json gs://artifacts.opnfv.org/index.json
+ # generate and upload index file
+ echo "Generating Artifacts API ..."
+ python $WORKSPACE/utils/opnfv-artifacts.py > index.json
+ gsutil cp index.json gs://artifacts.opnfv.org/index.json
- deactivate
+ deactivate
- builder:
name: lint-python-code
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
-
- virtualenv -p python2.7 $WORKSPACE/releng_flake8
- source $WORKSPACE/releng_flake8/bin/activate
-
- # install python packages
- pip install "flake8==2.6.2"
-
- # generate and upload lint log
- echo "Running flake8 code on $PROJECT ..."
-
- # Get number of flake8 violations. If none, this will be an
- # empty string: ""
- FLAKE_COUNT="$(find . \
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
+
+ virtualenv -p python2.7 $WORKSPACE/releng_flake8
+ source $WORKSPACE/releng_flake8/bin/activate
+
+ # install python packages
+ pip install "flake8==2.6.2"
+
+ # generate and upload lint log
+ echo "Running flake8 code on $PROJECT ..."
+
+ # Get number of flake8 violations. If none, this will be an
+ # empty string: ""
+ FLAKE_COUNT="$(find . \
+ -path './releng_flake8' -prune -o \
+ -path './.tox' -prune -o \
+ -type f -name "*.py" -print | \
+ xargs flake8 --exit-zero -qq --count 2>&1)"
+
+ # Ensure we start with a clean environment
+ rm -f lint.log
+
+ if [ ! -z $FLAKE_COUNT ]; then
+ echo "Flake8 Violations: $FLAKE_COUNT" > lint.log
+ find . \
-path './releng_flake8' -prune -o \
-path './.tox' -prune -o \
-type f -name "*.py" -print | \
- xargs flake8 --exit-zero -qq --count 2>&1)"
-
- # Ensure we start with a clean environment
- rm -f lint.log
-
- if [ ! -z $FLAKE_COUNT ]; then
- echo "Flake8 Violations: $FLAKE_COUNT" > lint.log
- find . \
- -path './releng_flake8' -prune -o \
- -path './.tox' -prune -o \
- -type f -name "*.py" -print | \
- xargs flake8 --exit-zero --first >> violation.log
- SHOWN=$(wc -l violation.log | cut -d' ' -f1)
- echo -e "First $SHOWN shown\n---" >> lint.log
- cat violation.log >> lint.log
- sed -r -i '4,$s/^/ /g' lint.log
- rm violation.log
- fi
-
- deactivate
+ xargs flake8 --exit-zero --first >> violation.log
+ SHOWN=$(wc -l violation.log | cut -d' ' -f1)
+ echo -e "First $SHOWN shown\n---" >> lint.log
+ cat violation.log >> lint.log
+ sed -r -i '4,$s/^/ /g' lint.log
+ rm violation.log
+ fi
+
+ deactivate
- builder:
name: report-lint-result-to-gerrit
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
- # If no violations were found, no lint log will exist.
- if [[ -e lint.log ]] ; then
- echo -e "\nposting linting report to gerrit...\n"
+ # If no violations were found, no lint log will exist.
+ if [[ -e lint.log ]] ; then
+ echo -e "\nposting linting report to gerrit...\n"
- cat lint.log
- echo
+ cat lint.log
+ echo
- ssh -p 29418 gerrit.opnfv.org \
- "gerrit review -p $GERRIT_PROJECT \
- -m \"$(cat lint.log)\" \
- $GERRIT_PATCHSET_REVISION \
- --notify NONE"
+ ssh -p 29418 gerrit.opnfv.org \
+ "gerrit review -p $GERRIT_PROJECT \
+ -m \"$(cat lint.log)\" \
+ $GERRIT_PATCHSET_REVISION \
+ --notify NONE"
- exit 1
- fi
+ exit 1
+ fi
- builder:
name: upload-review-docs
builders:
- - build-html-and-pdf-docs-output
- - upload-under-review-docs-to-opnfv-artifacts
- - report-build-result-to-gerrit
+ - build-html-and-pdf-docs-output
+ - upload-under-review-docs-to-opnfv-artifacts
+ - report-build-result-to-gerrit
- builder:
name: upload-merged-docs
builders:
- - build-html-and-pdf-docs-output
- - upload-generated-docs-to-opnfv-artifacts
- - report-build-result-to-gerrit
- - remove-old-docs-from-opnfv-artifacts
+ - build-html-and-pdf-docs-output
+ - upload-generated-docs-to-opnfv-artifacts
+ - report-build-result-to-gerrit
+ - remove-old-docs-from-opnfv-artifacts
- builder:
name: check-bash-syntax
builders:
- - shell: "find . -name '*.sh' | xargs bash -n"
+ - shell: "find . -name '*.sh' | xargs bash -n"
- builder:
name: lint-yaml-code
builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
+
+ # install python packages
+ pip install "yamllint==1.6.0"
+
+ # generate and upload lint log
+ echo "Running yaml code on $PROJECT ..."
+
+ # Ensure we start with a clean environment
+ rm -f yaml-violation.log lint.log
+
+ # Get number of yaml violations. If none, this will be an
+ # empty string: ""
+ find . \
+ -type f -name "*.yml" -print \
+ -o -name "*.yaml" -print | \
+ xargs yamllint > yaml-violation.log || true
+
+ if [ -s "yaml-violation.log" ]; then
+ SHOWN=$(cat yaml-violation.log| grep -v "^$" |wc -l)
+ echo -e "First $SHOWN shown\n---" > lint.log
+ cat yaml-violation.log >> lint.log
+ sed -r -i '4,$s/^/ /g' lint.log
+ fi
- # install python packages
- pip install "yamllint==1.6.0"
-
- # generate and upload lint log
- echo "Running yaml code on $PROJECT ..."
-
- # Ensure we start with a clean environment
- rm -f yaml-violation.log lint.log
-
- # Get number of yaml violations. If none, this will be an
- # empty string: ""
- find . \
- -type f -name "*.yml" -print \
- -o -name "*.yaml" -print | \
- xargs yamllint > yaml-violation.log || true
-
- if [ -s "yaml-violation.log" ]; then
- SHOWN=$(cat yaml-violation.log| grep -v "^$" |wc -l)
- echo -e "First $SHOWN shown\n---" > lint.log
- cat yaml-violation.log >> lint.log
- sed -r -i '4,$s/^/ /g' lint.log
- fi
+- builder:
+ name: clean-workspace
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+ sudo /bin/rm -rf "$WORKSPACE"
- builder:
name: clean-workspace-log
builders:
- - shell: |
- find $WORKSPACE -type f -name '*.log' | xargs rm -f
+ - shell: |
+ find $WORKSPACE -type f -name '*.log' | xargs rm -f
- publisher:
name: archive-artifacts
publishers:
- - archive:
- artifacts: '{artifacts}'
- allow-empty: true
- fingerprint: true
- latest-only: true
+ - archive:
+ artifacts: '{artifacts}'
+ allow-empty: true
+ fingerprint: true
+ latest-only: true
- publisher:
name: publish-coverage
@@ -492,3 +511,17 @@
unhealthy: 40
failing: 30
+# The majority of the email-ext plugin options are set to the default
+# for this macro so they can be managed through Jenkins' global
+# settings.
+- publisher:
+ name: email-jenkins-admins-on-failure
+ publishers:
+ - email-ext:
+ content-type: text
+ attach-build-log: true
+ compress-log: true
+ always: false
+ failure: true
+ send-to:
+ - recipients
diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml
index 57442225c..59c9461be 100644
--- a/jjb/global/slave-params.yml
+++ b/jjb/global/slave-params.yml
@@ -1,3 +1,4 @@
+---
#####################################################
# Parameters for slaves using old labels
# This will be cleaned up once the new job structure and
@@ -6,1001 +7,1039 @@
- parameter:
name: 'apex-baremetal-master-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod1
- default-slaves:
- - lf-pod1
+ - label:
+ name: SLAVE_LABEL
+ default: 'apex-baremetal-master'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod1
+ default-slaves:
+ - lf-pod1
+
- parameter:
name: 'apex-baremetal-danube-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-danube'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod1
- default-slaves:
- - lf-pod1
+ - label:
+ name: SLAVE_LABEL
+ default: 'apex-baremetal-danube'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod1
+ default-slaves:
+ - lf-pod1
+
- parameter:
name: 'apex-virtual-master-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-virtual2
- - lf-virtual3
- default-slaves:
- - lf-virtual2
- - lf-virtual3
+ - label:
+ name: SLAVE_LABEL
+ default: 'apex-virtual-master'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-virtual2
+ - lf-virtual3
+ default-slaves:
+ - lf-virtual2
+ - lf-virtual3
- parameter:
name: 'apex-virtual-danube-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-danube'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod3
- default-slaves:
- - lf-pod3
+ - label:
+ name: SLAVE_LABEL
+ default: 'apex-virtual-danube'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod3
+ default-slaves:
+ - lf-pod3
+
- parameter:
name: 'lf-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod1
- default-slaves:
- - lf-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod1
+ default-slaves:
+ - lf-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+
- parameter:
name: 'lf-pod3-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod3
- default-slaves:
- - lf-pod3
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - lf-pod3
+ default-slaves:
+ - lf-pod3
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+
#####################################################
# Parameters for CI baremetal PODs
#####################################################
- parameter:
name: 'apex-baremetal-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
+ - label:
+ name: SLAVE_LABEL
+ default: 'apex-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
- parameter:
name: 'compass-baremetal-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'compass-baremetal-master-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-baremetal-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-baremetal-master'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'compass-baremetal-branch-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-baremetal-branch'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-baremetal-branch'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'fuel-baremetal-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'fuel-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'fuel-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'armband-baremetal-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'armband-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
- description: 'Base URI to the configuration directory'
+ - label:
+ name: SLAVE_LABEL
+ default: 'armband-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: LAB_CONFIG_URL
+ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
+ description: 'Base URI to the configuration directory'
+
- parameter:
name: 'joid-baremetal-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'joid-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net
- description: "External network floating ips"
+ - label:
+ name: SLAVE_LABEL
+ default: 'joid-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: EXTERNAL_NETWORK
+ default: ext-net
+ description: "External network floating ips"
+
- parameter:
name: 'daisy-baremetal-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod2
- default-slaves:
- - zte-pod2
- - label:
- name: SLAVE_LABEL
- default: 'daisy-baremetal'
- - string:
- name: INSTALLER_IP
- default: '10.20.11.2'
- description: 'IP of the installer'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - zte-pod2
+ default-slaves:
+ - zte-pod2
+ - label:
+ name: SLAVE_LABEL
+ default: 'daisy-baremetal'
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.7.3'
+ description: 'IP of the installer'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
#####################################################
# Parameters for CI virtual PODs
#####################################################
- parameter:
name: 'apex-virtual-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
+ - label:
+ name: SLAVE_LABEL
+ default: 'apex-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+
- parameter:
name: 'compass-virtual-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'compass-virtual-master-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-virtual-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-virtual-master'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'compass-virtual-branch-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-virtual-branch'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'compass-virtual-branch'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'fuel-virtual-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'fuel-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'fuel-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'armband-virtual-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'armband-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
- description: 'Base URI to the configuration directory'
+ - label:
+ name: SLAVE_LABEL
+ default: 'armband-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: LAB_CONFIG_URL
+ default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
+ description: 'Base URI to the configuration directory'
+
- parameter:
name: 'joid-virtual-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'joid-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'joid-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'daisy-virtual-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-virtual1
- - zte-virtual2
- default-slaves:
- - zte-virtual1
- - label:
- name: SLAVE_LABEL
- default: 'daisy-virtual'
- - string:
- name: INSTALLER_IP
- default: '10.20.11.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'daisy1'
- description: 'pxe bridge for booting of Fuel master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - zte-virtual1
+ - zte-virtual2
+ default-slaves:
+ - zte-virtual1
+ - label:
+ name: SLAVE_LABEL
+ default: 'daisy-virtual'
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.11.2'
+ description: 'IP of the installer'
+ - string:
+ name: BRIDGE
+ default: 'daisy1'
+ description: 'pxe bridge for booting of Daisy master'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
#####################################################
# Parameters for build slaves
#####################################################
- parameter:
- name: 'opnfv-build-enea-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-enea'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-- parameter:
name: 'opnfv-build-centos-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-centos'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-centos'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+
- parameter:
name: 'opnfv-build-ubuntu-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-ubuntu'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+
- parameter:
name: 'opnfv-build-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+
- parameter:
name: 'huawei-build-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-build
- default-slaves:
- - huawei-build
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - huawei-build
+ default-slaves:
+ - huawei-build
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'opnfv-build-ubuntu-arm-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu-arm'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-ubuntu-arm'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+
#####################################################
# Parameters for none-CI PODs
#####################################################
- parameter:
name: 'ericsson-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - ericsson-pod1
- default-slaves:
- - ericsson-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - ericsson-pod1
+ default-slaves:
+ - ericsson-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'cengn-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - cengn-pod1
- default-slaves:
- - cengn-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - cengn-pod1
+ default-slaves:
+ - cengn-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'intel-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod1
- default-slaves:
- - intel-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod1
+ default-slaves:
+ - intel-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'intel-pod2-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod2
- default-slaves:
- - intel-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod2
+ default-slaves:
+ - intel-pod2
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+
- parameter:
name: 'intel-pod9-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod9
- default-slaves:
- - intel-pod9
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod9
+ default-slaves:
+ - intel-pod9
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'intel-pod10-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod10
- default-slaves:
- - intel-pod10
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod10
+ default-slaves:
+ - intel-pod10
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'intel-pod12-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod12
- default-slaves:
- - intel-pod12
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod12
+ default-slaves:
+ - intel-pod12
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'huawei-pod3-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod3
- default-slaves:
- - huawei-pod3
- - label:
- name: SLAVE_LABEL
- default: 'huawei-test'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - huawei-pod3
+ default-slaves:
+ - huawei-pod3
+ - label:
+ name: SLAVE_LABEL
+ default: 'huawei-test'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'huawei-pod4-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod4
- default-slaves:
- - huawei-pod4
- - label:
- name: SLAVE_LABEL
- default: 'huawei-test'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - huawei-pod4
+ default-slaves:
+ - huawei-pod4
+ - label:
+ name: SLAVE_LABEL
+ default: 'huawei-test'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'intel-pod8-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod8
- default-slaves:
- - intel-pod8
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod8
+ default-slaves:
+ - intel-pod8
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+
+- parameter:
+ name: 'huawei-virtual5-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'huawei-virtual5'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+
- parameter:
name: 'huawei-virtual7-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-virtual7
- default-slaves:
- - huawei-virtual7
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - huawei-virtual7
+ default-slaves:
+ - huawei-virtual7
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+
- parameter:
name: 'huawei-pod7-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod7
- default-slaves:
- - huawei-pod7
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - huawei-pod7
+ default-slaves:
+ - huawei-pod7
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+
- parameter:
name: 'zte-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod1
- default-slaves:
- - zte-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.6.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br6'
- description: 'pxe bridge for booting of Fuel master'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - zte-pod1
+ default-slaves:
+ - zte-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.6.2'
+ description: 'IP of the installer'
+ - string:
+ name: BRIDGE
+ default: 'br6'
+ description: 'pxe bridge for booting of Fuel master'
+
- parameter:
name: 'zte-pod2-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod2
- default-slaves:
- - zte-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.7.3'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br7'
- description: 'pxe bridge for booting of Fuel master'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - zte-pod2
+ default-slaves:
+ - zte-pod2
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.7.3'
+ description: 'IP of the installer'
+ - string:
+ name: BRIDGE
+ default: 'br7'
+ description: 'pxe bridge for booting of Fuel master'
+
- parameter:
name: 'zte-pod3-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod3
- default-slaves:
- - zte-pod3
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BRIDGE
- default: 'br0'
- description: 'pxe bridge for booting of Fuel master'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - zte-pod3
+ default-slaves:
+ - zte-pod3
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BRIDGE
+ default: 'br0'
+ description: 'pxe bridge for booting of Fuel master'
+
+- parameter:
+ name: zte-pod4-defaults
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - zte-pod4
+ default-slaves:
+ - zte-pod4
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'juniper-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - juniper-pod1
- default-slaves:
- - juniper-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: CEPH_DISKS
- default: /srv
- description: "Disks to use by ceph (comma separated list)"
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - juniper-pod1
+ default-slaves:
+ - juniper-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: CEPH_DISKS
+ default: /srv
+ description: "Disks to use by ceph (comma separated list)"
+
- parameter:
name: 'orange-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod1
- default-slaves:
- - orange-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - orange-pod1
+ default-slaves:
+ - orange-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'orange-pod2-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod2
- default-slaves:
- - orange-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - orange-pod2
+ default-slaves:
+ - orange-pod2
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'orange-pod5-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod5
- default-slaves:
- - orange-pod5
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - orange-pod5
+ default-slaves:
+ - orange-pod5
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'dell-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - dell-pod1
- default-slaves:
- - dell-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - dell-pod1
+ default-slaves:
+ - dell-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'dell-pod2-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - dell-pod2
- default-slaves:
- - dell-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - dell-pod2
+ default-slaves:
+ - dell-pod2
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'nokia-pod1-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - nokia-pod1
- default-slaves:
- - nokia-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-- parameter:
- name: 'arm-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - arm-pod2
- default-slaves:
- - arm-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
- description: 'Base URI to the configuration directory'
-- parameter:
- name: 'arm-pod3-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - arm-pod3
- default-slaves:
- - arm-pod3
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
- description: 'Base URI to the configuration directory'
-- parameter:
- name: 'arm-pod4-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - arm-pod4
- default-slaves:
- - arm-pod4
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
- description: 'Base URI to the configuration directory'
-- parameter:
- name: 'arm-virtual1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - arm-virtual1
- default-slaves:
- - arm-virtual1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
- description: 'Base URI to the configuration directory'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - nokia-pod1
+ default-slaves:
+ - nokia-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to use for Apex'
+
- parameter:
name: 'intel-virtual6-defaults'
parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-virtual6
- default-slaves:
- - intel-virtual6
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-- parameter:
- name: 'ool-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - ool-virtual1
- - ool-virtual2
- - ool-virtual3
- default-slaves:
- - '{default-slave}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to be used'
-- parameter:
- name: 'ool-virtual1-defaults'
- parameters:
- - 'ool-defaults':
- default-slave: 'ool-virtual1'
-- parameter:
- name: 'ool-virtual2-defaults'
- parameters:
- - 'ool-defaults':
- default-slave: 'ool-virtual2'
-- parameter:
- name: 'ool-virtual3-defaults'
- parameters:
- - 'ool-defaults':
- default-slave: 'ool-virtual3'
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-virtual6
+ default-slaves:
+ - intel-virtual6
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
+- parameter:
+ name: 'intel-virtual10-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-virtual10
+ default-slaves:
+ - intel-virtual10
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
+- parameter:
+ name: 'doctor-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - '{default-slave}'
+ default-slaves:
+ - '{default-slave}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: SSH_KEY
+ default: /root/.ssh/id_rsa
+ description: 'SSH key to be used'
+
+- parameter:
+ name: 'doctor-apex-verify-defaults'
+ parameters:
+ - 'doctor-defaults':
+ default-slave: 'doctor-apex-verify'
+
+- parameter:
+ name: 'doctor-fuel-verify-defaults'
+ parameters:
+ - 'doctor-defaults':
+ default-slave: 'doctor-fuel-verify'
+
+- parameter:
+ name: 'doctor-joid-verify-defaults'
+ parameters:
+ - 'doctor-defaults':
+ default-slave: 'doctor-joid-verify'
+
- parameter:
name: 'multisite-virtual-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'multisite-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'multisite-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'ericsson-virtual5-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual5'
- - string:
- name: GIT_BASE
- default: https://git.opendaylight.org/gerrit/p/$PROJECT.git
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-virtual5'
+ - string:
+ name: GIT_BASE
+ default: https://git.opendaylight.org/gerrit/p/$PROJECT.git
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'ericsson-virtual12-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual12'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-virtual12'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'ericsson-virtual13-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual13'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-virtual13'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
+- parameter:
+ name: 'ericsson-virtual-pod1bl01-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-virtual-pod1bl01'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'odl-netvirt-virtual-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'odl-netvirt-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'odl-netvirt-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
- parameter:
name: 'odl-netvirt-virtual-intel-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'odl-netvirt-virtual-intel'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'odl-netvirt-virtual-intel'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
+- parameter:
+ name: 'flex-pod1-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - flex-pod1
+ default-slaves:
+ - flex-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
#####################################################
# These slaves are just dummy slaves for sandbox jobs
#####################################################
- parameter:
name: 'sandbox-baremetal-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'sandbox-baremetal'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
+ - label:
+ name: SLAVE_LABEL
+ default: 'sandbox-baremetal'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+
- parameter:
name: 'sandbox-virtual-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'sandbox-virtual'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
+ - label:
+ name: SLAVE_LABEL
+ default: 'sandbox-virtual'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+
- parameter:
name: 'dummy-pod1-defaults'
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'dummy-pod1'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
+ - label:
+ name: SLAVE_LABEL
+ default: 'dummy-pod1'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
diff --git a/jjb/joid/joid-daily-jobs.yml b/jjb/joid/joid-daily-jobs.yml
index 13ea9b308..ac04962e0 100644
--- a/jjb/joid/joid-daily-jobs.yml
+++ b/jjb/joid/joid-daily-jobs.yml
@@ -1,3 +1,4 @@
+---
########################
# Job configuration for joid
########################
@@ -9,80 +10,73 @@
installer: '{name}'
-#--------------------------------
-# BRANCH ANCHORS
-#--------------------------------
+ # -------------------------------
+ # BRANCH ANCHORS
+ # -------------------------------
master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- gs-pathname: ''
- danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- disabled: false
- gs-pathname: '/{stream}'
-#--------------------------------
-# POD, INSTALLER, AND BRANCH MAPPING
-#--------------------------------
-# CI PODs
-#--------------------------------
+ stream: master
+ branch: '{stream}'
+ disabled: false
+ gs-pathname: ''
+ # -------------------------------
+ # POD, INSTALLER, AND BRANCH MAPPING
+ # -------------------------------
+ # CI PODs
+ # -------------------------------
pod:
- - baremetal:
- slave-label: joid-baremetal
- <<: *master
- - virtual:
- slave-label: joid-virtual
- <<: *master
- - baremetal:
- slave-label: joid-baremetal
- <<: *danube
- - virtual:
- slave-label: joid-virtual
- <<: *danube
-#--------------------------------
-# None-CI PODs
-#--------------------------------
- - orange-pod1:
- slave-label: orange-pod1
- <<: *master
- - cengn-pod1:
- slave-label: cengn-pod1
- <<: *master
-#--------------------------------
-# scenarios
-#--------------------------------
+ - baremetal:
+ slave-label: joid-baremetal
+ <<: *master
+ - virtual:
+ slave-label: joid-virtual
+ <<: *master
+ # -------------------------------
+ # None-CI PODs
+ # -------------------------------
+ - orange-pod1:
+ slave-label: orange-pod1
+ <<: *master
+ - cengn-pod1:
+ slave-label: cengn-pod1
+ <<: *master
+ # -------------------------------
+ # scenarios
+ # -------------------------------
scenario:
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-lxd-ha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-lxd-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l2-nofeature-ha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-onos-nofeature-ha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-odl_l2-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-onos-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-onos-sfc-ha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-ocl-nofeature-ha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-ocl-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'k8-nosdn-nofeature-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'k8-nosdn-lb-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-nofeature-noha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-nofeature-ha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-lxd-ha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-lxd-noha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'os-odl_l2-nofeature-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-onos-nofeature-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-odl_l2-nofeature-noha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-onos-nofeature-noha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-onos-sfc-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-ocl-nofeature-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-ocl-nofeature-noha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'k8-nosdn-nofeature-noha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ - 'k8-nosdn-lb-noha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'k8-ovn-lb-noha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'os-nosdn-openbaton-ha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
jobs:
- - 'joid-{scenario}-{pod}-daily-{stream}'
- - 'joid-deploy-{pod}-daily-{stream}'
+ - 'joid-{scenario}-{pod}-daily-{stream}'
+ - 'joid-deploy-{pod}-daily-{stream}'
########################
# job templates
@@ -95,85 +89,86 @@
concurrent: false
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'joid-os-.*?-{pod}-daily-.*'
- block-level: 'NODE'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'joid-os-.*?-{pod}-daily-.*'
+ block-level: 'NODE'
wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
triggers:
- - '{auto-trigger-name}'
+ - '{auto-trigger-name}'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults':
+ installer: '{installer}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{scenario}'
builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - trigger-builds:
- - project: 'joid-deploy-{pod}-daily-{stream}'
- current-parameters: true
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-joid-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-joid-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- # 1.dovetail only master by now, not sync with A/B/C branches
- # 2.here the stream means the SUT stream, dovetail stream is defined in its own job
- # 3.only debug testsuite here(includes basic testcase,
- # i.e. one tempest smoke ipv6, two vping from functest)
- # 4.not used for release criteria or compliance,
- # only to debug the dovetail tool bugs with joid
- #- trigger-builds:
- # - project: 'dovetail-joid-{pod}-proposed_tests-{stream}'
- # current-parameters: false
- # predefined-parameters:
- # DEPLOY_SCENARIO={scenario}
- # block: true
- # same-node: true
- # block-thresholds:
- # build-step-failure-threshold: 'never'
- # failure-threshold: 'never'
- # unstable-threshold: 'FAILURE'
+ - description-setter:
+ description: "POD: $NODE_NAME"
+ - trigger-builds:
+ - project: 'joid-deploy-{pod}-daily-{stream}'
+ current-parameters: true
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ same-node: true
+ block: true
+ - trigger-builds:
+ - project: 'functest-joid-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-joid-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+# 1.dovetail only master by now, not sync with A/B/C branches
+# 2.here the stream means the SUT stream, dovetail stream is defined in
+# its own job
+# 3.only debug testsuite here(includes basic testcase,
+# i.e. one tempest smoke ipv6, two vping from functest)
+# 4.not used for release criteria or compliance,
+# only to debug the dovetail tool bugs with joid
+# - trigger-builds:
+# - project: 'dovetail-joid-{pod}-proposed_tests-{stream}'
+# current-parameters: false
+# predefined-parameters:
+# DEPLOY_SCENARIO={scenario}
+# block: true
+# same-node: true
+# block-thresholds:
+# build-step-failure-threshold: 'never'
+# failure-threshold: 'never'
+# unstable-threshold: 'FAILURE'
- job-template:
name: 'joid-deploy-{pod}-daily-{stream}'
@@ -183,58 +178,58 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'joid-deploy-{pod}-daily-.*'
- block-level: 'NODE'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'joid-deploy-{pod}-daily-.*'
+ block-level: 'NODE'
wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 180
- fail: true
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 180
+ fail: true
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults':
+ installer: '{installer}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
scm:
- - git-scm
+ - git-scm
builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'builder-macro'
+ - description-setter:
+ description: "POD: $NODE_NAME"
+ - 'builder-macro'
########################
# builder macros
########################
- builder:
name: 'builder-macro'
builders:
- - shell: |
- #!/bin/bash
- echo "Running $INSTALLER_TYPE with controller $SDN_CONTROLLER"
- echo
- echo "------ First Executing clean.sh ------"
- cd $WORKSPACE/ci
- ./clean.sh
- - shell:
- !include-raw: ./joid-deploy.sh
+ - shell: |
+ #!/bin/bash
+ echo "Running $INSTALLER_TYPE with controller $SDN_CONTROLLER"
+ echo
+ echo "------ First Executing clean.sh ------"
+ cd $WORKSPACE/ci
+ ./clean.sh
+ - shell:
+ !include-raw: ./joid-deploy.sh
########################
# trigger macros
@@ -243,305 +238,187 @@
- trigger:
name: 'joid-os-nosdn-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '5 2 * * *'
+ - timed: '5 2 * * *'
- trigger:
name: 'joid-os-nosdn-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-nosdn-nofeature-ha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# os-nosdn-nofeature-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 2 * * *'
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
+ - timed: ''
# os-odl_l2-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '5 7 * * *'
+ - timed: '5 7 * * *'
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# os-odl_l2-nofeature-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 7 * * *'
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
+ - timed: ''
# os-onos-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-nofeature-ha-baremetal-master-trigger'
triggers:
- - timed: '5 12 * * *'
+ - timed: '5 12 * * *'
- trigger:
name: 'joid-os-onos-nofeature-ha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-onos-nofeature-ha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-onos-nofeature-ha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# os-onos-nofeature-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-onos-nofeature-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 12 * * *'
-- trigger:
- name: 'joid-os-onos-nofeature-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-nofeature-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-nofeature-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
+ - timed: ''
# os-onos-sfc-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
triggers:
- - timed: '5 17 * * *'
+ - timed: '5 17 * * *'
- trigger:
name: 'joid-os-onos-sfc-ha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-onos-sfc-ha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-onos-sfc-ha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# os-onos-sfc-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-onos-sfc-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 17 * * *'
-- trigger:
- name: 'joid-os-onos-sfc-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-sfc-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-sfc-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
+ - timed: ''
# os-nosdn-lxd-noha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
triggers:
- - timed: '5 22 * * *'
+ - timed: '5 22 * * *'
- trigger:
name: 'joid-os-nosdn-lxd-noha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-nosdn-lxd-noha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-nosdn-lxd-noha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# os-nosdn-lxd-noha trigger - branch: danube
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-baremetal-danube-trigger'
- triggers:
- - timed: '0 22 * * *'
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
+ - timed: ''
# os-nosdn-lxd-ha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
triggers:
- - timed: '5 10 * * *'
+ - timed: '5 10 * * *'
- trigger:
name: 'joid-os-nosdn-lxd-ha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-nosdn-lxd-ha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-nosdn-lxd-ha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# os-nosdn-lxd-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 10 * * *'
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
+ - timed: ''
# os-nosdn-nofeature-noha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
triggers:
- - timed: '5 4 * * *'
+ - timed: '5 4 * * *'
- trigger:
name: 'joid-os-nosdn-nofeature-noha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-nosdn-nofeature-noha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# os-nosdn-nofeature-noha trigger - branch: danube
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-baremetal-danube-trigger'
- triggers:
- - timed: '0 4 * * *'
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
+ - timed: ''
# k8-nosdn-nofeature-noha trigger - branch: master
- trigger:
name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
triggers:
- - timed: '5 15 * * *'
+ - timed: '5 15 * * *'
- trigger:
name: 'joid-k8-nosdn-nofeature-noha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# k8-nosdn-nofeature-noha trigger - branch: danube
+ - timed: ''
+# k8-nosdn-lb-noha trigger - branch: master
- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-baremetal-danube-trigger'
+ name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
triggers:
- - timed: '0 15 * * *'
+ - timed: '5 20 * * *'
- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-virtual-danube-trigger'
+ name: 'joid-k8-nosdn-lb-noha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-danube-trigger'
+ name: 'joid-k8-nosdn-lb-noha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
+ name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# k8-nosdn-lb-noha trigger - branch: master
+ - timed: ''
+# k8-ovn-lb-noha trigger - branch: master
- trigger:
- name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
+ name: 'joid-k8-ovn-lb-noha-baremetal-master-trigger'
triggers:
- - timed: '5 20 * * *'
+ - timed: '5 17 * * *'
- trigger:
- name: 'joid-k8-nosdn-lb-noha-virtual-master-trigger'
+ name: 'joid-k8-ovn-lb-noha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
- name: 'joid-k8-nosdn-lb-noha-orange-pod1-master-trigger'
+ name: 'joid-k8-ovn-lb-noha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
- name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
+ name: 'joid-k8-ovn-lb-noha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
-# k8-nosdn-lb-noha trigger - branch: danube
+ - timed: ''
+
+# os-nosdn-openbaton-ha trigger - branch: master
- trigger:
- name: 'joid-k8-nosdn-lb-noha-baremetal-danube-trigger'
+ name: 'joid-os-nosdn-openbaton-ha-baremetal-master-trigger'
triggers:
- - timed: '0 20 * * *'
+ - timed: '5 25 * * *'
- trigger:
- name: 'joid-k8-nosdn-lb-noha-virtual-danube-trigger'
+ name: 'joid-os-nosdn-openbaton-ha-virtual-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
- name: 'joid-k8-nosdn-lb-noha-orange-pod1-danube-trigger'
+ name: 'joid-os-nosdn-openbaton-ha-orange-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
- trigger:
- name: 'joid-k8-nosdn-lb-noha-cengn-pod1-danube-trigger'
+ name: 'joid-os-nosdn-openbaton-ha-cengn-pod1-master-trigger'
triggers:
- - timed: ''
+ - timed: ''
diff --git a/jjb/joid/joid-deploy.sh b/jjb/joid/joid-deploy.sh
index e197dbd8c..9740d38bf 100644
--- a/jjb/joid/joid-deploy.sh
+++ b/jjb/joid/joid-deploy.sh
@@ -94,9 +94,6 @@ EXTRA=${DEPLOY_OPTIONS[4]}
if [ "$SDN_CONTROLLER" == 'odl_l2' ] || [ "$SDN_CONTROLLER" == 'odl_l3' ]; then
SDN_CONTROLLER='odl'
fi
-if [ "$HA_MODE" == 'noha' ]; then
- HA_MODE='nonha'
-fi
# Add extra to features
if [ "$EXTRA" != "" ];then
diff --git a/jjb/joid/joid-verify-jobs.yml b/jjb/joid/joid-verify-jobs.yml
index 03fab553e..3b30cb6c9 100644
--- a/jjb/joid/joid-verify-jobs.yml
+++ b/jjb/joid/joid-verify-jobs.yml
@@ -1,37 +1,38 @@
+---
- project:
name: 'joid-verify-jobs'
project: 'joid'
installer: 'joid'
-#####################################
-# branch definitions
-#####################################
+ #####################################
+ # branch definitions
+ #####################################
stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-#####################################
-# patch verification phases
-#####################################
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+ #####################################
+ # patch verification phases
+ #####################################
phase:
- - 'basic':
- slave-label: 'opnfv-build-ubuntu'
- - 'deploy-virtual':
- slave-label: 'joid-virtual'
- - 'smoke-test':
- slave-label: 'joid-virtual'
-#####################################
-# jobs
-#####################################
+ - 'basic':
+ slave-label: 'opnfv-build-ubuntu'
+ - 'deploy-virtual':
+ slave-label: 'joid-virtual'
+ - 'smoke-test':
+ slave-label: 'joid-virtual'
+ #####################################
+ # jobs
+ #####################################
jobs:
- - 'joid-verify-{stream}'
- - 'joid-verify-{phase}-{stream}'
+ - 'joid-verify-{stream}'
+ - 'joid-verify-{phase}-{stream}'
#####################################
# job templates
#####################################
@@ -45,103 +46,103 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'joid-verify-master'
- - 'joid-verify-danube'
- block-level: 'NODE'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'joid-verify-master'
+ - 'joid-verify-danube'
+ block-level: 'NODE'
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 360
+ fail: true
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
- readable-message: true
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+ readable-message: true
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'joid-virtual-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'joid-virtual-defaults'
builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'joid-verify-basic-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'joid-verify-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- - name: 'joid-verify-smoke-test-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: basic
+ condition: SUCCESSFUL
+ projects:
+ - name: 'joid-verify-basic-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'joid-verify-deploy-virtual-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: smoke-test
+ condition: SUCCESSFUL
+ projects:
+ - name: 'joid-verify-smoke-test-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
- job-template:
name: 'joid-verify-{phase}-{stream}'
@@ -151,62 +152,62 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'joid-verify-deploy-.*'
- - 'joid-verify-test-.*'
- block-level: 'NODE'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'joid-verify-deploy-.*'
+ - 'joid-verify-test-.*'
+ block-level: 'NODE'
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 360
+ fail: true
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-verify-{phase}-macro'
#####################################
# builder macros
#####################################
- builder:
name: 'joid-verify-basic-macro'
builders:
- - shell: |
- #!/bin/bash
+ - shell: |
+ #!/bin/bash
- echo "Not activated!"
+ echo "Not activated!"
- builder:
name: 'joid-verify-deploy-virtual-macro'
builders:
- - shell: |
- #!/bin/bash
+ - shell: |
+ #!/bin/bash
- echo "Not activated!"
+ echo "Not activated!"
- builder:
name: 'joid-verify-smoke-test-macro'
builders:
- - shell: |
- #!/bin/bash
+ - shell: |
+ #!/bin/bash
- echo "Not activated!"
+ echo "Not activated!"
diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml
index e5b56bf9b..a39249ad2 100644
--- a/jjb/kvmfornfv/kvmfornfv.yml
+++ b/jjb/kvmfornfv/kvmfornfv.yml
@@ -26,6 +26,7 @@
testname:
- 'cyclictest'
- 'packet_forward'
+ - 'livemigration'
#####################################
# patch verification phases
#####################################
@@ -236,6 +237,17 @@
git-revision: true
kill-phase-on: FAILURE
abort-all-job: true
+ - multijob:
+ name: livemigration-test
+ condition: SUCCESSFUL
+ projects:
+ - name: 'kvmfornfv-livemigration-daily-test-{stream}'
+ current-parameters: false
+ node-parameters: false
+ git-revision: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+
- job-template:
name: 'kvmfornfv-daily-build-{stream}'
@@ -344,6 +356,14 @@
!include-raw: ./kvmfornfv-download-artifact.sh
- shell:
!include-raw: ./kvmfornfv-test.sh
+- builder:
+ name: 'kvmfornfv-livemigration-daily-test-macro'
+ builders:
+ - shell:
+ !include-raw: ./kvmfornfv-download-artifact.sh
+ - shell:
+ !include-raw: ./kvmfornfv-test.sh
+
#####################################
# parameter macros
#####################################
diff --git a/jjb/multisite/fuel-deploy-for-multisite.sh b/jjb/multisite/fuel-deploy-for-multisite.sh
deleted file mode 100755
index 71c6cc11d..000000000
--- a/jjb/multisite/fuel-deploy-for-multisite.sh
+++ /dev/null
@@ -1,124 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-set -o pipefail
-
-# do not continue with the deployment if FRESH_INSTALL is not requested
-if [[ "$FRESH_INSTALL" == "true" ]]; then
- echo "Fresh install requested. Proceeding with the installation."
-else
- echo "Fresh install is not requested. Skipping the installation."
- exit 0
-fi
-
-export TERM="vt220"
-export BRANCH=$(echo $BRANCH | sed 's/stable\///g')
-# get the latest successful job console log and extract the properties filename
-FUEL_DEPLOY_BUILD_URL="https://build.opnfv.org/ci/job/fuel-deploy-virtual-daily-$BRANCH/lastSuccessfulBuild/consoleText"
-FUEL_PROPERTIES_FILE=$(curl -s -L ${FUEL_DEPLOY_BUILD_URL} | grep 'ISO:' | awk '{print $2}' | sed 's/iso/properties/g')
-if [[ -z "FUEL_PROPERTIES_FILE" ]]; then
- echo "Unable to extract the url to Fuel ISO properties from ${FUEL_DEPLOY_URL}"
- exit 1
-fi
-
-# use known/working version of fuel
-#FUEL_PROPERTIES_FILE="opnfv-2017-03-06_16-00-15.properties"
-curl -L -s -o $WORKSPACE/latest.properties $GS_PATH/$FUEL_PROPERTIES_FILE
-
-# source the file so we get OPNFV vars
-source latest.properties
-
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
-# download the iso
-echo "Downloading the ISO using the link http://$OPNFV_ARTIFACT_URL"
-curl -L -s -o $WORKSPACE/opnfv.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
-
-
-# set deployment parameters
-DEPLOY_SCENARIO="os-nosdn-nofeature-noha"
-export TMPDIR=$HOME/tmpdir
-BRIDGE=${BRIDGE:-pxebr}
-LAB_NAME=${NODE_NAME/-*}
-POD_NAME=${NODE_NAME/*-}
-
-if [[ "$NODE_NAME" =~ "virtual" ]]; then
- POD_NAME="virtual_kvm"
-fi
-
-# we currently support ericsson, intel, lf and zte labs
-if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf|zte) ]]; then
- echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
- exit 1
-else
- echo "Using configuration for $LAB_NAME"
-fi
-
-# create TMPDIR if it doesn't exist
-export TMPDIR=$HOME/tmpdir
-mkdir -p $TMPDIR
-
-# change permissions down to TMPDIR
-chmod a+x $HOME
-chmod a+x $TMPDIR
-
-# clone fuel repo and checkout the sha1 that corresponds to the ISO
-echo "Cloning fuel repo"
-git clone https://gerrit.opnfv.org/gerrit/p/fuel.git fuel
-cd $WORKSPACE/fuel
-echo "Checking out $OPNFV_GIT_SHA1"
-git checkout $OPNFV_GIT_SHA1 --quiet
-
-# clone the securedlab repo
-cd $WORKSPACE
-echo "Cloning securedlab repo ${GIT_BRANCH##origin/}"
-git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
- --branch ${GIT_BRANCH##origin/}
-
-# log file name
-FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
-
-# construct the command
-DEPLOY_COMMAND="sudo $WORKSPACE/fuel/ci/deploy.sh -b file://$WORKSPACE/securedlab \
- -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso \
- -H -B $BRIDGE -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
-
-# log info to console
-echo "Deployment parameters"
-echo "--------------------------------------------------------"
-echo "Scenario: $DEPLOY_SCENARIO"
-echo "Lab: $LAB_NAME"
-echo "POD: $POD_NAME"
-echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
-echo
-echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# start the deployment
-echo "Issuing command"
-echo "$DEPLOY_COMMAND"
-echo
-
-$DEPLOY_COMMAND
-exit_code=$?
-
-echo
-echo "--------------------------------------------------------"
-echo "Deployment is done!"
-
-if [[ $exit_code -ne 0 ]]; then
- echo "Deployment failed!"
- exit $exit_code
-else
- echo "Deployment is successful!"
- exit 0
-fi
diff --git a/jjb/multisite/multisite-daily-jobs.yml b/jjb/multisite/multisite-daily-jobs.yml
deleted file mode 100644
index 06cefb646..000000000
--- a/jjb/multisite/multisite-daily-jobs.yml
+++ /dev/null
@@ -1,305 +0,0 @@
-- project:
- name: kingbird
-
- project: 'multisite'
-
- jobs:
- - 'multisite-kingbird-virtual-daily-{stream}'
- - 'multisite-{phase}-{stream}'
-
- phase:
- - 'fuel-deploy-regionone-virtual':
- slave-label: ericsson-virtual12
- - 'fuel-deploy-regiontwo-virtual':
- slave-label: ericsson-virtual13
- - 'register-endpoints':
- slave-label: ericsson-virtual12
- - 'update-auth':
- slave-label: ericsson-virtual13
- - 'kingbird-deploy-virtual':
- slave-label: ericsson-virtual12
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- timed: '0 12 * * *'
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- timed: '0 0 * * *'
-
-- job-template:
- name: 'multisite-kingbird-virtual-daily-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - choice:
- name: FRESH_INSTALL
- choices:
- - 'true'
- - 'false'
- - string:
- name: KINGBIRD_LOG_FILE
- default: $WORKSPACE/kingbird.log
- - 'opnfv-build-defaults'
-
- triggers:
- - timed: '{timed}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: fuel-deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'multisite-fuel-deploy-regionone-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- FUEL_VERSION=latest
- DEPLOY_SCENARIO=os-nosdn-nofeature-noha
- OS_REGION=RegionOne
- REGIONONE_IP=100.64.209.10
- REGIONTWO_IP=100.64.209.11
- FRESH_INSTALL=$FRESH_INSTALL
- node-parameters: false
- node-label-name: SLAVE_LABEL
- node-label: ericsson-virtual12
- kill-phase-on: FAILURE
- abort-all-job: true
- - name: 'multisite-fuel-deploy-regiontwo-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- FUEL_VERSION=latest
- DEPLOY_SCENARIO=os-nosdn-nofeature-noha
- OS_REGION=RegionTwo
- REGIONONE_IP=100.64.209.10
- REGIONTWO_IP=100.64.209.11
- FRESH_INSTALL=$FRESH_INSTALL
- node-parameters: false
- node-label-name: SLAVE_LABEL
- node-label: ericsson-virtual13
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: centralize-keystone
- condition: SUCCESSFUL
- projects:
- - name: 'multisite-register-endpoints-{stream}'
- current-parameters: false
- predefined-parameters: |
- OS_REGION=RegionOne
- REGIONONE_IP=100.64.209.10
- REGIONTWO_IP=100.64.209.11
- FRESH_INSTALL=$FRESH_INSTALL
- node-parameters: false
- node-label-name: SLAVE_LABEL
- node-label: ericsson-virtual12
- kill-phase-on: FAILURE
- abort-all-job: true
- - name: 'multisite-update-auth-{stream}'
- current-parameters: false
- predefined-parameters: |
- OS_REGION=RegionTwo
- REGIONONE_IP=100.64.209.10
- REGIONTWO_IP=100.64.209.11
- FRESH_INSTALL=$FRESH_INSTALL
- node-parameters: false
- node-label-name: SLAVE_LABEL
- node-label: ericsson-virtual13
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: kingbird-deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'multisite-kingbird-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- OS_REGION=RegionOne
- REGIONONE_IP=100.64.209.10
- REGIONTWO_IP=100.64.209.11
- FRESH_INSTALL=$FRESH_INSTALL
- node-parameters: false
- node-label-name: SLAVE_LABEL
- node-label: ericsson-virtual12
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: kingbird-functest
- condition: SUCCESSFUL
- projects:
- - name: 'functest-fuel-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-multisite-noha
- FUNCTEST_SUITE_NAME=multisite
- OS_REGION=RegionOne
- REGIONONE_IP=100.64.209.10
- REGIONTWO_IP=100.64.209.11
- FRESH_INSTALL=$FRESH_INSTALL
- node-parameters: false
- node-label-name: SLAVE_LABEL
- node-label: ericsson-virtual12
- kill-phase-on: NEVER
- abort-all-job: false
-
-- job-template:
- name: 'multisite-{phase}-{stream}'
-
- concurrent: false
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: KINGBIRD_LOG_FILE
- default: $WORKSPACE/kingbird.log
- - string:
- name: GS_PATH
- default: 'http://artifacts.opnfv.org/fuel{gs-pathname}'
- - 'fuel-defaults'
- - '{slave-label}-defaults'
- - choice:
- name: FRESH_INSTALL
- choices:
- - 'true'
- - 'false'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'multisite-{phase}-builder':
- stream: '{stream}'
-
- publishers:
- - 'multisite-{phase}-publisher'
-
-########################
-# builder macros
-########################
-- builder:
- name: 'multisite-fuel-deploy-regionone-virtual-builder'
- builders:
- - shell:
- !include-raw-escape: ./fuel-deploy-for-multisite.sh
- - shell: |
- #!/bin/bash
-
- echo "This is where we deploy fuel, extract passwords and save into file"
-
- cd $WORKSPACE/tools/keystone/
- ./run.sh -t controller -r fetchpass.sh -o servicepass.ini
-
-- builder:
- name: 'multisite-fuel-deploy-regiontwo-virtual-builder'
- builders:
- - shell:
- !include-raw-escape: ./fuel-deploy-for-multisite.sh
- - shell: |
- #!/bin/bash
-
- echo "This is where we deploy fuel, extract publicUrl, privateUrl, and adminUrl and save into file"
-
- cd $WORKSPACE/tools/keystone/
- ./run.sh -t controller -r endpoint.sh -o endpoints.ini
-- builder:
- name: 'multisite-register-endpoints-builder'
- builders:
- - copyartifact:
- project: 'multisite-fuel-deploy-regiontwo-virtual-{stream}'
- which-build: multijob-build
- filter: "endpoints.ini"
- - shell: |
- #!/bin/bash
-
- echo "This is where we register RegionTwo in RegionOne keystone using endpoints.ini"
-
- cd $WORKSPACE/tools/keystone/
- ./run.sh -t controller -r region.sh -d $WORKSPACE/endpoints.ini
-- builder:
- name: 'multisite-update-auth-builder'
- builders:
- - copyartifact:
- project: 'multisite-fuel-deploy-regionone-virtual-{stream}'
- which-build: multijob-build
- filter: "servicepass.ini"
- - shell: |
- #!/bin/bash
-
- echo "This is where we read passwords from servicepass.ini and replace passwords in RegionTwo"
-
- cd $WORKSPACE/tools/keystone/
- ./run.sh -t controller -r writepass.sh -d $WORKSPACE/servicepass.ini
- ./run.sh -t compute -r writepass.sh -d $WORKSPACE/servicepass.ini
-- builder:
- name: 'multisite-kingbird-deploy-virtual-builder'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "This is where we install kingbird"
- cd $WORKSPACE/tools/kingbird
- ./deploy.sh
-########################
-# publisher macros
-########################
-- publisher:
- name: 'multisite-fuel-deploy-regionone-virtual-publisher'
- publishers:
- - archive:
- artifacts: 'servicepass.ini'
- allow-empty: false
- only-if-success: true
- fingerprint: true
-- publisher:
- name: 'multisite-fuel-deploy-regiontwo-virtual-publisher'
- publishers:
- - archive:
- artifacts: 'endpoints.ini'
- allow-empty: false
- only-if-success: true
- fingerprint: true
-- publisher:
- name: 'multisite-register-endpoints-publisher'
- publishers:
- - archive:
- artifacts: 'dummy.txt'
- allow-empty: true
-- publisher:
- name: 'multisite-update-auth-publisher'
- publishers:
- - archive:
- artifacts: 'dummy.txt'
- allow-empty: true
-- publisher:
- name: 'multisite-kingbird-deploy-virtual-publisher'
- publishers:
- - archive:
- artifacts: 'dummy.txt'
- allow-empty: true
-- publisher:
- name: 'multisite-kingbird-functest-publisher'
- publishers:
- - archive:
- artifacts: 'dummy.txt'
- allow-empty: true
diff --git a/jjb/nfvbench/nfvbench.yml b/jjb/nfvbench/nfvbench.yml
new file mode 100644
index 000000000..62776d566
--- /dev/null
+++ b/jjb/nfvbench/nfvbench.yml
@@ -0,0 +1,93 @@
+---
+- project:
+ name: nfvbench
+
+ project: '{name}'
+
+ jobs:
+ - 'nfvbench-build-{stream}'
+ - 'nfvbench-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ docker-tag: 'latest'
+
+- job-template:
+ name: 'nfvbench-build-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ # yamllint disable rule:line-length
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
+ # yamllint enable rule:line-length
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - comment-added-contains-event:
+ comment-contains-value: 'buildvm'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+
+ builders:
+ - shell: |
+ cd $WORKSPACE/nfvbenchvm/dib
+ bash build-image.sh
+
+- job-template:
+ name: 'nfvbench-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ # yamllint disable rule:line-length
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
+ # yamllint enable rule:line-length
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+
+ builders:
+ - shell: |
+ echo "pass"
diff --git a/jjb/opera/opera-daily-jobs.yml b/jjb/opera/opera-daily-jobs.yml
index 596d3771f..d0dd0525a 100644
--- a/jjb/opera/opera-daily-jobs.yml
+++ b/jjb/opera/opera-daily-jobs.yml
@@ -1,31 +1,32 @@
+---
- project:
name: 'opera-daily-jobs'
project: 'opera'
-#####################################
-# branch definitions
-#####################################
+ #####################################
+ # branch definitions
+ #####################################
master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
-#####################################
-# pod definitions
-#####################################
+ #####################################
+ # pod definitions
+ #####################################
pod:
- - virtual:
- slave-label: 'huawei-virtual7'
- os-version: 'xenial'
- <<: *master
+ - virtual:
+ slave-label: 'huawei-virtual7'
+ os-version: 'xenial'
+ <<: *master
-#####################################
-# jobs
-#####################################
+ #####################################
+ # jobs
+ #####################################
jobs:
- - 'opera-{pod}-daily-{stream}'
+ - 'opera-{pod}-daily-{stream}'
#####################################
# job templates
@@ -40,59 +41,58 @@
concurrent: false
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
scm:
- - git-scm
+ - git-scm
wrappers:
- - ssh-agent-wrapper
-
- - timeout:
- timeout: 240
- fail: true
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 240
+ fail: true
triggers:
- - timed: '@midnight'
+ - timed: '@midnight'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DEPLOY_SCENARIO
- default: os-nosdn-openo-ha
- - '{slave-label}-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: os-nosdn-openo-ha
+ - '{slave-label}-defaults'
builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'compass-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-openo-ha
- COMPASS_OS_VERSION=xenial
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: functest
- condition: SUCCESSFUL
- projects:
- - name: 'functest-compass-{pod}-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-openo-ha
- FUNCTEST_SUITE_NAME=opera_vims
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'compass-deploy-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-openo-ha
+ COMPASS_OS_VERSION=xenial
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: functest
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-compass-{pod}-suite-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-openo-ha
+ FUNCTEST_SUITE_NAME=opera_vims
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: true
diff --git a/jjb/opera/opera-project-jobs.yml b/jjb/opera/opera-project-jobs.yml
index 38efbc159..f866342a4 100644
--- a/jjb/opera/opera-project-jobs.yml
+++ b/jjb/opera/opera-project-jobs.yml
@@ -1,3 +1,4 @@
+---
- project:
name: opera-project
@@ -5,12 +6,12 @@
project: 'opera'
stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
jobs:
- - 'opera-build-{stream}'
+ - 'opera-build-{stream}'
########################
# job templates
@@ -21,27 +22,27 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- - git-scm
+ - git-scm
triggers:
- - timed: 'H 23 * * *'
+ - timed: 'H 23 * * *'
builders:
- - 'opera-build-macro'
+ - 'opera-build-macro'
#####################################
# builder macros
@@ -49,9 +50,7 @@
- builder:
name: 'opera-build-macro'
builders:
- - shell: |
- #!/bin/bash
-
- echo "Hello world!"
-
+ - shell: |
+ #!/bin/bash
+ echo "Hello world!"
diff --git a/jjb/opera/opera-verify-jobs.yml b/jjb/opera/opera-verify-jobs.yml
index 4da41d8d9..ad93d4653 100644
--- a/jjb/opera/opera-verify-jobs.yml
+++ b/jjb/opera/opera-verify-jobs.yml
@@ -1,30 +1,31 @@
+---
- project:
name: 'opera-verify-jobs'
project: 'opera'
-#####################################
-# branch definitions
-#####################################
+ #####################################
+ # branch definitions
+ #####################################
stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
-#####################################
-# patch verification phases
-#####################################
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+
+ #####################################
+ # patch verification phases
+ #####################################
phase:
- - 'basic'
- - 'deploy'
+ - 'basic'
+ - 'deploy'
-#####################################
-# jobs
-#####################################
+ #####################################
+ # jobs
+ #####################################
jobs:
- - 'opera-verify-{stream}'
- - 'opera-verify-{phase}-{stream}'
+ - 'opera-verify-{stream}'
+ - 'opera-verify-{phase}-{stream}'
#####################################
# job templates
#####################################
@@ -38,77 +39,77 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 120
- fail: true
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 120
+ fail: true
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: '**/*'
+ disable-strict-forbidden-file-verification: 'true'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**'
+ readable-message: true
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'huawei-pod7-defaults'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'huawei-pod7-defaults'
builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'opera-verify-basic-{stream}'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'opera-verify-deploy-{stream}'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: basic
+ condition: SUCCESSFUL
+ projects:
+ - name: 'opera-verify-basic-{stream}'
+ current-parameters: true
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'opera-verify-deploy-{stream}'
+ current-parameters: true
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
- job-template:
name: 'opera-verify-{phase}-{stream}'
@@ -118,25 +119,25 @@
concurrent: true
properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 120
- fail: true
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 120
+ fail: true
builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-verify-{phase}-macro'
#####################################
# builder macros
@@ -144,14 +145,13 @@
- builder:
name: 'opera-verify-basic-macro'
builders:
- - shell: |
- #!/bin/bash
- echo "Hello world!"
+ - shell: |
+ #!/bin/bash
+ echo "Hello world!"
- builder:
name: 'opera-verify-deploy-macro'
builders:
- - shell: |
- #!/bin/bash
- echo "Hello world!"
-
+ - shell: |
+ #!/bin/bash
+ echo "Hello world!"
diff --git a/jjb/opnfvdocs/docs-rtd.yaml b/jjb/opnfvdocs/docs-rtd.yaml
index 864626b06..f81feab22 100644
--- a/jjb/opnfvdocs/docs-rtd.yaml
+++ b/jjb/opnfvdocs/docs-rtd.yaml
@@ -1,14 +1,15 @@
+---
- project:
name: docs-rtd
jobs:
- - 'docs-merge-rtd-{stream}'
- - 'docs-verify-rtd-{stream}'
+ - 'docs-merge-rtd-{stream}'
+ - 'docs-verify-rtd-{stream}'
stream:
- - master:
- branch: 'master'
- - danube:
- branch: 'stable/{stream}'
+ - master:
+ branch: 'master'
+ - danube:
+ branch: 'stable/{stream}'
project: 'opnfvdocs'
rtdproject: 'opnfv'
@@ -20,28 +21,28 @@
project-type: freestyle
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'lf-build1'
- description: 'Slave label on Jenkins'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
+ - label:
+ name: SLAVE_LABEL
+ default: 'lf-build1'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
triggers:
- - gerrit-trigger-change-merged:
- project: '**'
- branch: '{branch}'
- files: 'docs/**/*.*'
+ - gerrit-trigger-change-merged:
+ project: '**'
+ branch: '{branch}'
+ files: 'docs/**/*.*'
builders:
- - shell: |
- if [ $GERRIT_BRANCH == "master" ]; then
- RTD_BUILD_VERSION=latest
- else
- RTD_BUILD_VERSION=${{GERRIT_BRANCH/\//-}}
- fi
- curl -X POST --data "version_slug=$RTD_BUILD_VERSION" https://readthedocs.org/build/opnfvdocsdemo
+ - shell: |
+ if [ $GERRIT_BRANCH == "master" ]; then
+ RTD_BUILD_VERSION=latest
+ else
+ RTD_BUILD_VERSION=${{GERRIT_BRANCH/\//-}}
+ fi
+ curl -X POST --data "version_slug=$RTD_BUILD_VERSION" https://readthedocs.org/build/opnfvdocsdemo
- job-template:
@@ -50,42 +51,43 @@
project-type: freestyle
parameters:
- - label:
- name: SLAVE_LABEL
- default: 'lf-build2'
- description: 'Slave label on Jenkins'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/opnfvdocs
- description: 'Git URL to use on this Jenkins Slave'
+ - label:
+ name: SLAVE_LABEL
+ default: 'lf-build2'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/opnfvdocs
+ description: 'Git URL to use on this Jenkins Slave'
+
scm:
- - git-scm-with-submodules:
- branch: '{branch}'
+ - git-scm-with-submodules:
+ branch: '{branch}'
triggers:
- - gerrit-trigger-patchset-created:
- server: 'gerrit.opnfv.org'
- project: '**'
- branch: '{branch}'
- files: 'docs/**/*.*'
- - timed: 'H H * * *'
+ - gerrit-trigger-patchset-created:
+ server: 'gerrit.opnfv.org'
+ project: '**'
+ branch: '{branch}'
+ files: 'docs/**/*.*'
+ - timed: 'H H * * *'
builders:
- - shell: |
- if [ "$GERRIT_PROJECT" != "opnfvdocs" ]; then
- cd docs/submodules/$GERRIT_PROJECT
- git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
- else
- git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
- fi
- - shell: |
- sudo pip install virtualenv
- virtualenv $WORKSPACE/venv
- . $WORKSPACE/venv/bin/activate
- pip install --upgrade pip
- pip freeze
- pip install tox
- tox -edocs
+ - shell: |
+ if [ "$GERRIT_PROJECT" != "opnfvdocs" ]; then
+ cd docs/submodules/$GERRIT_PROJECT
+ git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
+ else
+ git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
+ fi
+ - shell: |
+ sudo pip install virtualenv
+ virtualenv $WORKSPACE/venv
+ . $WORKSPACE/venv/bin/activate
+ pip install --upgrade pip
+ pip freeze
+ pip install tox
+ tox -edocs
diff --git a/jjb/opnfvdocs/opnfvdocs.yml b/jjb/opnfvdocs/opnfvdocs.yml
index fc825ff64..6f63db96b 100644
--- a/jjb/opnfvdocs/opnfvdocs.yml
+++ b/jjb/opnfvdocs/opnfvdocs.yml
@@ -1,3 +1,4 @@
+---
########################
# Job configuration for opnfvdocs
########################
@@ -8,19 +9,19 @@
project: '{name}'
jobs:
- - 'opnfvdocs-verify-shellcheck-{stream}'
- - 'opnfvdocs-merge-shellcheck-{stream}'
- - 'opnfvdocs-daily-{stream}'
+ - 'opnfvdocs-verify-shellcheck-{stream}'
+ - 'opnfvdocs-merge-shellcheck-{stream}'
+ - 'opnfvdocs-daily-{stream}'
stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
########################
# job templates
@@ -32,44 +33,44 @@
disabled: '{obj:disabled}'
parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_CLONE_BASE
- default: ssh://gerrit.opnfv.org:29418
- description: "Used for overriding the GIT URL coming from parameters macro."
+ - project-parameter:
+ project: $GERRIT_PROJECT
+ branch: '{branch}'
+ - string:
+ name: GIT_CLONE_BASE
+ default: ssh://gerrit.opnfv.org:29418
+ description: "Used for overriding the GIT URL coming from parameters macro."
scm:
- - git-scm-gerrit
+ - git-scm-gerrit
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'REG_EXP'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
builders:
- - check-bash-syntax
+ - check-bash-syntax
- job-template:
name: 'opnfvdocs-merge-shellcheck-{stream}'
@@ -77,37 +78,37 @@
disabled: '{obj:disabled}'
parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_CLONE_BASE
- default: ssh://gerrit.opnfv.org:29418
- description: "Used for overriding the GIT URL coming from parameters macro."
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon the completion of the build."
+ - project-parameter:
+ project: $GERRIT_PROJECT
+ branch: '{branch}'
+ - string:
+ name: GIT_CLONE_BASE
+ default: ssh://gerrit.opnfv.org:29418
+ description: "Used for overriding the GIT URL coming from parameters macro."
+ - string:
+ name: GS_URL
+ default: '$GS_BASE{gs-pathname}'
+ description: "Directory where the build artifact will be located upon the completion of the build."
scm:
- - git-scm
+ - git-scm
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'REG_EXP'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
builders:
- - check-bash-syntax
+ - check-bash-syntax
- job-template:
name: 'opnfvdocs-daily-{stream}'
@@ -115,24 +116,24 @@
disabled: true
parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: GIT_CLONE_BASE
- default: ssh://gerrit.opnfv.org:29418
- description: "Used for overriding the GIT URL coming from parameters macro."
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GS_URL
+ default: '$GS_BASE{gs-pathname}'
+ description: "Directory where the build artifact will be located upon the completion of the build."
+ - string:
+ name: GIT_CLONE_BASE
+ default: ssh://gerrit.opnfv.org:29418
+ description: "Used for overriding the GIT URL coming from parameters macro."
scm:
- - git-scm
+ - git-scm
triggers:
- - timed: '0 H/6 * * *'
+ - timed: '0 H/6 * * *'
builders:
- - build-html-and-pdf-docs-output
-# - upload-generated-docs-to-opnfv-artifacts
+ - build-html-and-pdf-docs-output
+# - upload-generated-docs-to-opnfv-artifacts
diff --git a/jjb/opnfvdocs/project.cfg b/jjb/opnfvdocs/project.cfg
index 1ea05c1d4..0722b4036 100644
--- a/jjb/opnfvdocs/project.cfg
+++ b/jjb/opnfvdocs/project.cfg
@@ -5,6 +5,7 @@ bottlenecks
compass4nfv
copper
conductor
+container4nfv
daisy
doctor
domino
@@ -24,7 +25,6 @@ movie
multisite
octopus
onosfw
-openretriever
ovno
ovsnfv
parser
diff --git a/jjb/orchestra/orchestra-daily-jobs.yml b/jjb/orchestra/orchestra-daily-jobs.yml
new file mode 100644
index 000000000..74c997cad
--- /dev/null
+++ b/jjb/orchestra/orchestra-daily-jobs.yml
@@ -0,0 +1,99 @@
+---
+###################################
+# job configuration for orchestra
+###################################
+- project:
+ name: 'orchestra-daily-jobs'
+
+ project: 'orchestra'
+
+ # -------------------------------
+ # BRANCH ANCHORS
+ # -------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+
+ # ------------------------------------------------------
+ # POD, INSTALLER, AND BRANCH MAPPING
+ # ------------------------------------------------------
+ pod:
+ - virtual:
+ slave-label: 'joid-virtual'
+ os-version: 'xenial'
+ <<: *master
+
+ jobs:
+ - 'orchestra-{pod}-daily-{stream}'
+
+################################
+# job template
+################################
+- job-template:
+ name: 'orchestra-{pod}-daily-{stream}'
+
+ project-type: multijob
+
+ disabled: '{obj:disabled}'
+
+ concurrent: false
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
+
+ scm:
+ - git-scm
+
+ wrappers:
+ - ssh-agent-wrapper
+
+ - timeout:
+ timeout: 240
+ fail: true
+
+ triggers:
+ - timed: '@daily'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: os-nosdn-openbaton-ha
+ - '{slave-label}-defaults'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'joid-deploy-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-openbaton-ha
+ COMPASS_OS_VERSION=xenial
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: functest
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-joid-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-openbaton-ha
+ FUNCTEST_SUITE_NAME=orchestra_ims
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: true
diff --git a/jjb/orchestra/orchestra-project-jobs.yml b/jjb/orchestra/orchestra-project-jobs.yml
new file mode 100644
index 000000000..60732ce03
--- /dev/null
+++ b/jjb/orchestra/orchestra-project-jobs.yml
@@ -0,0 +1,49 @@
+---
+- project:
+
+ name: orchestra-project
+
+ project: 'orchestra'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+
+ jobs:
+ - 'orchestra-build-{stream}'
+
+- job-template:
+ name: 'orchestra-build-{stream}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+
+ scm:
+ - git-scm
+
+ triggers:
+ - timed: 'H 23 * * *'
+
+ builders:
+ - 'orchestra-build-macro'
+
+- builder:
+ name: 'orchestra-build-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ echo "Hello world!"
diff --git a/jjb/ovn4nfv/ovn4nfv-daily-jobs.yml b/jjb/ovn4nfv/ovn4nfv-daily-jobs.yml
new file mode 100644
index 000000000..a4c5865c0
--- /dev/null
+++ b/jjb/ovn4nfv/ovn4nfv-daily-jobs.yml
@@ -0,0 +1,87 @@
+---
+- project:
+ name: 'ovn4nfv-daily-jobs'
+
+ project: 'ovn4nfv'
+
+ master: &master
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+
+ pod:
+ - virtual:
+ slave-label: 'joid-virtual'
+ os-version: 'xenial'
+ <<: *master
+
+ jobs:
+ - 'ovn4nfv-{pod}-daily-{stream}'
+
+- job-template:
+ name: 'ovn4nfv-{pod}-daily-{stream}'
+
+ project-type: multijob
+
+ disabled: '{obj:disabled}'
+
+ concurrent: false
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
+
+ scm:
+ - git-scm
+
+ wrappers:
+ - ssh-agent-wrapper
+
+ - timeout:
+ timeout: 240
+ fail: true
+
+ triggers:
+ - timed: '@daily'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: os-ovn-nofeature-noha
+ - '{slave-label}-defaults'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'joid-deploy-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-ovn-nofeature-noha
+ COMPASS_OS_VERSION=xenial
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: functest
+ condition: SUCCESSFUL
+ projects:
+ - name: 'functest-joid-{pod}-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-ovn-nofeature-ha
+ FUNCTEST_SUITE_NAME=ovn4nfv_test_suite
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: true
diff --git a/jjb/ovn4nfv/ovn4nfv-project-jobs.yml b/jjb/ovn4nfv/ovn4nfv-project-jobs.yml
new file mode 100644
index 000000000..8c82cac5e
--- /dev/null
+++ b/jjb/ovn4nfv/ovn4nfv-project-jobs.yml
@@ -0,0 +1,52 @@
+---
+- project:
+ name: ovn4nfv
+
+ project: '{name}'
+
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+
+ jobs:
+ - 'ovn4nfv-build-{stream}'
+
+- job-template:
+ name: 'ovn4nfv-build-{stream}'
+
+ concurrent: true
+
+ disabled: '{obj:disabled}'
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
+
+ parametert:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+
+ scm:
+ - git-scm
+
+ triggers:
+ - timed: 'H 23 * * *'
+
+ builders:
+ - 'ovn4nfv-build-macro'
+
+- builder:
+ name: 'ovn4nfv-build-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ echo "hello world"
diff --git a/jjb/ovsnfv/ovsnfv.yml b/jjb/ovsnfv/ovsnfv.yml
index 0e8c713fd..62f6de05a 100644
--- a/jjb/ovsnfv/ovsnfv.yml
+++ b/jjb/ovsnfv/ovsnfv.yml
@@ -141,6 +141,7 @@
publishers:
- email:
recipients: therbert@redhat.com mark.d.gray@intel.com billy.o.mahony@intel.com
+ - email-jenkins-admins-on-failure
- builder:
name: build-rpms
diff --git a/jjb/qtip/helpers/cleanup-deploy.sh b/jjb/qtip/helpers/cleanup-deploy.sh
deleted file mode 100644
index 9cb19a583..000000000
--- a/jjb/qtip/helpers/cleanup-deploy.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-# Remove previous running containers if exist
-if [[ ! -z $(docker ps -a | grep "opnfv/qtip:$DOCKER_TAG") ]]; then
- echo "Removing existing opnfv/qtip containers..."
- # workaround: sometimes it throws an error when stopping qtip container.
- # To make sure ci job unblocked, remove qtip container by force without stopping it.
- docker rm -f $(docker ps -a | grep "opnfv/qtip:$DOCKER_TAG" | awk '{print $1}')
-fi
-
-# Remove existing images if exist
-if [[ $(docker images opnfv/qtip:${DOCKER_TAG} | wc -l) -gt 1 ]]; then
- echo "Removing docker image opnfv/qtip:$DOCKER_TAG..."
- docker rmi opnfv/qtip:$DOCKER_TAG
-fi
diff --git a/jjb/qtip/helpers/validate-deploy.sh b/jjb/qtip/helpers/validate-deploy.sh
deleted file mode 100644
index af8f8c200..000000000
--- a/jjb/qtip/helpers/validate-deploy.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 ZTE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -e
-
-echo "--------------------------------------------------------"
-echo "POD: $NODE_NAME"
-echo "Scenario: $DEPLOY_SCENARIO"
-echo "INSTALLER: $INSTALLER_TYPE"
-echo "INSTALLER_IP: $INSTALLER_IP"
-echo "--------------------------------------------------------"
-
-echo "Qtip: Pulling docker image: opnfv/qtip:${DOCKER_TAG}"
-docker pull opnfv/qtip:$DOCKER_TAG >/dev/null
-
-envs="INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP}
--e POD_NAME=${NODE_NAME} -e SCENARIO=${DEPLOY_SCENARIO}"
-
-cmd=" docker run -id -e $envs opnfv/qtip:${DOCKER_TAG} /bin/bash"
-echo "Qtip: Running docker command: ${cmd}"
-${cmd}
-
-container_id=$(docker ps | grep "opnfv/qtip:${DOCKER_TAG}" | awk '{print $1}' | head -1)
-if [ $(docker ps | grep 'opnfv/qtip' | wc -l) == 0 ]; then
- echo "The container opnfv/qtip with ID=${container_id} has not been properly started. Exiting..."
- exit 1
-fi
-
-echo "The container ID is: ${container_id}"
-QTIP_REPO=/home/opnfv/repos/qtip
-
-docker exec -t ${container_id} bash -c "bash ${QTIP_REPO}/tests/ci/run_ci.sh"
-
-echo "Qtip done!"
-exit 0 \ No newline at end of file
diff --git a/jjb/qtip/helpers/validate-setup.sh b/jjb/qtip/helpers/validate-setup.sh
deleted file mode 100644
index 8d84e120c..000000000
--- a/jjb/qtip/helpers/validate-setup.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-##############################################################################
-# Copyright (c) 2017 ZTE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -e
-
-# setup virtualenv
-sudo pip install -u virtualenv virtualenvwrapper
-export WORKON_HOME=$HOME/.virtualenvs
-source /usr/local/bin/virtualenvwrapper.sh
-mkvirtualenv qtip
-workon qtip
-
-# setup qtip
-sudo pip install $HOME/repos/qtip
-
-# testing
-qtip --version
-qtip --help
diff --git a/jjb/qtip/qtip-experimental-jobs.yml b/jjb/qtip/qtip-experimental-jobs.yml
new file mode 100644
index 000000000..05445d898
--- /dev/null
+++ b/jjb/qtip/qtip-experimental-jobs.yml
@@ -0,0 +1,44 @@
+###########################################
+# Experimental jobs for development purpose
+###########################################
+
+- project:
+ name: qtip-experimental-jobs
+ project: qtip
+ jobs:
+ - 'qtip-experimental-{stream}'
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+
+################################
+## job templates
+#################################
+
+- job-template:
+ name: 'qtip-experimental-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ # Pin the tests on zte-pod4 with apex deployment
+ - apex-defaults
+ - zte-pod4-defaults
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - experimental:
+ project: '{project}'
+ branch: '{branch}'
+ files: '**'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+ source tests/ci/experimental.sh
diff --git a/jjb/qtip/qtip-validate-jobs.yml b/jjb/qtip/qtip-validate-jobs.yml
index 8dd97de1d..e64173ca7 100644
--- a/jjb/qtip/qtip-validate-jobs.yml
+++ b/jjb/qtip/qtip-validate-jobs.yml
@@ -13,48 +13,31 @@
branch: '{stream}'
gs-pathname: ''
docker-tag: latest
- danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- docker-tag: 'stable'
#--------------------------------
# JOB VARIABLES
#--------------------------------
- pod:
- - zte-pod1:
- installer: fuel
- scenario: os-odl_l2-nofeature-ha
+ qpi:
+ - compute:
+ installer: apex
+ pod: zte-pod4
<<: *master
- - zte-pod3:
- installer: fuel
- scenario: os-nosdn-kvm-ha
+ - storage:
+ installer: apex
+ pod: zte-pod4
<<: *master
- - zte-pod1:
- installer: fuel
- scenario: os-odl_l2-nofeature-ha
- <<: *danube
- - zte-pod3:
- installer: fuel
- scenario: os-nosdn-nofeature-ha
- <<: *danube
- - zte-pod3:
- installer: fuel
- scenario: os-nosdn-kvm-ha
- <<: *danube
#--------------------------------
# JOB LIST
#--------------------------------
jobs:
- - 'qtip-{scenario}-{pod}-daily-{stream}'
+ - 'qtip-{qpi}-{installer}-{stream}'
################################
# job templates
################################
- job-template:
- name: 'qtip-{scenario}-{pod}-daily-{stream}'
+ name: 'qtip-{qpi}-{installer}-{stream}'
disabled: false
parameters:
- project-parameter:
@@ -64,7 +47,7 @@
- '{pod}-defaults'
- string:
name: DEPLOY_SCENARIO
- default: '{scenario}'
+ default: generic
- string:
name: DOCKER_TAG
default: '{docker-tag}'
@@ -73,16 +56,22 @@
name: CI_DEBUG
default: 'false'
description: "Show debug output information"
+ - string:
+ name: TEST_SUITE
+ default: '{qpi}'
scm:
- git-scm
triggers:
- - 'qtip-{scenario}-{pod}-daily-{stream}-trigger'
+ - 'qtip-daily'
builders:
- description-setter:
description: "POD: $NODE_NAME"
- - qtip-validate-deploy
+ - shell: |
+ #!/bin/bash
+ source tests/ci/periodic.sh
publishers:
- qtip-common-publishers
+ - email-jenkins-admins-on-failure
################
# MARCOS
@@ -91,14 +80,6 @@
#---------
# builder
#---------
-- builder:
- name: qtip-validate-deploy
- builders:
- - shell:
- !include-raw: ./helpers/cleanup-deploy.sh
- - shell:
- !include-raw: ./helpers/validate-deploy.sh
-
#-----------
# parameter
@@ -119,26 +100,6 @@
#---------
- trigger:
- name: 'qtip-os-odl_l2-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: '0 15 * * *'
-
-- trigger:
- name: 'qtip-os-nosdn-kvm-ha-zte-pod3-daily-master-trigger'
+ name: 'qtip-daily'
triggers:
- timed: '0 15 * * *'
-
-- trigger:
- name: 'qtip-os-odl_l2-nofeature-ha-zte-pod1-daily-danube-trigger'
- triggers:
- - timed: '0 7 * * *'
-
-- trigger:
- name: 'qtip-os-nosdn-kvm-ha-zte-pod3-daily-danube-trigger'
- triggers:
- - timed: '0 7 * * *'
-
-- trigger:
- name: 'qtip-os-nosdn-nofeature-ha-zte-pod3-daily-danube-trigger'
- triggers:
- - timed: '30 0 * * *'
diff --git a/jjb/qtip/qtip-verify-jobs.yml b/jjb/qtip/qtip-verify-jobs.yml
index dd444c7a5..a273c858f 100644
--- a/jjb/qtip/qtip-verify-jobs.yml
+++ b/jjb/qtip/qtip-verify-jobs.yml
@@ -7,6 +7,8 @@
project: qtip
jobs:
- 'qtip-verify-{stream}'
+ - 'qtip-review-notebook-{stream}'
+ - 'qtip-merge-{stream}'
stream:
- master:
branch: '{stream}'
@@ -62,6 +64,90 @@
- qtip-unit-tests-and-docs-build
publishers:
- publish-coverage
+ - email-jenkins-admins-on-failure
+
+# upload juypter notebook to artifacts for review
+- job-template:
+ name: 'qtip-review-notebook-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'true'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'examples/**'
+ builders:
+ - upload-under-review-notebooks-to-opnfv-artifacts
+ - report-build-result-to-gerrit
+
+- job-template:
+ name: 'qtip-merge-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: $GERRIT_PROJECT
+ branch: '{branch}'
+ - string:
+ name: GS_URL
+ default: '$GS_BASE{gs-pathname}'
+ description: "Directory where the build artifact will be located upon the completion of the build."
+ - string:
+ name: GERRIT_REFSPEC
+ default: 'refs/heads/{branch}'
+ description: "JJB configured GERRIT_REFSPEC parameter"
+
+ scm:
+ - git-scm
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '*'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: examples/**
+
+ builders:
+ - remove-old-docs-from-opnfv-artifacts
################################
## job builders
@@ -76,3 +162,35 @@
set -o xtrace
tox
+
+# modified from upload-under-review-docs-to-opnfv-artifacts in global/releng-macro.yml
+- builder:
+ name: upload-under-review-notebooks-to-opnfv-artifacts
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o pipefail
+ set -o xtrace
+ export PATH=$PATH:/usr/local/bin/
+
+ [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
+ [[ -d examples ]] || exit 0
+
+ echo
+ echo "###########################"
+ echo "UPLOADING DOCS UNDER REVIEW"
+ echo "###########################"
+ echo
+
+ gs_base="artifacts.opnfv.org/$PROJECT/review"
+ gs_path="$gs_base/$GERRIT_CHANGE_NUMBER"
+ local_path="upload/$GERRIT_CHANGE_NUMBER"
+
+ mkdir -p upload
+ cp -r examples "$local_path"
+ gsutil -m cp -r "$local_path" "gs://$gs_base/"
+
+ echo "Document link(s):" >> gerrit_comment.txt
+ find "$local_path" | grep -e 'ipynb$' | \
+ sed -e "s|^$local_path| https://nbviewer.jupyter.org/url/$gs_path|" >> gerrit_comment.txt
diff --git a/jjb/releng/automate.yml b/jjb/releng/automate.yml
index 9eb281de3..c6ca37fa9 100644
--- a/jjb/releng/automate.yml
+++ b/jjb/releng/automate.yml
@@ -82,26 +82,15 @@
builders:
- shell: |
- bash ./utils/test/{module}/run_test.sh
+ cd ./utils/test/{module}/
+ tox
+ if [ -e *.xml ];then
+ cp *.xml $WORKSPACE
+ fi
publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "false"
- stability-auto-update: "false"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 10
- unhealthy: 20
- failing: 30
- - method:
- healthy: 50
- unhealthy: 40
- failing: 30
+ - publish-coverage
+ - email-jenkins-admins-on-failure
- job-template:
name: '{module}-automate-{stream}'
@@ -182,6 +171,7 @@
publishers:
- 'email-publisher'
+ - email-jenkins-admins-on-failure
- job-template:
name: '{module}-automate-{phase}-{stream}'
@@ -247,12 +237,17 @@
name: 'testapi-automate-docker-deploy-macro'
builders:
- shell: |
- bash ./jjb/releng/docker-deploy.sh 'sudo docker run -dti -p 8082:8000 -e mongodb_url=mongodb://172.17.0.1:27017 -e swagger_url=http://testresults.opnfv.org/test opnfv/testapi' "http://testresults.opnfv.org/test/swagger/APIs"
+ sudo bash ./jjb/releng/docker-deploy.sh "sudo docker run -dti --name testapi -p 8082:8000
+ -e mongodb_url=mongodb://172.17.0.1:27017
+ -e base_url=http://testresults.opnfv.org/test opnfv/testapi" \
+ "http://testresults.opnfv.org/test/" "testapi"
+
- builder:
name: 'reporting-automate-docker-deploy-macro'
builders:
- shell: |
- bash ./jjb/releng/docker-deploy.sh 'sudo docker run -itd -p 8084:8000 -e SERVER_URL=http://testresults.opnfv.org/reporting2:8084 opnfv/reporting' "http://testresults.opnfv.org/reporting2/reporting/index.html"
+ sudo bash ./jjb/releng/docker-deploy.sh "sudo docker run -itd --name reporting -p 8084:8000 opnfv/reporting" \
+ "http://testresults.opnfv.org/reporting2/reporting/index.html" "reporting"
- builder:
name: mongodb-backup
diff --git a/jjb/releng/docker-deploy.sh b/jjb/releng/docker-deploy.sh
index b3b930fc4..1e8357717 100644
--- a/jjb/releng/docker-deploy.sh
+++ b/jjb/releng/docker-deploy.sh
@@ -16,86 +16,137 @@
# specific language governing permissions and limitations *
# under the License. *
-# Assigning Variables
+
command=$1
url=$2
+module=$3
+
+REPO="opnfv"
+latest_image=$REPO/$module:latest
+old_image=$REPO/$module:old
+latest_container_name=$module
+old_container_name=$module"_old"
+latest_container_id=
+old_container_id=
+new_start_container=
+
+function DEBUG() {
+ echo `date "+%Y-%m-%d %H:%M:%S.%N"` ": $1"
+}
-function check() {
-
- # Verify hosted
+function check_connectivity() {
+ # check update status via test the connectivity of provide url
sleep 5
cmd=`curl -s --head --request GET ${url} | grep '200 OK' > /dev/null`
rc=$?
- echo $rc
-
- if [[ $rc == 0 ]]
- then
+ DEBUG $rc
+ if [[ $rc == 0 ]]; then
return 0
else
return 1
fi
-
}
-echo "Getting contianer Id of the currently running one"
-contId=$(sudo docker ps | grep "opnfv/testapi:latest" | awk '{print $1}')
-echo "Pulling the latest image"
-sudo docker pull opnfv/testapi:latest
+function pull_latest_image() {
+ DEBUG "pull latest image $latest_image"
+ docker pull $latest_image
+}
-echo "Deleting old containers of opnfv/testapi:old"
-sudo docker ps -a | grep "opnfv/testapi" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
+function get_latest_running_container() {
+ latest_container_id=`docker ps -q --filter name=^/$latest_container_name$`
+}
-echo "Deleting old images of opnfv/testapi:latest"
-sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
+function get_old_running_container() {
+ old_container_id=`docker ps -q --filter name=^/$old_container_name$`
+}
+function delete_old_image() {
+ DEBUG "delete old image: $old_image"
+ docker rmi -f $old_image
+}
-if [[ -z "$contId" ]]
-then
- echo "No running testapi container"
+function delete_old_container() {
+ DEBUG "delete old container: $old_container_name"
+ docker ps -a -q --filter name=^/$old_container_name$ | xargs docker rm -f &>/dev/null
+}
- echo "Removing stopped testapi containers in the previous iterations"
- sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
-else
- echo $contId
+function delete_latest_container() {
+ DEBUG "delete latest container: $module"
+ docker ps -a -q --filter name=^/$latest_container_name$ | xargs docker rm -f &>/dev/null
+}
- echo "Get the image id of the currently running conatiner"
- currImgId=$(sudo docker ps | grep "$contId" | awk '{print $2}')
- echo $currImgId
+function delete_latest_image() {
+ DEBUG "delete latest image: $REPO/$module:latest"
+ docker rmi -f $latest_image
+}
- if [[ -z "$currImgId" ]]
- then
- echo "No image id found for the container id"
- exit 1
- fi
+function change_image_tag_2_old() {
+ DEBUG "change image tag 2 old"
+ docker tag $latest_image $old_image
+ docker rmi -f $latest_image
+}
- echo "Changing current image tag to old"
- sudo docker tag "$currImgId" opnfv/testapi:old
+function mark_latest_container_2_old() {
+ DEBUG "mark latest container to be old"
+ docker rename "$latest_container_name" "$old_container_name"
+}
- echo "Removing stopped testapi containers in the previous iteration"
- sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+function stop_old_container() {
+ DEBUG "stop old container"
+ docker stop "$old_container_name"
+}
- echo "Renaming the running container name to opnfv_testapi as to identify it."
- sudo docker rename $contId opnfv_testapi
+function run_latest_image() {
+ new_start_container=`$command`
+ DEBUG "run latest image: $new_start_container"
+}
- echo "Stop the currently running container"
- sudo docker stop $contId
+get_latest_running_container
+get_old_running_container
+
+if [[ ! -z $latest_container_id ]]; then
+ DEBUG "latest container is running: $latest_container_id"
+ delete_old_container
+ delete_old_image
+ change_image_tag_2_old
+ mark_latest_container_2_old
+ pull_latest_image
+ stop_old_container
+ run_latest_image
+
+elif [[ ! -z $old_container_id ]]; then
+ DEBUG "old container is running: $old_container_id"
+ delete_latest_container
+ delete_latest_image
+ pull_latest_image
+ stop_old_container
+ run_latest_image
+else
+ DEBUG "no container is running"
+ delete_old_container
+ delete_old_image
+ delete_latest_container
+ delete_latest_image
+ pull_latest_image
+ run_latest_image
fi
-echo "Running a container with the new image"
-$command:latest
-
-if check; then
- echo "TestResults Hosted."
+if check_connectivity; then
+ DEBUG "CONGRATS: $module update successfully"
else
- echo "TestResults Hosting Failed"
- if [[ $(sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}') ]]; then
- echo "Running old Image"
- $command:old
- exit 1
+ DEBUG "ATTENTION: $module update failed"
+ id=`docker ps -a -q --filter name=^/$old_container_name$`
+ if [[ ! -z $id ]]; then
+ DEBUG "start old container instead"
+ docker stop $new_start_container
+ docker start $id
+ fi
+ if ! check_connectivity; then
+ DEBUG "BIG ISSUE: no container is running normally"
fi
+ exit 1
fi
-# Echo Images and Containers
-sudo docker images
-sudo docker ps -a
+docker images
+docker ps -a
diff --git a/jjb/releng/generate-job-list.sh b/jjb/releng/generate-job-list.sh
new file mode 100755
index 000000000..4bf8974e4
--- /dev/null
+++ b/jjb/releng/generate-job-list.sh
@@ -0,0 +1,70 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -o errexit
+set -o pipefail
+
+# Job Number Formatter
+function JOBS {
+ local NUMS=$1
+ if [ $NUMS == 1 ]; then
+ echo -n "Job"
+ else
+ echo -n "Jobs"
+ fi
+}
+
+# Activiate the virtualenv so we have access to 'jenkins-jobs'
+source /opt/virtualenv/jenkins-job-builder/bin/activate
+
+echo "> Generating list of current JJB jobs..."
+jenkins-jobs -l ERROR test -r jjb -o job_output
+
+echo "> Generating list of previous JJB jobs..."
+git checkout -b previous-commit HEAD^
+jenkins-jobs -l ERROR test -r jjb -o job_output_prev
+git checkout - && git branch -d previous-commit
+
+echo "> Finding job changes ..."
+diff -r -q job_output job_output_prev &> job_diff.txt || true
+
+# Only in (job_output) = NEW
+# Only in (job_output_prev) = DELETED
+# Files ... differ = MODIFIED
+
+declare -a JOBS_ADDED=($(grep 'job_output:' job_diff.txt | cut -d':' -f2- | sed 's/^[ \t]*//;s/[ \t]*$//'))
+declare -a JOBS_MODIFIED=($(grep 'differ$' job_diff.txt | sed "s/Files job_output\/\(.*\) and.*/\1/g"))
+declare -a JOBS_REMOVED=($(grep 'job_output_prev:' job_diff.txt | cut -d ':' -f2- | sed 's/^[ \t]*//;s/[ \t]*$//'))
+
+NUM_JOBS_ADDED=${#JOBS_ADDED[@]}
+NUM_JOBS_MODIFIED=${#JOBS_MODIFIED[@]}
+NUM_JOBS_REMOVED=${#JOBS_REMOVED[@]}
+
+echo "> Writing gerrit comment ..."
+if [ $NUM_JOBS_ADDED -gt 0 ]; then
+ JOB_STRING="$(JOBS $NUM_JOBS_ADDED)"
+ { printf "Added %s %s:\n\n" "${NUM_JOBS_ADDED}" "$JOB_STRING";
+ printf '* %s\n' "${JOBS_ADDED[@]}";
+ printf "\n"; } >> gerrit_comment.txt
+fi
+
+if [ $NUM_JOBS_MODIFIED -gt 0 ]; then
+ JOB_STRING="$(JOBS $NUM_JOBS_MODIFIED)"
+ { printf "Modified %s %s:\n\n" "${NUM_JOBS_MODIFIED}" "$JOB_STRING";
+ printf '* %s\n' "${JOBS_MODIFIED[@]}";
+ printf "\n"; } >> gerrit_comment.txt
+fi
+
+if [ $NUM_JOBS_REMOVED -gt 0 ]; then
+ JOB_STRING="$(JOBS $NUM_JOBS_REMOVED)"
+ { printf "Removed %s %s:\n\n" "${NUM_JOBS_REMOVED}" "$JOB_STRING";
+ printf '* %s\n' "${JOBS_REMOVED[@]}";
+ printf "\n"; } >> gerrit_comment.txt
+fi
diff --git a/jjb/releng/opnfv-docker-arm.yml b/jjb/releng/opnfv-docker-arm.yml
index 417fc702c..842d4967d 100644
--- a/jjb/releng/opnfv-docker-arm.yml
+++ b/jjb/releng/opnfv-docker-arm.yml
@@ -23,6 +23,12 @@
cristina.pauna@enea.com
alexandru.avadanii@enea.com
alexandru.nemes@enea.com
+ yardstick-arm-receivers: &yardstick-arm-receivers
+ receivers: >
+ cristina.pauna@enea.com
+ alexandru.avadanii@enea.com
+ alexandru.nemes@enea.com
+ catalina.focsa@enea.com
other-receivers: &other-receivers
receivers: ''
@@ -34,6 +40,9 @@
- 'dovetail':
<<: *master
<<: *dovetail-arm-receivers
+ - 'yardstick':
+ <<: *master
+ <<: *yardstick-arm-receivers
# projects with jobs for stable
jobs:
@@ -65,6 +74,10 @@
default: ""
description: "Release version, e.g. 1.0, 2.0, 3.0"
- string:
+ name: DOCKER_DIR
+ default: "docker"
+ description: "Directory containing files needed by the Dockerfile"
+ - string:
name: DOCKERFILE
default: "Dockerfile.aarch64"
description: "Dockerfile to use for creating the image."
@@ -83,3 +96,4 @@
publishers:
- email:
recipients: '{receivers}'
+ - email-jenkins-admins-on-failure
diff --git a/jjb/releng/opnfv-docker.sh b/jjb/releng/opnfv-docker.sh
index ebd0c9f3d..0de3df28e 100644
--- a/jjb/releng/opnfv-docker.sh
+++ b/jjb/releng/opnfv-docker.sh
@@ -54,7 +54,7 @@ if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
done
fi
-cd $WORKSPACE/docker
+cd $WORKSPACE/$DOCKER_DIR
HOST_ARCH=$(uname -m)
if [ ! -f "${DOCKERFILE}" ]; then
# If this is expected to be a Dockerfile for other arch than x86
diff --git a/jjb/releng/opnfv-docker.yml b/jjb/releng/opnfv-docker.yml
index 095ba4129..414eba255 100644
--- a/jjb/releng/opnfv-docker.yml
+++ b/jjb/releng/opnfv-docker.yml
@@ -23,62 +23,107 @@
other-receivers: &other-receivers
receivers: ''
- project:
+ dockerfile: "Dockerfile"
+ dockerdir: "docker"
+
+ # This is the dockerhub repo the image will be pushed to as
+ # 'opnfv/{dockerrepo}. See: DOCKER_REPO_NAME parameter.
+ # 'project' is the OPNFV repo we expect to contain the Dockerfile
+ dockerrepo:
# projects with jobs for master
- 'releng-anteater':
+ project: 'releng-anteater'
<<: *master
<<: *other-receivers
- 'bottlenecks':
+ project: 'bottlenecks'
<<: *master
<<: *other-receivers
- 'cperf':
+ project: 'cperf'
<<: *master
<<: *other-receivers
- 'dovetail':
+ project: 'dovetail'
<<: *master
<<: *other-receivers
- 'functest':
+ project: 'functest'
<<: *master
<<: *functest-receivers
- 'qtip':
+ project: 'qtip'
<<: *master
<<: *other-receivers
- - 'storperf':
+ - 'storperf-master':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-master'
+ <<: *master
+ <<: *other-receivers
+ - 'storperf-graphite':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-graphite'
+ <<: *master
+ <<: *other-receivers
+ - 'storperf-httpfrontend':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-httpfrontend'
+ <<: *master
+ <<: *other-receivers
+ - 'storperf-reporting':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-reporting'
+ <<: *master
+ <<: *other-receivers
+ - 'storperf-swaggerui':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-swaggerui'
<<: *master
<<: *other-receivers
- 'yardstick':
+ project: 'yardstick'
<<: *master
<<: *other-receivers
# projects with jobs for stable
- 'bottlenecks':
+ project: 'bottlenecks'
<<: *danube
<<: *other-receivers
- 'functest':
+ project: 'functest'
<<: *danube
<<: *functest-receivers
- 'qtip':
+ project: 'qtip'
<<: *danube
<<: *other-receivers
- 'storperf':
+ project: 'storperf'
<<: *danube
<<: *other-receivers
- 'yardstick':
+ project: 'yardstick'
<<: *danube
<<: *other-receivers
jobs:
- - '{project}-docker-build-push-{stream}'
+ - "{dockerrepo}-docker-build-push-{stream}"
- project:
name: opnfv-monitor-docker # projects which only monitor dedicated file or path
+ dockerfile: "Dockerfile"
+ dockerdir: "docker"
+
project:
# projects with jobs for master
- 'daisy':
+ dockerrepo: 'daisy'
<<: *master
- 'escalator':
+ dockerrepo: 'escalator'
<<: *master
jobs:
@@ -88,7 +133,7 @@
# job templates
########################
- job-template:
- name: '{project}-docker-build-push-{stream}'
+ name: '{dockerrepo}-docker-build-push-{stream}'
disabled: '{obj:disabled}'
@@ -103,9 +148,13 @@
description: "To enable/disable pushing the image to Dockerhub."
- string:
name: DOCKER_REPO_NAME
- default: "opnfv/{project}"
+ default: "opnfv/{dockerrepo}"
description: "Dockerhub repo to be pushed to."
- string:
+ name: DOCKER_DIR
+ default: "{dockerdir}"
+ description: "Directory containing files needed by the Dockerfile"
+ - string:
name: COMMIT_ID
default: ""
description: "commit id to make a snapshot docker image"
@@ -115,7 +164,7 @@
description: "Release version, e.g. 1.0, 2.0, 3.0"
- string:
name: DOCKERFILE
- default: "Dockerfile"
+ default: "{dockerfile}"
description: "Dockerfile to use for creating the image."
scm:
@@ -132,6 +181,7 @@
publishers:
- email:
recipients: '{receivers}'
+ - email-jenkins-admins-on-failure
- job-template:
name: '{project}-docker-build-push-monitor-{stream}'
diff --git a/jjb/releng/opnfv-lint.yml b/jjb/releng/opnfv-lint.yml
index 8c231c3e8..d253da027 100644
--- a/jjb/releng/opnfv-lint.yml
+++ b/jjb/releng/opnfv-lint.yml
@@ -53,7 +53,7 @@
comment-contains-value: 'reverify'
projects:
- project-compare-type: 'REG_EXP'
- project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng|pharos|octopus|securedlab'
+ project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
@@ -102,7 +102,7 @@
comment-contains-value: 'reverify'
projects:
- project-compare-type: 'REG_EXP'
- project-pattern: ''
+ project-pattern: 'octopus|releng-anteater|pharos'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
diff --git a/jjb/releng/opnfv-repo-archiver.sh b/jjb/releng/opnfv-repo-archiver.sh
new file mode 100644
index 000000000..c9fdba379
--- /dev/null
+++ b/jjb/releng/opnfv-repo-archiver.sh
@@ -0,0 +1,66 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -o errexit
+set -o pipefail
+export PATH=$PATH:/usr/local/bin/
+
+DATE="$(date +%Y%m%d)"
+
+declare -a PROJECT_LIST
+EXCLUDE_PROJECTS="All-Projects|All-Users|securedlab"
+CLONE_PATH="$WORKSPACE/opnfv-repos"
+
+# Generate project list from gerrit
+PROJECT_LIST=($(ssh -p 29418 jenkins-ci@gerrit.opnfv.org gerrit ls-projects | egrep -v $EXCLUDE_PROJECTS))
+
+echo "Cloning all OPNFV repositories"
+echo "------------------------------"
+
+for PROJECT in "${PROJECT_LIST[@]}"; do
+ echo "> Cloning $PROJECT"
+ if [ ! -d "$CLONE_PATH/$PROJECT" ]; then
+ git clone "https://gerrit.opnfv.org/gerrit/$PROJECT.git" $CLONE_PATH/$PROJECT
+ else
+ pushd "$CLONE_PATH/$PROJECT" &>/dev/null
+ git pull -f
+ popd &> /dev/null
+ fi
+
+ # Don't license scan kernel or qemu in kvmfornfv
+ if [ "$PROJECT" == "kvmfornfv" ]; then
+ rm -rf "$CLONE_PATH/$PROJECT/{kernel,qemu}"
+ fi
+done
+
+echo "Finished cloning OPNFV repositories"
+echo "-----------------------------------"
+
+# Copy repos and clear git data
+echo "Copying repos to $WORKSPACE/opnfv-archive and removing .git files"
+cp -R $CLONE_PATH $WORKSPACE/opnfv-archive
+find $WORKSPACE/opnfv-archive -type d -iname '.git' -exec rm -rf {} +
+find $WORKSPACE/opnfv-archive -type f -iname '.git*' -exec rm -rf {} +
+
+# Create archive
+echo "Creating archive: opnfv-archive-$DATE.tar.gz"
+echo "--------------------------------------"
+cd $WORKSPACE
+tar -czf "opnfv-archive-$DATE.tar.gz" opnfv-archive && rm -rf opnfv-archive
+echo "Archiving Complete."
+
+echo "Uploading artifacts"
+echo "--------------------------------------"
+
+gsutil cp "$WORKSPACE/opnfv-archive-$DATE.tar.gz" \
+ "gs://opnfv-archive/opnfv-archive-$DATE.tar.gz" 2>&1
+
+rm -f opnfv-archive-$DATE.tar.gz
+
+echo "Finished"
diff --git a/jjb/releng/opnfv-utils.yml b/jjb/releng/opnfv-utils.yml
index 717bb3cbc..721b5dede 100644
--- a/jjb/releng/opnfv-utils.yml
+++ b/jjb/releng/opnfv-utils.yml
@@ -4,6 +4,9 @@
jobs:
- 'prune-docker-images'
+ - 'archive-repositories'
+ - 'check-status-of-slaves'
+
########################
# job templates
########################
@@ -37,3 +40,50 @@
triggers:
- timed: '@midnight'
+
+- job-template:
+ name: 'archive-repositories'
+
+ disabled: false
+
+ concurrent: true
+
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: Where to create the archive
+ default-slaves:
+ - master
+ allowed-multiselect: false
+ ignore-offline-nodes: true
+
+ triggers:
+ - timed: '@monthly'
+
+ builders:
+ - shell:
+ !include-raw-escape: opnfv-repo-archiver.sh
+
+- job-template:
+ name: 'check-status-of-slaves'
+
+ disabled: false
+
+ concurrent: true
+
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: We don't want workspace wiped. so I just threw the script on the master
+ default-slaves:
+ - master
+ allowed-multiselect: false
+ ignore-offline-nodes: true
+
+ triggers:
+ - timed: '@midnight'
+
+ builders:
+ - shell: |
+ cd /opt/jenkins-ci/slavemonitor
+ bash slave-monitor-0.1.sh | sort
diff --git a/jjb/releng/releng-ci-jobs.yml b/jjb/releng/releng-ci-jobs.yml
index ecc87303f..fd58ef2b8 100644
--- a/jjb/releng/releng-ci-jobs.yml
+++ b/jjb/releng/releng-ci-jobs.yml
@@ -3,6 +3,7 @@
jobs:
- 'releng-verify-jjb'
- 'releng-merge-jjb'
+ - 'releng-comment-jjb'
- 'releng-generate-artifacts-api'
project: 'releng'
@@ -52,6 +53,28 @@
publishers:
- archive-artifacts:
artifacts: 'job_output/*'
+ - email-jenkins-admins-on-failure
+
+- job-template:
+ name: releng-comment-jjb
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: 'master'
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - experimental:
+ project: '{project}'
+ branch: 'master'
+ files: 'jjb/**'
+
+ builders:
+ - shell:
+ !include-raw-escape: generate-job-list.sh
+ - report-build-result-to-gerrit
- job-template:
name: 'releng-merge-jjb'
diff --git a/jjb/securedlab/check-jinja2.yml b/jjb/securedlab/check-jinja2.yml
index 1e85536e7..430ced560 100644
--- a/jjb/securedlab/check-jinja2.yml
+++ b/jjb/securedlab/check-jinja2.yml
@@ -70,6 +70,12 @@
pattern: '**/*.jinja2'
- compare-type: ANT
pattern: '**/*.yaml'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
+
builders:
- check-jinja
diff --git a/jjb/multisite/multisite-verify-jobs.yml b/jjb/sfc/sfc-project-jobs.yml
index 9431e0bac..379fe793a 100644
--- a/jjb/multisite/multisite-verify-jobs.yml
+++ b/jjb/sfc/sfc-project-jobs.yml
@@ -3,38 +3,32 @@
# They will only be enabled on request by projects!
###################################################
- project:
- name: multisite
+ name: sfc-project-jobs
- project: '{name}'
+ project: 'sfc'
jobs:
- - 'multisite-verify-{stream}'
-
+ - 'sfc-verify-{stream}'
stream:
- master:
branch: '{stream}'
gs-pathname: ''
disabled: false
- timed: '@midnight'
- danube:
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
- disabled: false
- timed: ''
+ disabled: true
- job-template:
- name: 'multisite-verify-{stream}'
+ name: 'sfc-verify-{stream}'
disabled: '{obj:disabled}'
- concurrent: true
-
parameters:
- project-parameter:
project: '{project}'
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
-
scm:
- git-scm-gerrit
@@ -63,7 +57,14 @@
pattern: 'docs/**|.gitignore'
builders:
- - shell: |
- #!/bin/bash
+ - sfc-unit-tests
+
+################################
+# job builders
+################################
- echo "Hello World"
+- builder:
+ name: sfc-unit-tests
+ builders:
+ - shell: |
+ cd $WORKSPACE && yamllint $(git ls-tree -r HEAD --name-only | egrep 'yml$|yaml$')
diff --git a/jjb/snaps/snaps.yml b/jjb/snaps/snaps-verify-jobs.yml
index 50b7c3070..01ea3e41e 100644
--- a/jjb/snaps/snaps.yml
+++ b/jjb/snaps/snaps-verify-jobs.yml
@@ -15,21 +15,27 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- job-template:
name: 'snaps-verify-{stream}'
disabled: '{obj:disabled}'
+ concurrent: false
+
parameters:
- project-parameter:
project: '{project}'
branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
+ - string:
+ name: DEPLOYMENT_HOST_IP
+ default: 192.168.122.2
+ description: 'IP of the deployment node'
+ - string:
+ name: CONTROLLER_IP
+ default: 192.168.122.3
+ description: 'IP of the controller node'
+ - 'intel-virtual10-defaults'
scm:
- git-scm-gerrit
@@ -60,4 +66,7 @@
builders:
- shell: |
- echo "Nothing to verify!"
+ #!/bin/bash
+
+ cd $WORKSPACE/ci
+ ./run_tests.sh $DEPLOYMENT_HOST_IP $CONTROLLER_IP
diff --git a/jjb/storperf/storperf-daily-jobs.yml b/jjb/storperf/storperf-daily-jobs.yml
new file mode 100644
index 000000000..e849e29e1
--- /dev/null
+++ b/jjb/storperf/storperf-daily-jobs.yml
@@ -0,0 +1,175 @@
+###################################
+# job configuration for storperf
+###################################
+- project:
+ name: storperf-daily
+
+ project: storperf
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+ docker-tag: 'latest'
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+# Installers using labels
+# CI PODs
+# This section should only contain the installers
+# that have been switched using labels for slaves
+#--------------------------------
+ pod:
+## fuel CI PODs
+# - baremetal:
+# slave-label: fuel-baremetal
+# installer: fuel
+# <<: *master
+# - virtual:
+# slave-label: fuel-virtual
+# installer: fuel
+# <<: *master
+## joid CI PODs
+# - baremetal:
+# slave-label: joid-baremetal
+# installer: joid
+# <<: *master
+# - virtual:
+# slave-label: joid-virtual
+# installer: joid
+# <<: *master
+## compass CI PODs
+# - baremetal:
+# slave-label: compass-baremetal
+# installer: compass
+# <<: *master
+# - virtual:
+# slave-label: compass-virtual
+# installer: compass
+# <<: *master
+## apex CI PODs
+# - virtual:
+# slave-label: apex-virtual-master
+# installer: apex
+# <<: *master
+ - baremetal:
+ slave-label: apex-baremetal-master
+ installer: apex
+ <<: *master
+## armband CI PODs
+# - armband-baremetal:
+# slave-label: armband-baremetal
+# installer: fuel
+# <<: *master
+# - armband-virtual:
+# slave-label: armband-virtual
+# installer: fuel
+# <<: *master
+## daisy CI PODs
+# - baremetal:
+# slave-label: daisy-baremetal
+# installer: daisy
+# <<: *master
+# - virtual:
+# slave-label: daisy-virtual
+# installer: daisy
+# <<: *master
+
+ jobs:
+ - 'storperf-{installer}-{pod}-daily-{stream}'
+
+################################
+# job template
+################################
+- job-template:
+ name: 'storperf-{installer}-{pod}-daily-{stream}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: '30'
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{installer}-defaults'
+ - '{slave-label}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-nofeature-ha'
+ - string:
+ name: DOCKER_TAG
+ default: '{docker-tag}'
+ description: 'Tag to pull docker image'
+ - string:
+ name: CLEAN_DOCKER_IMAGES
+ default: 'false'
+ description: 'Remove downloaded docker images (opnfv/storperf*:*)'
+ - string:
+ name: GS_PATHNAME
+ default: '{gs-pathname}'
+ description: "Version directory where the opnfv documents will be stored in gs repository"
+ - string:
+ name: DISK_TYPE
+ default: 'HDD'
+ description: 'The type of hard disk that Cinder uses'
+ - string:
+ name: VOLUME_SIZE
+ default: '2'
+ description: 'Size of Cinder volume (in GB)'
+ - string:
+ name: WORKLOADS
+ default: 'rw'
+ description: 'Workloads to run'
+ - string:
+ name: BLOCK_SIZES
+ default: '16384'
+ description: 'Block sizes for VM I/O operations'
+ - string:
+ name: QUEUE_DEPTHS
+ default: '4'
+ description: 'Number of simultaneous I/O operations to keep active'
+ - string:
+ name: STEADY_STATE_SAMPLES
+ default: '10'
+ description: 'Number of samples to use (1 per minute) to measure steady state'
+ - string:
+ name: TEST_CASE
+ choices:
+ - 'snia_steady_state'
+ description: 'The test case to run'
+
+ scm:
+ - git-scm
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'storperf-daily-builder'
+
+########################
+# builder macros
+########################
+- builder:
+ name: storperf-daily-builder
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ cd $WORKSPACE
+ ./ci/daily.sh
diff --git a/jjb/storperf/storperf-verify-jobs.yml b/jjb/storperf/storperf-verify-jobs.yml
new file mode 100644
index 000000000..f99ceeacb
--- /dev/null
+++ b/jjb/storperf/storperf-verify-jobs.yml
@@ -0,0 +1,190 @@
+- project:
+ name: storperf-verify
+
+ project: 'storperf'
+
+#--------------------------------
+# branches
+#--------------------------------
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ docker-tag: 'latest'
+#--------------------------------
+# patch verification phases
+#--------------------------------
+ phase:
+ - 'unit-test':
+ slave-label: 'opnfv-build-ubuntu'
+ - 'build-x86_64':
+ slave-label: 'opnfv-build-ubuntu'
+ - 'build-aarch64':
+ slave-label: 'opnfv-build-ubuntu-arm'
+#--------------------------------
+# jobs
+#--------------------------------
+ jobs:
+ - 'storperf-verify-{stream}'
+ - 'storperf-verify-{phase}-{stream}'
+#--------------------------------
+# job templates
+#--------------------------------
+- job-template:
+ name: 'storperf-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ project-type: 'multijob'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ # we do nothing here as the main stuff will be done
+ # in phase jobs
+ echo "Triggering phase jobs!"
+ - multijob:
+ name: 'storperf-build-and-unittest'
+ execution-type: PARALLEL
+ projects:
+ - name: 'storperf-verify-unit-test-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ git-revision: true
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: false
+ - name: 'storperf-verify-build-x86_64-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ ARCH=x86_64
+ git-revision: true
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: false
+ - name: 'storperf-verify-build-aarch64-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ ARCH=aarch64
+ git-revision: true
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: false
+
+- job-template:
+ name: 'storperf-verify-{phase}-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ wrappers:
+ - ssh-agent-wrapper
+ - build-timeout:
+ timeout: 60
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - '{slave-label}-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ builders:
+ - 'storperf-verify-{phase}-builders-macro'
+
+ publishers:
+ - 'storperf-verify-{phase}-publishers-macro'
+#--------------------------------
+# builder macros
+#--------------------------------
+- builder:
+ name: 'storperf-verify-unit-test-builders-macro'
+ builders:
+ - shell: |
+ $WORKSPACE/ci/verify.sh
+- builder:
+ name: 'storperf-verify-build-x86_64-builders-macro'
+ builders:
+ - shell: |
+ $WORKSPACE/ci/verify-build.sh
+- builder:
+ name: 'storperf-verify-build-aarch64-builders-macro'
+ builders:
+ - shell: |
+ $WORKSPACE/ci/verify-build.sh
+#--------------------------------
+# publisher macros
+#--------------------------------
+- publisher:
+ name: 'storperf-verify-unit-test-publishers-macro'
+ publishers:
+ - junit:
+ results: nosetests.xml
+ - cobertura:
+ report-file: "coverage.xml"
+ only-stable: "true"
+ health-auto-update: "true"
+ stability-auto-update: "true"
+ zoom-coverage-chart: "true"
+ targets:
+ - files:
+ healthy: 10
+ unhealthy: 20
+ failing: 30
+ - method:
+ healthy: 50
+ unhealthy: 40
+ failing: 30
+ - email-jenkins-admins-on-failure
+- publisher:
+ name: 'storperf-verify-build-x86_64-publishers-macro'
+ publishers:
+ - email-jenkins-admins-on-failure
+- publisher:
+ name: 'storperf-verify-build-aarch64-publishers-macro'
+ publishers:
+ - email-jenkins-admins-on-failure
diff --git a/jjb/storperf/storperf.yml b/jjb/storperf/storperf.yml
index 13186a1ad..307becfed 100644
--- a/jjb/storperf/storperf.yml
+++ b/jjb/storperf/storperf.yml
@@ -4,9 +4,7 @@
project: '{name}'
jobs:
- - 'storperf-verify-{stream}'
- 'storperf-merge-{stream}'
- - 'storperf-daily-{stream}'
stream:
- master:
@@ -21,68 +19,6 @@
docker-tag: 'stable'
- job-template:
- name: 'storperf-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- node: opnfv-build-ubuntu
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - shell: |
- $WORKSPACE/ci/verify.sh
-
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 10
- unhealthy: 20
- failing: 30
- - method:
- healthy: 50
- unhealthy: 40
- failing: 30
-
-- job-template:
name: 'storperf-merge-{stream}'
node: opnfv-build-ubuntu
@@ -135,75 +71,5 @@
healthy: 50
unhealthy: 40
failing: 30
-
-- job-template:
- name: 'storperf-daily-{stream}'
-
- # Job template for daily builders
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'intel-pod9-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-noha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - choice:
- name: DISK_TYPE
- choices:
- - 'SSD'
- - 'HDD'
- default: 'HDD'
- description: 'The type of hard disk that Cinder uses'
- - string:
- name: AGENT_COUNT
- description: 'The number of slave agents to start. Defaults to the cinder node count'
- - string:
- name: VOLUME_SIZE
- default: '4'
- description: 'Size of Cinder volume (in GB)'
- - string:
- name: WORKLOADS
- default: 'wr,rr,rw'
- description: 'Workloads to run'
- - string:
- name: BLOCK_SIZES
- default: '2048,16384'
- description: 'Block sizes for VM I/O operations'
- - string:
- name: QUEUE_DEPTHS
- default: '1,4'
- description: 'Number of simultaneous I/O operations to keep active'
- - string:
- name: STEADY_STATE_SAMPLES
- default: '10'
- description: 'Number of samples to use (1 per minute) to measure steady state'
- - string:
- name: DEADLINE
- description: 'Maximum run time in minutes if steady state cannot be found. Defaults to 3 times steady state samples'
- - choice:
- name: TEST_CASE
- choices:
- - 'snia_steady_state'
- description: 'The test case to run'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '0 22 * * *'
-
- builders:
- - shell: |
- $WORKSPACE/ci/daily.sh
+ - email-jenkins-admins-on-failure
diff --git a/jjb/test-requirements.txt b/jjb/test-requirements.txt
deleted file mode 100644
index 6b700dcfc..000000000
--- a/jjb/test-requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-jenkins-job-builder
diff --git a/jjb/xci/bifrost-cleanup-job.yml b/jjb/xci/bifrost-cleanup-job.yml
index d5a444d09..6f3f51a6d 100644
--- a/jjb/xci/bifrost-cleanup-job.yml
+++ b/jjb/xci/bifrost-cleanup-job.yml
@@ -15,8 +15,8 @@
project-repo: 'https://git.openstack.org/openstack/bifrost'
clone-location: '/opt/bifrost'
- 'opnfv':
- project-repo: 'https://gerrit.opnfv.org/gerrit/releng'
- clone-location: '/opt/releng'
+ project-repo: 'https://gerrit.opnfv.org/gerrit/releng-xci'
+ clone-location: '/opt/releng-xci'
#--------------------------------
# jobs
@@ -83,6 +83,7 @@
publishers:
- email:
recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
+ - email-jenkins-admins-on-failure
#--------------------------------
# trigger macros
#--------------------------------
@@ -131,11 +132,11 @@
silent-start: true
projects:
- project-compare-type: 'ANT'
- project-pattern: 'releng'
+ project-pattern: 'releng-xci'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
file-paths:
- compare-type: ANT
- pattern: 'prototypes/bifrost/**'
+ pattern: 'bifrost/**'
readable-message: true
diff --git a/jjb/xci/bifrost-periodic-jobs.yml b/jjb/xci/bifrost-periodic-jobs.yml
index 9773cfd9e..a1892fc5f 100644
--- a/jjb/xci/bifrost-periodic-jobs.yml
+++ b/jjb/xci/bifrost-periodic-jobs.yml
@@ -1,5 +1,5 @@
- project:
- project: 'releng'
+ project: 'releng-xci'
name: 'bifrost-periodic'
#--------------------------------
@@ -49,7 +49,7 @@
- 'suse':
disabled: true
slave-label: xci-suse-virtual
- dib-os-release: '42.2'
+ dib-os-release: '42.3'
dib-os-element: 'opensuse-minimal'
dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
extra-dib-elements: 'openssh-server'
diff --git a/jjb/xci/bifrost-provision.sh b/jjb/xci/bifrost-provision.sh
index b37da9059..4d646a676 100755
--- a/jjb/xci/bifrost-provision.sh
+++ b/jjb/xci/bifrost-provision.sh
@@ -38,7 +38,7 @@ if [[ ! "$DISTRO" =~ (xenial|centos7|suse) ]]; then
fi
# remove previously cloned repos
-sudo /bin/rm -rf /opt/bifrost /opt/openstack-ansible /opt/releng /opt/functest
+sudo /bin/rm -rf /opt/bifrost /opt/openstack-ansible /opt/releng-xci /opt/functest
# Fix up permissions
fix_ownership
@@ -65,16 +65,16 @@ cd /opt/bifrost && sudo git checkout --quiet $OPENSTACK_BIFROST_VERSION
echo "xci: using bifrost commit"
git show --oneline -s --pretty=format:'%h - %s (%cr) <%an>'
-sudo git clone --quiet https://gerrit.opnfv.org/gerrit/releng /opt/releng
-cd /opt/releng && sudo git checkout --quiet $OPNFV_RELENG_VERSION
+sudo git clone --quiet https://gerrit.opnfv.org/gerrit/releng-xci /opt/releng-xci
+cd /opt/releng-xci && sudo git checkout --quiet $OPNFV_RELENG_VERSION
echo "xci: using releng commit"
git show --oneline -s --pretty=format:'%h - %s (%cr) <%an>'
# source flavor vars
-source "$WORKSPACE/prototypes/xci/config/${XCI_FLAVOR}-vars"
+source "$WORKSPACE/xci/config/${XCI_FLAVOR}-vars"
# combine opnfv and upstream scripts/playbooks
-sudo /bin/cp -rf /opt/releng/prototypes/bifrost/* /opt/bifrost/
+sudo /bin/cp -rf /opt/releng-xci/bifrost/* /opt/bifrost/
# cleanup remnants of previous deployment
cd /opt/bifrost
diff --git a/jjb/xci/bifrost-verify-jobs.yml b/jjb/xci/bifrost-verify-jobs.yml
index 319f8eb28..af51c2b79 100644
--- a/jjb/xci/bifrost-verify-jobs.yml
+++ b/jjb/xci/bifrost-verify-jobs.yml
@@ -14,8 +14,8 @@
project-repo: 'https://git.openstack.org/openstack/bifrost'
clone-location: '$WORKSPACE/bifrost'
- 'opnfv':
- project-repo: 'https://gerrit.opnfv.org/gerrit/releng'
- clone-location: '$WORKSPACE/releng'
+ project-repo: 'https://gerrit.opnfv.org/gerrit/releng-xci'
+ clone-location: '$WORKSPACE/releng-xci'
#--------------------------------
# distros
#--------------------------------
@@ -29,12 +29,12 @@
- 'centos7':
disabled: false
dib-os-release: '7'
- dib-os-element: 'centos7'
+ dib-os-element: 'centos-minimal'
dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
extra-dib-elements: 'openssh-server'
- 'suse':
disabled: false
- dib-os-release: '42.2'
+ dib-os-release: '42.3'
dib-os-element: 'opensuse-minimal'
dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
extra-dib-elements: 'openssh-server'
@@ -165,10 +165,13 @@
wrappers:
- fix-workspace-permissions
+ - build-timeout:
+ timeout: 90
publishers:
- email:
recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
+ - email-jenkins-admins-on-failure
#--------------------------------
# trigger macros
#--------------------------------
@@ -219,13 +222,13 @@
custom-url: '* $JOB_NAME $BIFROST_LOG_URL/index.html'
projects:
- project-compare-type: 'ANT'
- project-pattern: 'releng'
+ project-pattern: 'releng-xci'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
file-paths:
- compare-type: ANT
- pattern: 'prototypes/bifrost/**'
+ pattern: 'bifrost/**'
readable-message: true
#---------------------------
diff --git a/jjb/xci/bifrost-verify.sh b/jjb/xci/bifrost-verify.sh
index b522b8969..03d9afccd 100755
--- a/jjb/xci/bifrost-verify.sh
+++ b/jjb/xci/bifrost-verify.sh
@@ -57,16 +57,17 @@ EOF
</html>
EOF
+ # Upload landing page
+ echo "Uploading the landing page"
+ gsutil -q cp ${WORKSPACE}/index.html ${BIFROST_GS_URL}/index.html
+ rm -f ${WORKSPACE}/index.html
+
# Finally, download and upload the entire build log so we can retain
# as much build information as possible
echo "Uploading the final console output"
curl -s -L ${BIFROST_CONSOLE_LOG} > ${WORKSPACE}/build_log.txt
gsutil -q cp -Z ${WORKSPACE}/build_log.txt ${BIFROST_GS_URL}/build_log.txt
- rm ${WORKSPACE}/build_log.txt
-
- # Upload landing page
- gsutil -q cp ${WORKSPACE}/index.html ${BIFROST_GS_URL}/index.html
- rm ${WORKSPACE}/index.html
+ rm -f ${WORKSPACE}/build_log.txt
}
function fix_ownership() {
@@ -83,6 +84,9 @@ function fix_ownership() {
function cleanup_and_upload() {
original_exit=$?
+ echo "Job exit code: $original_exit"
+ # Turn off errexit
+ set +o errexit
fix_ownership
upload_logs
exit $original_exit
@@ -95,21 +99,21 @@ if [[ ! "$DISTRO" =~ (xenial|centos7|suse) ]]; then
fi
# remove previously cloned repos
-/bin/rm -rf $WORKSPACE/bifrost $WORKSPACE/releng
+/bin/rm -rf $WORKSPACE/bifrost $WORKSPACE/releng-xci
# Fix up permissions
fix_ownership
# clone all the repos first and checkout the patch afterwards
git clone https://git.openstack.org/openstack/bifrost $WORKSPACE/bifrost
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng
+git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE/releng-xci
# checkout the patch
cd $CLONE_LOCATION
git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD
# combine opnfv and upstream scripts/playbooks
-/bin/cp -rf $WORKSPACE/releng/prototypes/bifrost/* $WORKSPACE/bifrost/
+/bin/cp -rf $WORKSPACE/releng-xci/bifrost/* $WORKSPACE/bifrost/
# cleanup remnants of previous deployment
cd $WORKSPACE/bifrost
diff --git a/jjb/xci/osa-periodic-jobs.yml b/jjb/xci/osa-periodic-jobs.yml
index 722b077ac..7311baab4 100644
--- a/jjb/xci/osa-periodic-jobs.yml
+++ b/jjb/xci/osa-periodic-jobs.yml
@@ -1,149 +1,227 @@
- project:
- project: 'releng'
+ name: 'opnfv-osa-periodic'
- name: 'os-periodic'
+ project: 'releng-xci'
#--------------------------------
-# Branch Anchors
+# branches
#--------------------------------
-# the versions stated here default to branches which then later
-# on used for checking out the branches, pulling in head of the branch.
- master: &master
- stream: master
- openstack-osa-version: '{stream}'
- opnfv-releng-version: 'master'
- gs-pathname: ''
- ocata: &ocata
- stream: ocata
- openstack-osa-version: 'stable/{stream}'
- opnfv-releng-version: 'master'
- gs-pathname: '/{stream}'
+ stream:
+ - master:
+ branch: '{stream}'
#--------------------------------
-# XCI PODs
-#--------------------------------
- pod:
- - virtual:
- <<: *master
- - virtual:
- <<: *ocata
-#--------------------------------
-# Supported Distros
+# distros
#--------------------------------
distro:
- 'xenial':
disabled: false
- slave-label: xci-xenial-virtual
- dib-os-release: 'xenial'
- dib-os-element: 'ubuntu-minimal'
- dib-os-packages: 'vlan,vim,less,bridge-utils,sudo,language-pack-en,iputils-ping,rsyslog,curl,python,debootstrap,ifenslave,ifenslave-2.6,lsof,lvm2,tcpdump,nfs-kernel-server,chrony,iptables'
- extra-dib-elements: 'openssh-server'
- 'centos7':
disabled: true
- slave-label: xci-centos7-virtual
- dib-os-release: '7'
- dib-os-element: 'centos7'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- 'suse':
disabled: true
- slave-label: xci-suse-virtual
- dib-os-release: '42.2'
- dib-os-element: 'opensuse-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
-
+#--------------------------------
+# type
+#--------------------------------
+ type:
+ - virtual
+#--------------------------------
+# phases
+#--------------------------------
+ phase:
+ - 'deploy'
+ - 'healthcheck'
#--------------------------------
# jobs
#--------------------------------
jobs:
- - 'osa-deploy-{pod}-{distro}-periodic-{stream}'
-
+ - 'osa-periodic-{distro}-{type}-{stream}'
+ - 'osa-periodic-{phase}-{type}-{stream}'
#--------------------------------
# job templates
#--------------------------------
- job-template:
- name: 'osa-deploy-{pod}-{distro}-periodic-{stream}'
+ name: 'osa-periodic-{distro}-{type}-{stream}'
+
+ project-type: multijob
disabled: '{obj:disabled}'
concurrent: false
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - '^xci-os.*'
- - '^xci-deploy.*'
- - '^xci-functest.*'
- - '^bifrost-.*periodic.*'
- - '^osa-.*periodic.*'
+ - 'xci-verify-.*'
+ - 'bifrost-verify-.*'
+ - 'bifrost-periodic-.*'
+ - 'osa-verify-.*'
+ - 'osa-periodic-.*'
block-level: 'NODE'
+
+ wrappers:
+ - ssh-agent-wrapper
+ - build-timeout:
+ timeout: 240
+ - fix-workspace-permissions
+
+ scm:
+ - git-scm-osa
+
+ triggers:
+ - pollscm:
+ cron: "@midnight"
+ ignore-post-commit-hooks: True
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - label:
+ name: SLAVE_LABEL
+ default: 'xci-virtual-{distro}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'osa-periodic-deploy-{type}-{stream}'
+ current-parameters: true
+ predefined-parameters: |
+ DISTRO={distro}
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+ git-revision: true
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: healthcheck
+ condition: SUCCESSFUL
+ projects:
+ - name: 'osa-periodic-healthcheck-{type}-{stream}'
+ current-parameters: true
+ predefined-parameters: |
+ DISTRO={distro}
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+ FUNCTEST_SUITE_NAME=healthcheck
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: false
+
+- job-template:
+ name: 'osa-periodic-{phase}-{type}-{stream}'
+
+ disabled: false
+
+ concurrent: true
+
+ properties:
- logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'xci-verify-deploy-.*'
+ - 'xci-verify-healthcheck-.*'
+ - 'bifrost-verify-.*'
+ - 'bifrost-periodic-.*'
+ - 'osa-verify-deploy-.*'
+ - 'osa-verify-halthcheck-.*'
+ - 'osa-periodic-deploy-.*'
+ - 'osa-periodic-healthcheck-.*'
+ block-level: 'NODE'
parameters:
- project-parameter:
project: '{project}'
- branch: '{opnfv-releng-version}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- - string:
- name: XCI_FLAVOR
- default: 'ha'
+ branch: '{branch}'
+ - label:
+ name: SLAVE_LABEL
+ default: 'xci-virtual-{distro}'
- string:
name: OPENSTACK_OSA_VERSION
- default: '{openstack-osa-version}'
- - string:
- name: OPNFV_RELENG_VERSION
- default: '{opnfv-releng-version}'
+ default: 'master'
- string:
name: DISTRO
- default: '{distro}'
+ default: 'xenial'
- string:
- name: DIB_OS_RELEASE
- default: '{dib-os-release}'
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-noha'
- string:
- name: DIB_OS_ELEMENT
- default: '{dib-os-element}'
+ name: XCI_FLAVOR
+ default: 'mini'
- string:
- name: DIB_OS_PACKAGES
- default: '{dib-os-packages}'
+ name: XCI_LOOP
+ default: 'periodic'
- string:
- name: EXTRA_DIB_ELEMENTS
- default: '{extra-dib-elements}'
+ name: OPNFV_RELENG_DEV_PATH
+ default: $WORKSPACE/releng-xci
- string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - label:
- name: SLAVE_LABEL
- default: '{slave-label}'
+ name: FUNCTEST_SUITE_NAME
+ default: 'healthcheck'
- string:
name: ANSIBLE_VERBOSITY
- default: ''
+ default: '-vvvv'
- string:
- name: XCI_LOOP
- default: 'periodic'
-
- wrappers:
- - fix-workspace-permissions
+ name: FORCE_MASTER
+ default: 'true'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
scm:
- - git-scm
+ - git-scm-osa
- # trigger is disabled until we know which jobs we will have
- # and adjust stuff accordingly
- triggers:
- - timed: '' # '@midnight'
+ wrappers:
+ - ssh-agent-wrapper
+ - build-timeout:
+ timeout: 240
+ - fix-workspace-permissions
builders:
- description-setter:
- description: "Built on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - 'osa-deploy-builder'
+ description: "Built on $NODE_NAME"
+ - 'osa-periodic-{phase}-macro'
-#---------------------------
+#--------------------------------
# builder macros
-#---------------------------
+#--------------------------------
- builder:
- name: osa-deploy-builder
+ name: 'osa-periodic-deploy-macro'
builders:
- - shell:
- !include-raw: ./xci-deploy.sh
+ - shell: |
+ #!/bin/bash
+
+ # here we will
+ # - clone releng-xci repo as the jobs are running against openstack gerrit
+ # and we need to clone releng-xci ourselves to $OPNFV_RELENG_DEV_PATH
+ # - run sources-branch-updater.sh from osa to update/pin the role versions
+ # at the time this job gets triggered against osa master in case if the
+ # deployment succeeds and we decide to bump version used by xci
+ # - copy generated role versions into $OPNFV_RELENG_DEV_PATH/xci/file
+ # - start the deployment by executing xci-deploy.sh as usual
+ #
+ # we might also need to pin versions of openstack services as well.
+
+ echo "Hello World!"
+
+- builder:
+ name: 'osa-periodic-healthcheck-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ echo "Hello World!"
+#--------------------------------
+# scm macro
+#--------------------------------
+- scm:
+ name: git-scm-osa
+ scm:
+ - git:
+ url: https://review.openstack.org/p/openstack/openstack-ansible.git
+ branches:
+ - master
+ timeout: 15
diff --git a/jjb/xci/xci-daily-jobs.yml b/jjb/xci/xci-daily-jobs.yml
index 64e13d3eb..0835e6709 100644
--- a/jjb/xci/xci-daily-jobs.yml
+++ b/jjb/xci/xci-daily-jobs.yml
@@ -6,7 +6,7 @@
# are checked out based on what is configured.
#--------------------------------
- project:
- project: 'releng'
+ project: 'releng-xci'
name: 'xci-daily'
#--------------------------------
@@ -16,10 +16,6 @@
stream: master
opnfv-releng-version: master
gs-pathname: ''
- ocata: &ocata
- stream: ocata
- opnfv-releng-version: master
- gs-pathname: '/{stream}'
#--------------------------------
# Scenarios
#--------------------------------
@@ -30,14 +26,18 @@
- 'os-nosdn-nofeature-noha':
auto-trigger-name: 'daily-trigger-disabled'
xci-flavor: 'noha'
+ - 'os-odl-sfc-ha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ xci-flavor: 'ha'
+ - 'os-odl-sfc-noha':
+ auto-trigger-name: 'daily-trigger-disabled'
+ xci-flavor: 'noha'
#--------------------------------
# XCI PODs
#--------------------------------
pod:
- virtual:
<<: *master
- - virtual:
- <<: *ocata
#--------------------------------
# Supported Distros
#--------------------------------
@@ -59,7 +59,7 @@
- 'suse':
disabled: true
slave-label: xci-suse-virtual
- dib-os-release: '42.2'
+ dib-os-release: '42.3'
dib-os-element: 'opensuse-minimal'
dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
extra-dib-elements: 'openssh-server'
@@ -148,6 +148,7 @@
publishers:
- email:
recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
+ - email-jenkins-admins-on-failure
- job-template:
name: 'xci-{phase}-{pod}-{distro}-daily-{stream}'
@@ -213,7 +214,13 @@
- string:
name: XCI_LOOP
default: 'daily'
-
+ - string:
+ name: INSTALLER_TYPE
+ default: 'osa'
+ - string:
+ name: FUNCTEST_SUITE_NAME
+ default: 'daily'
+ description: "Daily suite name to run"
builders:
- description-setter:
description: "Built on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
@@ -231,5 +238,16 @@
- builder:
name: xci-functest-builder
builders:
- - shell:
- !include-raw: ./xci-functest.sh
+ - shell: |
+ #!/bin/bash
+
+ echo "Hello World!"
+
+# this will be enabled once the xci is prepared
+#- builder:
+# name: xci-functest-builder
+# builders:
+# - shell:
+# !include-raw:
+# - ../../utils/fetch_os_creds.sh
+# - ../functest/functest-alpine.sh
diff --git a/jjb/xci/xci-deploy.sh b/jjb/xci/xci-deploy.sh
index 8ad637805..211d282d3 100755
--- a/jjb/xci/xci-deploy.sh
+++ b/jjb/xci/xci-deploy.sh
@@ -11,7 +11,7 @@ set -o errexit
set -o nounset
set -o pipefail
-cd $WORKSPACE/prototypes/xci
+cd $WORKSPACE/xci
# for daily jobs, we want to use working versions
# for periodic jobs, we will use whatever is set in the job, probably master
@@ -53,7 +53,7 @@ if [[ "$XCI_LOOP" == "periodic" && "$OPENSTACK_OSA_VERSION" == "master" ]]; then
fi
# proceed with the deployment
-cd $WORKSPACE/prototypes/xci
+cd $WORKSPACE/xci
./xci-deploy.sh
if [[ "$JOB_NAME" =~ "periodic" && "$OPENSTACK_OSA_VERSION" == "master" ]]; then
diff --git a/jjb/xci/xci-functest.sh b/jjb/xci/xci-functest.sh
deleted file mode 100755
index 0f58dfefc..000000000
--- a/jjb/xci/xci-functest.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo "Functional testing with functest"
diff --git a/jjb/xci/xci-verify-jobs.yml b/jjb/xci/xci-verify-jobs.yml
new file mode 100644
index 000000000..2cdecb208
--- /dev/null
+++ b/jjb/xci/xci-verify-jobs.yml
@@ -0,0 +1,236 @@
+- project:
+ name: 'opnfv-xci-verify'
+
+ project: releng-xci
+#--------------------------------
+# branches
+#--------------------------------
+ stream:
+ - master:
+ branch: '{stream}'
+#--------------------------------
+# distros
+#--------------------------------
+ distro:
+ - 'xenial':
+ disabled: false
+ - 'centos7':
+ disabled: true
+ - 'suse':
+ disabled: true
+#--------------------------------
+# type
+#--------------------------------
+ type:
+ - virtual
+#--------------------------------
+# patch verification phases
+#--------------------------------
+ phase:
+ - 'deploy'
+ - 'healthcheck'
+#--------------------------------
+# jobs
+#--------------------------------
+ jobs:
+ - 'xci-verify-{distro}-{type}-{stream}'
+ - 'xci-verify-{phase}-{type}-{stream}'
+#--------------------------------
+# job templates
+#--------------------------------
+- job-template:
+ name: 'xci-verify-{distro}-{type}-{stream}'
+
+ project-type: multijob
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'xci-verify-.*'
+ - 'bifrost-verify-.*'
+ - 'bifrost-periodic-.*'
+ - 'osa-verify-.*'
+ - 'osa-periodic-.*'
+ block-level: 'NODE'
+
+ wrappers:
+ - ssh-agent-wrapper
+ - build-timeout:
+ timeout: 240
+ - fix-workspace-permissions
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'true'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ disable-strict-forbidden-file-verification: 'false'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'bifrost/**'
+ - compare-type: ANT
+ pattern: 'prototypes/**'
+ - compare-type: ANT
+ pattern: 'upstream/**'
+ - compare-type: ANT
+ pattern: '**/README.rst'
+ readable-message: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - label:
+ name: SLAVE_LABEL
+ default: 'xci-virtual-{distro}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'xci-verify-deploy-{type}-{stream}'
+ current-parameters: true
+ predefined-parameters: |
+ DISTRO={distro}
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+ node-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: healthcheck
+ condition: SUCCESSFUL
+ projects:
+ - name: 'xci-verify-healthcheck-{type}-{stream}'
+ current-parameters: true
+ predefined-parameters: |
+ DISTRO={distro}
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
+ FUNCTEST_SUITE_NAME=healthcheck
+ node-parameters: true
+ kill-phase-on: NEVER
+ abort-all-job: true
+
+- job-template:
+ name: 'xci-verify-{phase}-{type}-{stream}'
+
+ disabled: false
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'xci-verify-deploy-.*'
+ - 'xci-verify-healthcheck-.*'
+ - 'bifrost-verify-.*'
+ - 'bifrost-periodic-.*'
+ - 'osa-verify-.*'
+ - 'osa-periodic-.*'
+ block-level: 'NODE'
+
+ parameters:
+ - string:
+ name: DISTRO
+ default: 'xenial'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-noha'
+ - string:
+ name: FUNCTEST_SUITE_NAME
+ default: 'healthcheck'
+ - string:
+ name: XCI_FLAVOR
+ default: 'mini'
+ - string:
+ name: OPNFV_RELENG_DEV_PATH
+ default: $WORKSPACE/
+ - string:
+ name: ANSIBLE_VERBOSITY
+ default: '-vvvv'
+ - string:
+ name: INSTALLER_TYPE
+ default: 'osa'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+
+ wrappers:
+ - ssh-agent-wrapper
+ - build-timeout:
+ timeout: 240
+ - fix-workspace-permissions
+
+ scm:
+ - git-scm-gerrit
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'xci-verify-{phase}-macro'
+
+#--------------------------------
+# builder macros
+#--------------------------------
+- builder:
+ name: 'xci-verify-deploy-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ # for some reason, the PATH is not set correctly
+ # setting PATH for ansible stuff
+ export PATH=/home/jenkins/.local/bin:$PATH
+
+ cd $WORKSPACE/xci
+ ./xci-deploy.sh
+
+
+- builder:
+ name: 'xci-verify-healthcheck-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ echo "Hello World!"
+
+# this will be enabled once the xci is prepared
+#- builder:
+# name: 'xci-verify-healthcheck-macro'
+# builders:
+# - shell:
+# !include-raw: ../../utils/fetch_os_creds.sh
+# - shell:
+# !include-raw: ../functest/functest-alpine.sh
diff --git a/jjb/yardstick/yardstick-daily-jobs.yml b/jjb/yardstick/yardstick-daily-jobs.yml
index ff1d47eb4..5d38a0b7d 100644
--- a/jjb/yardstick/yardstick-daily-jobs.yml
+++ b/jjb/yardstick/yardstick-daily-jobs.yml
@@ -28,6 +28,27 @@
# that have been switched using labels for slaves
#--------------------------------
pod:
+# apex CI PODs
+ - virtual:
+ slave-label: apex-virtual-master
+ installer: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - baremetal:
+ slave-label: apex-baremetal-master
+ installer: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
+ - virtual:
+ slave-label: apex-virtual-danube
+ installer: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *danube
+ - baremetal:
+ slave-label: apex-baremetal-danube
+ installer: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *danube
# fuel CI PODs
- baremetal:
slave-label: fuel-baremetal
@@ -91,7 +112,6 @@
installer: joid
auto-trigger-name: 'daily-trigger-disabled'
<<: *danube
-
# compass CI PODs
- baremetal:
slave-label: compass-baremetal
@@ -114,22 +134,6 @@
auto-trigger-name: 'daily-trigger-disabled'
<<: *danube
#--------------------------------
-# Installers not using labels
-# CI PODs
-# This section should only contain the installers
-# that have not been switched using labels for slaves
-#--------------------------------
- - lf-pod1:
- slave-label: '{pod}'
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - lf-pod1:
- slave-label: '{pod}'
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *danube
-#--------------------------------
# None-CI PODs
#--------------------------------
- orange-pod1:
@@ -162,36 +166,6 @@
installer: fuel
auto-trigger-name: 'daily-trigger-disabled'
<<: *danube
- - arm-pod2:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - arm-pod2:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *danube
- - arm-pod3:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - arm-pod3:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *danube
- - arm-virtual1:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - arm-virtual1:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *danube
- orange-pod2:
slave-label: '{pod}'
installer: joid
@@ -212,6 +186,11 @@
installer: compass
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
+ - flex-pod1:
+ slave-label: '{pod}'
+ installer: apex
+ auto-trigger-name: 'daily-trigger-disabled'
+ <<: *master
#--------------------------------
testsuite:
- 'daily'
@@ -276,13 +255,15 @@
- description-setter:
description: "POD: $NODE_NAME"
- 'yardstick-cleanup'
- #- 'yardstick-fetch-os-creds'
+ - 'yardstick-fetch-os-creds'
+ - 'yardstick-fetch-k8s-conf'
- 'yardstick-{testsuite}'
- 'yardstick-store-results'
publishers:
- email:
recipients: jean.gaoliang@huawei.com limingjiang@huawei.com ross.b.brattain@intel.com
+ - email-jenkins-admins-on-failure
########################
# builder macros
@@ -306,6 +287,12 @@
!include-raw: ../../utils/fetch_os_creds.sh
- builder:
+ name: yardstick-fetch-k8s-conf
+ builders:
+ - shell:
+ !include-raw: ./yardstick-get-k8s-conf.sh
+
+- builder:
name: yardstick-store-results
builders:
- shell:
@@ -320,94 +307,91 @@
# parameter macros
########################
- parameter:
- name: 'yardstick-params-fuel-baremetal'
+ name: 'yardstick-params-apex-virtual-master'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-fuel-virtual'
+ name: 'yardstick-params-apex-baremetal-master'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-armband-baremetal'
+ name: 'yardstick-params-apex-virtual-danube'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-armband-virtual'
+ name: 'yardstick-params-apex-baremetal-danube'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-arm-virtual1'
+ name: 'yardstick-params-fuel-baremetal'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-joid-baremetal'
+ name: 'yardstick-params-fuel-virtual'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-joid-virtual'
+ name: 'yardstick-params-armband-baremetal'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-intel-pod8'
+ name: 'yardstick-params-armband-virtual'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-lf-pod1'
+ name: 'yardstick-params-joid-baremetal'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
-
- parameter:
- name: 'yardstick-params-lf-pod2'
+ name: 'yardstick-params-joid-virtual'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
-
- parameter:
- name: 'yardstick-params-compass-baremetal'
+ name: 'yardstick-params-intel-pod8'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-zte-pod1'
+ name: 'yardstick-params-compass-baremetal'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
-
- parameter:
- name: 'yardstick-params-zte-pod2'
+ name: 'yardstick-params-zte-pod1'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -415,7 +399,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-zte-pod3'
+ name: 'yardstick-params-zte-pod2'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -423,7 +407,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-orange-pod1'
+ name: 'yardstick-params-zte-pod3'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -431,7 +415,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-orange-pod2'
+ name: 'yardstick-params-orange-pod1'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -439,7 +423,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-arm-pod2'
+ name: 'yardstick-params-orange-pod2'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -447,7 +431,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-arm-pod3'
+ name: 'yardstick-params-virtual'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -455,7 +439,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-virtual'
+ name: 'yardstick-params-compass-virtual'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -463,7 +447,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-compass-virtual'
+ name: 'yardstick-params-huawei-pod3'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -471,7 +455,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-huawei-pod3'
+ name: 'yardstick-params-huawei-pod4'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -479,7 +463,7 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-huawei-pod4'
+ name: 'yardstick-params-flex-pod1'
parameters:
- string:
name: YARDSTICK_DB_BACKEND
@@ -498,4 +482,4 @@
- trigger:
name: 'yardstick-daily-huawei-pod4-trigger'
triggers:
- - timed: '0 1 * * *'
+ - timed: ''
diff --git a/jjb/yardstick/yardstick-daily.sh b/jjb/yardstick/yardstick-daily.sh
index 1c2abad3f..aaefba26e 100755
--- a/jjb/yardstick/yardstick-daily.sh
+++ b/jjb/yardstick/yardstick-daily.sh
@@ -2,9 +2,10 @@
set -e
[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-# labconfig is used only for joid
-labconfig=""
+rc_file_vol=""
+cacert_file_vol=""
sshkey=""
+
if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
instack_mac=$(sudo virsh domiflist undercloud | grep default | \
grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
@@ -15,12 +16,24 @@ if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
fi
-elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
- # If production lab then creds may be retrieved dynamically
- # creds are on the jumphost, always in the same folder
- labconfig="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
- # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
- # replace the default one by the customized one provided by jenkins config
+fi
+
+if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
+ if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
+ rc_file_vol="-v /home/ubuntu/config:/etc/yardstick/admin.conf"
+ else
+ # If production lab then creds may be retrieved dynamically
+ # creds are on the jumphost, always in the same folder
+ rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
+ # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
+ # replace the default one by the customized one provided by jenkins config
+ fi
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+ cacert_file_vol="-v ${HOME}/os_cacert:/etc/yardstick/os_cacert"
+ echo "export OS_CACERT=/etc/yardstick/os_cacert" >> ${HOME}/opnfv-openrc.sh
+ rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
+else
+ rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
fi
# Set iptables rule to allow forwarding return traffic for container
@@ -46,8 +59,14 @@ sudo rm -rf ${dir_result}/*
map_log_dir="-v ${dir_result}:/tmp/yardstick"
# Run docker
-cmd="sudo docker run ${opts} ${envs} ${labconfig} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
+if [[ ${INSTALLER_TYPE} == "joid" && "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
+ juju ssh kubernetes-master/0 sudo apt-get install -y docker.io
+ cmd="juju ssh kubernetes-master/0 sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
+else
+ cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
+fi
+
echo "Yardstick: Running docker cmd: ${cmd}"
${cmd}
diff --git a/jjb/yardstick/yardstick-get-k8s-conf.sh b/jjb/yardstick/yardstick-get-k8s-conf.sh
new file mode 100755
index 000000000..e93367f9a
--- /dev/null
+++ b/jjb/yardstick/yardstick-get-k8s-conf.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+set -e
+
+dest_path="$HOME/admin.conf"
+
+if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
+ juju scp kubernetes-master/0:config "${dest_path}"
+fi