summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitmodules3
-rw-r--r--INFO.yaml14
-rw-r--r--docs/ci/index.rst9
-rw-r--r--docs/ci/resources.rst84
-rw-r--r--docs/ci/tables/ci-baremetal-servers.rst148
-rw-r--r--docs/ci/tables/ci-build-servers.rst31
-rw-r--r--docs/ci/tables/ci-labels.rst13
-rw-r--r--docs/ci/tables/ci-virtual-servers.rst164
-rw-r--r--docs/ci/tables/none-ci-servers.rst53
-rw-r--r--docs/ci/user-guide.rst100
-rw-r--r--docs/conf.py11
-rw-r--r--docs/conf.yaml3
-rw-r--r--docs/infra/jenkins/connect-to-jenkins.rst116
-rw-r--r--docs/requirements.txt3
-rw-r--r--gitlab-templates/Docker.gitlab-ci.yml70
-rw-r--r--gitlab-templates/GoogleStorage.gitlab-ci.yml35
-rw-r--r--gitlab-templates/RTD.gitlab-ci.yml104
-rwxr-xr-xjjb/3rd_party_ci/create-apex-vms.sh12
-rwxr-xr-xjjb/3rd_party_ci/detect-snapshot.sh31
-rwxr-xr-xjjb/3rd_party_ci/download-netvirt-artifact.sh39
-rwxr-xr-xjjb/3rd_party_ci/install-netvirt.sh32
-rw-r--r--jjb/3rd_party_ci/odl-netvirt.yaml278
-rwxr-xr-xjjb/3rd_party_ci/postprocess-netvirt.sh11
-rwxr-xr-xjjb/apex/apex-build.sh98
-rwxr-xr-xjjb/apex/apex-deploy.sh161
-rwxr-xr-xjjb/apex/apex-download-artifact.sh89
-rwxr-xr-xjjb/apex/apex-fetch-logs.sh25
-rwxr-xr-xjjb/apex/apex-iso-verify.sh75
-rw-r--r--jjb/apex/apex-jjb-renderer.py49
-rw-r--r--jjb/apex/apex-project-jobs.yaml136
-rw-r--r--jjb/apex/apex-snapshot-create.sh108
-rw-r--r--jjb/apex/apex-snapshot-deploy.sh174
-rwxr-xr-xjjb/apex/apex-unit-test.sh33
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh165
-rw-r--r--jjb/apex/apex-verify-jobs.yaml396
-rw-r--r--jjb/apex/apex.yaml1793
-rw-r--r--jjb/apex/apex.yaml.j21105
-rw-r--r--jjb/apex/scenarios.yaml.hidden72
-rw-r--r--jjb/apex/update-build-result.groovy5
-rw-r--r--jjb/armband/armband-ci-jobs.yaml373
-rw-r--r--jjb/armband/armband-verify-jobs.yaml210
-rw-r--r--jjb/auto/auto.yaml127
-rw-r--r--jjb/availability/availability.yaml8
-rw-r--r--jjb/barometer/barometer-build.sh22
-rw-r--r--jjb/barometer/barometer-upload-artifact.sh74
-rw-r--r--jjb/barometer/barometer.yaml163
-rw-r--r--jjb/bottlenecks/bottlenecks-ci-jobs.yaml204
-rw-r--r--jjb/bottlenecks/bottlenecks-cleanup.sh17
-rw-r--r--jjb/bottlenecks/bottlenecks-project-jobs.yaml224
-rw-r--r--jjb/bottlenecks/bottlenecks-run-suite.sh147
-rw-r--r--jjb/calipso/calipso.yaml65
-rw-r--r--jjb/ci_gate_security/anteater-report-to-gerrit.sh28
-rw-r--r--jjb/ci_gate_security/anteater-security-audit-weekly.sh50
-rw-r--r--jjb/ci_gate_security/anteater-security-audit.sh32
-rw-r--r--jjb/ci_gate_security/opnfv-ci-gate-security.yaml196
-rw-r--r--jjb/clover/clover-project.yaml172
-rw-r--r--jjb/cntt/cntt.yaml97
-rw-r--r--jjb/compass4nfv/compass-build.sh52
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yaml1009
-rw-r--r--jjb/compass4nfv/compass-deploy.sh80
-rw-r--r--jjb/compass4nfv/compass-dovetail-jobs.yaml192
-rw-r--r--jjb/compass4nfv/compass-download-artifact.sh35
-rw-r--r--jjb/compass4nfv/compass-logs.sh28
-rw-r--r--jjb/compass4nfv/compass-makeppa.sh17
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yaml135
-rw-r--r--jjb/compass4nfv/compass-upload-artifact.sh67
-rw-r--r--jjb/compass4nfv/compass-verify-jobs.yaml345
-rw-r--r--jjb/compass4nfv/compass-workspace-precleanup.sh8
-rw-r--r--jjb/conductor/conductor.yaml8
-rwxr-xr-xjjb/container4nfv/arm64/compass-build.sh14
-rwxr-xr-xjjb/container4nfv/arm64/compass-deploy.sh18
-rwxr-xr-xjjb/container4nfv/arm64/yardstick-arm64.sh93
-rw-r--r--jjb/container4nfv/container4nfv-arm64.yaml95
-rw-r--r--jjb/container4nfv/container4nfv-project.yaml169
-rw-r--r--jjb/container4nfv/yardstick-arm64.yaml130
-rw-r--r--jjb/copper/copper.yaml8
-rw-r--r--jjb/cperf/cperf-ci-jobs.yaml190
-rwxr-xr-xjjb/cperf/cperf-prepare-robot.sh32
-rwxr-xr-xjjb/cperf/cperf-robot-netvirt-csit.sh105
-rw-r--r--jjb/cperf/parse-node-yaml.py71
-rw-r--r--jjb/daisy4nfv/daisy-daily-jobs.yaml423
-rwxr-xr-xjjb/daisy4nfv/daisy-deploy.sh75
-rw-r--r--jjb/daisy4nfv/daisy-project-jobs.yaml312
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-basic.sh6
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-build-kolla-image.sh68
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-build.sh41
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-download-artifact.sh86
-rw-r--r--jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml226
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-smoke-test.sh6
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-upload-artifact.sh96
-rw-r--r--jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml225
-rw-r--r--jjb/doctor/doctor.yaml275
-rw-r--r--jjb/domino/domino.yaml57
-rw-r--r--jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml101
-rwxr-xr-xjjb/dovetail/dovetail-artifacts-upload.sh94
-rw-r--r--jjb/dovetail/dovetail-artifacts-upload.yaml115
-rw-r--r--jjb/dovetail/dovetail-ci-jobs.yaml271
-rwxr-xr-xjjb/dovetail/dovetail-cleanup.sh49
-rw-r--r--jjb/dovetail/dovetail-project-jobs.yaml116
-rwxr-xr-xjjb/dovetail/dovetail-run.sh366
-rw-r--r--jjb/dovetail/dovetail-weekly-jobs.yaml139
-rw-r--r--jjb/dpacc/dpacc.yaml8
-rwxr-xr-xjjb/escalator/escalator-basic.sh5
-rwxr-xr-xjjb/escalator/escalator-build.sh33
-rwxr-xr-xjjb/escalator/escalator-upload-artifact.sh89
-rw-r--r--jjb/escalator/escalator.yaml317
-rw-r--r--jjb/fuel/fuel-daily-jobs.yaml766
-rwxr-xr-xjjb/fuel/fuel-deploy.sh142
-rwxr-xr-xjjb/fuel/fuel-download-artifact.sh63
-rwxr-xr-xjjb/fuel/fuel-lab-reconfig.sh80
-rwxr-xr-xjjb/fuel/fuel-logs.sh36
-rw-r--r--jjb/fuel/fuel-project-jobs.yaml80
-rw-r--r--jjb/fuel/fuel-verify-jobs.yaml208
-rw-r--r--jjb/fuel/fuel-weekly-jobs.yaml191
-rwxr-xr-xjjb/functest/functest-alpine.sh183
-rwxr-xr-xjjb/functest/functest-cleanup.sh40
-rw-r--r--jjb/functest/functest-daily-jobs.yaml416
-rw-r--r--jjb/functest/functest-docker.yaml307
-rwxr-xr-xjjb/functest/functest-env-presetup.sh58
-rw-r--r--jjb/functest/functest-exit.sh11
-rwxr-xr-xjjb/functest/functest-k8.sh64
-rw-r--r--jjb/functest/functest-kubernetes-docker.yaml182
-rw-r--r--jjb/functest/functest-kubernetes-pi.yaml891
-rw-r--r--jjb/functest/functest-kubernetes-project-jobs.yaml257
-rw-r--r--jjb/functest/functest-kubernetes.yaml2198
-rw-r--r--jjb/functest/functest-pi.yaml1239
-rw-r--r--jjb/functest/functest-project-jobs.yaml257
-rwxr-xr-xjjb/functest/functest-suite.sh21
-rw-r--r--jjb/functest/functest-weekly-jobs.yaml128
-rw-r--r--jjb/functest/functest.yaml2663
-rw-r--r--jjb/functest/xtesting-ci-docker.yaml189
-rw-r--r--jjb/functest/xtesting-ci-vm.yaml233
-rw-r--r--jjb/functest/xtesting-ci.yaml182
-rw-r--r--jjb/functest/xtesting-docker.yaml224
-rw-r--r--jjb/functest/xtesting-pi.yaml425
-rw-r--r--jjb/functest/xtesting-project-jobs.yaml257
-rw-r--r--jjb/functest/xtesting.yaml986
m---------jjb/global-jjb0
-rw-r--r--jjb/global/basic-jobs.yaml46
-rw-r--r--jjb/global/installer-params.yaml156
-rwxr-xr-xjjb/global/installer-report.sh46
-rw-r--r--jjb/global/releng-defaults.yaml13
-rw-r--r--jjb/global/releng-macros.yaml611
-rw-r--r--jjb/global/slave-params.yaml1119
-rw-r--r--jjb/ipv6/ipv6.yaml8
-rw-r--r--jjb/joid/joid-daily-jobs.yaml384
-rw-r--r--jjb/joid/joid-deploy.sh160
-rw-r--r--jjb/joid/joid-verify-jobs.yaml213
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-download-artifact.sh39
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-test.sh33
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-upload-artifact.sh85
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yaml386
-rw-r--r--jjb/laas/laas.yml72
-rw-r--r--jjb/laas/shell/backup-dashboard.sh28
-rw-r--r--jjb/laas/shell/deploy-dashboard.sh19
-rw-r--r--jjb/models/models.yaml8
-rw-r--r--jjb/moon/moon.yaml60
-rwxr-xr-xjjb/netready/netready-gluon-build.sh42
-rwxr-xr-xjjb/netready/netready-upload-gluon-packages.sh27
-rw-r--r--jjb/netready/netready.yaml69
-rw-r--r--jjb/nfvbench/nfvbench-views.yaml12
-rw-r--r--jjb/nfvbench/nfvbench.yaml44
-rw-r--r--jjb/onosfw/onosfw.yaml192
-rwxr-xr-xjjb/openci/create-ane.sh26
-rwxr-xr-xjjb/openci/create-cde.sh27
-rwxr-xr-xjjb/openci/create-clme.sh27
-rw-r--r--jjb/openci/openci-odl-daily-jobs.yaml99
-rw-r--r--jjb/openci/openci-onap-daily-jobs.yaml99
-rw-r--r--jjb/openci/openci-opnfv-daily-jobs.yaml149
-rw-r--r--jjb/opera/opera-daily-jobs.yaml99
-rw-r--r--jjb/opera/opera-project-jobs.yaml56
-rw-r--r--jjb/opera/opera-verify-jobs.yaml157
-rw-r--r--jjb/opnfvdocs/docs-rtd.yaml99
-rw-r--r--jjb/opnfvdocs/opnfvdocs-views.yaml12
-rw-r--r--jjb/opnfvdocs/opnfvdocs.yaml141
-rw-r--r--jjb/opnfvdocs/project.cfg42
-rw-r--r--jjb/orchestra/orchestra-daily-jobs.yaml100
-rw-r--r--jjb/orchestra/orchestra-project-jobs.yaml49
-rw-r--r--jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml88
-rw-r--r--jjb/ovn4nfv/ovn4nfv-project-jobs.yaml56
-rw-r--r--jjb/ovsnfv/ovsnfv.yaml157
-rw-r--r--jjb/parser/parser.yaml78
-rw-r--r--jjb/pharos/check-jinja2.yaml98
-rw-r--r--jjb/pharos/pharos.yaml88
-rw-r--r--jjb/promise/promise.yaml8
-rw-r--r--jjb/qtip/qtip-experimental-jobs.yaml49
-rw-r--r--jjb/qtip/qtip-validate-jobs.yaml159
-rw-r--r--jjb/qtip/qtip-verify-jobs.yaml195
-rw-r--r--jjb/releng/artifact-cleanup.yaml41
-rwxr-xr-xjjb/releng/branch-or-tag.sh40
-rw-r--r--jjb/releng/compass4nfv-docker.yaml264
-rw-r--r--jjb/releng/opnfv-docker-arm.yaml198
-rw-r--r--jjb/releng/opnfv-docker-custom.yaml111
-rw-r--r--jjb/releng/opnfv-docker.sh158
-rw-r--r--jjb/releng/opnfv-docker.yaml405
-rw-r--r--jjb/releng/opnfv-lint.yaml186
-rw-r--r--jjb/releng/opnfv-repo-archiver.sh68
-rw-r--r--jjb/releng/opnfv-utils.yaml210
-rw-r--r--jjb/releng/releng-ci-jobs.yaml20
-rw-r--r--jjb/releng/releng-jobs.yaml496
-rw-r--r--jjb/releng/releng-release-create-branch.sh66
-rw-r--r--jjb/releng/releng-release-jobs.yaml119
-rw-r--r--jjb/releng/releng-release-tagging.sh9
-rw-r--r--jjb/releng/releng-views.yaml12
-rw-r--r--jjb/releng/testresults-automate.yaml2
-rw-r--r--jjb/releng/testresults-verify.yaml63
-rw-r--r--jjb/securedlab/check-jinja2.yaml85
-rw-r--r--jjb/sfc/sfc-project-jobs.yaml102
-rw-r--r--jjb/snaps/snaps-verify-jobs.yaml77
-rw-r--r--jjb/stor4nfv/stor4nfv-project.yaml8
-rw-r--r--jjb/storperf/storperf-daily-jobs.yaml187
-rw-r--r--jjb/storperf/storperf-verify-jobs.yaml200
-rw-r--r--jjb/storperf/storperf.yaml78
-rw-r--r--jjb/ves/ves.yaml8
-rw-r--r--jjb/vnf_forwarding_graph/vnf_forwarding_graph.yaml8
-rw-r--r--jjb/vswitchperf/vswitchperf.yaml199
-rw-r--r--jjb/xci/bifrost-cleanup-job.yaml146
-rw-r--r--jjb/xci/bifrost-periodic-jobs.yaml152
-rwxr-xr-xjjb/xci/bifrost-provision.sh109
-rw-r--r--jjb/xci/bifrost-verify-jobs.yaml223
-rwxr-xr-xjjb/xci/bifrost-verify.sh54
-rw-r--r--jjb/xci/osa-periodic-jobs.yaml261
-rwxr-xr-xjjb/xci/xci-cleanup.sh28
-rw-r--r--jjb/xci/xci-daily-jobs.yaml271
-rwxr-xr-xjjb/xci/xci-deploy.sh75
-rw-r--r--jjb/xci/xci-merge-jobs.yaml476
-rwxr-xr-xjjb/xci/xci-promote.sh51
-rwxr-xr-xjjb/xci/xci-run-functest.sh73
-rwxr-xr-xjjb/xci/xci-set-scenario.sh214
-rwxr-xr-xjjb/xci/xci-start-deployment.sh45
-rwxr-xr-xjjb/xci/xci-start-new-vm.sh93
-rw-r--r--jjb/xci/xci-verify-jobs.yaml307
-rwxr-xr-xjjb/yardstick/yardstick-cleanup.sh36
-rw-r--r--jjb/yardstick/yardstick-daily-jobs.yaml548
-rwxr-xr-xjjb/yardstick/yardstick-daily.sh82
-rwxr-xr-xjjb/yardstick/yardstick-get-k8s-conf.sh13
-rw-r--r--jjb/yardstick/yardstick-project-jobs.yaml238
-rw-r--r--modules/opnfv/deployment/example.py10
-rw-r--r--modules/opnfv/deployment/factory.py2
-rw-r--r--modules/opnfv/deployment/fuel/adapter.py230
-rw-r--r--modules/opnfv/deployment/manager.py6
-rw-r--r--modules/opnfv/utils/Credentials.py11
-rw-r--r--modules/opnfv/utils/ssh_utils.py3
-rw-r--r--modules/requirements.txt1
-rw-r--r--releases/2023.1/functest.yaml9
-rw-r--r--releases/2023.2/functest.yaml9
-rw-r--r--releases/gambia/apex.yaml29
-rw-r--r--releases/gambia/armband.yaml30
-rw-r--r--releases/gambia/auto.yaml25
-rw-r--r--releases/gambia/availability.yaml9
-rw-r--r--releases/gambia/barometer.yaml14
-rw-r--r--releases/gambia/bottlenecks.yaml14
-rw-r--r--releases/gambia/clover.yaml22
-rw-r--r--releases/gambia/compass4nfv.yaml26
-rw-r--r--releases/gambia/container4nfv.yaml9
-rw-r--r--releases/gambia/doctor.yaml27
-rw-r--r--releases/gambia/edgecloud.yaml9
-rw-r--r--releases/gambia/fuel.yaml30
-rw-r--r--releases/gambia/functest.yaml35
-rw-r--r--releases/gambia/ipv6.yaml30
-rw-r--r--releases/gambia/nfvbench.yaml9
-rw-r--r--releases/gambia/opnfvdocs.yaml9
-rw-r--r--releases/gambia/ovn4nfv.yaml9
-rw-r--r--releases/gambia/samplevnf.yaml25
-rw-r--r--releases/gambia/sandbox.yaml17
-rw-r--r--releases/gambia/sdnvpn.yaml27
-rw-r--r--releases/gambia/sfc.yaml11
-rw-r--r--releases/gambia/stor4nfv.yaml9
-rw-r--r--releases/gambia/storperf.yaml9
-rw-r--r--releases/gambia/vswitchperf.yaml14
-rw-r--r--releases/gambia/yardstick.yaml24
-rw-r--r--releases/hunter/apex.yaml23
-rw-r--r--releases/hunter/availability.yaml9
-rw-r--r--releases/hunter/barometer.yaml (renamed from jjb/releng/releng-release-create-venv.sh)22
-rw-r--r--releases/hunter/bottlenecks.yaml14
-rw-r--r--releases/hunter/clover.yaml16
-rw-r--r--releases/hunter/container4nfv.yaml9
-rw-r--r--releases/hunter/cran.yaml9
-rw-r--r--releases/hunter/doctor.yaml24
-rw-r--r--releases/hunter/dovetail.yaml9
-rw-r--r--releases/hunter/edgecloud.yaml9
-rw-r--r--releases/hunter/fds.yaml9
-rw-r--r--releases/hunter/fuel.yaml27
-rw-r--r--releases/hunter/functest.yaml29
-rw-r--r--releases/hunter/ipv6.yaml27
-rw-r--r--releases/hunter/opnfvdocs.yaml9
-rw-r--r--releases/hunter/samplevnf.yaml11
-rw-r--r--releases/hunter/sfc.yaml9
-rw-r--r--releases/hunter/snaps.yaml9
-rw-r--r--releases/hunter/stor4nfv.yaml11
-rw-r--r--releases/hunter/storperf.yaml9
-rw-r--r--releases/hunter/vswitchperf.yaml14
-rw-r--r--releases/hunter/yardstick.yaml16
-rw-r--r--releases/iruya/airship.yaml27
-rw-r--r--releases/iruya/barometer.yaml25
-rw-r--r--releases/iruya/bottlenecks.yaml9
-rw-r--r--releases/iruya/doctor.yaml24
-rw-r--r--releases/iruya/edgecloud.yaml9
-rw-r--r--releases/iruya/fds.yaml9
-rw-r--r--releases/iruya/fuel.yaml24
-rw-r--r--releases/iruya/functest.yaml23
-rw-r--r--releases/iruya/opnfvdocs.yaml9
-rw-r--r--releases/iruya/rocket.yaml9
-rw-r--r--releases/iruya/stor4nfv.yaml11
-rw-r--r--releases/iruya/storperf.yaml14
-rw-r--r--releases/iruya/vswitchperf.yaml14
-rw-r--r--releases/iruya/yardstick.yaml17
-rw-r--r--releases/jerma/airship.yaml24
-rw-r--r--releases/jerma/barometer.yaml21
-rw-r--r--releases/jerma/cirv.yaml34
-rw-r--r--releases/jerma/doctor.yaml24
-rw-r--r--releases/jerma/dovetail.yaml29
-rw-r--r--releases/jerma/functest.yaml15
-rw-r--r--releases/jerma/kuberef.yaml22
-rw-r--r--releases/jerma/moon.yaml9
-rw-r--r--releases/jerma/opnfvdocs.yaml14
-rw-r--r--releases/jerma/samplevnf.yaml24
-rw-r--r--releases/jerma/vswitchperf.yaml14
-rw-r--r--releases/kali/airship.yaml19
-rw-r--r--releases/kali/barometer.yaml17
-rw-r--r--releases/kali/cirv.yaml (renamed from utils/build-server-ansible/vars/docker-compose-CentOS.yml)17
-rw-r--r--releases/kali/functest.yaml15
-rw-r--r--releases/kali/kuberef.yaml17
-rw-r--r--releases/kali/vineperf.yaml17
-rw-r--r--releases/lakelse/barometer.yaml17
-rw-r--r--releases/lakelse/cirv.yaml17
-rw-r--r--releases/lakelse/kuberef.yaml17
-rw-r--r--releases/lakelse/opnfvdocs.yaml17
-rw-r--r--releases/lakelse/vineperf.yaml17
-rw-r--r--releases/leguer/functest.yaml15
-rw-r--r--releases/moselle/barometer.yaml17
-rw-r--r--releases/moselle/kuberef.yaml17
-rw-r--r--releases/moselle/opnfvdocs.yaml17
-rw-r--r--releases/moselle/thoth.yaml (renamed from utils/build-server-ansible/inventory.ini)15
-rw-r--r--releases/moselle/vineperf.yaml17
-rw-r--r--releases/nile/barometer.yaml17
-rw-r--r--releases/nile/opnfvdocs.yaml17
-rw-r--r--[-rwxr-xr-x]releases/nile/thoth.yaml (renamed from jjb/kvmfornfv/kvmfornfv-build.sh)19
-rw-r--r--releases/nile/vineperf.yaml17
-rw-r--r--releases/orinoco/barometer.yaml17
-rw-r--r--releases/orinoco/kuberef.yaml17
-rw-r--r--releases/orinoco/opnfvdocs.yaml9
-rw-r--r--releases/orinoco/thoth.yaml (renamed from utils/build-server-ansible/vars/docker-compose-Ubuntu.yml)19
-rw-r--r--releases/schema.yaml2
-rw-r--r--releases/scripts/create_branch.py143
-rwxr-xr-xreleases/scripts/release-status.sh2
-rw-r--r--releases/scripts/repos.py31
-rw-r--r--releases/scripts/requirements.txt5
-rw-r--r--releases/v1.21/functest.yaml9
-rw-r--r--releases/v1.22/functest.yaml9
-rw-r--r--releases/v1.23/functest.yaml9
-rw-r--r--releases/v1.24/functest.yaml9
-rw-r--r--releases/v1.25/functest.yaml9
-rw-r--r--releases/v1.26/functest.yaml9
-rw-r--r--releases/v1.27/functest.yaml9
-rw-r--r--releases/v1.28/functest.yaml9
-rw-r--r--releases/v1.29/functest.yaml9
-rw-r--r--releases/wallaby/functest.yaml12
-rw-r--r--releases/xena/functest.yaml12
-rw-r--r--releases/yoga/functest.yaml12
-rw-r--r--releases/zed/functest.yaml12
-rw-r--r--test-requirements.txt3
-rw-r--r--tox.ini23
-rw-r--r--upper-constraints.txt7
-rwxr-xr-xutils/artifacts.opnfv.org.sh8
-rw-r--r--utils/build-server-ansible/main.yml41
-rw-r--r--utils/build-server-ansible/vars/CentOS.yml76
-rw-r--r--utils/build-server-ansible/vars/Ubuntu.yml88
-rw-r--r--utils/build-server-ansible/vars/defaults.yml23
-rwxr-xr-xutils/fetch_k8_conf.sh3
-rwxr-xr-xutils/fetch_os_creds.sh2
-rwxr-xr-xutils/jenkins-jnlp-connect.sh24
-rw-r--r--utils/push-test-logs.sh9
-rwxr-xr-xutils/retention_script.sh5
374 files changed, 12871 insertions, 30180 deletions
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 07b28be4b..000000000
--- a/.gitmodules
+++ /dev/null
@@ -1,3 +0,0 @@
-[submodule "jjb/global-jjb"]
- path = jjb/global-jjb
- url = https://github.com/lfit/releng-global-jjb
diff --git a/INFO.yaml b/INFO.yaml
index dae36c9b4..1c9b4693c 100644
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -47,7 +47,7 @@ committers:
email: 'agardner@linuxfoundation.org'
company: 'The Linux Foundation'
id: 'agardner'
- timezone: 'Canada/Atlantic'
+ timezone: 'America/New_York'
- name: 'Tim Rozet'
email: 'trozet@redhat.com'
company: 'Red Hat'
@@ -93,6 +93,15 @@ committers:
email: 'lhinds@redhat.com'
id: 'lukehinds'
timezone: 'Europe/London'
+ - name: 'Cédric Ollivier'
+ email: 'cedric.ollivier@orange.com'
+ company: 'orange'
+ id: 'ollivier'
+ timezone: 'Europe/Paris'
+ - name: 'Alexandru Avadanii'
+ email: 'Alexandru.Avadanii@enea.com'
+ company: 'enea.com'
+ id: 'AlexandruAvadanii'
tsc:
approval: 'http://ircbot.wl.linuxfoundation.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-07-14-14.00.html'
changes:
@@ -123,3 +132,6 @@ tsc:
- type: 'promotion'
name: 'Luke Hinds'
link: ''
+ - type: 'promotion'
+ name: 'Alexandru Avadanii'
+ link: 'https://gerrit.opnfv.org/gerrit/#/c/64499/'
diff --git a/docs/ci/index.rst b/docs/ci/index.rst
index 08f23ac2f..445552a82 100644
--- a/docs/ci/index.rst
+++ b/docs/ci/index.rst
@@ -8,10 +8,17 @@
OPNFV CI
========
-TBD
+OPNFV continuous integration (CI) is ran on a variety of :doc:`hardware <resources>`
+connected to Jenkins and mangaged through YAML files in the `Releng`_
+repository. These YAML files are read by `Jenkins Job Builder`_ to
+generate and upload Jenkins jobs to the server. See the :doc:`User Guide
+<user-guide>` for resources on getting started with CI for your project.
.. toctree::
:maxdepth: 2
user-guide
resources
+
+.. _Releng: https://gerrit.opnfv.org/gerrit/admin/repos/releng
+.. _Jenkins Job Builder: https://docs.openstack.org/infra/jenkins-job-builder/
diff --git a/docs/ci/resources.rst b/docs/ci/resources.rst
index 572852c48..b4efe9704 100644
--- a/docs/ci/resources.rst
+++ b/docs/ci/resources.rst
@@ -13,49 +13,81 @@ verification needs. Each resource must meet a set of criteria in order
to be part of CI for an OPNFV release. There are three types of
resources:
- - Baremetal PODs (PODs)
- - Virtual PODs (vPODs)
- - Build Servers
+- Baremetal PODs (PODs)
+- Virtual PODs (vPODs)
+- Build Servers
+
+
+.. _ci-resources-baremetal-pods:
---------------
Baremetal PODs
--------------
-TBD
+Baremetal PODs are used to deploy OPNFV on to baremetal hardware through
+one of the installer projects. They enable the full range of scenarios
+to be deployed and tested.
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Baremetal PODs Requirements
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+**Requirements**
In order of a POD to be considered CI-Ready the following requirements
must be met:
- #. Pharos Compliant and has a PDF
- #. Connected to Jenkins
- #. 24/7 Uptime
- #. No Development
- #. No manual intervention
+#. Pharos Compliant and has a PDF
+#. Connected to Jenkins
+#. 24/7 Uptime
+#. No Development
+#. No manual intervention
-------------
-Virtual PODS
+.. include:: tables/ci-baremetal-servers.rst
+
+
+.. _ci-resources-virtual-pods:
+
+Virtual PODs
------------
-TBD
+Virtual PODs are used to deploy OPNFV in a virtualized environment
+generally on top of KVM through libvirt.
-~~~~~~~~~~~~
-Requirements
-~~~~~~~~~~~~
+**Requirements**
-TBD
+#. Have required virtualization packages installed
+#. Meet the Pharos resource specification for virtual PODs
+#. Connected to Jenkins
+#. 24/7 Uptime
+
+.. include:: tables/ci-virtual-servers.rst
+
+.. _ci-resources-build-servers:
--------------
Build Servers
-------------
-TBD
+Build servers are used to build project, run basic verifications (such
+as unit tests and linting), and generate documentation.
+
+**Requirements**
+
+#. Have required `packages_` installed
+#. 24/7 Uptime
+#. Connected to Jenkins
+
+.. include:: tables/ci-build-servers.rst
+
+.. _dev-resources:
+
+=====================
+Development Resources
+=====================
+
+.. include:: tables/none-ci-servers.rst
+
+.. _ci-lables:
+
+===================
+CI Resources Labels
+===================
-~~~~~~~~~~~~
-Requirements
-~~~~~~~~~~~~
+.. include:: tables/ci-labels.rst
-TBD
+.. _packages: https://wiki.opnfv.org/display/INF/Continuous+Integration#ContinuousIntegration-BuildServers
diff --git a/docs/ci/tables/ci-baremetal-servers.rst b/docs/ci/tables/ci-baremetal-servers.rst
new file mode 100644
index 000000000..cccf75092
--- /dev/null
+++ b/docs/ci/tables/ci-baremetal-servers.rst
@@ -0,0 +1,148 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+.. list-table:: CI Servers for Baremetal Deployment
+ :header-rows: 1
+ :stub-columns: 1
+
+ * - Node
+ - Usage
+ - Jumphost OS / Version
+ - PDF
+ - IDF
+ * - `arm-pod9 <https://build.opnfv.org/ci/computer/arm-pod9>`_
+ - Armband
+ - Ubuntu 16.04
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/arm/pod9.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/arm/idf-pod9.yaml>`__
+ * - `arm-pod10 <https://build.opnfv.org/ci/computer/arm-pod10>`_
+ - Fuel
+ - Ubuntu 16.04
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/arm/pod10.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/arm/idf-pod10.yaml>`__
+ * - `ericsson-pod1 <https://build.opnfv.org/ci/computer/ericsson-pod1>`_
+ - Fuel
+ - Ubuntu 16.04
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/ericsson/pod1.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/ericsson/idf-pod1.yaml>`__
+ * - `ericsson-pod2 <https://build.opnfv.org/ci/computer/ericsson-pod2>`_
+ - XCI
+ - Ubuntu 16.04
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/ericsson/pod2.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/ericsson/idf-pod2.yaml>`__
+ * - `flex-pod1 <https://build.opnfv.org/ci/computer/flex-pod1>`_
+ - Yardstick
+ -
+ - PDF
+ - IDF
+ * - `flex-pod2 <https://build.opnfv.org/ci/computer/flex-pod2>`_
+ - Apex
+ -
+ - PDF
+ - IDF
+ * - `huawei-pod1 <https://build.opnfv.org/ci/computer/huawei-pod1>`_
+ - Compass4NFV
+ -
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/huawei/pod1.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/huawei/idf-pod1.yaml>`__
+ * - `huawei-pod2 <https://build.opnfv.org/ci/computer/huawei-pod2>`_
+ - Compass4NFV
+ - Ubuntu 14.04
+ - PDF
+ - IDF
+ * - `huawei-pod3 <https://build.opnfv.org/ci/computer/huawei-pod3>`_
+ - Yardstick
+ - Ubuntu 14.04
+ - PDF
+ - IDF
+ * - `huawei-pod4 <https://build.opnfv.org/ci/computer/huawei-pod4>`_
+ - Dovetail
+ -
+ - PDF
+ - IDF
+ * - `huawei-pod6 <https://build.opnfv.org/ci/computer/huawei-pod6>`_
+ -
+ - Ubuntu 14.04
+ - PDF
+ - IDF
+ * - `huawei-pod7 <https://build.opnfv.org/ci/computer/huawei-pod7>`_
+ - Dovetail
+ - Ubuntu 14.04
+ - PDF
+ - IDF
+ * - `huawei-pod8 <https://build.opnfv.org/ci/computer/huawei-pod8>`_
+ - Compass4NFV
+ - Ubuntu 16.04 (aarch64)
+ - PDF
+ - IDF
+ * - `huawei-pod12 <https://build.opnfv.org/ci/computer/huawei-pod12>`_
+ - JOID
+ - Ubuntu 16.04
+ - PDF
+ - IDF
+ * - `intel-pod10 <https://build.opnfv.org/ci/computer/intel-pod10>`_
+ - KVMforNFV
+ - CentOS 7
+ - PDF
+ - IDF
+ * - `intel-pod11 <https://build.opnfv.org/ci/computer/intel-pod11>`_
+ - Apex
+ -
+ - PDF
+ - IDF
+ * - `intel-pod12 <https://build.opnfv.org/ci/computer/intel-pod12>`_
+ - VSPerf
+ - CentOS 7
+ - PDF
+ - IDF
+ * - `intel-pod17 <https://build.opnfv.org/ci/computer/intel-pod17>`_
+ - Airship
+ -
+ - PDF
+ - IDF
+ * - `intel-pod18 <https://build.opnfv.org/ci/computer/intel-pod18>`_
+ - Airship
+ -
+ - PDF
+ - IDF
+ * - `lf-pod1 <https://build.opnfv.org/ci/computer/lf-pod1>`_
+ - Apex
+ - CentOS 7
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/lf/pod1.yaml>`__
+ - IDF
+ * - `lf-pod2 <https://build.opnfv.org/ci/computer/lf-pod2>`_
+ - Fuel
+ - CentOS 7
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/lf/pod2.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/lf/idf-pod2.yaml>`__
+ * - `unh-pod1 <https://build.opnfv.org/ci/computer/unh-pod1>`_
+ - Auto
+ - Ubuntu 16.04 (aarch64)
+ - PDF
+ - IDF
+ * - `zte-pod1 <https://build.opnfv.org/ci/computer/zte-pod1>`_
+ -
+ -
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/zte/pod1.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/zte/idf-pod1.yaml>`__
+ * - `zte-pod2 <https://build.opnfv.org/ci/computer/zte-pod2>`_
+ -
+ -
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/zte/pod2.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/zte/idf-pod2.yaml>`__
+ * - `zte-pod3 <https://build.opnfv.org/ci/computer/zte-pod3>`_
+ -
+ -
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/zte/pod3.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/zte/idf-pod3.yaml>`__
+ * - `zte-pod4 <https://build.opnfv.org/ci/computer/zte-pod4>`_
+ -
+ -
+ - PDF
+ - IDF
+ * - `zte-pod9 <https://build.opnfv.org/ci/computer/zte-pod9>`_
+ -
+ -
+ - `PDF <https://git.opnfv.org/pharos/plain/labs/zte/pod9.yaml>`__
+ - `IDF <https://git.opnfv.org/pharos/plain/labs/zte/idf-pod9.yaml>`__
diff --git a/docs/ci/tables/ci-build-servers.rst b/docs/ci/tables/ci-build-servers.rst
new file mode 100644
index 000000000..b2f4893e3
--- /dev/null
+++ b/docs/ci/tables/ci-build-servers.rst
@@ -0,0 +1,31 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+.. list-table:: CI Build Servers
+ :header-rows: 1
+ :stub-columns: 1
+
+ * - Node
+ - Architecture
+ - OS
+ - Contact
+ * - `arm-build3 <https://build.opnfv.org/ci/computer/arm-build3>`_
+ - aarch64
+ - CentOS 7.4
+ - `Armband ENEA Team`_
+ * - `arm-build4 <https://build.opnfv.org/ci/computer/arm-build4>`_
+ - aarch64
+ - Ubuntu 16.04
+ - `Armband ENEA Team`_
+ * - `lf-build5 <https://build.opnfv.org/ci/computer/lf-build5>`_
+ - x86_64
+ - Ubuntu 18.04
+ - `Linux Foundation`_
+ * - `lf-build6 <https://build.opnfv.org/ci/computer/lf-build6>`_
+ - x86_64
+ - CentOS 8
+ - `Linux Foundation`_
+
+.. _Linux Foundation: helpdesk@opnfv.org
+.. _Armband ENEA Team: armband@enea.com
diff --git a/docs/ci/tables/ci-labels.rst b/docs/ci/tables/ci-labels.rst
new file mode 100644
index 000000000..2865cc22f
--- /dev/null
+++ b/docs/ci/tables/ci-labels.rst
@@ -0,0 +1,13 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+:ci-resource: Resource devoted to CI
+:ci-pod: POD devoted to CI
+:opnfv-build: Node is for builds - independent of OS
+:opnfv-build-centos: Node is for builds needing CentOS
+:opnfv-build-centos-arm: Node is for ARM builds on CentOS
+:opnfv-build-ubuntu: Node is for builds needing Ubuntu
+:opnfv-build-ubuntu-arm: Node is for ARM builds on Ubuntu
+:{installer}-baremetal: POD is devoted to {installer} for baremetal deployments
+:{installer}-virtual: Server is devoted to {installer} for virtual deployments
diff --git a/docs/ci/tables/ci-virtual-servers.rst b/docs/ci/tables/ci-virtual-servers.rst
new file mode 100644
index 000000000..e87c46396
--- /dev/null
+++ b/docs/ci/tables/ci-virtual-servers.rst
@@ -0,0 +1,164 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+.. list-table:: CI Servers for Virtual Deployment
+ :header-rows: 1
+ :stub-columns: 1
+
+ * - Node
+ - Architecture
+ - OS
+ - Contact
+ * - `arm-virtual2 <https://build.opnfv.org/ci/computer/arm-virtual2>`_
+ - aarch64
+ - Ubuntu 16.04
+ - `Armband ENEA Team`_
+ * - `arm-virtual3 <https://build.opnfv.org/ci/computer/arm-virtual3>`_
+ - aarch64
+ - Ubuntu 16.04
+ - `Xuan Jia`
+ * - `arm-virtual4 <https://build.opnfv.org/ci/computer/arm-virtual4>`_
+ - aarch64
+ - Ubuntu 16.04
+ - `Xuan Jia`
+ * - `ericsson-virtual-pod1bl01 <https://build.opnfv.org/ci/computer/ericsson-virtual-pod1bl01>`_
+ - x86_64
+ - CentOS 7
+ -
+ * - `ericsson-virtual1 <https://build.opnfv.org/ci/computer/ericsson-virtual1>`_
+ - x86_64
+ - Ubuntu 16.04
+ -
+ * - `ericsson-virtual2 <https://build.opnfv.org/ci/computer/ericsson-virtual2>`_
+ - x86_64
+ - Ubuntu 16.04
+ -
+ * - `ericsson-virtual3 <https://build.opnfv.org/ci/computer/ericsson-virtual3>`_
+ - x86_64
+ - Ubuntu 16.04
+ -
+ * - `ericsson-virtual4 <https://build.opnfv.org/ci/computer/ericsson-virtual4>`_
+ - x86_64
+ - Ubuntu 16.04
+ -
+ * - `ericsson-virtual5 <https://build.opnfv.org/ci/computer/ericsson-virtual5>`_
+ - x86_64
+ - Ubuntu 16.04
+ -
+ * - `huawei-virtual1 <https://build.opnfv.org/ci/computer/huawei-virtual1>`_
+ - x86_64
+ - Ubuntu 14.04
+ -
+ * - `huawei-virtual2 <https://build.opnfv.org/ci/computer/huawei-virtual2>`_
+ - x86_64
+ - Ubuntu 14.04
+ -
+ * - `huawei-virtual3 <https://build.opnfv.org/ci/computer/huawei-virtual3>`_
+ - x86_64
+ - Ubuntu 14.04
+ -
+ * - `huawei-virtual4 <https://build.opnfv.org/ci/computer/huawei-virtual4>`_
+ - x86_64
+ - Ubuntu 14.04
+ -
+ * - `huawei-virtual5 <https://build.opnfv.org/ci/computer/huawei-virtual5>`_
+ - x86_64
+ -
+ -
+ * - `huawei-virtual6 <https://build.opnfv.org/ci/computer/huawei-virtual6>`_
+ - x86_64
+ - Ubuntu 16.04
+ -
+ * - `huawei-virtual7 <https://build.opnfv.org/ci/computer/huawei-virtual7>`_
+ - x86_64
+ - Ubuntu 14.04
+ -
+ * - `huawei-virtual8 <https://build.opnfv.org/ci/computer/huawei-virtual8>`_
+ - x86_64
+ - Ubuntu 14.04
+ -
+ * - `huawei-virtual9 <https://build.opnfv.org/ci/computer/huawei-virtual9>`_
+ - x86_64
+ - Ubuntu 14.04
+ -
+ * - `intel-virtual3 <https://build.opnfv.org/ci/computer/intel-virtual3>`_
+ - x86_64
+ -
+ -
+ * - `intel-virtual11 <https://build.opnfv.org/ci/computer/intel-virtual11>`_
+ - x86_64
+ -
+ -
+ * - `intel-virtual12 <https://build.opnfv.org/ci/computer/intel-virtual12>`_
+ - x86_64
+ -
+ -
+ * - `intel-virtual13 <https://build.opnfv.org/ci/computer/intel-virtual13>`_
+ - x86_64
+ -
+ -
+ * - `intel-virtual14 <https://build.opnfv.org/ci/computer/intel-virtual14>`_
+ - x86_64
+ -
+ -
+ * - `intel-virtual15 <https://build.opnfv.org/ci/computer/intel-virtual15>`_
+ - x86_64
+ -
+ -
+ * - `intel-virtual16 <https://build.opnfv.org/ci/computer/intel-virtual16>`_
+ - x86_64
+ -
+ -
+ * - `lf-virtual1 <https://build.opnfv.org/ci/computer/lf-virtual1>`_
+ - x86_64
+ - Ubuntu 14.04
+ - `Linux Foundation`_
+ * - `lf-virtual2 <https://build.opnfv.org/ci/computer/lf-virtual2>`_
+ - x86_64
+ - CentOS 7
+ - `Linux Foundation`_
+ * - `lf-virtual3 <https://build.opnfv.org/ci/computer/lf-virtual3>`_
+ - x86_64
+ - CentOS 7
+ - `Linux Foundation`_
+ * - `ool-virtual1 <https://build.opnfv.org/ci/computer/ool-virtual1>`_
+ - x86_64
+ -
+ -
+ * - `ool-virtual2 <https://build.opnfv.org/ci/computer/ool-virtual2>`_
+ - x86_64
+ -
+ -
+ * - `ool-virtual3 <https://build.opnfv.org/ci/computer/ool-virtual3>`_
+ - x86_64
+ -
+ -
+ * - `zte-virtual1 <https://build.opnfv.org/ci/computer/zte-virtual1>`_
+ - x86_64
+ -
+ -
+ * - `zte-virtual2 <https://build.opnfv.org/ci/computer/zte-virtual2>`_
+ - x86_64
+ -
+ -
+ * - `zte-virtual3 <https://build.opnfv.org/ci/computer/zte-virtual3>`_
+ - x86_64
+ -
+ -
+ * - `zte-virtual4 <https://build.opnfv.org/ci/computer/zte-virtual4>`_
+ - x86_64
+ -
+ -
+ * - `zte-virtual5 <https://build.opnfv.org/ci/computer/zte-virtual5>`_
+ - x86_64
+ -
+ -
+ * - `zte-virtual6 <https://build.opnfv.org/ci/computer/zte-virtual6>`_
+ - x86_64
+ -
+ -
+
+.. _Armband ENEA Team: armband@enea.com
+.. _Linux Foundation: helpdesk@opnfv.org
+.. _Xuan Jia: jason.jiaxuan@gmail.com'
diff --git a/docs/ci/tables/none-ci-servers.rst b/docs/ci/tables/none-ci-servers.rst
new file mode 100644
index 000000000..48cf97b50
--- /dev/null
+++ b/docs/ci/tables/none-ci-servers.rst
@@ -0,0 +1,53 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
+.. (c) Open Platform for NFV Project, Inc. and its contributors
+
+.. list-table:: Baremetal Development Servers
+ :header-rows: 1
+ :stub-columns: 1
+
+ * - Node
+ - Usage
+ - Jumphost OS / Version
+ - PDF
+ - IDF
+ * - cacti-pod1
+ -
+ -
+ -
+ -
+ * - cengn-pod1
+ -
+ -
+ -
+ -
+ * - itri-pod1
+ -
+ -
+ -
+ -
+ * - lf-pod4
+ -
+ -
+ -
+ -
+ * - lf-pod5
+ -
+ -
+ -
+ -
+ * - nokia-pod1
+ -
+ -
+ -
+ -
+ * - ool-pod1
+ -
+ -
+ -
+ -
+ * - bii-pod1
+ -
+ -
+ -
+ -
diff --git a/docs/ci/user-guide.rst b/docs/ci/user-guide.rst
index 9c9b74a41..a56a3a1b0 100644
--- a/docs/ci/user-guide.rst
+++ b/docs/ci/user-guide.rst
@@ -8,4 +8,102 @@
CI User Guide
=============
-TBD
+Structure of the Releng Repository
+----------------------------------
+
+jjb/<projects>
+ Individual project CI configurations.
+
+jjb/global
+ Collection of JJB defaults and templates shared by all projects.
+
+global-jjb/
+ Git submodule pointing to `Global-JJB`_, which provides a variety of
+ common `CI jobs`_ such as ReadTheDocs
+ (RTD) builds.
+
+docs/
+ This documentation.
+
+releases/
+ Release configuration files for creating stable branches and tagging
+ repositories and related automation scripts.
+
+utils/
+ Collection of common utilities used by projects
+
+utils/build-server-ansible
+ Ansible configuration for managing build servers. This is where
+ projects can add packages they need for their CI to the servers.
+
+
+CI Setup
+--------
+
+Basic Setup
+~~~~~~~~~~~
+
+All projects are required to have a **+1 Verified** vote in Gerrit in
+order to merge their code. As a new project that comes in may not yet
+know how they want to setup CI, they can pass this validation by
+configuring a 'no-op' job to run against their changesets.
+
+1. Clone the `Releng`_ repository, using the *Clone with commit-msg
+ hook* command under the *SSH* tab (displayed after logging in and
+ uploading an SSH key):
+
+ .. note::
+ <gerrit username> in the command below will be your username in
+ Gerrit when viewing the command on the website.
+
+ For example::
+
+ git clone "ssh://<gerrit username>@gerrit.opnfv.org:29418/releng" && \
+ scp -p -P 29418 <gerrit username>@gerrit.opnfv.org:hooks/commit-msg "releng/.git/hooks/"
+
+
+2. Create a project directory under the *jjb/* directory, and an intial
+ project YAML file::
+
+ mkdir jjb/myproject
+ touch jjb/myproject/myproject-ci-jobs.yaml
+
+3. Modify the project YAML file to add the basic validation job::
+
+ $EDITOR jjb/myproject/myproject-ci-jobs.yaml
+
+ ::
+
+ ---
+ - project:
+ name: myproject
+ project:
+ - '{name}'
+ jobs:
+ - '{project}-verify-basic'
+
+Docker Builds
+~~~~~~~~~~~~~
+
+Docker build are managed through the **jjb/releng/opnfv-docker.yaml**
+file. Modify this file with your project details to enable docker builds
+on merges and tags to your project repository::
+
+ ---
+ - project:
+ name: opnfv-docker'
+
+ [...]
+
+ dockerrepo:
+ [...]
+ - 'myproject':
+ project: 'myproject'
+ <<: *master
+
+.. _Jenkins Job Builder: https://docs.openstack.org/infra/jenkins-job-builder/
+.. _Releng: https://gerrit.opnfv.org/gerrit/admin/repos/releng
+.. _Global-JJB: https://docs.releng.linuxfoundation.org/projects/global-jjb/en/latest/index.html
+.. _CI jobs: https://docs.releng.linuxfoundation.org/projects/global-jjb/en/latest/index.html#global-jjb-templates
+.. _opnfvdocs: https://docs.opnfv.org/en/latest/how-to-use-docs/index.html
+.. _support.linuxfoundation.org: https://jira.linuxfoundation.org/plugins/servlet/theme/portal/2/create/145
diff --git a/docs/conf.py b/docs/conf.py
index 86ab8c577..6cfaf6985 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1 +1,10 @@
-from docs_conf.conf import * # flake8: noqa
+project = 'Releng'
+extensions = [
+ 'sphinx.ext.autosectionlabel'
+]
+html_theme = "piccolo_theme"
+autosectionlabel_prefix_document = True
+autosectionlabel_maxdepth = 4
+numfig = True
+numfig_format = {'figure': 'Figure %s', 'table': 'Table %s',
+ 'code-block': 'Listing %s', 'section': 'Section %s'}
diff --git a/docs/conf.yaml b/docs/conf.yaml
deleted file mode 100644
index 749a4b1cf..000000000
--- a/docs/conf.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-project_cfg: opnfv
-project: releng
diff --git a/docs/infra/jenkins/connect-to-jenkins.rst b/docs/infra/jenkins/connect-to-jenkins.rst
index ddf345fa3..8c5218659 100644
--- a/docs/infra/jenkins/connect-to-jenkins.rst
+++ b/docs/infra/jenkins/connect-to-jenkins.rst
@@ -111,86 +111,108 @@ Servers connecting to OPNFV Jenkins using this method must have access to intern
Please follow below steps to connect a slave to OPNFV Jenkins.
- 1. Create a user named **jenkins** on the machine you want to connect to OPNFV Jenkins and give the user sudo rights.
- 2. Install needed software on the machine you want to connect to OPNFV Jenkins as slave.
+#. Create a user named **jenkins** on the machine you want to connect to
+ OPNFV Jenkins and give the user sudo rights.
- - openjdk 8
- - monit
+#. Install needed software on the machine you want to connect to OPNFV
+ Jenkins as slave.
- 3. If the slave will be used for running virtual deployments, Functest, and Yardstick, install below software and make jenkins user the member of the groups.
+ - openjdk 8
+ - monit
- - docker
- - libvirt
+#. If the slave will be used for running virtual deployments, Functest,
+ and Yardstick, install below software and make jenkins user the
+ member of the groups.
- 4. Create slave root in Jenkins user home directory.
+ - docker
+ - libvirt
- ``mkdir -p /home/jenkins/opnfv/slave_root``
+#. Create slave root in Jenkins user home directory.
- 5. Clone OPNFV Releng Git repository.
+ ``mkdir -p /home/jenkins/opnfv/slave_root``
- ``mkdir -p /home/jenkins/opnfv/repos``
+5. Clone OPNFV Releng Git repository.
- ``cd /home/jenkins/opnfv/repos``
+ .. code::
- ``git clone https://gerrit.opnfv.org/gerrit/p/releng.git``
+ mkdir -p /home/jenkins/opnfv/repos
+ cd /home/jenkins/opnfv/repos
+ git clone https://gerrit.opnfv.org/gerrit/p/releng.git
- 6. Contact LF by sending mail to `OPNFV LF Helpdesk <opnfv-helpdesk@rt.linuxfoundation.org>`_ and request creation of a slave on OPNFV Jenkins. Include below information in your mail.
+#. Contact LF by creating a ticket to `Connect my 3rd party CI/Lab
+ <https://jira.linuxfoundation.org/servicedesk/customer/portal/2/create/135>`_
+ Include the following information in your ticket.
- - Slave root (/home/jenkins/opnfv/slave_root)
- - Public IP of the slave (You can get the IP by executing ``curl http://icanhazip.com/``)
- - PGP Key (attached to the mail or exported to a key server)
+ - Slave root (/home/jenkins/opnfv/slave_root)
+ - Public IP of the slave (You can get the IP by executing ``curl http://icanhazip.com/``)
+ - PGP Key (attached to the mail or exported to a key server)
- 7. Once you get confirmation from LF stating that your slave is created on OPNFV Jenkins, check if the firewall on LF is open for the server you are trying to connect to Jenkins.
+#. Once you get confirmation from LF stating that your slave is created
+ on OPNFV Jenkins, check if the firewall on LF is open for the server
+ you are trying to connect to Jenkins.
- ``cp /home/jenkins/opnfv/repos/releng/utils/jenkins-jnlp-connect.sh /home/jenkins/``
- ``cd /home/jenkins/``
- ``sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n <slave name on OPNFV Jenkins> -s <the token you received from LF> -f``
+ .. code::
- - If you receive an error, follow the steps listed on the command output.
+ cp /home/jenkins/opnfv/repos/releng/utils/jenkins-jnlp-connect.sh /home/jenkins/
+ cd /home/jenkins/
+ sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n <slave name on OPNFV Jenkins> -s <the token you received from LF> -f
- 8. Run the same script with test(-t) on foreground in order to make sure no problem on connection. You should see **INFO: Connected** in the console log.
+ - If you receive an error, follow the steps listed on the command output.
- ``sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n <slave name on OPNFV Jenkins> -s <the token you received from LF> -t``
+#. Run the same script with test(-t) on foreground in order to make sure
+ no problem on connection. You should see **INFO: Connected** in the
+ console log.
- - If you receive an error similar to the one shown `on this link <http://hastebin.com/ozadagirax.avrasm>`_, you need to check your firewall and allow outgoing connections for the port.
+ ``sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n <slave name on OPNFV Jenkins> -s <the token you received from LF> -t``
- 9. Kill the Java slave.jar process.
- 10. Run the same script normally without test(-t) in order to get monit script created.
+ - If you receive an error similar to the one shown `on this link
+ <http://hastebin.com/ozadagirax.avrasm>`_, you need to check your
+ firewall and allow outgoing connections for the port.
- ``sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n <slave name on OPNFV Jenkins> -s <the token you received from LF>``
+#. Kill the Java slave.jar process.
- 11. Edit monit configuration and enable http interface. The file to edit is /etc/monit/monitrc on Ubuntu systems. Uncomment below lines.
+#. Run the same script normally without test(-t) in order to get monit
+ script created.
- set httpd port 2812 and
- use address localhost # only accept connection from localhost
- allow localhost # allow localhost to connect to the server and
+ ``sudo ./jenkins-jnlp-connect.sh -j /home/jenkins -u jenkins -n <slave name on OPNFV Jenkins> -s <the token you received from LF>``
- 12. Restart monit service.
+#. Edit monit configuration and enable http interface. The file to edit
+ is /etc/monit/monitrc on Ubuntu systems. Uncomment below lines.
- - Without systemd:
+ .. code::
- ``sudo service monit restart``
+ set httpd port 2812 and
+ use address localhost # only accept connection from localhost
+ allow localhost # allow localhost to connect to the server and
- - With systemd: you have to enable monit service first and then restart it.
+#. Restart monit service.
- ``sudo systemctl enable monit``
+ - Without systemd:
- ``sudo systemctl restart monit``
+ ``sudo service monit restart``
- 13. Check to see if jenkins comes up as managed service in monit.
+ - With systemd: you have to enable monit service first and then restart it.
- ``sudo monit status``
+ .. code::
- 14. Connect slave to OPNFV Jenkins using monit.
+ sudo systemctl enable monit
+ sudo systemctl restart monit
- ``sudo monit start jenkins``
+#. Check to see if jenkins comes up as managed service in monit.
- 15. Check slave on OPNFV Jenkins to verify the slave is reported as connected.
+ ``sudo monit status``
- - The slave on OPNFV Jenkins should have some executors in “Idle” state if the connection is successful.
+#. Connect slave to OPNFV Jenkins using monit.
+
+ ``sudo monit start jenkins``
+
+#. Check slave on OPNFV Jenkins to verify the slave is reported as connected.
+
+ - The slave on OPNFV Jenkins should have some executors in “Idle”
+ state if the connection is successful.
Notes
-==========
+=====
PGP Key Instructions
--------------------
@@ -198,12 +220,12 @@ PGP Key Instructions
Public PGP Key can be uploaded to public key server so it can be taken from
there using your mail address. Example command to upload the key to key server is
- ``gpg --keyserver hkp://keys.gnupg.net:80 --send-keys XXXXXXX``
+``gpg --keyserver hkp://keys.gnupg.net:80 --send-keys XXXXXXX``
The Public PGP Key can also be attached to the email by storing the key in a file and then
attaching it to the email.
- ``gpg --export -a '<your email address>' > pgp.pubkey``
+``gpg --export -a '<your email address>' > pgp.pubkey``
References
==========
diff --git a/docs/requirements.txt b/docs/requirements.txt
deleted file mode 100644
index f26b04141..000000000
--- a/docs/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-lfdocs-conf
-sphinxcontrib-httpdomain
-sphinx-opnfv-theme
diff --git a/gitlab-templates/Docker.gitlab-ci.yml b/gitlab-templates/Docker.gitlab-ci.yml
new file mode 100644
index 000000000..8acb5a00e
--- /dev/null
+++ b/gitlab-templates/Docker.gitlab-ci.yml
@@ -0,0 +1,70 @@
+# Build and push a Docker image with CI/CD.
+# Docker-in-Docker documentation: https://docs.gitlab.com/ee/ci/docker/using_docker_build.html
+#
+# By default builds are tagged with their branch name and pushed to the
+# Gitlab Docker Registry. If DOCKER_TAG_LATEST is set to true, builds on
+# the $DOCKER_LATEST_BRANCH are also tagged and pushed as ":latest"
+#
+# Scheduled builds can be enabled on a Gitlab schedule by specifying
+# DOCKER_SCHEDULE = "true" in variables
+---
+variables:
+ # Docker registry where images will be pushed
+ DOCKER_REGISTRY: "$CI_REGISTRY"
+ DOCKER_USERNAME: "$CI_REGISTRY_USER"
+ DOCKER_TOKEN: "$CI_REGISTRY_PASSWORD"
+ # Whether or to push images after they're built
+ DOCKER_PUSH: "true"
+ # TODO: Conditionally include '--file' to docker build to reduce need
+ # to always define FILEPATH when BUILDCONTEXT is set
+ DOCKER_FILEPATH: "Dockerfile"
+ DOCKER_BUILDCONTEXT: "."
+ DOCKER_IMAGE: "$CI_REGISTRY_IMAGE"
+ # If LATEST_TAG is set to true, builds on the $DOCKER_LATEST_BRANCH
+ # will be tagged and pushed with ":latest"
+ DOCKER_LATEST_TAG: "true"
+ DOCKER_LATEST_BRANCH: "$CI_DEFAULT_BRANCH"
+
+.docker-build-and-push: &docker-build-and-push
+ image: docker:latest
+ stage: deploy
+ interruptible: true
+ services:
+ - docker:dind
+ before_script:
+ - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_TOKEN" $DOCKER_REGISTRY
+ script:
+ # Warm the cache by fetching the latest image. There's no guarantee
+ # the image will already exist on the runner.
+ - docker pull "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}" || true
+ - >
+ docker build
+ --pull
+ --cache-from "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}"
+ --file "$DOCKER_FILEPATH"
+ --tag "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}"
+ $DOCKER_BUILDCONTEXT
+ - |
+ if [[ "$CI_COMMIT_BRANCH" == "$DOCKER_LATEST_BRANCH" && "$DOCKER_LATEST_TAG" == "true" ]]; then
+ docker tag "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}" "$DOCKER_IMAGE"
+ fi
+ - |
+ # Push docker images if DOCKER_PUSH is set
+ if [[ "$DOCKER_PUSH" == "true" ]]; then
+ docker push "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}"
+ # Push ':latest' if LATEST_TAG is true
+ if [[ "$CI_COMMIT_BRANCH" == "$DOCKER_LATEST_BRANCH" && "$DOCKER_LATEST_TAG" == "true" ]]; then
+ docker push "$DOCKER_IMAGE"
+ fi
+ fi
+ rules:
+ - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+ when: never
+ # Gitlab does not have a way of specifying which jobs are scheduled,
+ # so an extra variable is needed in order to signify docker build
+ # should be picked up by the schedule run.
+ - if: $CI_PIPELINE_SOURCE == "schedule" && $DOCKER_SCHEDULE != "true"
+ when: never
+ - if: '$CI_COMMIT_BRANCH == $DOCKER_LATEST_BRANCH'
+ - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+ - if: $CI_COMMIT_TAG
diff --git a/gitlab-templates/GoogleStorage.gitlab-ci.yml b/gitlab-templates/GoogleStorage.gitlab-ci.yml
new file mode 100644
index 000000000..4ddf313d6
--- /dev/null
+++ b/gitlab-templates/GoogleStorage.gitlab-ci.yml
@@ -0,0 +1,35 @@
+# Template for uploading artifacts to Google Storage
+#
+# To upload artifacts to Google Storage, include this file in your
+# .gitlab-ci.yml file with the following stanza:
+#
+# include:
+# - project: anuket/releng
+# file: '/gitlab-templates/GoogleStorage.gitlab-ci.yml'
+#
+# And append the following "- !reference.." line to the script portion
+# of a job where artifacts should be uploaded:
+#
+# script:
+# ...
+# - !reference [.gsutil-install, script]
+# ...
+#
+# After the script has been included `gsutil` will have access to the
+# necessary Google Storage bucket.
+---
+variables:
+ GS_URL: "artifacts.opnfv.org/$CI_PROJECT_NAME"
+ WORKSPACE: $CI_PROJECT_DIR
+
+.gsutil-install: &gsutil-install
+ script:
+ - |
+ if command -v dnf &> /dev/null; then
+ dnf -y install python3-pip
+ else
+ yum -y install python3-pip
+ fi
+ - python3 -m pip install -U pip
+ - python3 -m pip install gsutil
+ - echo "$GSUTIL_CONFIG" > ~/.boto
diff --git a/gitlab-templates/RTD.gitlab-ci.yml b/gitlab-templates/RTD.gitlab-ci.yml
new file mode 100644
index 000000000..59b455d69
--- /dev/null
+++ b/gitlab-templates/RTD.gitlab-ci.yml
@@ -0,0 +1,104 @@
+# ReadTheDocs Workflow
+#
+# This workflow adds these builds to projects:
+#
+# docs-build:
+# Generate a html sphinx-build from the $DOCS_DIRECTORY
+#
+# docs-link-check:
+# Run a non-blocking sphinx-build linkcheck against
+# the $DOCS_DIRECTORY
+#
+# pages:
+# Serve the built documentation as the Gitlab Pages site for
+# the project
+#
+# Both docs-build and docs-link-check run on merge requests and merges
+# to the default branch that modify files under the $DOCS_DIRECTORY,
+# while pages only run on merges.
+#
+# Scheduled builds can be enabled when creating a schedule job and
+# specifying DOCS_SCHEDULE = "true" in build variables
+#
+# If extra dependencies are needed for builds they will be installed
+# from the $DOCS_REQUIREMENTS location.
+---
+variables:
+ PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
+ DOCS_DIRECTORY: "docs"
+ DOCS_REQUIREMENTS: "$DOCS_DIRECTORY/requirements.txt"
+ STABLE_BRANCH: "stable/*"
+
+.docs-cache: &docs-cache
+ paths:
+ - .cache/pip
+ - venv/
+
+.docs-before-script: &docs-before-script
+ - python -V
+ - pip install virtualenv
+ - virtualenv venv
+ - source venv/bin/activate
+ - pip install Sphinx
+ - |
+ if [ -f "$DOCS_REQUIREMENTS" ]; then
+ pip install -r "$DOCS_REQUIREMENTS"
+ fi
+
+docs-build:
+ stage: build
+ image: python:3
+ before_script:
+ - *docs-before-script
+ script: |
+ sphinx-build -T -b html -D language=en $DOCS_DIRECTORY _build/html
+ cache: *docs-cache
+ artifacts:
+ paths:
+ - _build/html
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "schedule" && $DOCS_SCHEDULE != "true"
+ when: never
+ - if: $CI_PIPELINE_SOURCE == "merge_request_event"
+ changes:
+ - $DOCS_DIRECTORY/**/*
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ - if: $CI_COMMIT_BRANCH == $STABLE_BRANCH
+
+docs-link-check:
+ stage: test
+ allow_failure: true
+ needs: []
+ image: python:3
+ before_script:
+ - *docs-before-script
+ script: |
+ sphinx-build -T -b linkcheck $DOCS_DIRECTORY _build/linkcheck
+ cache: *docs-cache
+ artifacts:
+ paths:
+ - _build/linkcheck
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "schedule" && $DOCS_SCHEDULE != "true"
+ when: never
+ - if: $CI_PIPELINE_SOURCE == "merge_request_event"
+ changes:
+ - $DOCS_DIRECTORY/**/*
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ - if: $CI_COMMIT_BRANCH == $STABLE_BRANCH
+
+pages:
+ stage: deploy
+ image: python:3
+ script: |
+ mkdir public
+ mv _build/html/* public/
+ artifacts:
+ paths:
+ - public
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "schedule"
+ when: never
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ changes:
+ - $DOCS_DIRECTORY/**/*
diff --git a/jjb/3rd_party_ci/create-apex-vms.sh b/jjb/3rd_party_ci/create-apex-vms.sh
deleted file mode 100755
index 0744ac89a..000000000
--- a/jjb/3rd_party_ci/create-apex-vms.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-
-. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh
-pushd $LIB
-./test_environment.sh --env-number $APEX_ENV_NUMBER --cloner-info $CLONER_INFO --snapshot-disks $SNAPSHOT_DISKS --vjump-hosts $VIRTUAL_JUMPHOSTS
-popd
diff --git a/jjb/3rd_party_ci/detect-snapshot.sh b/jjb/3rd_party_ci/detect-snapshot.sh
deleted file mode 100755
index 46d4dfa2d..000000000
--- a/jjb/3rd_party_ci/detect-snapshot.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Detecting requested OpenStack branch and topology type in gerrit comment"
-parsed_comment=$(echo $GERRIT_EVENT_COMMENT_TEXT | sed -n 's/^opnfv-check\s*//p')
-parsed_comment=$(echo $parsed_comment | sed -n 's/\s*$//p')
-if [ ! -z "$parsed_comment" ]; then
- if echo $parsed_comment | grep -E '^[a-z]+-(no)?ha'; then
- IFS='-' read -r -a array <<< "$parsed_comment"
- os_version=${array[0]}
- topo=${array[1]}
- echo "OS version detected in gerrit comment: ${os_version}"
- echo "Topology type detected in gerrit comment: ${topo}"
- else
- echo "Invalid format given for scenario in gerrit comment: ${parsed_comment}...aborting"
- exit 1
- fi
-else
- echo "No scenario given in gerrit comment, will use default (master OpenStack, noha)"
- os_version='master'
- topo='noha'
-fi
-
-echo "Writing variables to file"
-echo > detected_snapshot << EOI
-OS_VERSION=$os_version
-TOPOLOGY=$topo
-SNAP_CACHE=$HOME/snap_cache/$os_version/$topo
-EOI
diff --git a/jjb/3rd_party_ci/download-netvirt-artifact.sh b/jjb/3rd_party_ci/download-netvirt-artifact.sh
deleted file mode 100755
index b1f977a6f..000000000
--- a/jjb/3rd_party_ci/download-netvirt-artifact.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Attempting to fetch the artifact location from ODL Jenkins"
-if [ "$ODL_BRANCH" != 'master' ]; then
- DIST=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\2#p')
- ODL_BRANCH=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\1%2F\2#p')
-else
- DIST='fluorine'
-fi
-
-echo "ODL Distribution is ${DIST}"
-ODL_ZIP="karaf-SNAPSHOT.zip"
-CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/netvirt~${ODL_BRANCH}~${GERRIT_CHANGE_ID}/detail"
-# due to limitation with the Jenkins Gerrit Trigger, we need to use Gerrit REST API to get the change details
-ODL_BUILD_JOB_NUM=$(curl --fail ${CHANGE_DETAILS_URL} | grep -Eo "netvirt-distribution-check-${DIST}/[0-9]+" | tail -1 | grep -Eo [0-9]+)
-DISTRO_CHECK_CONSOLE_LOG="https://logs.opendaylight.org/releng/vex-yul-odl-jenkins-1/netvirt-distribution-check-${DIST}/${ODL_BUILD_JOB_NUM}/console.log.gz"
-NETVIRT_ARTIFACT_URL=$(curl --fail --compressed ${DISTRO_CHECK_CONSOLE_LOG} | grep 'BUNDLE_URL' | cut -d = -f 2)
-
-echo -e "URL to artifact is\n\t$NETVIRT_ARTIFACT_URL"
-
-echo "Downloading the artifact. This could take time..."
-wget -q -O $ODL_ZIP $NETVIRT_ARTIFACT_URL
-if [[ $? -ne 0 ]]; then
- echo "The artifact does not exist! Probably removed due to ODL Jenkins artifact retention policy."
- echo "Rerun netvirt-patch-test-current-carbon to get artifact rebuilt."
- exit 1
-fi
-
-#TODO(trozet) remove this once odl-pipeline accepts zip files
-echo "Converting artifact zip to tar.gz"
-UNZIPPED_DIR=`dirname $(unzip -qql ${ODL_ZIP} | head -n1 | tr -s ' ' | cut -d' ' -f5-)`
-unzip ${ODL_ZIP}
-tar czf /tmp/${NETVIRT_ARTIFACT} ${UNZIPPED_DIR}
-
-echo "Download complete"
-ls -al /tmp/${NETVIRT_ARTIFACT}
diff --git a/jjb/3rd_party_ci/install-netvirt.sh b/jjb/3rd_party_ci/install-netvirt.sh
deleted file mode 100755
index 232d60e5c..000000000
--- a/jjb/3rd_party_ci/install-netvirt.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SNAP_CACHE=$HOME/snap_cache/$OS_VERSION/$TOPOLOGY
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-
-if [ ! -f "/tmp/${NETVIRT_ARTIFACT}" ]; then
- echo "ERROR: /tmp/${NETVIRT_ARTIFACT} specified as NetVirt Artifact, but file does not exist"
- exit 1
-fi
-
-if [ ! -f "${SNAP_CACHE}/node.yaml" ]; then
- echo "ERROR: node.yaml pod config missing in ${SNAP_CACHE}"
- exit 1
-fi
-
-if [ ! -f "${SNAP_CACHE}/id_rsa" ]; then
- echo "ERROR: id_rsa ssh creds missing in ${SNAP_CACHE}"
- exit 1
-fi
-
-# TODO (trozet) snapshot should have already been unpacked into cache folder
-# but we really should check the cache here, and not use a single cache folder
-# for when we support multiple jobs on a single slave
-pushd sdnvpn/odl-pipeline/lib > /dev/null
-git fetch https://gerrit.opnfv.org/gerrit/sdnvpn refs/changes/17/59017/5 && git checkout FETCH_HEAD
-./odl_reinstaller.sh --pod-config ${SNAP_CACHE}/node.yaml \
- --odl-artifact /tmp/${NETVIRT_ARTIFACT} --ssh-key-file ${SNAP_CACHE}/id_rsa
-popd > /dev/null
diff --git a/jjb/3rd_party_ci/odl-netvirt.yaml b/jjb/3rd_party_ci/odl-netvirt.yaml
deleted file mode 100644
index 77263d0a7..000000000
--- a/jjb/3rd_party_ci/odl-netvirt.yaml
+++ /dev/null
@@ -1,278 +0,0 @@
----
-- project:
- name: 'netvirt'
-
- project: 'netvirt'
-
- installer: 'netvirt'
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - oxygen:
- branch: 'stable/oxygen'
- gs-pathname: ''
- disabled: false
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'create-apex-vms':
- slave-label: 'apex-virtual-master'
- - 'install-netvirt':
- slave-label: 'apex-virtual-master'
- - 'postprocess':
- slave-label: 'apex-virtual-master'
- #####################################
- # jobs
- #####################################
- jobs:
- - 'odl-netvirt-verify-virtual-{stream}'
- - 'odl-netvirt-verify-virtual-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'odl-netvirt-verify-virtual-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 5
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-.*-promote.*'
- - 'apex-virtual.*'
- - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
- - 'odl-netvirt-verify-virtual-install-netvirt-.*'
- - 'functest-netvirt-virtual-suite-.*'
- - 'odl-netvirt-verify-virtual-postprocess-.*'
- block-level: 'NODE'
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/apex
- branches:
- - 'origin/master'
- timeout: 15
- wipe-workspace: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: NETVIRT_ARTIFACT
- default: distribution-karaf.tar.gz
- - 'apex-virtual-master-defaults'
-
- triggers:
- - gerrit:
- server-name: 'git.opendaylight.org'
- trigger-on:
- # yamllint disable rule:line-length
- # - comment-added-contains-event:
- # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS'
- # - comment-added-contains-event:
- # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE'
- # yamllint enable rule:line-length
- - comment-added-contains-event:
- comment-contains-value: 'check-opnfv'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- readable-message: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - detect-opnfv-snapshot
- - inject:
- properties-file: detected_snapshot
- - multijob:
- name: create-apex-vms
- condition: SUCCESSFUL
- projects:
- - name: 'odl-netvirt-verify-virtual-create-apex-vms-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- APEX_ENV_NUMBER=$APEX_ENV_NUMBER
- GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
- TOPOLOGY=$TOPOLOGY
- OS_VERSION=$OS_VERSION
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: install-netvirt
- condition: SUCCESSFUL
- projects:
- - name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}'
- current-parameters: false
- predefined-parameters: |
- ODL_BRANCH=$BRANCH
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- TOPOLOGY=$TOPOLOGY
- OS_VERSION=$OS_VERSION
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: functest
- condition: SUCCESSFUL
- projects:
- - name: 'functest-netvirt-virtual-suite-master'
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl-nofeature-$TOPOLOGY
- FUNCTEST_MODE=testcase
- FUNCTEST_SUITE_NAME=tempest_smoke
- RC_FILE_PATH=$HOME/cloner-info/overcloudrc
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
- - multijob:
- name: csit
- condition: ALWAYS
- projects:
- - name: cperf-apex-csit-{stream}
- predefined-parameters: |
- ODL_BRANCH=$BRANCH
- RC_FILE_PATH=$SNAP_CACHE/overcloudrc
- NODE_FILE_PATH=$SNAP_CACHE/node.yaml
- SSH_KEY_PATH=$SNAP_CACHE/id_rsa
- ODL_CONTAINERIZED=false
- OS_VERSION=$OS_VERSION
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: false
- - multijob:
- name: postprocess
- condition: ALWAYS
- projects:
- - name: 'odl-netvirt-verify-virtual-postprocess-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
-
-- job-template:
- name: 'odl-netvirt-verify-virtual-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 5
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
- - 'odl-netvirt-verify-virtual-install-netvirt-.*'
- - 'functest-netvirt-virtual-suite-.*'
- - 'odl-netvirt-verify-virtual-postprocess-.*'
- block-level: 'NODE'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/apex
- branches:
- - 'origin/master'
- timeout: 15
- wipe-workspace: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-noha'
- description: 'Scenario to deploy and test'
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/apex
- description: "URL to Google Storage with snapshot artifacts."
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-builder'
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'netvirt-verify-create-apex-vms-builder'
- builders:
- - shell:
- !include-raw: ../apex/apex-snapshot-deploy.sh
-- builder:
- name: 'netvirt-verify-install-netvirt-builder'
- builders:
- - shell:
- !include-raw: ./download-netvirt-artifact.sh
- - shell:
- !include-raw: ./install-netvirt.sh
-- builder:
- name: 'netvirt-verify-postprocess-builder'
- builders:
- - shell:
- !include-raw: ./postprocess-netvirt.sh
-
-- builder:
- name: 'detect-opnfv-snapshot'
- builders:
- - shell:
- !include-raw-escape: ./detect-snapshot.sh
diff --git a/jjb/3rd_party_ci/postprocess-netvirt.sh b/jjb/3rd_party_ci/postprocess-netvirt.sh
deleted file mode 100755
index 796514259..000000000
--- a/jjb/3rd_party_ci/postprocess-netvirt.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh
-pushd $LIB
-./post_process.sh
-popd
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
deleted file mode 100755
index 09aa716be..000000000
--- a/jjb/apex/apex-build.sh
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-# log info to console
-echo "Starting the build of Apex using OpenStack Master packages. This will take some time..."
-echo "---------------------------------------------------------------------------------------"
-echo
-# create the cache directory if it doesn't exist
-[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
-# set OPNFV_ARTIFACT_VERSION
-if echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
- GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
- export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
- if [ "$BRANCH" == 'master' ]; then
- # build rpm
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
- else
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
- fi
-elif echo $BUILD_TAG | grep "csit" 1> /dev/null; then
- export OPNFV_ARTIFACT_VERSION=csit${BUILD_NUMBER}
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
-elif [ "$ARTIFACT_VERSION" == "daily" ]; then
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
- if [ "$BRANCH" == 'master' ]; then
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
- else
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
- fi
-else
- export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION}
- if [ "$BRANCH" == 'master' ]; then
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
- else
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
- fi
-fi
-
-# Temporary hack until we fix apex build script
-BUILD_DIRECTORY=${WORKSPACE}/build
-
-# start the build
-pushd ${BUILD_DIRECTORY}
-make clean
-popd
-export PYTHONPATH=${WORKSPACE}
-python3 apex/build.py $BUILD_ARGS
-RPM_VERSION=$(grep Version: $WORKSPACE/build/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
-# list the contents of BUILD_OUTPUT directory
-echo "Build Directory is ${BUILD_DIRECTORY}/../.build"
-echo "Build Directory Contents:"
-echo "-------------------------"
-ls -al ${BUILD_DIRECTORY}/../.build
-
-# list the contents of CACHE directory
-echo "Cache Directory is ${CACHE_DIRECTORY}"
-echo "Cache Directory Contents:"
-echo "-------------------------"
-ls -al $CACHE_DIRECTORY
-
-if [[ "$BUILD_ARGS" =~ '--iso' && "$BRANCH" != 'master' ]]; then
- mkdir -p /tmp/apex-iso/
- rm -f /tmp/apex-iso/*.iso
- cp -f $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso /tmp/apex-iso/
-fi
-
-if ! echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
- echo "Writing opnfv.properties file"
- if [ "$BRANCH" != master ]; then
- # save information regarding artifact into file
- (
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
- echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm"
- echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm"
- echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
- ) > $WORKSPACE/opnfv.properties
- else
- # save information regarding artifact into file
- # we only generate the python package for master
- (
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_SRPM_URL=$GS_URL/python34-opnfv-apex-$RPM_VERSION.src.rpm"
- echo "OPNFV_RPM_URL=$GS_URL/python34-opnfv-apex-$RPM_VERSION.noarch.rpm"
- echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/python34-opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
- ) > $WORKSPACE/opnfv.properties
- fi
-fi
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
deleted file mode 100755
index f5c6ab108..000000000
--- a/jjb/apex/apex-deploy.sh
+++ /dev/null
@@ -1,161 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-IPV6_FLAG=False
-
-# log info to console
-echo "Starting the Apex deployment."
-echo "--------------------------------------------------------"
-echo
-
-if [ -z "$DEPLOY_SCENARIO" ]; then
- echo "Deploy scenario not set!"
- exit 1
-else
- echo "Deploy scenario: ${DEPLOY_SCENARIO}"
-fi
-
-# Dev or RPM/ISO build
-if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
- # Settings for deploying from git workspace
- DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
- NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
- CLEAN_CMD="opnfv-clean"
- # if we are using master, then we are downloading/caching upstream images
- # we want to use that built in mechanism to avoid re-downloading every job
- # so we use a dedicated folder to hold the upstream cache
- UPSTREAM_CACHE=$HOME/upstream_cache
- if [ "$BRANCH" == 'master' ]; then
- mkdir -p ${UPSTREAM_CACHE}
- RESOURCES=$UPSTREAM_CACHE
- else
- RESOURCES="${WORKSPACE}/.build/"
- fi
- CONFIG="${WORKSPACE}/build"
- BASE=$CONFIG
- IMAGES=$RESOURCES
- LIB="${WORKSPACE}/lib"
- DEPLOY_CMD="opnfv-deploy --image-dir ${RESOURCES}"
- # Ensure artifacts were downloaded and extracted correctly
- # TODO(trozet) add verification here
-
- # Install dev build
- sudo rm -rf /tmp/.build
- mv -f .build /tmp/
- sudo pip3 install --upgrade --force-reinstall .
- mv -f /tmp/.build ${WORKSPACE}/
-else
- DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
- NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
- CLEAN_CMD="opnfv-clean"
- # set to use different directory here because upon RPM removal this
- # directory will be wiped in daily
- UPSTREAM_CACHE=$HOME/upstream_cache
- if [ "$BRANCH" == 'master' ]; then
- mkdir -p ${UPSTREAM_CACHE}
- RESOURCES=$UPSTREAM_CACHE
- else
- RESOURCES="/var/opt/opnfv/images"
- fi
- DEPLOY_CMD="opnfv-deploy --image-dir ${RESOURCES}"
- CONFIG="/var/opt/opnfv"
- BASE=$CONFIG
- IMAGES=$RESOURCES
- LIB="/var/opt/opnfv/lib"
- sudo mkdir -p /var/log/apex
- sudo chmod 777 /var/log/apex
- cd /var/log/apex
-fi
-
-# Install Dependencies
-# Make sure python34 dependencies are installed
-dependencies="epel-release python34 python34-devel libvirt-devel python34-pip \
-ansible python34-PyYAML python34-jinja2 python34-setuptools python-tox ansible"
-
-for dep_pkg in $dependencies; do
- if ! rpm -q ${dep_pkg} > /dev/null; then
- if ! sudo yum install -y ${dep_pkg}; then
- echo "Failed to install ${dep_pkg}"
- exit 1
- fi
- fi
-done
-
-if [[ "$JOB_NAME" =~ "virtual" ]]; then
- # Make sure ipxe-roms-qemu package is updated to latest.
- # This package is needed for multi virtio nic PXE boot in virtual environment.
- sudo yum update -y ipxe-roms-qemu
-fi
-
-if [ "$OPNFV_CLEAN" == 'yes' ]; then
- if sudo test -e '/root/inventory/pod_settings.yaml'; then
- clean_opts='-i /root/inventory/pod_settings.yaml'
- else
- clean_opts=''
- fi
-
- sudo ${CLEAN_CMD} ${clean_opts}
-fi
-
-if echo ${DEPLOY_SCENARIO} | grep ipv6; then
- IPV6_FLAG=True
- DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} | sed 's/-ipv6//')
- echo "INFO: IPV6 Enabled"
-fi
-
-echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
-DEPLOY_FILE="${DEPLOY_SETTINGS_DIR}/${DEPLOY_SCENARIO}.yaml"
-
-if [ ! -e "$DEPLOY_FILE" ]; then
- echo "ERROR: Required settings file missing: Deploy settings file ${DEPLOY_FILE}"
-fi
-
-if [[ "$JOB_NAME" =~ "virtual" ]]; then
- # settings for virtual deployment
- DEPLOY_CMD="${DEPLOY_CMD} -v"
- if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
- DEPLOY_CMD="${DEPLOY_CMD} --virtual-default-ram 12 --virtual-compute-ram 7"
- fi
- if [[ "$PROMOTE" == "True" ]]; then
- DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 2 -e csit-environment.yaml"
- fi
-else
- # settings for bare metal deployment
- NETWORK_SETTINGS_DIR="/root/network"
- INVENTORY_FILE="/root/inventory/pod_settings.yaml"
-
- if ! sudo test -e "$INVENTORY_FILE"; then
- echo "ERROR: Required settings file missing: Inventory settings file ${INVENTORY_FILE}"
- exit 1
- fi
- # include inventory file for bare metal deployment
- DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
-fi
-
-if [[ "$BRANCH" == "master" ]]; then
- echo "Upstream deployment detected"
- DEPLOY_CMD="${DEPLOY_CMD} --upstream"
-fi
-
-if [ "$IPV6_FLAG" == "True" ]; then
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
-elif [[ "$PROMOTE" == "True" ]]; then
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_csit.yaml"
-else
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
-fi
-
-# Check that network settings file exists
-if ! sudo test -e "$NETWORK_FILE"; then
- echo "ERROR: Required settings file missing: Network Settings file ${NETWORK_FILE}"
- exit 1
-fi
-
-# start deployment
-sudo ${DEPLOY_CMD} -d ${DEPLOY_FILE} -n ${NETWORK_FILE} --debug
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-download-artifact.sh b/jjb/apex/apex-download-artifact.sh
deleted file mode 100755
index e1e51b3b6..000000000
--- a/jjb/apex/apex-download-artifact.sh
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Downloading the Apex artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-[[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
-
-if [ -z "$DEPLOY_SCENARIO" ]; then
- echo "Deploy scenario not set!"
- exit 1
-else
- echo "Deploy scenario: ${DEPLOY_SCENARIO}"
-fi
-
-if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
- if [ "$BRANCH" == 'master' ]; then
- echo "Skipping download of artifacts for master branch"
- else
- # dev build
- GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
- export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
- # get build artifact
- pushd ${BUILD_DIRECTORY} > /dev/null
- echo "Downloading packaged dev build: apex-${OPNFV_ARTIFACT_VERSION}.tar.gz"
- curl --fail -s -o $BUILD_DIRECTORY/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
- tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
- popd > /dev/null
- fi
-else
- echo "Will use RPMs..."
-
- # Must be RPMs/ISO
- echo "Downloading latest properties file"
-
- # get the properties file in order to get info regarding artifacts
- curl --fail -s -o $BUILD_DIRECTORY/opnfv.properties http://$GS_URL/latest.properties
-
- # source the file so we get OPNFV vars
- source $BUILD_DIRECTORY/opnfv.properties
-
- RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
- RPM_LIST=$(basename $OPNFV_RPM_URL)
- # find version of RPM
- VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
- if [ "$BRANCH" != 'master' ]; then
- # build RPM List which already includes base Apex RPM
- RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
- RPM_LIST+=" python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
- fi
-
- # remove old / install new RPMs
- if rpm -q python34-opnfv-apex > /dev/null; then
- INSTALLED_RPMS=$(rpm -qa | grep apex)
- if [ -n "$INSTALLED_RPMS" ]; then
- sudo yum remove -y ${INSTALLED_RPMS}
- fi
- fi
- # Create an rpms dir on slave
- mkdir -p ~/apex_rpms
- pushd ~/apex_rpms
- # Remove older rpms which do not match this version
- find . ! -name "*${VERSION_EXTENSION}.noarch.rpm" -type f -exec rm -f {} +
- # Download RPM only if changed on server
- for rpm in $RPM_LIST; do
- wget -N ${RPM_INSTALL_PATH}/${rpm}
- done
- if ! sudo yum install -y $RPM_LIST; then
- echo "Unable to install new RPMs: $RPM_LIST"
- exit 1
- fi
- popd
-fi
-
-# TODO: Uncomment these lines to verify SHA512SUMs once the sums are
-# fixed.
-# echo "$OPNFV_ARTIFACT_SHA512SUM $BUILD_DIRECTORY/apex.iso" | sha512sum -c
-# echo "$OPNFV_RPM_SHA512SUM $BUILD_DIRECTORY/$(basename $OPNFV_RPM_URL)" | sha512sum -c
-
-# list the files
-ls -al $BUILD_DIRECTORY
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-fetch-logs.sh b/jjb/apex/apex-fetch-logs.sh
deleted file mode 100755
index bdb2252b3..000000000
--- a/jjb/apex/apex-fetch-logs.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Fetching logs from overcloud. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-if sudo opnfv-pyutil --fetch-logs; then
- LOG_LOCATION=$(cat apex_util.log | grep 'Log retrieval complete' | grep -Eo '/tmp/.+$')
- if [ -z "$LOG_LOCATION" ]; then
- echo "WARNING: Unable to determine log location. Logs will not be uploaded"
- exit 0
- else
- sudo chmod 777 ${LOG_LOCATION}
- UPLOAD_LOCATION="${GS_URL}/logs/${JOB_NAME}/${BUILD_NUMBER}/"
- gsutil -m cp -r ${LOG_LOCATION} gs://${UPLOAD_LOCATION} > gsutil.latest_logs.log
- echo -e "Logs available at: \n$(find ${LOG_LOCATION} -type f | sed -n 's#^/tmp/#http://'$UPLOAD_LOCATION'#p')"
- fi
-else
- echo "WARNING: Log retrieval failed. No logs will be uploaded"
- exit 0
-fi
diff --git a/jjb/apex/apex-iso-verify.sh b/jjb/apex/apex-iso-verify.sh
deleted file mode 100755
index f34937619..000000000
--- a/jjb/apex/apex-iso-verify.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Starting the Apex iso verify."
-echo "--------------------------------------------------------"
-echo
-
-if [ "$BRANCH" == 'master' ]; then
- echo "Skipping Apex iso verify for master branch"
- exit 0
-fi
-
-# Must be RPMs/ISO
-echo "Downloading latest properties file"
-
-# get the properties file in order to get info regarding artifacts
-curl --fail -s -o opnfv.properties http://$GS_URL/latest.properties
-
-# source the file so we get OPNFV vars
-source opnfv.properties
-
-if ! rpm -q virt-install > /dev/null; then
- sudo yum -y install virt-install
-fi
-
-# define a clean function
-rm_apex_iso_verify () {
-if sudo virsh list --all | grep apex-iso-verify | grep running; then
- sudo virsh destroy apex-iso-verify
-fi
-if sudo virsh list --all | grep apex-iso-verify; then
- sudo virsh undefine apex-iso-verify
-fi
-}
-
-# Make sure a pre-existing iso-verify isn't there
-rm_apex_iso_verify
-
-#make sure there is not an existing console log file for the VM
-sudo rm -f /var/log/libvirt/qemu/apex-iso-verify-console.log
-
-# run an install from the iso
-# This streams a serial console to tcp port 3737 on localhost
-sudo virt-install -n apex-iso-verify -r 4096 --vcpus 4 --os-variant=rhel7 \
- --accelerate -v --noautoconsole \
- --disk path=/var/lib/libvirt/images/apex-iso-verify.qcow2,size=30,format=qcow2 \
- -l /tmp/apex-iso/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso \
- --extra-args 'console=ttyS0 console=ttyS0,115200n8 serial inst.ks=file:/iso-verify.ks inst.stage2=hd:LABEL=OPNFV\x20CentOS\x207\x20x86_64:/' \
- --initrd-inject ci/iso-verify.ks \
- --serial file,path=/var/log/libvirt/qemu/apex-iso-verify-console.log
-
-echo "Waiting for install to finish..."
-sleep 10
-end_time=$(($SECONDS+1500))
-while ! [[ `sudo tail -n1 /var/log/libvirt/qemu/apex-iso-verify-console.log` =~ 'Power down' ]]; do
- if [ $SECONDS -gt $end_time ] || ! sudo virsh list --all | grep apex-iso-verify | grep running > /dev/null; then
- sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
- sudo virsh list --all
- echo "Error: Failed to find power down message after install"
- exit 1
- fi
- sleep 10
-done
-
-sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
-
-# clean up
-rm_apex_iso_verify
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-jjb-renderer.py b/jjb/apex/apex-jjb-renderer.py
deleted file mode 100644
index 0da47b518..000000000
--- a/jjb/apex/apex-jjb-renderer.py
+++ /dev/null
@@ -1,49 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (trozet@redhat.com) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import pprint
-import yaml
-from jinja2 import Environment
-from jinja2 import FileSystemLoader
-
-
-def render_jjb():
- """Render JJB output from scenarios.yaml.hidden file and jinja
- template"""
-
- gspathname = dict()
- branch = dict()
- build_slave = dict()
- env = Environment(loader=FileSystemLoader('./'), autoescape=True)
-
- with open('scenarios.yaml.hidden') as _:
- scenarios = yaml.safe_load(_)
-
- template = env.get_template('apex.yaml.j2')
-
- print("Scenarios are: ")
- pprint.pprint(scenarios)
-
- for stream in scenarios:
- if stream == 'master':
- gspathname['master'] = ''
- branch[stream] = stream
- else:
- gspathname[stream] = '/' + stream
- branch[stream] = 'stable/' + stream
- build_slave[stream] = 'apex-baremetal-{}'.format(stream)
-
- output = template.render(scenarios=scenarios, gspathname=gspathname,
- branch=branch, build_slave=build_slave)
-
- with open('./apex.yaml', 'w') as fh:
- fh.write(output)
-
-if __name__ == "__main__":
- render_jjb()
diff --git a/jjb/apex/apex-project-jobs.yaml b/jjb/apex/apex-project-jobs.yaml
deleted file mode 100644
index 700ff60e8..000000000
--- a/jjb/apex/apex-project-jobs.yaml
+++ /dev/null
@@ -1,136 +0,0 @@
----
-- project:
- name: 'apex-project-jobs'
- project: 'apex'
-
- stream:
- - master: &master
- branch: 'master'
- gs-pathname: ''
- concurrent-builds: 3
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- concurrent-builds: 3
- disabled: false
- - euphrates: &euphrates
- branch: 'stable/euphrates'
- gs-pathname: '/euphrates'
- concurrent-builds: 3
- disabled: true
- - danube: &danube
- branch: 'stable/danube'
- gs-pathname: '/danube'
- concurrent-builds: 1
- disabled: true
-
- jobs:
- - 'apex-build-{stream}'
- - 'apex-verify-iso-{stream}'
-
-# Build phase
-- job-template:
- name: 'apex-build-{stream}'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: 'apex-build-master'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - timeout:
- timeout: 150
- fail: true
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: '{concurrent-builds}'
- max-total: 10
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify-iso-{stream}'
-
- builders:
- - 'apex-build'
- - inject:
- properties-content: ARTIFACT_TYPE=rpm
- - 'apex-upload-artifact'
-
-# ISO verify job
-- job-template:
- name: 'apex-verify-iso-{stream}'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: 'apex-virtual-master'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-iso-verify'
- - inject:
- properties-content: ARTIFACT_TYPE=iso
- - 'apex-upload-artifact'
-
-########################
-# builder macros
-########################
-- builder:
- name: 'apex-build'
- builders:
- - shell:
- !include-raw: ./apex-build.sh
-
-- builder:
- name: 'apex-iso-verify'
- builders:
- - shell:
- !include-raw: ./apex-iso-verify.sh
diff --git a/jjb/apex/apex-snapshot-create.sh b/jjb/apex/apex-snapshot-create.sh
deleted file mode 100644
index b73931470..000000000
--- a/jjb/apex/apex-snapshot-create.sh
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env bash
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (Red Hat) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-
-if [ -z "$SNAP_TYPE" ]; then
- echo "ERROR: SNAP_TYPE not provided...exiting"
- exit 1
-fi
-
-echo "Creating Apex snapshot..."
-echo "-------------------------"
-echo
-
-# create tmp directory
-tmp_dir=$(pwd)/.tmp
-mkdir -p ${tmp_dir}
-
-# TODO(trozet) remove this after fix goes in for tripleo_inspector to copy these
-pushd ${tmp_dir} > /dev/null
-echo "Copying overcloudrc and ssh key from Undercloud..."
-# Store overcloudrc
-UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
-sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./
-# Copy out ssh key of stack from undercloud
-sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./
-popd > /dev/null
-
-echo "Gathering introspection information"
-git clone https://gerrit.opnfv.org/gerrit/sdnvpn.git
-pushd sdnvpn/odl-pipeline/lib > /dev/null
-sudo ./tripleo_introspector.sh --out-file ${tmp_dir}/node.yaml
-popd > /dev/null
-sudo rm -rf sdnvpn
-
-echo "Shutting down nodes"
-# Shut down nodes
-nodes=$(sudo virsh list | grep -Eo "baremetal[0-9]")
-for node in $nodes; do
- sudo virsh shutdown ${node} --mode acpi
-done
-
-for node in $nodes; do
- count=0
- while [ "$count" -lt 10 ]; do
- sleep 10
- if sudo virsh list | grep ${node}; then
- echo "Waiting for $node to shutdown, try $count"
- else
- break
- fi
- count=$((count+1))
- done
-
- if [ "$count" -ge 10 ]; then
- echo "Node $node failed to shutdown"
- exit 1
- fi
-done
-
-pushd ${tmp_dir} > /dev/null
-echo "Gathering virsh definitions"
-# copy qcow2s, virsh definitions
-for node in $nodes; do
- sudo cp -f /var/lib/libvirt/images/${node}.qcow2 ./
- sudo virsh dumpxml ${node} > ${node}.xml
-done
-
-# copy virsh net definitions
-sudo virsh net-dumpxml admin > admin.xml
-
-sudo chown jenkins-ci:jenkins-ci *
-
-# tar up artifacts
-DATE=`date +%Y-%m-%d`
-tar czf ../apex-${SNAP_TYPE}-snap-${DATE}.tar.gz .
-popd > /dev/null
-sudo rm -rf ${tmp_dir}
-echo "Snapshot saved as apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
-
-# update opnfv properties file
-if [ "$SNAP_TYPE" == 'csit' ]; then
- snap_sha=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)
- if curl --fail -O -L http://$GS_URL/snapshot.properties; then
- sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
- sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
- else
- cat << EOF > snapshot.properties
-OPNFV_SNAP_URL=${GS_URL}/apex-csit-snap-${DATE}.tar.gz
-OPNFV_SNAP_SHA512SUM=${snap_sha}
-EOF
- fi
- echo "OPNFV_SNAP_URL=$GS_URL/apex-csit-snap-${DATE}.tar.gz"
- echo "OPNFV_SNAP_SHA512SUM=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)"
- echo "Updated properties file: "
- cat snapshot.properties
-fi
diff --git a/jjb/apex/apex-snapshot-deploy.sh b/jjb/apex/apex-snapshot-deploy.sh
deleted file mode 100644
index 9738ecb19..000000000
--- a/jjb/apex/apex-snapshot-deploy.sh
+++ /dev/null
@@ -1,174 +0,0 @@
-#!/usr/bin/env bash
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (Red Hat) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-SNAP_CACHE=$HOME/snap_cache
-
-
-echo "Deploying Apex snapshot..."
-echo "--------------------------"
-echo
-
-echo "Cleaning server"
-pushd ci > /dev/null
-sudo opnfv-clean
-popd > /dev/null
-
-full_snap_url=http://$GS_URL/${OS_VERSION}/${TOPOLOGY}
-
-echo "Downloading latest snapshot properties file"
-if ! wget -O $WORKSPACE/opnfv.properties ${full_snap_url}/snapshot.properties; then
- echo "ERROR: Unable to find snapshot.properties at ${full_snap_url}...exiting"
- exit 1
-fi
-
-# find latest check sum
-latest_snap_checksum=$(cat opnfv.properties | grep OPNFV_SNAP_SHA512SUM | awk -F "=" '{print $2}')
-if [ -z "$latest_snap_checksum" ]; then
- echo "ERROR: checksum of latest snapshot from snapshot.properties is null!"
- exit 1
-fi
-
-local_snap_checksum=""
-SNAP_CACHE=${SNAP_CACHE}/${OS_VERSION}/${TOPOLOGY}
-
-# check snap cache directory exists
-# if snapshot cache exists, find the checksum
-if [ -d "$SNAP_CACHE" ]; then
- latest_snap=$(ls ${SNAP_CACHE} | grep tar.gz | tail -n 1)
- if [ -n "$latest_snap" ]; then
- local_snap_checksum=$(sha512sum ${SNAP_CACHE}/${latest_snap} | cut -d' ' -f1)
- fi
-else
- mkdir -p ${SNAP_CACHE}
-fi
-
-# compare check sum and download latest snap if not up to date
-if [ "$local_snap_checksum" != "$latest_snap_checksum" ]; then
- snap_url=$(cat opnfv.properties | grep OPNFV_SNAP_URL | awk -F "=" '{print $2}')
- if [ -z "$snap_url" ]; then
- echo "ERROR: Snap URL from snapshot.properties is null!"
- exit 1
- fi
- echo "INFO: SHA mismatch, will download latest snapshot"
- # wipe cache
- rm -rf ${SNAP_CACHE}/*
- wget --directory-prefix=${SNAP_CACHE}/ ${snap_url}
- snap_tar=$(basename ${snap_url})
-else
- snap_tar=${latest_snap}
-fi
-
-echo "INFO: Snapshot to be used is ${snap_tar}"
-
-# move to snap cache dir and unpack
-pushd ${SNAP_CACHE} > /dev/null
-tar xvf ${snap_tar}
-
-# create each network
-virsh_networks=$(ls *.xml | grep -v baremetal)
-
-if [ -z "$virsh_networks" ]; then
- echo "ERROR: no virsh networks found in snapshot unpack"
- exit 1
-fi
-
-echo "Checking overcloudrc"
-if ! stat overcloudrc; then
- echo "ERROR: overcloudrc does not exist in snap unpack"
- exit 1
-fi
-
-for network_def in ${virsh_networks}; do
- sudo virsh net-create ${network_def}
- network=$(echo ${network_def} | awk -F '.' '{print $1}')
- if ! sudo virsh net-list | grep ${network}; then
- sudo virsh net-start ${network}
- fi
- echo "Checking if OVS bridge is missing for network: ${network}"
- if ! sudo ovs-vsctl show | grep "br-${network}"; then
- sudo ovs-vsctl add-br br-${network}
- echo "OVS Bridge created: br-${network}"
- if [ "br-${network}" == 'br-admin' ]; then
- echo "Configuring IP 192.0.2.99 on br-admin"
- sudo ip addr add 192.0.2.99/24 dev br-admin
- sudo ip link set up dev br-admin
- elif [ "br-${network}" == 'br-external' ]; then
- echo "Configuring IP 192.168.37.1 on br-external"
- sudo ip addr add 192.168.37.1/24 dev br-external
- sudo ip link set up dev br-external
- # Routes for admin network
- # The overcloud controller is multi-homed and will fail to respond
- # to traffic from the functest container due to reverse-path-filtering
- # This route allows reverse traffic, by forcing admin network destined
- # traffic through the external network for controller IPs only.
- # Compute nodes have no ip on external interfaces.
- controller_ips=$(cat overcloudrc | grep -Eo "192.0.2.[0-9]+")
- for ip in $controller_ips; do
- sudo ip route add ${ip}/32 dev br-external
- done
- fi
- fi
-done
-
-echo "Virsh networks up: $(sudo virsh net-list)"
-echo "Bringing up Overcloud VMs..."
-virsh_vm_defs=$(ls baremetal*.xml)
-
-if [ -z "$virsh_vm_defs" ]; then
- echo "ERROR: no virsh VMs found in snapshot unpack"
- exit 1
-fi
-
-for node_def in ${virsh_vm_defs}; do
- sed -ri "s/machine='[^\s]+'/machine='pc'/" ${node_def}
- sudo virsh define ${node_def}
- node=$(echo ${node_def} | awk -F '.' '{print $1}')
- sudo cp -f ${node}.qcow2 /var/lib/libvirt/images/
- # FIXME (trozet) install java on each disk image as required to upgrade ODL
- # should be added to Apex as part of the deployment. Remove this after that
- # is complete
- sudo LIBGUESTFS_BACKEND=direct virt-customize --install java-1.8.0-openjdk -a /var/lib/libvirt/images/${node}.qcow2
- sudo virsh start ${node}
- echo "Node: ${node} started"
-done
-
-# copy overcloudrc for functest
-mkdir -p $HOME/cloner-info
-cp -f overcloudrc $HOME/cloner-info/
-
-admin_controller_ip=$(cat overcloudrc | grep -Eo -m 1 "192.0.2.[0-9]+" | head -1)
-netvirt_url="http://${admin_controller_ip}:8081/restconf/operational/network-topology:network-topology/topology/netvirt:1"
-
-source overcloudrc
-counter=1
-while [ "$counter" -le 10 ]; do
- echo "Checking if OpenStack is up"
- if nc -z ${admin_controller_ip} 9696 > /dev/null; then
- echo "Overcloud Neutron is up...Checking if OpenDaylight NetVirt is up..."
- if curl --fail --silent -u admin:admin ${netvirt_url} > /dev/null; then
- echo "OpenDaylight is up. Overcloud deployment complete"
- exit 0
- else
- echo "OpenDaylight not yet up, try ${counter}"
- fi
- else
- echo "Neutron not yet up, try ${counter}"
- fi
- counter=$((counter+1))
- sleep 60
-done
-
-echo "ERROR: Deployment not up after 10 minutes...exiting."
-exit 1
diff --git a/jjb/apex/apex-unit-test.sh b/jjb/apex/apex-unit-test.sh
deleted file mode 100755
index 3f15847f2..000000000
--- a/jjb/apex/apex-unit-test.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-# log info to console
-echo "Starting unit tests for Apex..."
-echo "---------------------------------------------------------------------------------------"
-echo
-
-PATH=$PATH:/usr/sbin
-
-
-pushd build/ > /dev/null
-for pkg in yamllint rpmlint iproute epel-release python34-devel python34-nose python34-PyYAML python-pep8 python34-mock python34-pip; do
- if ! rpm -q ${pkg} > /dev/null; then
- if ! sudo yum install -y ${pkg}; then
- echo "Failed to install ${pkg} package..."
- exit 1
- fi
- fi
-done
-
-# Make sure coverage is installed
-if ! python3 -c "import coverage" &> /dev/null; then sudo pip3 install coverage; fi
-
-make rpmlint
-make python-pep8-check
-make yamllint
-make python-tests
-popd > /dev/null
-
-echo "--------------------------------------------------------"
-echo "Unit Tests Done!"
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
deleted file mode 100755
index 5c777a824..000000000
--- a/jjb/apex/apex-upload-artifact.sh
+++ /dev/null
@@ -1,165 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [ -z "$ARTIFACT_TYPE" ]; then
- echo "ERROR: ARTIFACT_TYPE not provided...exiting"
- exit 1
-fi
-
-# log info to console
-echo "Uploading the Apex ${ARTIFACT_TYPE} artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-if [[ ! "$ARTIFACT_VERSION" =~ dev ]]; then
- source $BUILD_DIRECTORY/../opnfv.properties
-fi
-
-importkey () {
- # clone releng repository
- echo "Cloning releng repository..."
- [ -d releng ] && rm -rf releng
- git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
- #this is where we import the siging key
- if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- if ! $WORKSPACE/releng/utils/gpg_import_key.sh; then
- echo "WARNING: Failed to run gpg key import"
- fi
- fi
-}
-
-signrpm () {
- for artifact in $RPM_LIST $SRPM_LIST; do
- echo "Signing artifact: ${artifact}"
- gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $artifact
- gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig)
- echo "Upload complete for ${artifact} signature"
- done
-}
-
-signiso () {
- gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
-
- gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
- echo "ISO signature Upload Complete!"
-}
-
-uploadiso () {
- gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log
- echo "ISO Upload Complete!"
-}
-
-uploadrpm () {
- for artifact in $RPM_LIST $SRPM_LIST; do
- echo "Uploading artifact: ${artifact}"
- gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log
- echo "Upload complete for ${artifact}"
- done
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log
-
- # Make the property files viewable on the artifact site
- gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-}
-
-uploadsnap () {
- # Uploads snapshot artifact and updated properties file
- echo "Uploading snapshot artifacts"
- # snapshot dir is the same node in the create job workspace
- # only 1 promotion job can run at a time on a slave
- snapshot_dir="${WORKSPACE}/../apex-create-snapshot"
- if [ -z "$SNAP_TYPE" ]; then
- echo "ERROR: SNAP_TYPE not provided...exiting"
- exit 1
- fi
- gsutil cp ${snapshot_dir}/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
- if [ "$SNAP_TYPE" == 'csit' ]; then
- gsutil cp ${snapshot_dir}/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
- fi
- echo "Upload complete for Snapshot"
-}
-
-uploadimages () {
- # Uploads dev tarball
- GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
- export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
- echo "Uploading development build tarball"
- pushd $BUILD_DIRECTORY > /dev/null
- tar czf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz *.qcow2 *.vmlinuz *.initrd
- gsutil cp apex-${OPNFV_ARTIFACT_VERSION}.tar.gz gs://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz > gsutil.latest.log
- popd > /dev/null
-}
-
-# Always import the signing key, if it's available the artifacts will be
-# signed before being uploaded
-importkey
-
-if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
- echo "Signing Key avaliable"
- SIGN_ARTIFACT="true"
-fi
-
-if [ "$ARTIFACT_TYPE" == 'snapshot' ]; then
- uploadsnap
-elif [ "$ARTIFACT_TYPE" == 'iso' ]; then
- if [[ "$ARTIFACT_VERSION" =~ dev || "$BRANCH" == 'master' ]]; then
- echo "Skipping ISO artifact upload for ${ARTIFACT_TYPE} due to dev/master build"
- exit 0
- fi
- if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
- signiso
- fi
- uploadiso
-elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
- if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
- if [ "$BRANCH" == 'master' ]; then
- echo "will not upload artifacts, master uses upstream"
- ARTIFACT_TYPE=none
- else
- echo "dev build detected, will upload image tarball"
- ARTIFACT_TYPE=tarball
- uploadimages
- fi
- else
- RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
- # RPM URL should be python package for master, and is only package we need
- RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
- SRPM_INSTALL_PATH=$BUILD_DIRECTORY
- SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
- if [ "$BRANCH" != 'master' ]; then
- VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
- RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
- VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
- SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
- SRPM_LIST+=" ${SRPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
- fi
-
- if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
- signrpm
- fi
- uploadrpm
- fi
-else
- echo "ERROR: Unknown artifact type ${ARTIFACT_TYPE} to upload...exiting"
- exit 1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-if [ "$ARTIFACT_TYPE" == 'iso' ]; then echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"; fi
-if [ "$ARTIFACT_TYPE" == 'rpm' ]; then echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)"; fi
-if [ "$ARTIFACT_TYPE" == 'tarball' ]; then echo "Dev tarball Artifact is available as http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz)"; fi
diff --git a/jjb/apex/apex-verify-jobs.yaml b/jjb/apex/apex-verify-jobs.yaml
deleted file mode 100644
index 819e17b76..000000000
--- a/jjb/apex/apex-verify-jobs.yaml
+++ /dev/null
@@ -1,396 +0,0 @@
----
-- project:
- name: 'apex-verify-jobs'
- project: 'apex'
- jobs:
- - 'apex-verify-{stream}'
- - 'apex-verify-gate-{stream}'
- - 'apex-verify-unit-tests-{stream}'
- stream:
- - master: &master
- branch: '{stream}'
- gs-pathname: ''
- verify-scenario: 'os-nosdn-nofeature-ha'
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- verify-scenario: 'os-nosdn-nofeature-ha'
- disabled: false
- - danube: &danube
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- verify-scenario: 'os-odl_l3-nofeature-ha'
- disabled: true
- - euphrates: &euphrates
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- verify-scenario: 'os-odl-nofeature-ha'
- disabled: false
-
-# Unit Test
-- job-template:
- name: 'apex-verify-unit-tests-{stream}'
-
- node: 'apex-build-master'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- parameters:
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'true'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'apex'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'false'
- file-paths:
- - compare-type: ANT
- pattern: 'apex/tests/**'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: '*'
- - compare-type: ANT
- pattern: 'apex/*'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'lib/**'
- - compare-type: ANT
- pattern: 'config/**'
- - compare-type: ANT
- pattern: 'apex/build/**'
- - compare-type: ANT
- pattern: 'apex/common/**'
- - compare-type: ANT
- pattern: 'apex/inventory/**'
- - compare-type: ANT
- pattern: 'apex/network/**'
- - compare-type: ANT
- pattern: 'apex/overcloud/**'
- - compare-type: ANT
- pattern: 'apex/settings/**'
- - compare-type: ANT
- pattern: 'apex/undercloud/**'
- - compare-type: ANT
- pattern: 'apex/virtual/**'
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - apex-unit-test
-
-# Verify
-- job-template:
- name: 'apex-verify-{stream}'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - '{project}-virtual-{stream}-defaults'
- - apex-parameter:
- gs-pathname: '{gs-pathname}/dev'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'true'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'apex'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- file-paths:
- - compare-type: ANT
- pattern: '*'
- - compare-type: ANT
- pattern: 'apex/*'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'lib/**'
- - compare-type: ANT
- pattern: 'config/**'
- - compare-type: ANT
- pattern: 'apex/build/**'
- - compare-type: ANT
- pattern: 'apex/common/**'
- - compare-type: ANT
- pattern: 'apex/inventory/**'
- - compare-type: ANT
- pattern: 'apex/network/**'
- - compare-type: ANT
- pattern: 'apex/overcloud/**'
- - compare-type: ANT
- pattern: 'apex/settings/**'
- - compare-type: ANT
- pattern: 'apex/undercloud/**'
- - compare-type: ANT
- pattern: 'apex/virtual/**'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'apex/tests/**'
- - compare-type: ANT
- pattern: 'docs/**'
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 3
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'apex-verify-unit-tests-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- DEPLOY_SCENARIO={verify-scenario}
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- FUNCTEST_SUITE_NAME=tempest_smoke
- FUNCTEST_MODE=testcase
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
-
-# Verify Scenario Gate
-- job-template:
- name: 'apex-verify-gate-{stream}'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - '{project}-virtual-{stream}-defaults'
- - apex-parameter:
- gs-pathname: '{gs-pathname}/dev'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - comment-added-contains-event:
- comment-contains-value: '^Patch Set [0-9]+: Code-Review\+2.*start-gate-scenario:.*'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'apex'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'lib/**'
- - compare-type: ANT
- pattern: 'config/**'
- - compare-type: ANT
- pattern: 'apex/**'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-virtual.*'
- - 'apex-.*-promote.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- # yamllint disable rule:line-length
- - shell: |
- echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo '(os|k8s)-.*$') > detected_scenario
- # yamllint enable rule:line-length
- - inject:
- properties-file: detected_scenario
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - shell: |
- if echo $DEPLOY_SCENARIO | grep queens &> /dev/null; then
- echo 'REPO=opnfv' > functest_repo
- else
- echo 'REPO=ollivier' > functest_repo
- fi
- - inject:
- properties-file: functest_repo
- - multijob:
- name: functest-smoke
- condition: ALWAYS
- projects:
- - name: 'functest-apex-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- FUNCTEST_SUITE_NAME=tempest_smoke
- FUNCTEST_MODE=testcase
- REPO=$REPO
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' != k8s-nosdn-nofeature-noha"
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-########################
-# builder macros
-########################
-- builder:
- name: apex-unit-test
- builders:
- - shell:
- !include-raw: ./apex-unit-test.sh
diff --git a/jjb/apex/apex.yaml b/jjb/apex/apex.yaml
deleted file mode 100644
index 720d5bdd9..000000000
--- a/jjb/apex/apex.yaml
+++ /dev/null
@@ -1,1793 +0,0 @@
----
-- project:
- name: 'apex'
- project: 'apex'
- jobs:
- - 'apex-fetch-logs-{stream}'
- - 'apex-runner-cperf-{stream}'
- - 'apex-virtual-{stream}'
- - 'apex-deploy-{platform}-{stream}'
- - 'apex-daily-{stream}'
- - 'apex-csit-promote-daily-{stream}-os-{os_version}-{topology}'
- - 'apex-fdio-promote-daily-{stream}'
- - 'apex-{scenario}-baremetal-{scenario_stream}'
- - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- - 'apex-upload-snapshot'
- - 'apex-create-snapshot'
- - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
- - 'apex-dovetail-daily-os-nosdn-nofeature-ha-baremetal-danube'
- # stream: branch with - in place of / (eg. stable-arno)
- # branch: branch (eg. stable/arno)
- stream:
- - master: &master
- branch: 'master'
- gs-pathname: ''
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'master'
- disable_daily: false
- disable_promote: false
- - fraser: &fraser
- branch: 'stable/fraser'
- gs-pathname: '/fraser'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'fraser'
- disable_daily: true
- disable_promote: true
- - euphrates: &euphrates
- branch: 'stable/euphrates'
- gs-pathname: '/euphrates'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-odl-nofeature-ha'
- scenario_stream: 'euphrates'
- disable_daily: true
- disable_promote: true
- - danube: &danube
- branch: 'stable/danube'
- gs-pathname: '/danube'
- build-slave: 'apex-build-danube'
- virtual-slave: 'apex-virtual-danube'
- baremetal-slave: 'apex-baremetal-danube'
- verify-scenario: 'os-odl_l3-nofeature-ha'
- scenario_stream: 'danube'
- disabled: true
- disable_daily: true
- disable_promote: true
-
- scenario:
- - 'os-nosdn-nofeature-noha':
- <<: *fraser
- - 'os-nosdn-nofeature-ha':
- <<: *fraser
- - 'os-odl-nofeature-ha':
- <<: *fraser
- - 'os-odl-nofeature-noha':
- <<: *fraser
- - 'os-odl-bgpvpn-ha':
- <<: *fraser
- - 'os-ovn-nofeature-noha':
- <<: *fraser
- - 'os-nosdn-fdio-noha':
- <<: *fraser
- - 'os-nosdn-fdio-ha':
- <<: *fraser
- - 'os-nosdn-bar-ha':
- <<: *fraser
- - 'os-nosdn-bar-noha':
- <<: *fraser
- - 'os-nosdn-ovs_dpdk-noha':
- <<: *fraser
- - 'os-nosdn-ovs_dpdk-ha':
- <<: *fraser
- - 'os-odl-sfc-noha':
- <<: *fraser
- - 'os-odl-sfc-ha':
- <<: *fraser
- - 'os-nosdn-calipso-noha':
- <<: *fraser
- - 'os-nosdn-nofeature-noha':
- <<: *danube
- - 'os-nosdn-nofeature-ha':
- <<: *danube
- - 'os-nosdn-nofeature-ha-ipv6':
- <<: *danube
- - 'os-nosdn-ovs-noha':
- <<: *danube
- - 'os-nosdn-ovs-ha':
- <<: *danube
- - 'os-nosdn-fdio-noha':
- <<: *danube
- - 'os-nosdn-fdio-ha':
- <<: *danube
- - 'os-nosdn-kvm-ha':
- <<: *danube
- - 'os-nosdn-kvm-noha':
- <<: *danube
- - 'os-odl_l2-fdio-noha':
- <<: *danube
- - 'os-odl_l2-fdio-ha':
- <<: *danube
- - 'os-odl_netvirt-fdio-noha':
- <<: *danube
- - 'os-odl_l2-sfc-noha':
- <<: *danube
- - 'os-odl_l3-nofeature-noha':
- <<: *danube
- - 'os-odl_l3-nofeature-ha':
- <<: *danube
- - 'os-odl_l3-ovs-noha':
- <<: *danube
- - 'os-odl_l3-ovs-ha':
- <<: *danube
- - 'os-odl-bgpvpn-ha':
- <<: *danube
- - 'os-odl-gluon-noha':
- <<: *danube
- - 'os-odl_l3-fdio-noha':
- <<: *danube
- - 'os-odl_l3-fdio-ha':
- <<: *danube
- - 'os-odl_l3-fdio_dvr-noha':
- <<: *danube
- - 'os-odl_l3-fdio_dvr-ha':
- <<: *danube
- - 'os-odl_l3-csit-noha':
- <<: *danube
- - 'os-onos-nofeature-ha':
- <<: *danube
- - 'os-ovn-nofeature-noha':
- <<: *danube
- - 'os-nosdn-nofeature-noha':
- <<: *master
- - 'os-nosdn-nofeature-ha':
- <<: *master
- - 'os-odl-nofeature-noha':
- <<: *master
- - 'os-odl-nofeature-ha':
- <<: *master
- - 'os-nosdn-queens-noha':
- <<: *master
- - 'os-nosdn-queens-ha':
- <<: *master
- - 'os-odl-queens-noha':
- <<: *master
- - 'os-odl-queens-ha':
- <<: *master
- - 'k8s-nosdn-nofeature-noha':
- <<: *master
- - 'os-nosdn-nofeature-noha':
- <<: *euphrates
- - 'os-nosdn-nofeature-ha':
- <<: *euphrates
- - 'os-odl-nofeature-ha':
- <<: *euphrates
- - 'os-odl-nofeature-noha':
- <<: *euphrates
- - 'os-odl-bgpvpn-ha':
- <<: *euphrates
- - 'os-ovn-nofeature-noha':
- <<: *euphrates
- - 'os-nosdn-fdio-noha':
- <<: *euphrates
- - 'os-nosdn-fdio-ha':
- <<: *euphrates
- - 'os-nosdn-bar-ha':
- <<: *euphrates
- - 'os-nosdn-bar-noha':
- <<: *euphrates
- - 'os-nosdn-nofeature-ha-ipv6':
- <<: *euphrates
- - 'os-nosdn-ovs_dpdk-noha':
- <<: *euphrates
- - 'os-nosdn-ovs_dpdk-ha':
- <<: *euphrates
- - 'os-nosdn-kvm_ovs_dpdk-noha':
- <<: *euphrates
- - 'os-nosdn-kvm_ovs_dpdk-ha':
- <<: *euphrates
- - 'os-odl-sfc-noha':
- <<: *euphrates
- - 'os-odl-sfc-ha':
- <<: *euphrates
- - 'os-nosdn-calipso-noha':
- <<: *euphrates
-
- platform:
- - 'baremetal'
- - 'virtual'
-
- os_version:
- - 'pike':
- os_scenario: 'pike'
- - 'queens':
- os_scenario: 'queens'
- - 'master':
- os_scenario: 'nofeature'
-
- topology:
- - 'noha'
- - 'ha'
-
-# Fetch Logs Job
-- job-template:
- name: 'apex-fetch-logs-{stream}'
-
- concurrent: true
-
- disabled: false
- scm:
- - git-scm-gerrit
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-fetch-logs'
-
-- job-template:
- name: 'apex-runner-cperf-{stream}'
-
- # runner cperf job
- project-type: 'multijob'
- node: 'intel-pod2'
-
- disabled: false
-
- parameters:
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: false
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME"
- - multijob:
- name: 'Baremetal Deploy'
- condition: ALWAYS
- projects:
- - name: 'apex-deploy-baremetal-{stream}'
- node-parameters: false
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- OPNFV_CLEAN=yes
- DEPLOY_SCENARIO={verify-scenario}
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: CPERF
- condition: SUCCESSFUL
- projects:
- - name: 'cperf-apex-intel-pod2-daily-master'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={verify-scenario}
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-# Deploy job
-- job-template:
- name: 'apex-deploy-{platform}-{stream}'
-
- concurrent: true
-
- disabled: false
- quiet-period: 30
- scm:
- - git-scm-gerrit
-
- wrappers:
- - timeout:
- timeout: 140
- fail: true
-
- parameters:
- - '{project}-{platform}-{stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
- description: "Scenario to deploy with."
- # yamllint disable rule:line-length
- - string:
- name: OPNFV_CLEAN
- default: 'no'
- description: "Use yes in lower case to invoke clean. Indicates if the deploy environment should be cleaned before deployment"
-
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - 'functest.*'
- - 'yardstick.*'
- - 'dovetail.*'
- - 'storperf.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - 'apex-download-artifact'
- - 'apex-deploy'
- - 'clean-workspace'
-
-
-# Virtual Deploy and Test
-- job-template:
- name: 'apex-virtual-{stream}'
-
- project-type: 'multijob'
-
- concurrent: true
-
- disabled: false
-
- scm:
- - git-scm-gerrit
-
- parameters:
- - '{project}-defaults'
- - '{project}-virtual-{stream}-defaults'
- - 'functest-suite-parameter'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
- description: "Scenario to deploy with."
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-runner.*'
- - 'apex-run.*'
- - 'apex-virtual-.*'
- - 'apex-verify-gate-.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- PROMOTE=$PROMOTE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - shell: |
- if echo $DEPLOY_SCENARIO | grep queens &> /dev/null; then
- echo 'REPO=opnfv' > functest_repo
- else
- echo 'REPO=ollivier' > functest_repo
- fi
- - inject:
- properties-file: functest_repo
- - multijob:
- name: functest-smoke
- condition: ALWAYS
- projects:
- - name: 'functest-apex-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- REPO=$REPO
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# Baremetal Deploy and Test
-- job-template:
- name: 'apex-{scenario}-baremetal-{scenario_stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{scenario_stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
- - 'apex-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- execution-type: SEQUENTIALLY
- projects:
- - name: 'apex-deploy-baremetal-{scenario_stream}'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-fetch-logs-{scenario_stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: 'OPNFV Test Suite'
- condition: ALWAYS
- projects:
- - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{scenario_stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- publishers:
- - groovy-postbuild:
- script:
- !include-raw-escape: ./update-build-result.groovy
-
-# Baremetal test job
-- job-template:
- name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{scenario_stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-run.*'
- - 'apex-testsuite-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: Functest
- condition: ALWAYS
- projects:
- - name: 'functest-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Yardstick
- condition: ALWAYS
- projects:
- - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-proposed_tests
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-default
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-default-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: StorPerf
- condition: ALWAYS
- projects:
- - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-# Build status is always success due conditional plugin prefetching
-# build status before multijob phases execute
-# - conditional-step:
-# condition-kind: current-status
-# condition-worst: SUCCESS
-# condtion-best: SUCCESS
-# on-evaluation-failure: mark-unstable
-# steps:
-# - shell: 'echo "Tests Passed"'
-
-- job-template:
- name: 'apex-daily-{stream}'
-
- # Job template for daily build
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- project-type: 'multijob'
-
- disabled: '{obj:disable_daily}'
-
- scm:
- - git-scm
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-daily.*'
-
- triggers:
- - 'apex-{stream}'
-
- builders:
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: 'Verify and upload ISO'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-verify-iso-{stream}'
- current-parameters: false
- predefined-parameters: |
- BUILD_DIRECTORY=$WORKSPACE/../apex-build-{stream}/.build
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - apex-builder-{stream}
-
-# snapshot create
-- job-template:
- name: 'apex-create-snapshot'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./apex-snapshot-create.sh
-
-# snapshot upload
-- job-template:
- name: 'apex-upload-snapshot'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - inject:
- properties-content: ARTIFACT_TYPE=snapshot
- - 'apex-upload-artifact'
-
-# CSIT promote
-- job-template:
- name: 'apex-csit-promote-daily-{stream}-os-{os_version}-{topology}'
-
- # Job template for promoting CSIT Snapshots
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
- project-type: 'multijob'
- disabled: '{disable_promote}'
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
- - string:
- name: PROMOTE
- default: 'True'
- description: "Used for overriding the PROMOTE"
- - string:
- name: GS_URL
- default: $GS_BASE{gs-pathname}/{os_version}/{topology}
- description: "User for overriding GS_URL from apex params"
-
- properties:
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
- - 'apex-csit-promote.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- triggers:
- - timed: '0 12 * * 0'
-
- builders:
- - multijob:
- name: apex-virtual-deploy-test
- condition: SUCCESSFUL
- projects:
- - name: 'apex-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl-{os_scenario}-{topology}
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- FUNCTEST_SUITE_NAME=tempest_smoke
- FUNCTEST_MODE=testcase
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: create snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-create-snapshot'
- current-parameters: true
- predefined-parameters: |
- SNAP_TYPE=csit
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: upload snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-upload-snapshot'
- current-parameters: true
- predefined-parameters: |
- SNAP_TYPE=csit
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
-
-# FDIO promote
-- job-template:
- name: 'apex-fdio-promote-daily-{stream}'
-
- # Job template for promoting CSIT Snapshots
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
- project-type: 'multijob'
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-deploy.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
-
- builders:
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- PROMOTE=True
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: create snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-create-snapshot'
- current-parameters: false
- predefined-parameters: |
- SNAP_TYPE=fdio
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: upload snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-upload-snapshot'
- current-parameters: false
- predefined-parameters: |
- SNAP_TYPE=fdio
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
-
-# Flex job
-- job-template:
- name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- node: 'flex-pod2'
-
- scm:
- - git-scm
-
- triggers:
- - 'apex-{stream}'
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "Scenario to deploy with."
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
- - 'apex-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-baremetal-{stream}'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: Yardstick
- condition: ALWAYS
- projects:
- - name: 'yardstick-apex-baremetal-daily-{stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-# Dovetail Danube test job
-- job-template:
- name: 'apex-dovetail-daily-os-nosdn-nofeature-ha-baremetal-danube'
-
- project-type: 'multijob'
-
- node: 'huawei-pod4'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: 'stable/danube'
- - apex-parameter:
- gs-pathname: '/danube'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
-
- triggers:
- - timed: '' # '0 1 * * *'
-
- builders:
- - description-setter:
- description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-baremetal-danube'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: Dovetail
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-proposed_tests-danube'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-########################
-# parameter macros
-########################
-- parameter:
- name: apex-parameter
- parameters:
- - string:
- name: ARTIFACT_NAME
- default: 'latest'
- description: "RPM Artifact name that will be appended to GS_URL to deploy a specific artifact"
- - string:
- name: ARTIFACT_VERSION
- default: 'daily'
- description: "Artifact version type"
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/.build
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/apex-cache{gs-pathname}
- description: "Directory where the cache to be used during the build is located."
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where opnfv artifacts are stored in gs repository"
- - string:
- name: GS_URL
- default: $GS_BASE{gs-pathname}
- description: "URL to Google Storage."
- - string:
- name: PROMOTE
- default: 'False'
- description: "Flag to know if we should promote/upload snapshot artifacts."
-
-########################
-# builder macros
-########################
-
-# fraser Builder
-- builder:
- name: apex-builder-fraser
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-noha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-bgpvpn-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-ovn-nofeature-noha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-noha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-bar-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-bar-noha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs_dpdk-noha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs_dpdk-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-sfc-noha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-sfc-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-calipso-noha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# danube Builder
-- builder:
- name: apex-builder-danube
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-kvm-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-kvm-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l2-fdio-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l2-fdio-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_netvirt-fdio-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l2-sfc-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-nofeature-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-nofeature-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-ovs-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-ovs-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-bgpvpn-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-gluon-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-fdio-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-fdio-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-fdio_dvr-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-fdio_dvr-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-csit-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-onos-nofeature-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-ovn-nofeature-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# master Builder
-- builder:
- name: apex-builder-master
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-noha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-ha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-queens-noha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-queens-ha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-queens-noha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-queens-ha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-k8s-nosdn-nofeature-noha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# euphrates Builder
-- builder:
- name: apex-builder-euphrates
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-bgpvpn-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-ovn-nofeature-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-bar-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-bar-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs_dpdk-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs_dpdk-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-kvm_ovs_dpdk-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-kvm_ovs_dpdk-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-sfc-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-sfc-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-calipso-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-- builder:
- name: 'apex-upload-artifact'
- builders:
- - shell:
- !include-raw: ./apex-upload-artifact.sh
-
-- builder:
- name: 'apex-download-artifact'
- builders:
- - shell:
- !include-raw: ./apex-download-artifact.sh
-
-- builder:
- name: 'apex-deploy'
- builders:
- - shell:
- !include-raw: ./apex-deploy.sh
-
-- builder:
- name: 'apex-fetch-logs'
- builders:
- - shell:
- !include-raw: ./apex-fetch-logs.sh
-
-#######################
-# trigger macros
-########################
-- trigger:
- name: 'apex-master'
- triggers:
- - timed: '0 0 1-31/2 * *'
-
-- trigger:
- name: 'apex-fraser'
- triggers:
- - timed: '0 0 2-30/2 * *'
-
-- trigger:
- name: 'apex-euphrates'
- triggers:
- - timed: '0 0 2-30/2 * *'
-
-- trigger:
- name: 'apex-danube'
- triggers:
- - timed: '0 3 1 1 7'
diff --git a/jjb/apex/apex.yaml.j2 b/jjb/apex/apex.yaml.j2
deleted file mode 100644
index ffe8c5780..000000000
--- a/jjb/apex/apex.yaml.j2
+++ /dev/null
@@ -1,1105 +0,0 @@
----
-- project:
- name: 'apex'
- project: 'apex'
- jobs:
- - 'apex-fetch-logs-{stream}'
- - 'apex-runner-cperf-{stream}'
- - 'apex-virtual-{stream}'
- - 'apex-deploy-{platform}-{stream}'
- - 'apex-daily-{stream}'
- - 'apex-csit-promote-daily-{stream}-os-{os_version}-{topology}'
- - 'apex-fdio-promote-daily-{stream}'
- - 'apex-{scenario}-baremetal-{scenario_stream}'
- - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- - 'apex-upload-snapshot'
- - 'apex-create-snapshot'
- - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
- - 'apex-dovetail-daily-os-nosdn-nofeature-ha-baremetal-danube'
- # stream: branch with - in place of / (eg. stable-arno)
- # branch: branch (eg. stable/arno)
- stream:
- - master: &master
- branch: 'master'
- gs-pathname: ''
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'master'
- disable_daily: false
- disable_promote: false
- - fraser: &fraser
- branch: 'stable/fraser'
- gs-pathname: '/fraser'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'fraser'
- disable_daily: true
- disable_promote: true
- - euphrates: &euphrates
- branch: 'stable/euphrates'
- gs-pathname: '/euphrates'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-odl-nofeature-ha'
- scenario_stream: 'euphrates'
- disable_daily: true
- disable_promote: true
- - danube: &danube
- branch: 'stable/danube'
- gs-pathname: '/danube'
- build-slave: 'apex-build-danube'
- virtual-slave: 'apex-virtual-danube'
- baremetal-slave: 'apex-baremetal-danube'
- verify-scenario: 'os-odl_l3-nofeature-ha'
- scenario_stream: 'danube'
- disabled: true
- disable_daily: true
- disable_promote: true
-
- scenario:
- {%- for stream in scenarios %}
- {%- for scenario in scenarios[stream] %}
- - '{{scenario}}':
- <<: *{{stream}}
- {%- endfor %}
- {%- endfor %}
-
- platform:
- - 'baremetal'
- - 'virtual'
-
- os_version:
- - 'pike':
- os_scenario: 'pike'
- - 'queens':
- os_scenario: 'queens'
- - 'master':
- os_scenario: 'nofeature'
-
- topology:
- - 'noha'
- - 'ha'
-
-# Fetch Logs Job
-- job-template:
- name: 'apex-fetch-logs-{stream}'
-
- concurrent: true
-
- disabled: false
- scm:
- - git-scm-gerrit
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-fetch-logs'
-
-- job-template:
- name: 'apex-runner-cperf-{stream}'
-
- # runner cperf job
- project-type: 'multijob'
- node: 'intel-pod2'
-
- disabled: false
-
- parameters:
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: false
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME"
- - multijob:
- name: 'Baremetal Deploy'
- condition: ALWAYS
- projects:
- - name: 'apex-deploy-baremetal-{stream}'
- node-parameters: false
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- OPNFV_CLEAN=yes
- DEPLOY_SCENARIO={verify-scenario}
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: CPERF
- condition: SUCCESSFUL
- projects:
- - name: 'cperf-apex-intel-pod2-daily-master'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={verify-scenario}
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-# Deploy job
-- job-template:
- name: 'apex-deploy-{platform}-{stream}'
-
- concurrent: true
-
- disabled: false
- quiet-period: 30
- scm:
- - git-scm-gerrit
-
- wrappers:
- - timeout:
- timeout: 140
- fail: true
-
- parameters:
- - '{project}-{platform}-{stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
- description: "Scenario to deploy with."
- # yamllint disable rule:line-length
- - string:
- name: OPNFV_CLEAN
- default: 'no'
- description: "Use yes in lower case to invoke clean. Indicates if the deploy environment should be cleaned before deployment"
-
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - 'functest.*'
- - 'yardstick.*'
- - 'dovetail.*'
- - 'storperf.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - 'apex-download-artifact'
- - 'apex-deploy'
- - 'clean-workspace'
-
-
-# Virtual Deploy and Test
-- job-template:
- name: 'apex-virtual-{stream}'
-
- project-type: 'multijob'
-
- concurrent: true
-
- disabled: false
-
- scm:
- - git-scm-gerrit
-
- parameters:
- - '{project}-defaults'
- - '{project}-virtual-{stream}-defaults'
- - 'functest-suite-parameter'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
- description: "Scenario to deploy with."
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-runner.*'
- - 'apex-run.*'
- - 'apex-virtual-.*'
- - 'apex-verify-gate-.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- PROMOTE=$PROMOTE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - shell: |
- if echo $DEPLOY_SCENARIO | grep queens &> /dev/null; then
- echo 'REPO=opnfv' > functest_repo
- else
- echo 'REPO=ollivier' > functest_repo
- fi
- - inject:
- properties-file: functest_repo
- - multijob:
- name: functest-smoke
- condition: ALWAYS
- projects:
- - name: 'functest-apex-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- REPO=$REPO
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# Baremetal Deploy and Test
-- job-template:
- name: 'apex-{scenario}-baremetal-{scenario_stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{scenario_stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
- - 'apex-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- execution-type: SEQUENTIALLY
- projects:
- - name: 'apex-deploy-baremetal-{scenario_stream}'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-fetch-logs-{scenario_stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: 'OPNFV Test Suite'
- condition: ALWAYS
- projects:
- - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{scenario_stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- publishers:
- - groovy-postbuild:
- script:
- !include-raw-escape: ./update-build-result.groovy
-
-# Baremetal test job
-- job-template:
- name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{scenario_stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-run.*'
- - 'apex-testsuite-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: Functest
- condition: ALWAYS
- projects:
- - name: 'functest-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Yardstick
- condition: ALWAYS
- projects:
- - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-proposed_tests
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-proposed_tests-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-default
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-default-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: StorPerf
- condition: ALWAYS
- projects:
- - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-# Build status is always success due conditional plugin prefetching
-# build status before multijob phases execute
-# - conditional-step:
-# condition-kind: current-status
-# condition-worst: SUCCESS
-# condtion-best: SUCCESS
-# on-evaluation-failure: mark-unstable
-# steps:
-# - shell: 'echo "Tests Passed"'
-
-- job-template:
- name: 'apex-daily-{stream}'
-
- # Job template for daily build
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- project-type: 'multijob'
-
- disabled: '{obj:disable_daily}'
-
- scm:
- - git-scm
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-daily.*'
-
- triggers:
- - 'apex-{stream}'
-
- builders:
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: 'Verify and upload ISO'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-verify-iso-{stream}'
- current-parameters: false
- predefined-parameters: |
- BUILD_DIRECTORY=$WORKSPACE/../apex-build-{stream}/.build
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - apex-builder-{stream}
-
-# snapshot create
-- job-template:
- name: 'apex-create-snapshot'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./apex-snapshot-create.sh
-
-# snapshot upload
-- job-template:
- name: 'apex-upload-snapshot'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - inject:
- properties-content: ARTIFACT_TYPE=snapshot
- - 'apex-upload-artifact'
-
-# CSIT promote
-- job-template:
- name: 'apex-csit-promote-daily-{stream}-os-{os_version}-{topology}'
-
- # Job template for promoting CSIT Snapshots
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
- project-type: 'multijob'
- disabled: '{disable_promote}'
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
- - string:
- name: PROMOTE
- default: 'True'
- description: "Used for overriding the PROMOTE"
- - string:
- name: GS_URL
- default: $GS_BASE{gs-pathname}/{os_version}/{topology}
- description: "User for overriding GS_URL from apex params"
-
- properties:
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
- - 'apex-csit-promote.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- triggers:
- - timed: '0 12 * * 0'
-
- builders:
- - multijob:
- name: apex-virtual-deploy-test
- condition: SUCCESSFUL
- projects:
- - name: 'apex-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl-{os_scenario}-{topology}
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- FUNCTEST_SUITE_NAME=tempest_smoke
- FUNCTEST_MODE=testcase
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: create snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-create-snapshot'
- current-parameters: true
- predefined-parameters: |
- SNAP_TYPE=csit
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: upload snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-upload-snapshot'
- current-parameters: true
- predefined-parameters: |
- SNAP_TYPE=csit
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
-
-# FDIO promote
-- job-template:
- name: 'apex-fdio-promote-daily-{stream}'
-
- # Job template for promoting CSIT Snapshots
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
- project-type: 'multijob'
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-deploy.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
-
- builders:
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- PROMOTE=True
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: create snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-create-snapshot'
- current-parameters: false
- predefined-parameters: |
- SNAP_TYPE=fdio
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: upload snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-upload-snapshot'
- current-parameters: false
- predefined-parameters: |
- SNAP_TYPE=fdio
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
-
-# Flex job
-- job-template:
- name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- node: 'flex-pod2'
-
- scm:
- - git-scm
-
- triggers:
- - 'apex-{stream}'
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "Scenario to deploy with."
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
- - 'apex-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-baremetal-{stream}'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: Yardstick
- condition: ALWAYS
- projects:
- - name: 'yardstick-apex-baremetal-daily-{stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-# Dovetail Danube test job
-- job-template:
- name: 'apex-dovetail-daily-os-nosdn-nofeature-ha-baremetal-danube'
-
- project-type: 'multijob'
-
- node: 'huawei-pod4'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: 'stable/danube'
- - apex-parameter:
- gs-pathname: '/danube'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- block-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
-
- triggers:
- - timed: '' # '0 1 * * *'
-
- builders:
- - description-setter:
- description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-baremetal-danube'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: Dovetail
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-proposed_tests-danube'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-########################
-# parameter macros
-########################
-- parameter:
- name: apex-parameter
- parameters:
- - string:
- name: ARTIFACT_NAME
- default: 'latest'
- description: "RPM Artifact name that will be appended to GS_URL to deploy a specific artifact"
- - string:
- name: ARTIFACT_VERSION
- default: 'daily'
- description: "Artifact version type"
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/.build
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/apex-cache{gs-pathname}
- description: "Directory where the cache to be used during the build is located."
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where opnfv artifacts are stored in gs repository"
- - string:
- name: GS_URL
- default: $GS_BASE{gs-pathname}
- description: "URL to Google Storage."
- - string:
- name: PROMOTE
- default: 'False'
- description: "Flag to know if we should promote/upload snapshot artifacts."
-
-########################
-# builder macros
-########################
-{% for stream in scenarios %}
-# {{ stream }} Builder
-- builder:
- name: apex-builder-{{ stream }}
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
-{%- for scenario in scenarios[stream] %}
- - name: 'apex-{{ scenario }}-baremetal-{{ stream }}'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-{%- endfor %}
-{% endfor -%}
-
-- builder:
- name: 'apex-upload-artifact'
- builders:
- - shell:
- !include-raw: ./apex-upload-artifact.sh
-
-- builder:
- name: 'apex-download-artifact'
- builders:
- - shell:
- !include-raw: ./apex-download-artifact.sh
-
-- builder:
- name: 'apex-deploy'
- builders:
- - shell:
- !include-raw: ./apex-deploy.sh
-
-- builder:
- name: 'apex-fetch-logs'
- builders:
- - shell:
- !include-raw: ./apex-fetch-logs.sh
-
-#######################
-# trigger macros
-########################
-- trigger:
- name: 'apex-master'
- triggers:
- - timed: '0 0 1-31/2 * *'
-
-- trigger:
- name: 'apex-fraser'
- triggers:
- - timed: '0 0 2-30/2 * *'
-
-- trigger:
- name: 'apex-euphrates'
- triggers:
- - timed: '0 0 2-30/2 * *'
-
-- trigger:
- name: 'apex-danube'
- triggers:
- - timed: '0 3 1 1 7'
-
diff --git a/jjb/apex/scenarios.yaml.hidden b/jjb/apex/scenarios.yaml.hidden
deleted file mode 100644
index 3474fdbbb..000000000
--- a/jjb/apex/scenarios.yaml.hidden
+++ /dev/null
@@ -1,72 +0,0 @@
-master:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-odl-nofeature-noha'
- - 'os-odl-nofeature-ha'
- - 'os-nosdn-queens-noha'
- - 'os-nosdn-queens-ha'
- - 'os-odl-queens-noha'
- - 'os-odl-queens-ha'
- - 'k8s-nosdn-nofeature-noha'
-fraser:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-odl-nofeature-ha'
- - 'os-odl-nofeature-noha'
- - 'os-odl-bgpvpn-ha'
- - 'os-ovn-nofeature-noha'
- - 'os-nosdn-fdio-noha'
- - 'os-nosdn-fdio-ha'
- - 'os-nosdn-bar-ha'
- - 'os-nosdn-bar-noha'
- - 'os-nosdn-ovs_dpdk-noha'
- - 'os-nosdn-ovs_dpdk-ha'
- - 'os-odl-sfc-noha'
- - 'os-odl-sfc-ha'
- - 'os-nosdn-calipso-noha'
-euphrates:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-odl-nofeature-ha'
- - 'os-odl-nofeature-noha'
- - 'os-odl-bgpvpn-ha'
- - 'os-ovn-nofeature-noha'
- - 'os-nosdn-fdio-noha'
- - 'os-nosdn-fdio-ha'
- - 'os-nosdn-bar-ha'
- - 'os-nosdn-bar-noha'
- - 'os-nosdn-nofeature-ha-ipv6'
- - 'os-nosdn-ovs_dpdk-noha'
- - 'os-nosdn-ovs_dpdk-ha'
- - 'os-nosdn-kvm_ovs_dpdk-noha'
- - 'os-nosdn-kvm_ovs_dpdk-ha'
- - 'os-odl-sfc-noha'
- - 'os-odl-sfc-ha'
- - 'os-nosdn-calipso-noha'
-danube:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-nosdn-nofeature-ha-ipv6'
- - 'os-nosdn-ovs-noha'
- - 'os-nosdn-ovs-ha'
- - 'os-nosdn-fdio-noha'
- - 'os-nosdn-fdio-ha'
- - 'os-nosdn-kvm-ha'
- - 'os-nosdn-kvm-noha'
- - 'os-odl_l2-fdio-noha'
- - 'os-odl_l2-fdio-ha'
- - 'os-odl_netvirt-fdio-noha'
- - 'os-odl_l2-sfc-noha'
- - 'os-odl_l3-nofeature-noha'
- - 'os-odl_l3-nofeature-ha'
- - 'os-odl_l3-ovs-noha'
- - 'os-odl_l3-ovs-ha'
- - 'os-odl-bgpvpn-ha'
- - 'os-odl-gluon-noha'
- - 'os-odl_l3-fdio-noha'
- - 'os-odl_l3-fdio-ha'
- - 'os-odl_l3-fdio_dvr-noha'
- - 'os-odl_l3-fdio_dvr-ha'
- - 'os-odl_l3-csit-noha'
- - 'os-onos-nofeature-ha'
- - 'os-ovn-nofeature-noha'
diff --git a/jjb/apex/update-build-result.groovy b/jjb/apex/update-build-result.groovy
deleted file mode 100644
index 9edca6b6b..000000000
--- a/jjb/apex/update-build-result.groovy
+++ /dev/null
@@ -1,5 +0,0 @@
-import hudson.model.*
-if (manager.logContains('^.*apex-deploy-baremetal.*SUCCESS$')
- && manager.build.@result == hudson.model.Result.FAILURE) {
- manager.build.@result = hudson.model.Result.UNSTABLE
-}
diff --git a/jjb/armband/armband-ci-jobs.yaml b/jjb/armband/armband-ci-jobs.yaml
deleted file mode 100644
index 140f84b03..000000000
--- a/jjb/armband/armband-ci-jobs.yaml
+++ /dev/null
@@ -1,373 +0,0 @@
----
-# jenkins job templates for Armband
-- project:
- name: 'armband-ci'
- project: 'armband'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- # -------------------------------
- # DEPLOY TYPE ANCHORS
- # -------------------------------
- baremetal: &baremetal
- installer: 'fuel'
- deploy-type: 'baremetal'
- slave-label: 'armband-{deploy-type}'
- virtual: &virtual
- installer: 'fuel'
- deploy-type: 'virtual'
- slave-label: 'armband-{deploy-type}'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # CI POD's
- # -------------------------------
- # fraser
- # -------------------------------
- pod:
- # yamllint disable rule:key-duplicates
- - armband-baremetal:
- <<: *baremetal
- <<: *fraser
- - armband-virtual:
- <<: *virtual
- <<: *fraser
- # -------------------------------
- # master
- # -------------------------------
- - armband-baremetal:
- <<: *baremetal
- <<: *master
- - armband-virtual:
- <<: *virtual
- <<: *master
- # yamllint enable rule:key-duplicates
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- # HA scenarios
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl-nofeature-ha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- - 'os-ovn-nofeature-ha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-ovs-ha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
-
- # NOHA scenarios
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: '{installer}-{scenario}-{pod}-{stream}-trigger'
-
- jobs:
- - '{installer}-{scenario}-{pod}-daily-{stream}'
- - '{installer}-deploy-{pod}-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{installer}-{scenario}-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{installer}-os-.*?-{pod}-daily-.*'
- - 'armband-verify-.*'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
-
- builders:
- - trigger-builds:
- - project: '{installer}-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-{installer}-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-{installer}-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- # 1.here the stream means the SUT stream, dovetail stream is defined in its own job
- # 2.testsuite proposed_tests here is for new test cases planning to add into OVP
- # 3.run proposed_tests on Monday, Wednesday and Friday against ha scenario
- # 4.testsuite default here is for the test cases already added into OVP
- # 5.run default testsuite on Tuesday against ha scenario
- # 6.not used for release criteria or compliance,
- # only to debug the dovetail tool bugs with arm pods
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: '.*-ha'
- label: '{scenario}'
- - condition-kind: day-of-week
- day-selector: select-days
- days:
- SAT: true
- use-build-time: true
- steps:
- - trigger-builds:
- - project: 'dovetail-{installer}-{pod}-proposed_tests-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: '.*-ha'
- label: '{scenario}'
- - condition-kind: day-of-week
- day-selector: select-days
- days:
- SUN: true
- use-build-time: true
- steps:
- - trigger-builds:
- - project: 'dovetail-{installer}-{pod}-default-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- # Armband uses Fuel's log collection project job, no need to duplicate
- - conditional-step:
- condition-kind: not
- condition-operand:
- condition-kind: regex-match
- regex: 'danube'
- label: '{stream}'
- steps:
- - trigger-builds:
- - project: 'fuel-collect-logs-{deploy-type}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
-- job-template:
- name: '{installer}-deploy-{pod}-daily-{stream}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{installer}-deploy-{pod}-daily-{stream}'
- - '{installer}-deploy-generic-daily-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - testapi-parameter
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-ha'
- - string:
- name: PROJECT
- default: '{project}'
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - track-begin-timestamp
- - shell:
- !include-raw-escape: ../fuel/fuel-deploy.sh
-
- publishers:
- - email:
- recipients: armband@enea.com
- - email-jenkins-admins-on-failure
- - report-provision-result
-
-########################
-# trigger macros
-########################
-# CI PODs
-# ----------------------------------------------------------------
-# Enea Armband CI Baremetal Triggers running against master branch
-# ----------------------------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: '0 1 * * 1'
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: '0 1 * * 3,7'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: '0 1 * * 4'
-# ---------------------------------------------------------------------
-# Enea Armband CI Baremetal Triggers running against fraser branch
-# ---------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-fraser-trigger'
- triggers:
- - timed: '0 1 * * 2'
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-armband-baremetal-fraser-trigger'
- triggers:
- - timed: '0 1 * * 5'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-armband-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-armband-baremetal-fraser-trigger'
- triggers:
- - timed: '0 1 * * 6'
-
-# --------------------------------------------------------------
-# Enea Armband CI Virtual Triggers running against master branch
-# --------------------------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-armband-virtual-master-trigger'
- triggers:
- - timed: ''
-# -------------------------------------------------------------------
-# Enea Armband CI Virtual Triggers running against fraser branch
-# -------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-armband-virtual-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-armband-virtual-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-armband-virtual-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-armband-virtual-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-armband-virtual-fraser-trigger'
- triggers:
- - timed: ''
diff --git a/jjb/armband/armband-verify-jobs.yaml b/jjb/armband/armband-verify-jobs.yaml
deleted file mode 100644
index 6f0016348..000000000
--- a/jjb/armband/armband-verify-jobs.yaml
+++ /dev/null
@@ -1,210 +0,0 @@
----
-- project:
- name: 'armband-verify-jobs'
-
- project: 'armband'
-
- installer: 'fuel'
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'deploy-virtual':
- slave-label: 'armband-virtual'
- #####################################
- # jobs
- #####################################
- jobs:
- - 'armband-verify-{stream}'
- - 'armband-verify-{phase}-{stream}'
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'armband-verify-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-os-.*?-virtual-daily-.*'
- - 'armband-verify-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'patches/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'armband-virtual-defaults':
- installer: '{installer}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'armband-verify-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- # Use Functest job definition from jjb/functest/functest-daily-jobs
- - name: 'functest-fuel-armband-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- FUNCTEST_MODE=tier
- FUNCTEST_TIER=healthcheck
- # Should be in sync with fuel-deploy.sh default scenario
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- - name: 'functest-fuel-armband-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- FUNCTEST_MODE=testcase
- FUNCTEST_SUITE_NAME=vping_ssh
- # Should be in sync with fuel-deploy.sh default scenario
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
-- job-template:
- name: 'armband-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 2
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'armband-verify-deploy-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - 'armband-virtual-defaults':
- installer: '{installer}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'armband-verify-deploy-virtual-macro'
- builders:
- - shell:
- !include-raw: ../fuel/fuel-deploy.sh
diff --git a/jjb/auto/auto.yaml b/jjb/auto/auto.yaml
deleted file mode 100644
index ee7208205..000000000
--- a/jjb/auto/auto.yaml
+++ /dev/null
@@ -1,127 +0,0 @@
----
-# jenkins job templates for Auto
-- project:
- name: 'auto-ci-jobs'
- project: 'auto'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- # -------------------------------
- # DEPLOY TYPE ANCHORS
- # -------------------------------
- baremetal: &baremetal
- installer: 'fuel'
- slave-label: 'auto-baremetal'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # CI POD's
- # -------------------------------
- # fraser
- # -------------------------------
- pod:
- # yamllint disable rule:key-duplicates
- - auto-baremetal:
- <<: *baremetal
- <<: *fraser
- # -------------------------------
- # master
- # -------------------------------
- - auto-baremetal:
- <<: *baremetal
- <<: *master
- # yamllint enable rule:key-duplicates
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- # HA scenarios
- - 'os-nosdn-onap-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
-
- jobs:
- - '{installer}-{scenario}-{pod}-auto-daily-{stream}'
- - '{installer}-deploy-{pod}-daily-{stream}'
- - '{project}-verify-basic'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{installer}-{scenario}-{pod}-auto-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{installer}-os-.*?-{pod}-auto-daily-.*'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
-
- builders:
- - trigger-builds:
- - project: '{installer}-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
- PROJECT=armband
- same-node: true
- block: true
-
-########################
-# trigger macros
-########################
-# CI PODs
-# ----------------------------------------------------------------
-# Auto CI Baremetal Triggers running against master branch
-# ----------------------------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-onap-ha-auto-baremetal-master-trigger'
- triggers:
- - timed: ''
-# ---------------------------------------------------------------------
-# Auto CI Baremetal Triggers running against fraser branch
-# ---------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-onap-ha-auto-baremetal-fraser-trigger'
- triggers:
- - timed: ''
diff --git a/jjb/availability/availability.yaml b/jjb/availability/availability.yaml
deleted file mode 100644
index 2d3473499..000000000
--- a/jjb/availability/availability.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: availability
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/barometer/barometer-build.sh b/jjb/barometer/barometer-build.sh
deleted file mode 100644
index 5f78aae7a..000000000
--- a/jjb/barometer/barometer-build.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-set -x
-
-OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-OPNFV_ARTIFACT_URL="$GS_URL/$OPNFV_ARTIFACT_VERSION/"
-
-# log info to console
-echo "Starting the build of Barometer RPMs"
-echo "------------------------------------"
-echo
-
-cd ci
-./install_dependencies.sh
-./build_rpm.sh
-cp utility/rpms_list $WORKSPACE
-cd $WORKSPACE
-
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
-) > $WORKSPACE/opnfv.properties
-
diff --git a/jjb/barometer/barometer-upload-artifact.sh b/jjb/barometer/barometer-upload-artifact.sh
deleted file mode 100644
index f05dc2af8..000000000
--- a/jjb/barometer/barometer-upload-artifact.sh
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/bin/bash
-set -o nounset
-set -o pipefail
-
-RPM_LIST=$WORKSPACE/rpms_list
-RPM_WORKDIR=$WORKSPACE/rpmbuild
-RPM_DIR=$RPM_WORKDIR/RPMS/x86_64/
-cd $WORKSPACE/
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-# Check if all the appropriate RPMs were generated
-echo "Checking if all the Barometer RPMs were created"
-echo "-----------------------------------------------"
-echo
-
-if [ -d $RPM_DIR ]
-then
- ls $RPM_DIR > list_of_gen_pack
-else
- echo "Can't access folder $RPM_DIR with rpm packages"
- echo "Barometer nightly build FAILED"
- exit 1
-fi
-
-for PACKAGENAME in `cat $RPM_LIST`
-do
- if ! grep -q $PACKAGENAME list_of_gen_pack
- then
- echo "$PACKAGENAME is missing"
- echo "Barometer nightly build FAILED"
- exit 2
- fi
-done
-
-#remove the file you no longer need.
-rm list_of_gen_pack
-
-echo "Uploading the barometer RPMs to artifacts.opnfv.org"
-echo "---------------------------------------------------"
-echo
-
-gsutil -m cp -r $RPM_DIR/* gs://$OPNFV_ARTIFACT_URL > $WORKSPACE/gsutil.log 2>&1
-
-# Check if the RPMs were pushed
-gsutil ls gs://$OPNFV_ARTIFACT_URL > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading barometer RPMs to gs://$OPNFV_ARTIFACT_URL!"
- echo "Check log $WORKSPACE/gsutil.log on the appropriate build server"
- exit 1
-else
- # upload property files only if build is successful
- gsutil cp $WORKSPACE/opnfv.properties gs://$OPNFV_ARTIFACT_URL/opnfv.properties > gsutil.properties.log 2>&1
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-fi
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$OPNFV_ARTIFACT_URL/*.rpm > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$OPNFV_ARTIFACT_URL/opnfv.properties > /dev/null 2>&1
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-echo "Artifact is available at $OPNFV_ARTIFACT_URL"
-
-#cleanup the RPM repo from the build machine.
-rm -rf $RPM_WORKDIR
diff --git a/jjb/barometer/barometer.yaml b/jjb/barometer/barometer.yaml
deleted file mode 100644
index 92176207f..000000000
--- a/jjb/barometer/barometer.yaml
+++ /dev/null
@@ -1,163 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: barometer
-
- project: '{name}'
-
- jobs:
- - 'barometer-verify-{stream}'
- - 'barometer-merge-{stream}'
- - 'barometer-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'barometer-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make
-
-- job-template:
- name: 'barometer-merge-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 3
- max-per-node: 2
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make
-
-- job-template:
- name: 'barometer-daily-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - barometer-project-parameter:
- gs-pathname: '{gs-pathname}'
- - 'opnfv-build-centos-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - shell:
- !include-raw-escape: ./barometer-build.sh
- - shell:
- !include-raw-escape: ./barometer-upload-artifact.sh
-
-########################
-# parameter macros
-########################
-- parameter:
- name: barometer-project-parameter
- parameters:
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
diff --git a/jjb/bottlenecks/bottlenecks-ci-jobs.yaml b/jjb/bottlenecks/bottlenecks-ci-jobs.yaml
deleted file mode 100644
index 41b73a290..000000000
--- a/jjb/bottlenecks/bottlenecks-ci-jobs.yaml
+++ /dev/null
@@ -1,204 +0,0 @@
----
-####################################
-# job configuration for bottlenecks
-####################################
-- project:
- name: bottlenecks-ci-jobs
-
- project: 'bottlenecks'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- # This is used for common project file storage
- gs-pathname: ''
- # This is used for different test suite dependent packages storage
- gs-packagepath: '/{suite}'
- # docker tag used for version control
- docker-tag: 'latest'
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- gs-packagepath: '/{stream}/{suite}'
- docker-tag: 'stable'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # compass CI PODs
- - baremetal:
- slave-label: compass-baremetal-master
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: compass-virtual-master
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: compass-baremetal-branch
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - virtual:
- slave-label: compass-virtual-branch
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
-
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- # - orange-pod2:
- # slave-label: '{pod}'
- # installer: joid
- # auto-trigger-name: 'daily-trigger-disabled'
- # <<: *fraser
- # - orange-pod2:
- # slave-label: '{pod}'
- # installer: joid
- # auto-trigger-name: 'daily-trigger-disabled'
- # <<: *master
- # -------------------------------------------
- suite:
- - 'posca_stress_traffic'
- - 'posca_stress_ping'
- - 'posca_factor_multistack_storage'
- - 'posca_factor_multistack_storage_parallel'
- - 'posca_feature_moon_resources'
- - 'posca_feature_moon_tenants'
- - 'posca_feature_vnf_scale_out'
- - 'posca_factor_soak_throughputs'
-
- jobs:
- - 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
- name: 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 180
- abort: true
- - fix-workspace-permissions
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
- - 'bottlenecks-params-{slave-label}'
- - string:
- name: REPO_DIR
- default: "/home/opnfv/bottlenecks"
- description: "Directory where the repository is cloned"
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - string:
- name: GERRIT_REFSPEC_DEBUG
- default: 'true'
- description: "Gerrit refspec for debug."
- - string:
- name: SUITE_NAME
- default: '{suite}'
- description: "test suite name."
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: "docker image tag used for version control"
-
- scm:
- - git-scm
-
- builders:
- - 'bottlenecks-env-cleanup'
- - 'bottlenecks-run-suite'
- - 'bottlenecks-workspace-cleanup'
-
- publishers:
- - email:
- recipients: gabriel.yuyang@huawei.com, liyin11@huawei.com
- - email-jenkins-admins-on-failure
-
-########################
-# builder macros
-########################
-- builder:
- name: bottlenecks-env-cleanup
- builders:
- - shell:
- !include-raw: ./bottlenecks-cleanup.sh
-
-- builder:
- name: bottlenecks-run-suite
- builders:
- - shell:
- !include-raw: ./bottlenecks-run-suite.sh
-
-- builder:
- name: bottlenecks-workspace-cleanup
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # delete everything that is in $WORKSPACE
- sudo rm -rf $WORKSPACE
-
-####################
-# parameter macros
-####################
-
-- parameter:
- name: 'bottlenecks-params-compass-baremetal-master'
- parameters:
- - string:
- name: BOTTLENECKS_DB_TARGET
- default: 'http://testresults.opnfv.org/test/api/v1/results'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'bottlenecks-params-compass-virtual-master'
- parameters:
- - string:
- name: BOTTLENECKS_DB_TARGET
- default: 'http://testresults.opnfv.org/test/api/v1/results'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'bottlenecks-params-compass-baremetal-branch'
- parameters:
- - string:
- name: BOTTLENECKS_DB_TARGET
- default: 'http://testresults.opnfv.org/test/api/v1/results'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'bottlenecks-params-compass-virtual-branch'
- parameters:
- - string:
- name: BOTTLENECKS_DB_TARGET
- default: 'http://testresults.opnfv.org/test/api/v1/results'
- description: 'Arguments to use in order to choose the backend DB'
diff --git a/jjb/bottlenecks/bottlenecks-cleanup.sh b/jjb/bottlenecks/bottlenecks-cleanup.sh
deleted file mode 100644
index d0e2088c7..000000000
--- a/jjb/bottlenecks/bottlenecks-cleanup.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#clean up correlated dockers and their images
-bash $WORKSPACE/docker/docker_cleanup.sh -d bottlenecks --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d Bottlenecks --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d yardstick --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d kibana --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d elasticsearch --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d influxdb --debug
diff --git a/jjb/bottlenecks/bottlenecks-project-jobs.yaml b/jjb/bottlenecks/bottlenecks-project-jobs.yaml
deleted file mode 100644
index d4930d745..000000000
--- a/jjb/bottlenecks/bottlenecks-project-jobs.yaml
+++ /dev/null
@@ -1,224 +0,0 @@
----
-###################################################
-# Non-ci jobs for Bottlenecks project
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: bottlenecks-project-jobs
-
- project: 'bottlenecks'
-
- jobs:
- - 'bottlenecks-verify-{stream}'
- - 'bottlenecks-merge-{stream}'
- - 'bottlenecks-{suite}-upload-artifacts-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- # This is used for common project file storage
- gs-pathname: ''
- # This is used for different test suite dependent packages storage
- gs-packagepath: '/{suite}'
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- gs-packagepath: '/{stream}/{suite}'
- disabled: false
-
- suite:
- - 'posca_stress_traffic'
- - 'posca_stress_ping'
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'bottlenecks-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- # - bottlenecks-hello
- - bottlenecks-unit-tests
-
-- job-template:
- name: 'bottlenecks-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - bottlenecks-hello
- # - bottlenecks-unit-tests
-
-- job-template:
- name: 'bottlenecks-{suite}-upload-artifacts-{stream}'
-
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - bottlenecks-parameter:
- gs-packagepath: '{gs-packagepath}'
-
- scm:
- - git-scm
-
- builders:
- - 'bottlenecks-builder-upload-artifact'
- - 'bottlenecks-artifact-workspace-cleanup'
-
-####################
-# parameter macros
-####################
-- parameter:
- name: bottlenecks-parameter
- parameters:
- - string:
- name: CACHE_DIR
- default: $WORKSPACE/cache{gs-packagepath}
- description: "the cache to store packages downloaded from public IP"
- - string:
- name: SUITE_URL
- default: gs://artifacts.opnfv.org/bottlenecks{gs-packagepath}
- description: "LF artifacts url for storage of bottlenecks packages"
- - string:
- name: PACKAGE_URL
- default: http://205.177.226.237:9999/bottlenecks{gs-packagepath}/
- description: "the url where we store the packages used for bottlenecks\
- \ rubbos"
-
-####################################
-# builders for bottlenecks project
-####################################
-- builder:
- name: bottlenecks-builder-upload-artifact
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "Bottlenecks: upload to artifacts from the public IP"
-
- [[ -d $CACHE_DIR ]] || mkdir -p $CACHE_DIR
-
- for file in $(curl -s $PACKAGE_URL |
- grep href |
- sed 's/.*href="//' |
- sed 's/".*//' |
- grep '^[a-zA-Z].*'); do
- curl --connect-timeout 10 -o $CACHE_DIR/$file $PACKAGE_URL$file -v
- echo "bottlenecks: copy file $CACHE_DIR/$file to $SUITE_URL"
- gsutil cp $CACHE_DIR/$file $SUITE_URL
- done
-
-- builder:
- name: bottlenecks-artifact-workspace-cleanup
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "Bottlenecks: cleanup cache used for storage downloaded packages"
-
- /bin/rm -rf $CACHE_DIR
-
-- builder:
- name: bottlenecks-unit-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- echo "Running unit tests..."
- cd $WORKSPACE
- virtualenv $WORKSPACE/bottlenecks_venv
- source $WORKSPACE/bottlenecks_venv/bin/activate
-
- # install python packages
- easy_install -U setuptools
- easy_install -U pip
- pip install -r $WORKSPACE/requirements/verify.txt
-
- # unit tests
- /bin/bash $WORKSPACE/verify.sh
-
- deactivate
-
-- builder:
- name: bottlenecks-hello
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo -e "Wellcome to Bottlenecks! \nMerge event is planning to support more functions! "
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
deleted file mode 100644
index 7a3db00c4..000000000
--- a/jjb/bottlenecks/bottlenecks-run-suite.sh
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#set -e
-[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-BOTTLENECKS_IMAGE=opnfv/bottlenecks
-REPORT="True"
-
-RELENG_REPO=${WORKSPACE}/releng
-[ -d ${RELENG_REPO} ] && rm -rf ${RELENG_REPO}
-git clone https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO} >${redirect}
-
-YARDSTICK_REPO=${WORKSPACE}/yardstick
-[ -d ${YARDSTICK_REPO} ] && rm -rf ${YARDSTICK_REPO}
-git clone https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO} >${redirect}
-
-OPENRC=/tmp/admin_rc.sh
-OS_CACERT=/tmp/os_cacert
-
-BOTTLENECKS_CONFIG=/tmp
-
-if [[ $SUITE_NAME == *posca* ]]; then
- POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
- sudo rm -f ${OPENRC}
-
- # Preparing OpenStack RC and Cacert files
- echo "BOTTLENECKS INFO: fetching os credentials from $INSTALLER_TYPE"
- if [[ $INSTALLER_TYPE == 'compass' ]]; then
- ${RELENG_REPO}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} -o ${OS_CACERT} >${redirect}
- if [[ -f ${OS_CACERT} ]]; then
- echo "BOTTLENECKS INFO: successfully fetching os_cacert for openstack: ${OS_CACERT}"
- else
- echo "BOTTLENECKS ERROR: couldn't find os_cacert file: ${OS_CACERT}, please check if the it's been properly provided."
- exit 1
- fi
- fi
-
- if [[ -f ${OPENRC} ]]; then
- echo "BOTTLENECKS INFO: openstack credentials path is ${OPENRC}"
- if [[ $INSTALLER_TYPE == 'compass' ]]; then
- echo "BOTTLENECKS INFO: writing ${OS_CACERT} to ${OPENRC}"
- echo "export OS_CACERT=${OS_CACERT}" >> ${OPENRC}
- fi
- cat ${OPENRC}
- else
- echo "BOTTLENECKS ERROR: couldn't find openstack rc file: ${OPENRC}, please check if the it's been properly provided."
- exit 1
- fi
-
- # Finding and crearting POD description files from different deployments
- ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
- if [ "$INSTALLER_TYPE" == "fuel" ]; then
- echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
- sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
- fi
-
- if [ "$INSTALLER_TYPE" == "apex" ]; then
- echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
- sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
- fi
-
- set +e
-
- sudo -H pip install virtualenv
-
- cd ${RELENG_REPO}/modules
- sudo virtualenv venv
- source venv/bin/activate
- sudo -H pip install -e ./ >/dev/null
- sudo -H pip install netaddr
-
- if [[ ${INSTALLER_TYPE} == compass ]]; then
- options="-u root -p root"
- elif [[ ${INSTALLER_TYPE} == fuel ]]; then
- options="-u root -p r00tme"
- elif [[ ${INSTALLER_TYPE} == apex ]]; then
- options="-u stack -k /root/.ssh/id_rsa"
- else
- echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
- fi
-
- if [[ ${INSTALLER_TYPE} != compass ]]; then
- cmd="sudo python ${RELENG_REPO}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
- -i ${INSTALLER_IP} ${options} -f ${BOTTLENECKS_CONFIG}/pod.yaml \
- -s ${BOTTLENECKS_CONFIG}/id_rsa"
- echo ${cmd}
- ${cmd}
- else
- cmd="sudo cp ${YARDSTICK_REPO}/etc/yardstick/nodes/compass_sclab_virtual/pod.yaml \
- ${BOTTLENECKS_CONFIG}"
- echo ${cmd}
- ${cmd}
- fi
-
- deactivate
-
- sudo rm -rf ${RELENG_REPO}/modules/venv
- sudo rm -rf ${RELENG_REPO}/modules/opnfv.egg-info
-
- set -e
-
- cd ${WORKSPACE}
-
- if [ -f ${BOTTLENECKS_CONFIG}/pod.yaml ]; then
- echo "FILE: ${BOTTLENECKS_CONFIG}/pod.yaml:"
- cat ${BOTTLENECKS_CONFIG}/pod.yaml
- else
- echo "ERROR: cannot find file ${BOTTLENECKS_CONFIG}/pod.yaml. Please check if it is existing."
- sudo ls -al ${BOTTLENECKS_CONFIG}
- fi
-
- # Pulling Bottlenecks docker and passing environment variables
- echo "INFO: pulling Bottlenecks docker ${DOCKER_TAG}"
- docker pull opnfv/bottlenecks:${DOCKER_TAG} >$redirect
-
- opts="--privileged=true -id"
- envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
- -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
- -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \
- -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e BUILD_TAG=${BUILD_TAG}"
- docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp"
-
- cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
- echo "BOTTLENECKS INFO: running docker run commond: ${cmd}"
- ${cmd} >$redirect
- sleep 5
-
- # Running test cases through Bottlenecks docker
- if [[ $SUITE_NAME == posca_stress_traffic ]]; then
- TEST_CASE=posca_factor_system_bandwidth
- elif [[ $SUITE_NAME == posca_stress_ping ]]; then
- TEST_CASE=posca_factor_ping
- else
- TEST_CASE=$SUITE_NAME
- fi
- testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
- echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
- ${testcase_cmd} >$redirect
-fi
diff --git a/jjb/calipso/calipso.yaml b/jjb/calipso/calipso.yaml
deleted file mode 100644
index 6701e7ca2..000000000
--- a/jjb/calipso/calipso.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
----
-- project:
- name: calipso
-
- project: '{name}'
-
- jobs:
- - 'calipso-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'calipso-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - verify-unit-tests
-
-- builder:
- name: verify-unit-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
- cd $WORKSPACE
- PYTHONPATH=$PWD/app app/test/verify.sh
diff --git a/jjb/ci_gate_security/anteater-report-to-gerrit.sh b/jjb/ci_gate_security/anteater-report-to-gerrit.sh
deleted file mode 100644
index 51a8ac47b..000000000
--- a/jjb/ci_gate_security/anteater-report-to-gerrit.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-set -o pipefail
-export PATH=$PATH:/usr/local/bin/
-EXITSTATUS=0
-
-# This Log should always exist
-if [[ -e securityaudit.log ]] ; then
-
- #check if log has errors
- if grep ERROR securityaudit.log; then
- EXITSTATUS=1
- fi
-
- grep 'ERROR' securityaudit.log | awk -F"ERROR - " '{ print $2 }' | tr -d "\'\"" > shortlog
-
- # Only report to Gerrit when there are errors to report.
- if [[ -s shortlog ]]; then
- echo -e "\nposting security audit report to gerrit...\n"
- ssh -p 29418 gerrit.opnfv.org \
- "gerrit review -p $GERRIT_PROJECT \
- -m \"$(cat shortlog)\" \
- $GERRIT_PATCHSET_REVISION \
- --notify NONE"
- fi
-
- exit $EXITSTATUS
-fi
diff --git a/jjb/ci_gate_security/anteater-security-audit-weekly.sh b/jjb/ci_gate_security/anteater-security-audit-weekly.sh
deleted file mode 100644
index 6caa13117..000000000
--- a/jjb/ci_gate_security/anteater-security-audit-weekly.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2017 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-ANTEATER_SCAN_PATCHSET="${ANTEATER_SCAN_PATCHSET:-true}"
-
-cd $WORKSPACE
-REPORTDIR='.reports'
-mkdir -p $REPORTDIR
-# Ensure any user can read the reports directory
-chmod 777 $REPORTDIR
-
-ANTEATER_FILES="--patchset /home/opnfv/anteater/$PROJECT/patchset"
-
-if [[ "$ANTEATER_SCAN_PATCHSET" == "true" ]]; then
- echo "Generating patchset file to list changed files"
- git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
- echo "Changed files are"
- echo "--------------------------------------------------------"
- cat $WORKSPACE/patchset
- echo "--------------------------------------------------------"
-else
- echo "Checking full project $PROJECT"
- ANTEATER_FILES="--path /home/opnfv/anteater/$PROJECT"
-fi
-
-vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT -v $WORKSPACE/$REPORTDIR:/home/opnfv/anteater/$REPORTDIR"
-envs="-e PROJECT=$PROJECT"
-
-echo "Pulling releng-anteater docker image"
-echo "--------------------------------------------------------"
-docker pull opnfv/releng-anteater
-echo "--------------------------------------------------------"
-
-cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
-/home/opnfv/venv/bin/anteater --project $PROJECT $ANTEATER_FILES"
-echo "Running docker container"
-echo "$cmd"
-$cmd > $WORKSPACE/securityaudit.log 2>&1
-exit_code=$?
-echo "--------------------------------------------------------"
-echo "Docker container exited with code: $exit_code"
-echo "--------------------------------------------------------"
-exit 0
diff --git a/jjb/ci_gate_security/anteater-security-audit.sh b/jjb/ci_gate_security/anteater-security-audit.sh
deleted file mode 100644
index 8a170b044..000000000
--- a/jjb/ci_gate_security/anteater-security-audit.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-cd $WORKSPACE
-REPORTDIR='.reports'
-mkdir -p $REPORTDIR
-# Ensure any user can read the reports directory
-chmod 777 $REPORTDIR
-echo "Generating patchset file to list changed files"
-git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
-echo "Changed files are"
-echo "--------------------------------------------------------"
-cat $WORKSPACE/patchset
-echo "--------------------------------------------------------"
-
-vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT -v $WORKSPACE/$REPORTDIR:/home/opnfv/anteater/$REPORTDIR"
-envs="-e PROJECT=$PROJECT"
-
-echo "Pulling releng-anteater docker image"
-echo "--------------------------------------------------------"
-docker pull opnfv/releng-anteater
-echo "--------------------------------------------------------"
-
-cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
-/home/opnfv/venv/bin/anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
-echo "Running docker container"
-echo "$cmd"
-$cmd > $WORKSPACE/securityaudit.log 2>&1
-exit_code=$?
-echo "--------------------------------------------------------"
-echo "Docker container exited with code: $exit_code"
-echo "--------------------------------------------------------"
-cat securityaudit.log
-exit 0
diff --git a/jjb/ci_gate_security/opnfv-ci-gate-security.yaml b/jjb/ci_gate_security/opnfv-ci-gate-security.yaml
deleted file mode 100644
index 2e500befb..000000000
--- a/jjb/ci_gate_security/opnfv-ci-gate-security.yaml
+++ /dev/null
@@ -1,196 +0,0 @@
----
-# SPDX-license-identifier: Apache-2.0
-########################
-# Job configuration for opnfv-anteater (security audit)
-########################
-- project:
-
- name: anteaterfw
-
- project: anteaterfw
-
- repo:
- - apex
- - apex-os-net-config
- - apex-puppet-tripleo
- - apex-tripleo-heat-templates
- - armband
- - auto
- - availability
- - bamboo
- - barometer
- - bottlenecks
- - calipso
- - clover
- - compass-containers
- - compass4nfv
- - conductor
- - container4nfv
- - copper
- - cperf
- - daisy
- - doctor
- - domino
- - dovetail
- - dpacc
- - enfv
- - fds
- - fuel
- - functest
- - ipv6
- - joid
- - kvmfornfv
- - models
- - moon
- - netready
- - nfvbench
- - onosfw
- - opera
- - opnfvdocs
- - orchestra
- - ovn4nfv
- - ovno
- - ovsnfv
- - parser
- - pharos
- - pharos-tools
- - promise
- - qtip
- - releng
- - releng-anteater
- - releng-testresults
- - releng-utils
- - releng-xci
- - samplevnf
- - sdnvpn
- - securityscanning
- - sfc
- - snaps
- - stor4nfv
- - storperf
- - ves
- - vswitchperf
- - yardstick
-
- jobs:
- - 'opnfv-security-audit-verify-{stream}'
- - 'opnfv-security-audit-{repo}-weekly-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
-########################
-# job templates
-########################
-- job-template:
- name: 'opnfv-security-audit-{repo}-weekly-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - ericsson-build3-defaults
- - string:
- name: ANTEATER_SCAN_PATCHSET
- default: "false"
- description: "Have anteater scan patchsets (true) or full project (false)"
- - project-parameter:
- project: '{repo}'
- branch: '{branch}'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - timed: '@weekly'
-
- builders:
- - anteater-security-audit-weekly
-
- publishers:
- # defined in jjb/global/releng-macros.yml
- - 'email-{repo}-ptl':
- subject: 'OPNFV Security Scan Result: {repo}'
- - workspace-cleanup:
- fail-build: false
-
-- job-template:
- name: 'opnfv-security-audit-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build'
- description: 'Slave label on Jenkins'
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- # yamllint disable rule:line-length
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
-
- scm:
- - git-scm-gerrit
-
- # yamllint disable rule:line-length
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|compass4nfv|conductor|copper|cperf|daisy|doctor|dovetail|dpacc|enfv|fds|fuel|functest|pharos|releng|sandbox|yardstick|infra|ipv6|kvmfornfv|models|moon|netready'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
- # yamllint enable rule:line-length
-
- builders:
- - anteater-security-audit
- - report-security-audit-result-to-gerrit
- publishers:
- - archive-artifacts:
- artifacts: ".reports/*"
-
-########################
-# builder macros
-########################
-- builder:
- name: anteater-security-audit
- builders:
- - shell:
- !include-raw: ./anteater-security-audit.sh
-
-- builder:
- name: report-security-audit-result-to-gerrit
- builders:
- - shell:
- !include-raw: ./anteater-report-to-gerrit.sh
-
-- builder:
- name: anteater-security-audit-weekly
- builders:
- - shell:
- !include-raw: ./anteater-security-audit-weekly.sh
diff --git a/jjb/clover/clover-project.yaml b/jjb/clover/clover-project.yaml
deleted file mode 100644
index 31eed8f14..000000000
--- a/jjb/clover/clover-project.yaml
+++ /dev/null
@@ -1,172 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: clover
-
- project: '{name}'
-
- jobs:
- - 'clover-verify-{stream}'
- - 'clover-daily-upload-{stream}'
- - 'clover-daily-deploy-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'clover-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./verify.sh
-
-- job-template:
- name: 'clover-daily-upload-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- scm:
- - git-scm
-
- triggers:
- - timed: '0 8 * * *'
-
- wrappers:
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'clover-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./upload.sh
-
-- job-template:
- name: 'clover-daily-deploy-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'clover-daily-deploy-.*?'
- - 'container4nfv-daily-deploy-.*?'
- block-level: 'NODE'
-
- wrappers:
- - timeout:
- timeout: 180
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-virtual6
- default-slaves:
- - huawei-virtual6
-
- scm:
- - git-scm
-
- triggers:
- - timed: '0 9 * * *'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./deploy.sh
-
-###################
-# parameter macros
-###################
-- parameter:
- name: 'clover-defaults'
- parameters:
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/cntt/cntt.yaml b/jjb/cntt/cntt.yaml
new file mode 100644
index 000000000..9a60cf1f3
--- /dev/null
+++ b/jjb/cntt/cntt.yaml
@@ -0,0 +1,97 @@
+---
+- builder:
+ name: cntt-tox
+ builders:
+ - shell: |
+ set +x
+ if [ -d {dir} ] && [ -f {dir}/tox.ini ]; then
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install tox texlive \
+ texlive-latex-extra latexmk pandoc -y
+ (cd {dir} && tox
+ wget -q -N https://dl.min.io/client/mc/release/linux-amd64/mc && chmod a+x mc
+ ./mc cp -r -q build/ opnfv/artifacts.opnfv.org/{stream}/$BUILD_TAG
+ echo "\nhttps://artifacts.opnfv.org/{stream}/$BUILD_TAG/index.html\n")
+ fi
+ if [ -d {dir}/build ] && [ -f {dir}/build/Makefile ]; then
+ (cd {dir}/build && make
+ wget -q -N https://dl.min.io/client/mc/release/linux-amd64/mc && chmod a+x mc
+ for i in *.pdf; do
+ ./mc cp -r -q $i opnfv/artifacts.opnfv.org/{stream}/$BUILD_TAG
+ echo "\nhttps://artifacts.opnfv.org/{stream}/$BUILD_TAG/$i\n"
+ done)
+ fi
+ if [ -d {dir}/gsma ]; then
+ (cd {dir}/gsma
+ wget -q -N https://dl.min.io/client/mc/release/linux-amd64/mc && chmod a+x mc
+ for i in *.docx *.rst; do
+ ./mc cp -r -q $i opnfv/artifacts.opnfv.org/{stream}/$BUILD_TAG/gsma
+ echo "\nhttps://artifacts.opnfv.org/{stream}/$BUILD_TAG/gsma/$i\n"
+ done)
+ fi
+
+- scm:
+ name: cntt-scm
+ scm:
+ - git:
+ url: https://github.com/cntt-n/CNTT
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/pull/*/head:refs/remotes/origin/pr/*'
+
+- parameter:
+ name: cntt-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'opnfv-build'
+
+- job-template:
+ name: cntt-tox-{stream}
+ scm:
+ - cntt-scm
+ triggers:
+ - github
+ parameters:
+ - cntt-parameter
+ builders:
+ - cntt-tox:
+ stream: '{stream}'
+ dir: '{dir}'
+ publishers:
+ - github-notifier
+
+- project:
+ name: cntt
+ stream:
+ - rm:
+ dir: doc/ref_model
+ - ra1:
+ dir: doc/ref_arch/openstack
+ - ra2:
+ dir: doc/ref_arch/kubernetes
+ - rc:
+ dir: doc/ref_cert
+ - rc1:
+ dir: doc/ref_cert/RC1
+ - rc2:
+ dir: doc/ref_cert/RC2
+ - ri1:
+ dir: doc/ref_impl/cntt-ri
+ - ri2:
+ dir: doc/ref_impl/cntt-ri2
+ jobs:
+ - cntt-tox-{stream}
+
+- view:
+ name: cntt
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^cntt.*$
diff --git a/jjb/compass4nfv/compass-build.sh b/jjb/compass4nfv/compass-build.sh
deleted file mode 100644
index 673a9f106..000000000
--- a/jjb/compass4nfv/compass-build.sh
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-set -x
-
-# log info to console
-echo "Starting the build of $INSTALLER_TYPE. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# create the cache directory if it doesn't exist
-[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
-[[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
-
-# set OPNFV_ARTIFACT_VERSION
-export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-export PACKAGE_URL=$PPA_REPO
-
-# start the build
-if [ -d $PPA_CACHE ]
-then
- cp $PPA_CACHE/*.tar.gz $PPA_CACHE/*.iso $PPA_CACHE/*.img $CACHE_DIRECTORY/ -f
-fi
-
-cd $WORKSPACE/
-
-if [[ "$BRANCH" == 'stable/danube' ]]; then
- ./build.sh --iso-dir $BUILD_DIRECTORY/ --iso-name compass.iso -c $CACHE_DIRECTORY
- OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.iso | cut -d' ' -f1)
- OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso
-else
- ./build.sh --tar-dir $BUILD_DIRECTORY/ --tar-name compass.tar.gz -c $CACHE_DIRECTORY
- OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/compass.tar.gz | cut -d' ' -f1)
- OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz
-fi
-
-# list the build artifacts
-ls -al $BUILD_DIRECTORY
-
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
- echo "OPNFV_ARTIFACT_SHA512SUM=$OPNFV_ARTIFACT_SHA512SUM"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $BUILD_DIRECTORY/opnfv.properties
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/compass4nfv/compass-ci-jobs.yaml b/jjb/compass4nfv/compass-ci-jobs.yaml
deleted file mode 100644
index 298173e85..000000000
--- a/jjb/compass4nfv/compass-ci-jobs.yaml
+++ /dev/null
@@ -1,1009 +0,0 @@
----
-- project:
-
- name: compass-ci
-
- installer: 'compass'
-
- project: 'compass4nfv'
-
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- ppa-pathname: '/{stream}'
- disabled: false
- openstack-version: pike
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- disabled: false
- gs-pathname: '/{stream}'
- ppa-pathname: '/{stream}'
- openstack-version: pike
- danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- ppa-pathname: '/{stream}'
- disabled: false
- openstack-version: newton
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # CI PODs
- # -------------------------------
- pod:
- - baremetal:
- slave-label: compass-baremetal-master
- os-version: 'xenial'
- <<: *master
- - virtual:
- slave-label: compass-virtual-master
- os-version: 'xenial'
- <<: *master
- - baremetal:
- slave-label: compass-baremetal-branch
- os-version: 'xenial'
- <<: *fraser
- - virtual:
- slave-label: compass-virtual-branch
- os-version: 'xenial'
- <<: *fraser
- # -------------------------------
- # master
- # -------------------------------
- - baremetal-centos:
- slave-label: 'intel-pod17'
- os-version: 'centos7'
- <<: *master
- # -------------------------------
- # danube for dovetail
- # -------------------------------
- - huawei-pod7:
- slave-label: 'huawei-pod7'
- os-version: 'xenial'
- <<: *danube
-
- scenario:
- - 'os-nosdn-nofeature-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l3-nofeature-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-onos-nofeature-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-ocl-nofeature-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-onos-sfc-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l2-moon-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-kvm-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-openo-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl-sfc-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-ovs_dpdk-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'k8-nosdn-nofeature-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-nofeature-noha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l3-nofeature-noha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l2-moon-noha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-kvm-noha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl-sfc-noha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-ovs_dpdk-noha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-bar-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-bar-noha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'k8-nosdn-stor4nfv-ha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
- - 'k8-nosdn-stor4nfv-noha':
- disabled: false
- auto-trigger-name: 'compass-{scenario}-{pod}-{stream}-trigger'
-
- jobs:
- - 'compass-{scenario}-{pod}-daily-{stream}'
- - 'compass-deploy-{pod}-daily-{stream}'
- - 'compass-collect-logs-{pod}-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'compass-{scenario}-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'compass-os-.*?-{pod}-daily-.*?'
- - 'compass-k8-.*?-{pod}-daily-.*?'
- - 'compass-os-.*?-baremetal-daily-.*?'
- - 'compass-k8-.*?-baremetal-daily-.*?'
- - 'compass-verify-[^-]*-[^-]*'
- block-level: 'NODE'
-
- wrappers:
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - compass-ci-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- ppa-pathname: '{ppa-pathname}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
-
- triggers:
- - '{auto-trigger-name}'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - trigger-builds:
- - project: 'compass-deploy-{pod}-daily-{stream}'
- current-parameters: true
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- COMPASS_OS_VERSION={os-version}
- COMPASS_OPENSTACK_VERSION={openstack-version}
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-compass-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-compass-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- # here the stream means the SUT stream, dovetail stream is defined in its own job
- # only run on os-(nosdn|odl_l3)-nofeature-ha scenario
- # run with testsuite default, dovetail docker image with latest tag(Monday, Tuesday)
- # run with testsuite proposed_tests, dovetail docker image with latest tag(Thursday, Friday)
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: os-(nosdn|odl_l3)-nofeature-ha
- label: '{scenario}'
- steps:
- - trigger-builds:
- - project: 'dovetail-compass-{pod}-default-{stream}'
- current-parameters: false
- predefined-parameters: |
- DOCKER_TAG=latest
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: os-(nosdn|odl_l3)-nofeature-ha
- label: '{scenario}'
- steps:
- - trigger-builds:
- - project: 'dovetail-compass-{pod}-proposed_tests-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: os-nosdn-nofeature-ha
- label: '{scenario}'
- steps:
- - trigger-builds:
- - project: 'bottlenecks-compass-posca_factor_multistack_storage_parallel-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - project: 'bottlenecks-compass-posca_factor_soak_throughputs-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - project: 'bottlenecks-compass-posca_stress_ping-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: master
- label: '{stream}'
- steps:
- - trigger-builds:
- - project: 'compass-collect-logs-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
-
-- job-template:
- name: 'compass-deploy-{pod}-daily-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'compass-deploy-{pod}-daily-.*?'
- - 'compass-verify-deploy-.*?'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 360
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - compass-ci-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- ppa-pathname: '{ppa-pathname}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - conditional-step:
- condition-kind: regex-match
- regex: master
- label: '{stream}'
- steps:
- - shell:
- !include-raw-escape: ./compass-build.sh
- - shell:
- !include-raw-escape: ./compass-deploy.sh
- - conditional-step:
- condition-kind: regex-match
- regex: (danube|fraser)
- label: '{stream}'
- steps:
- - shell:
- !include-raw-escape: ./compass-download-artifact.sh
- - shell:
- !include-raw-escape: ./compass-deploy.sh
-
-- job-template:
- name: 'compass-collect-logs-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - compass-ci-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- ppa-pathname: '{ppa-pathname}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
-
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - fix-workspace-permissions
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - shell:
- !include-raw-escape: ./compass-logs.sh
-
-########################
-# parameter macros
-########################
-- parameter:
- name: compass-ci-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
- - string:
- name: CACHE_DIRECTORY
- default: "$HOME/opnfv/cache/$PROJECT{gs-pathname}"
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: PPA_REPO
- default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
- - string:
- name: PPA_CACHE
- default: "$WORKSPACE/work/repo/"
- - string:
- name: LOG_DIRECTORY
- default: $WORKSPACE/log_output
- description: "Directory where the logs will be located upon the completion of the collection."
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "Scenario to deploy with."
- - string:
- name: COMPASS_OS_VERSION
- default: ''
- - string:
- name: COMPASS_OPENSTACK_VERSION
- default: ''
-
-
-########################
-# trigger macros
-########################
-
-# --------------------------
-# ha-baremetal-centos-master
-# --------------------------
-- trigger:
- name: 'compass-os-nosdn-nofeature-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: '0 19 * * *'
-- trigger:
- name: 'compass-os-nosdn-openo-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: '0 15 * * *'
-- trigger:
- name: 'compass-os-onos-nofeature-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-ocl-nofeature-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-onos-sfc-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-kvm-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl-sfc-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-k8-nosdn-nofeature-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: '0 6 * * *'
-- trigger:
- name: 'compass-os-nosdn-bar-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: '' # '0 19 * * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-ha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------
-# noha-baremetal-centos-master
-# ----------------------------
-- trigger:
- name: 'compass-os-nosdn-nofeature-noha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-noha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-noha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-kvm-noha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl-sfc-noha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-noha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-bar-noha-baremetal-centos-master-trigger'
- triggers:
- - timed: '' # '0 19 * * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-noha-baremetal-centos-master-trigger'
- triggers:
- - timed: ''
-
-
-# --------------------------
-# ha-huawei-pod7-danube
-# --------------------------
-- trigger:
- name: 'compass-os-nosdn-nofeature-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: '' # '0 19 * * *'
-- trigger:
- name: 'compass-os-nosdn-openo-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: '' # '0 15 * * *'
-- trigger:
- name: 'compass-os-onos-nofeature-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-ocl-nofeature-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-onos-sfc-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-kvm-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl-sfc-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-k8-nosdn-nofeature-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-bar-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: '' # '0 19 * * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-ha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------
-# noha-huawei-pod7-danube
-# ----------------------------
-- trigger:
- name: 'compass-os-nosdn-nofeature-noha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-noha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-noha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-kvm-noha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl-sfc-noha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-noha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-bar-noha-huawei-pod7-danube-trigger'
- triggers:
- - timed: '' # '0 19 * * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-noha-huawei-pod7-danube-trigger'
- triggers:
- - timed: ''
-
-# -------------------
-# ha-baremetal-master
-# -------------------
-- trigger:
- name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 20 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-openo-ha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 18 1-29/2 * *'
-- trigger:
- name: 'compass-os-onos-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-ocl-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-onos-sfc-ha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-ha-baremetal-master-trigger'
- triggers:
- - timed: '' # '0 12 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-kvm-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 14 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 16 2-30/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 10 2-30/2 * *'
-- trigger:
- name: 'compass-os-odl-sfc-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 10 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-bar-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 2 2-30/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-ha-baremetal-master-trigger'
- triggers:
- - timed: '0 16 1-29/2 * *'
-
-# ---------------------
-# noha-baremetal-master
-# ---------------------
-- trigger:
- name: 'compass-os-nosdn-kvm-noha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-nofeature-noha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-noha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-noha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl-sfc-noha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-noha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-bar-noha-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-noha-baremetal-master-trigger'
- triggers:
- - timed: ''
-
-# -------------------
-# ha-baremetal-fraser
-# -------------------
-- trigger:
- name: 'compass-os-nosdn-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '0 1 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-openo-ha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '0 21 2-30/2 * *'
-- trigger:
- name: 'compass-os-onos-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-ocl-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-onos-sfc-ha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '' # '0 5 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-kvm-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '0 13 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '0 9 1-29/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '0 5 1-29/2 * *'
-- trigger:
- name: 'compass-os-odl-sfc-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '0 17 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-bar-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '0 21 1-29/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '0 7 2-30/2 * *'
-
-# ---------------------
-# noha-baremetal-fraser
-# ---------------------
-- trigger:
- name: 'compass-os-nosdn-kvm-noha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-nofeature-noha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-noha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-noha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl-sfc-noha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-noha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-bar-noha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-noha-baremetal-fraser-trigger'
- triggers:
- - timed: ''
-
-# -----------------
-# ha-virtual-master
-# -----------------
-- trigger:
- name: 'compass-os-nosdn-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: '0 21 * * *'
-- trigger:
- name: 'compass-os-nosdn-openo-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: '0 19 2-30/2 * *'
-- trigger:
- name: 'compass-os-onos-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-ocl-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-onos-sfc-ha-virtual-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-ha-virtual-master-trigger'
- triggers:
- - timed: '' # '30 12 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-kvm-ha-virtual-master-trigger'
- triggers:
- - timed: '0 13 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-ha-virtual-master-trigger'
- triggers:
- - timed: '0 17 2-30/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-nofeature-ha-virtual-master-trigger'
- triggers:
- - timed: '5 2 * * *'
-- trigger:
- name: 'compass-os-odl-sfc-ha-virtual-master-trigger'
- triggers:
- - timed: '0 16 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-bar-ha-virtual-master-trigger'
- triggers:
- - timed: '0 17 1-29/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-ha-virtual-master-trigger'
- triggers:
- - timed: '0 15 2-30/2 * *'
-
-# -------------------
-# noha-virtual-master
-# -------------------
-- trigger:
- name: 'compass-os-nosdn-kvm-noha-virtual-master-trigger'
- triggers:
- - timed: '30 13 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-nofeature-noha-virtual-master-trigger'
- triggers:
- - timed: '0 14 2-30/2 * *'
-- trigger:
- name: 'compass-os-odl_l3-nofeature-noha-virtual-master-trigger'
- triggers:
- - timed: '0 15 1-29/2 * *'
-- trigger:
- name: 'compass-os-odl_l2-moon-noha-virtual-master-trigger'
- triggers:
- - timed: '' # '0 18 2-30/2 * *'
-- trigger:
- name: 'compass-os-odl-sfc-noha-virtual-master-trigger'
- triggers:
- - timed: '0 20 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-noha-virtual-master-trigger'
- triggers:
- - timed: '0 11 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-bar-noha-virtual-master-trigger'
- triggers:
- - timed: '0 22 1-29/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-noha-virtual-master-trigger'
- triggers:
- - timed: '0 10 2-30/2 * *'
-
-# -----------------
-# ha-virtual-fraser
-# -----------------
-- trigger:
- name: 'compass-os-nosdn-nofeature-ha-virtual-fraser-trigger'
- triggers:
- - timed: '0 23 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-openo-ha-virtual-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-virtual-fraser-trigger'
- triggers:
- - timed: '0 22 2-30/2 * *'
-- trigger:
- name: 'compass-os-onos-nofeature-ha-virtual-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-ocl-nofeature-ha-virtual-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-onos-sfc-ha-virtual-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-odl_l2-moon-ha-virtual-fraser-trigger'
- triggers:
- - timed: '0 20 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-kvm-ha-virtual-fraser-trigger'
- triggers:
- - timed: '0 16 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-ha-virtual-fraser-trigger'
- triggers:
- - timed: '0 14 1-29/2 * *'
-- trigger:
- name: 'compass-os-odl-sfc-ha-virtual-fraser-trigger'
- triggers:
- - timed: '0 18 2-30/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-nofeature-ha-virtual-fraser-trigger'
- triggers:
- - timed: '5 1 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-bar-ha-virtual-fraser-trigger'
- triggers:
- - timed: '0 19 1-29/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-ha-virtual-fraser-trigger'
- triggers:
- - timed: '0 15 1-29/2 * *'
-
-# -------------------
-# noha-virtual-fraser
-# -------------------
-- trigger:
- name: 'compass-os-nosdn-kvm-noha-virtual-fraser-trigger'
- triggers:
- - timed: '0 15 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-nofeature-noha-virtual-fraser-trigger'
- triggers:
- - timed: '0 17 2-30/2 * *'
-- trigger:
- name: 'compass-os-odl_l3-nofeature-noha-virtual-fraser-trigger'
- triggers:
- - timed: '0 23 1-29/2 * *'
-- trigger:
- name: 'compass-os-odl_l2-moon-noha-virtual-fraser-trigger'
- triggers:
- - timed: '0 21 2-30/2 * *'
-- trigger:
- name: 'compass-os-odl-sfc-noha-virtual-fraser-trigger'
- triggers:
- - timed: '0 19 1-29/2 * *'
-- trigger:
- name: 'compass-os-nosdn-ovs_dpdk-noha-virtual-fraser-trigger'
- triggers:
- - timed: '0 12 2-30/2 * *'
-- trigger:
- name: 'compass-os-nosdn-bar-noha-virtual-fraser-trigger'
- triggers:
- - timed: '0 12 1-29/2 * *'
-- trigger:
- name: 'compass-k8-nosdn-stor4nfv-noha-virtual-fraser-trigger'
- triggers:
- - timed: '0 13 2-30/2 * *'
diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh
deleted file mode 100644
index ac649b992..000000000
--- a/jjb/compass4nfv/compass-deploy.sh
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/bin/bash
-set -x
-
-# log info to console
-echo "Starting the deployment on baremetal environment using $INSTALLER_TYPE. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-echo 1 > /proc/sys/vm/drop_caches
-
-export CONFDIR=$WORKSPACE/deploy/conf
-if [[ "$BRANCH" = 'stable/danube' ]]; then
- # source the properties file so we get OPNFV vars
- source $BUILD_DIRECTORY/latest.properties
- # echo the info about artifact that is used during the deployment
- echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
- if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # for none-merge deployments
- # checkout the commit that was used for building the downloaded artifact
- # to make sure the ISO and deployment mechanism uses same versions
- echo "Checking out $OPNFV_GIT_SHA1"
- git checkout $OPNFV_GIT_SHA1 --quiet
- fi
-
- export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
-else
- export ISO_URL=file://$BUILD_DIRECTORY/compass.tar.gz
-fi
-
-cd $WORKSPACE
-
-export OS_VERSION=${COMPASS_OS_VERSION}
-export OPENSTACK_VERSION=${COMPASS_OPENSTACK_VERSION}
-
-if [[ "${DEPLOY_SCENARIO}" =~ "-ocl" ]]; then
- export NETWORK_CONF_FILE=network_ocl.yml
-elif [[ "${DEPLOY_SCENARIO}" =~ "-onos" ]]; then
- export NETWORK_CONF_FILE=network_onos.yml
-elif [[ "${DEPLOY_SCENARIO}" =~ "-openo" ]]; then
- export NETWORK_CONF_FILE=network_openo.yml
-elif [[ "${DEPLOY_SCENARIO}" =~ "-ovs_dpdk" ]]; then
- export NETWORK_CONF_FILE=network_dpdk.yml
-else
- export NETWORK_CONF_FILE=network.yml
-fi
-
-if [[ "$NODE_NAME" =~ "-virtual" ]]; then
- export NETWORK_CONF=$CONFDIR/vm_environment/$NODE_NAME/${NETWORK_CONF_FILE}
- export DHA_CONF=$CONFDIR/vm_environment/${DEPLOY_SCENARIO}.yml
- if [[ "${DEPLOY_SCENARIO}" =~ "-moon-noha" ]]; then
- export VIRT_NUMBER=3
- elif [[ "${DEPLOY_SCENARIO}" =~ "-noha" ]]; then
- export VIRT_NUMBER=2
- fi
-else
- if [[ "$NODE_NAME" =~ "intel-pod17" ]]; then
- export INSTALL_NIC=eno2
- else
- export INSTALL_NIC=eth1
- fi
- export NETWORK_CONF=$CONFDIR/hardware_environment/$NODE_NAME/${NETWORK_CONF_FILE}
- export DHA_CONF=$CONFDIR/hardware_environment/$NODE_NAME/${DEPLOY_SCENARIO}.yml
-fi
-
-export DHA=${DHA_CONF}
-export NETWORK=${NETWORK_CONF}
-
-source ./ci/deploy_ci.sh
-
-if [ $? -ne 0 ]; then
- echo "depolyment failed!"
- deploy_ret=1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-
-exit $deploy_ret
diff --git a/jjb/compass4nfv/compass-dovetail-jobs.yaml b/jjb/compass4nfv/compass-dovetail-jobs.yaml
deleted file mode 100644
index c09086348..000000000
--- a/jjb/compass4nfv/compass-dovetail-jobs.yaml
+++ /dev/null
@@ -1,192 +0,0 @@
----
-- project:
-
- name: 'compass-dovetail-jobs'
- installer: 'compass'
- project: 'compass4nfv'
- # ---------------------------------
- # BRANCH ANCHORS
- # ---------------------------------
- danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- dovetail-branch: master
- # -----------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -----------------------------------
- # CI PODs
- # -----------------------------------
- pod:
- - baremetal:
- slave-label: compass-baremetal-branch
- os-version: 'xenial'
- <<: *danube
- # ----------------------------------
- # scenarios
- # ----------------------------------
- scenario:
- - 'os-nosdn-nofeature-ha':
- disabled: true
- auto-trigger-name: 'compass-{scenario}-{pod}-weekly-{stream}-trigger'
-
- jobs:
- - 'compass-{scenario}-{pod}-weekly-{stream}'
- - 'compass-deploy-{pod}-weekly-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'compass-{scenario}-{pod}-weekly-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'compass-os-.*?-{pod}-daily-.*?'
- - 'compass-os-.*?-{pod}-weekly-.*?'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - fix-workspace-permissions
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - compass-dovetail-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - trigger-builds:
- - project: 'compass-deploy-{pod}-weekly-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- COMPASS_OS_VERSION={os-version}
- same-node: true
- block: true
- - trigger-builds:
- - project: 'dovetail-compass-{pod}-compliance_set-weekly-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'dovetail-compass-{pod}-proposed_tests-weekly-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
-- job-template:
- name: 'compass-deploy-{pod}-weekly-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'compass-deploy-{pod}-daily-.*?'
- - 'compass-deploy-{pod}-weekly-.*'
- - 'compass-verify-deploy-.*?'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 240
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - compass-dovetail-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - shell:
- !include-raw-escape: ./compass-download-artifact.sh
- - shell:
- !include-raw-escape: ./compass-deploy.sh
-
-########################
-# parameter macros
-########################
-- parameter:
- name: compass-dovetail-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
- - choice:
- name: COMPASS_OPENSTACK_VERSION
- choices:
- - 'newton'
-
-########################
-# trigger macros
-########################
-- trigger:
- name: 'compass-os-nosdn-nofeature-ha-baremetal-weekly-danube-trigger'
- triggers:
- - timed: '' # 'H H * * 0'
-
-- trigger:
- name: 'dovetail-weekly-trigger'
- triggers:
- - timed: '' # 'H H * * 0'
diff --git a/jjb/compass4nfv/compass-download-artifact.sh b/jjb/compass4nfv/compass-download-artifact.sh
deleted file mode 100644
index f8915643f..000000000
--- a/jjb/compass4nfv/compass-download-artifact.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# get the latest.properties file in order to get info regarding latest artifact
-[[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
-curl -s -o $BUILD_DIRECTORY/latest.properties http://$GS_URL/latest.properties
-
-# check if we got the file
-[[ -f $BUILD_DIRECTORY/latest.properties ]] || exit 1
-
-# source the file so we get OPNFV vars
-source $BUILD_DIRECTORY/latest.properties
-
-if [[ "$BRANCH" == 'stable/danube' ]]; then
- # download the file
- curl -s -o $BUILD_DIRECTORY/compass.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
- # list the file
- ls -al $BUILD_DIRECTORY/compass.iso
-else
- # download the file
- curl -s -o $BUILD_DIRECTORY/compass.tar.gz http://$OPNFV_ARTIFACT_URL > gsutil.tar.gz.log 2>&1
- # list the file
- ls -al $BUILD_DIRECTORY/compass.tar.gz
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/compass4nfv/compass-logs.sh b/jjb/compass4nfv/compass-logs.sh
deleted file mode 100644
index c028194e7..000000000
--- a/jjb/compass4nfv/compass-logs.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Uploading the logs $INSTALLER_TYPE artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# create the log directory if it doesn't exist
-[[ -d $LOG_DIRECTORY ]] || mkdir -p $LOG_DIRECTORY
-
-OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-COMPASS_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}_${OPNFV_ARTIFACT_VERSION}.log.tar.gz"
-
-
-sudo docker exec compass-tasks /bin/bash /opt/collect-log.sh
-sudo docker cp compass-tasks:/opt/log.tar.gz ${LOG_DIRECTORY}/${COMPASS_LOG_FILENAME}
-
-sudo chown $(whoami):$(whoami) ${LOG_DIRECTORY}/${COMPASS_LOG_FILENAME}
-
-gsutil cp "${LOG_DIRECTORY}/${COMPASS_LOG_FILENAME}" \
- "gs://${GS_URL}/logs/${COMPASS_LOG_FILENAME}" > /dev/null 2>&1
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-echo "Artifact is available as http://${GS_URL}/logs/${COMPASS_LOG_FILENAME}"
diff --git a/jjb/compass4nfv/compass-makeppa.sh b/jjb/compass4nfv/compass-makeppa.sh
deleted file mode 100644
index fc5db2389..000000000
--- a/jjb/compass4nfv/compass-makeppa.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-set -x
-set -o errexit
-set -o nounset
-set -o pipefail
-# make ppa
-cd $WORKSPACE/
-./build/make_repo.sh
-# calc SHA512 of ppa
-cd $PPA_CACHE
-for i in $(find *.gz *.iso *.img -type f)
-do
- sha512sum=$(sha512sum $i | cut -d ' ' -f1)
- echo $sha512sum > $i.sha512
- curl -T $i $PPA_REPO
- curl -T $i.sha512 $PPA_REPO
-done
diff --git a/jjb/compass4nfv/compass-project-jobs.yaml b/jjb/compass4nfv/compass-project-jobs.yaml
deleted file mode 100644
index dc935f024..000000000
--- a/jjb/compass4nfv/compass-project-jobs.yaml
+++ /dev/null
@@ -1,135 +0,0 @@
----
-- project:
-
- name: compass-project
-
- installer: 'compass'
-
- project: 'compass4nfv'
-
- slave-label: 'compass-virtual'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- ppa-pathname: '/{stream}'
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- ppa-pathname: '/{stream}'
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- ppa-pathname: '/{stream}'
- disabled: false
-
- jobs:
- - '{installer}-build-daily-{stream}'
- - 'compass-build-ppa-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{installer}-build-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - compass-project-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- ppa-pathname: '{ppa-pathname}'
- - 'opnfv-build-ubuntu-defaults'
- - '{installer}-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - timed: 'H 8 * * *'
-
- builders:
- - shell:
- !include-raw-escape: ./compass-build.sh
- - shell:
- !include-raw-escape: ./compass-upload-artifact.sh
- - 'clean-workspace'
-
-- job-template:
- name: 'compass-build-ppa-{stream}'
-
- description: "build ppa(using docker) in huawei lab"
-
- disabled: '{obj:disabled}'
-
- node: huawei-build
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - compass-project-parameter:
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- ppa-pathname: '{ppa-pathname}'
- - '{node}-defaults'
- - '{installer}-defaults'
- scm:
- - git-scm
-
- builders:
- - shell:
- !include-raw-escape: ./compass-makeppa.sh
-
-
-########################
-# parameter macros
-########################
-- parameter:
- name: compass-project-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: "$HOME/opnfv/cache/$PROJECT{gs-pathname}"
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
- - string:
- name: PPA_REPO
- default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
- - string:
- name: PPA_CACHE
- default: "$WORKSPACE/work/repo/"
diff --git a/jjb/compass4nfv/compass-upload-artifact.sh b/jjb/compass4nfv/compass-upload-artifact.sh
deleted file mode 100644
index 87a9334b2..000000000
--- a/jjb/compass4nfv/compass-upload-artifact.sh
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/bin/bash
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Uploading the $INSTALLER_TYPE artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-if [[ "$BRANCH" == 'stable/danube' ]]; then
- FILETYPE='iso'
-else
- FILETYPE='tar.gz'
-fi
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $BUILD_DIRECTORY/opnfv.properties
-
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-
-signiso () {
-time gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/compass.$FILETYPE
-
-gsutil cp $BUILD_DIRECTORY/compass.$FILETYPE.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE.sig
-echo "ISO signature Upload Complete!"
-}
-
-signiso
-
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/compass.$FILETYPE gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > gsutil.$FILETYPE.log 2>&1
-gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-# BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- echo "Check log $WORKSPACE/gsutil.$FILETYPE.log on the machine where this build is done."
- exit 1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.$FILETYPE"
diff --git a/jjb/compass4nfv/compass-verify-jobs.yaml b/jjb/compass4nfv/compass-verify-jobs.yaml
deleted file mode 100644
index beb857d94..000000000
--- a/jjb/compass4nfv/compass-verify-jobs.yaml
+++ /dev/null
@@ -1,345 +0,0 @@
----
-- project:
- name: 'compass-verify-jobs'
-
- project: 'compass4nfv'
-
- installer: 'compass'
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- ppa-pathname: '/{stream}'
- disabled: false
- openstack-version: 'pike'
- branch-type: 'master'
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- ppa-pathname: '/{stream}'
- disabled: false
- openstack-version: 'pike'
- branch-type: 'master'
-
- distro:
- - 'xenial':
- disabled: false
- os-version: 'xenial'
- openstack-os-version: ''
- - 'centos7':
- disabled: true
- os-version: 'centos7'
- openstack-os-version: ''
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'basic'
- - 'deploy-virtual'
- #####################################
- # jobs
- #####################################
- jobs:
- - 'compass-verify-{distro}-{stream}'
- - 'compass-verify-k8-{distro}-{stream}'
- - 'compass-verify-{phase}-{distro}-{stream}'
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'compass-verify-{distro}-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'compass-verify-[^-]*-[^-]*'
- - 'compass-os-.*?-virtual-daily-.*?'
- - 'compass-k8-.*?-virtual-daily-.*?'
- block-level: 'NODE'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'compass-virtual-{branch-type}-defaults'
- - '{installer}-defaults'
- - 'compass-verify-defaults':
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- ppa-pathname: '{ppa-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'opnfv-lint-verify-{stream}'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'compass-verify-deploy-virtual-{distro}-{stream}'
- current-parameters: true
- predefined-parameters: |
- COMPASS_OS_VERSION={os-version}
- COMPASS_OPENSTACK_VERSION={openstack-version}
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- - name: 'functest-compass-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- FUNCTEST_MODE=tier
- FUNCTEST_TIER=healthcheck
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
-- job-template:
- name: 'compass-verify-k8-{distro}-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'compass-verify-[^-]*-[^-]*'
- - 'compass-os-.*?-virtual-daily-.*?'
- block-level: 'NODE'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 240
- fail: true
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - comment-added-contains-event:
- comment-contains-value: 'check k8'
- - comment-added-contains-event:
- comment-contains-value: 'verify k8'
- - comment-added-contains-event:
- comment-contains-value: 'check kubernetes'
- - comment-added-contains-event:
- comment-contains-value: 'verify kubernetes'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'compass-virtual-{branch-type}-defaults'
- - '{installer}-defaults'
- - 'compass-verify-defaults':
- installer: '{installer}'
- gs-pathname: '{gs-pathname}'
- ppa-pathname: '{ppa-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'k8-nosdn-nofeature-ha'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'opnfv-lint-verify-{stream}'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'compass-verify-deploy-virtual-{distro}-{stream}'
- current-parameters: true
- predefined-parameters: |
- COMPASS_OS_VERSION={os-version}
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'compass-verify-{phase}-{distro}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'compass-os-.*?-virtual-daily-.*?'
- - 'compass-verify-deploy-.*'
- - 'functest-compass-virtual.*'
- block-level: 'NODE'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'compass4nfv-verify-basic-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
-
-- builder:
- name: 'compass4nfv-verify-deploy-virtual-macro'
- builders:
- - shell:
- !include-raw: ./compass-build.sh
- - shell:
- !include-raw: ./compass-deploy.sh
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'compass-verify-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: "$HOME/opnfv/cache/$PROJECT{gs-pathname}"
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
- - string:
- name: PPA_REPO
- default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
- - string:
- name: PPA_CACHE
- default: "$WORKSPACE/work/repo/"
- - choice:
- name: COMPASS_OS_VERSION
- choices:
- - 'xenial'
- - 'centos7'
diff --git a/jjb/compass4nfv/compass-workspace-precleanup.sh b/jjb/compass4nfv/compass-workspace-precleanup.sh
deleted file mode 100644
index e4a301bab..000000000
--- a/jjb/compass4nfv/compass-workspace-precleanup.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-cd $WORKSPACE/..
-sudo rm $WORKSPACE -rf
-git clone $GIT_BASE $WORKSPACE \ No newline at end of file
diff --git a/jjb/conductor/conductor.yaml b/jjb/conductor/conductor.yaml
deleted file mode 100644
index 8a128da37..000000000
--- a/jjb/conductor/conductor.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: conductor
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/container4nfv/arm64/compass-build.sh b/jjb/container4nfv/arm64/compass-build.sh
deleted file mode 100755
index 696f7ff76..000000000
--- a/jjb/container4nfv/arm64/compass-build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-set -e
-
-cd compass4nfv
-
-COMPASS_WORK_DIR=$WORKSPACE/../compass-work
-mkdir -p $COMPASS_WORK_DIR
-ln -s $COMPASS_WORK_DIR work
-
-#TODO: remove workaround after all arm64 patches merged
-curl -s http://people.linaro.org/~yibo.cai/compass/compass4nfv-arm64-fixup.sh | bash -s {scenario}
-
-# build tarball
-COMPASS_ISO_REPO='http://people.linaro.org/~yibo.cai/compass' ./build.sh
diff --git a/jjb/container4nfv/arm64/compass-deploy.sh b/jjb/container4nfv/arm64/compass-deploy.sh
deleted file mode 100755
index a22af09ba..000000000
--- a/jjb/container4nfv/arm64/compass-deploy.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-set -e
-
-cd compass4nfv
-
-export ADAPTER_OS_PATTERN='(?i)CentOS-7.*arm.*'
-export OS_VERSION="centos7"
-export KUBERNETES_VERSION="v1.7.5"
-if [[ "$NODE_NAME" =~ "-virtual" ]]; then
- export DHA="deploy/conf/vm_environment/k8-nosdn-nofeature-noha.yml"
- export NETWORK="deploy/conf/vm_environment/network.yml"
- export VIRT_NUMBER=2 VIRT_CPUS=4 VIRT_MEM=8192 VIRT_DISK=50G
-else
- export DHA="deploy/conf/hardware_environment/huawei-pod8/k8-nosdn-nofeature-noha.yml"
- export NETWORK="deploy/conf/hardware_environment/huawei-pod8/network.yml"
-fi
-
-./deploy.sh
diff --git a/jjb/container4nfv/arm64/yardstick-arm64.sh b/jjb/container4nfv/arm64/yardstick-arm64.sh
deleted file mode 100755
index 26c6fdcfe..000000000
--- a/jjb/container4nfv/arm64/yardstick-arm64.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/bash
-set -e
-
-sshpass -p root ssh root@10.1.0.50 \
- "mkdir -p /etc/yardstick; rm -rf /etc/yardstick/admin.conf"
-
-
-sshpass -p root ssh root@10.1.0.50 \
- kubectl config set-cluster yardstick --server=127.0.0.1:8080 --insecure-skip-tls-verify=true --kubeconfig=/etc/yardstick/admin.conf
-sshpass -p root ssh root@10.1.0.50 \
- kubectl config set-context yardstick --cluster=yardstick --kubeconfig=/etc/yardstick/admin.conf
-sshpass -p root ssh root@10.1.0.50 \
- kubectl config use-context yardstick --kubeconfig=/etc/yardstick/admin.conf
-
-
-
-if [ ! -n "$redirect" ]; then
- redirect="/dev/stdout"
-fi
-
-if [ ! -n "$DOCKER_TAG" ]; then
- DOCKER_TAG='latest'
-fi
-
-if [ ! -n "$NODE_NAME" ]; then
- NODE_NAME='arm-virutal03'
-fi
-
-if [ ! -n "$DEPLOY_SCENARIO" ]; then
- DEPLOY_SCENARIO='k8-nosdn-lb-noha_daily'
-fi
-
-if [ ! -n "$YARDSTICK_DB_BACKEND" ]; then
- YARDSTICK_DB_BACKEND='-i 104.197.68.199:8086'
-fi
-
-# Pull the image with correct tag
-DOCKER_REPO='opnfv/yardstick'
-if [ "$(uname -m)" = 'aarch64' ]; then
- DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
-fi
-echo "Yardstick: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
-sshpass -p root ssh root@10.1.0.50 \
- docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
-
-if [ ! -n "$BRANCH" ]; then
- BRANCH=master
-fi
-
-opts="--name=yardstick --privileged=true --net=host -d -it "
-envs="-e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
- -e NODE_NAME=${NODE_NAME} \
- -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
-rc_file_vol="-v /etc/yardstick/admin.conf:/etc/yardstick/admin.conf"
-cacert_file_vol=""
-map_log_dir=""
-sshkey=""
-YARDSTICK_SCENARIO_SUITE_NAME="opnfv_k8-nosdn-lb-noha_daily.yaml"
-
-# map log directory
-branch=${BRANCH##*/}
-#branch="master"
-dir_result="${HOME}/opnfv/yardstick/results/${branch}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-map_log_dir="-v ${dir_result}:/tmp/yardstick"
-
-# Run docker
-cmd="docker rm -f yardstick || true"
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} /bin/bash"
-echo "Yardstick: Running docker cmd: ${cmd}"
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-
-cmd='sudo docker exec yardstick sed -i.bak "/# execute tests/i\sed -i.bak \"s/openretriever\\\/yardstick/openretriever\\\/yardstick_aarch64/g\" \
- $\{YARDSTICK_REPO_DIR\}/tests/opnfv/test_cases/opnfv_yardstick_tc080.yaml" /usr/local/bin/exec_tests.sh'
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-echo "Yardstick: run tests: ${YARDSTICK_SCENARIO_SUITE_NAME}"
-cmd="sudo docker exec yardstick exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-cmd="docker rm -f yardstick"
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-echo "Yardstick: done!"
diff --git a/jjb/container4nfv/container4nfv-arm64.yaml b/jjb/container4nfv/container4nfv-arm64.yaml
deleted file mode 100644
index 5f5bc8627..000000000
--- a/jjb/container4nfv/container4nfv-arm64.yaml
+++ /dev/null
@@ -1,95 +0,0 @@
----
-
-- project:
- name: 'container4nfv-arm64'
- project: 'container4nfv'
- installer: 'compass'
- stream:
- - master:
- branch: master
- - fraser:
- branch: stable/fraser
- scenario:
- - 'k8-multus-nofeature-noha':
- disabled: false
- - 'k8-sriov-nofeature-noha':
- disabled: false
- pod:
- - virtual:
- slave-label: arm-packet01
- - baremetal:
- slave-label: compass-baremetal-arm
- jobs:
- - 'container4nfv-{scenario}-{pod}-daily-{stream}'
-
-
-- job-template:
- name: 'container4nfv-{scenario}-{pod}-daily-{stream}'
- disabled: '{obj:disabled}'
- concurrent: false
- node: '{slave-label}'
-
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/compass4nfv
- branches:
- - '{branch}'
- basedir: compass4nfv
- wipe-workspace: true
-
- triggers:
- - 'trigger-{scenario}-{pod}-{stream}'
-
- wrappers:
- - timeout:
- timeout: 150
- fail: true
-
- builders:
- - shell:
- !include-raw: arm64/compass-build.sh
- - shell:
- !include-raw: arm64/compass-deploy.sh
- - trigger-builds:
- - project: yardstick-arm64-compass-arm-virtual03-daily-master
- current-parameters: false
- same-node: true
- block: false
-
-
-- trigger:
- name: 'trigger-k8-multus-nofeature-noha-virtual-master'
- triggers:
- - timed: '0 12 * * *'
-- trigger:
- name: 'trigger-k8-sriov-nofeature-noha-virtual-master'
- triggers:
- - timed: '0 15 * * *'
-
-- trigger:
- name: 'trigger-k8-multus-nofeature-noha-virtual-fraser'
- triggers:
- - timed: '0 18 * * *'
-- trigger:
- name: 'trigger-k8-sriov-nofeature-noha-virtual-fraser'
- triggers:
- - timed: '0 21 * * *'
-
-
-- trigger:
- name: 'trigger-k8-multus-nofeature-noha-baremetal-master'
- triggers:
- - timed: '0 12 * * *'
-- trigger:
- name: 'trigger-k8-sriov-nofeature-noha-baremetal-master'
- triggers:
- - timed: '0 15 * * *'
-
-- trigger:
- name: 'trigger-k8-multus-nofeature-noha-baremetal-fraser'
- triggers:
- - timed: '0 18 * * *'
-- trigger:
- name: 'trigger-k8-sriov-nofeature-noha-baremetal-fraser'
- triggers:
- - timed: '0 21 * * *'
diff --git a/jjb/container4nfv/container4nfv-project.yaml b/jjb/container4nfv/container4nfv-project.yaml
deleted file mode 100644
index 194a1a989..000000000
--- a/jjb/container4nfv/container4nfv-project.yaml
+++ /dev/null
@@ -1,169 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: container4nfv
-
- project: '{name}'
-
- jobs:
- - 'container4nfv-verify-{stream}'
- - 'container4nfv-daily-upload-{stream}'
- - 'container4nfv-daily-deploy-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'container4nfv-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./build.sh
-
-- job-template:
- name: 'container4nfv-daily-upload-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- scm:
- - git-scm
-
- wrappers:
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'container4nfv-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./upload.sh
-
-- job-template:
- name: 'container4nfv-daily-deploy-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'clover-daily-deploy-.*?'
- - 'container4nfv-daily-deploy-.*?'
- block-level: 'NODE'
-
- wrappers:
- - timeout:
- timeout: 240
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-virtual6
- default-slaves:
- - huawei-virtual6
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./deploy.sh
-
-###################
-# parameter macros
-###################
-- parameter:
- name: 'container4nfv-defaults'
- parameters:
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/container4nfv/yardstick-arm64.yaml b/jjb/container4nfv/yardstick-arm64.yaml
deleted file mode 100644
index 6ed345ecc..000000000
--- a/jjb/container4nfv/yardstick-arm64.yaml
+++ /dev/null
@@ -1,130 +0,0 @@
----
-###################################
-# job configuration for yardstick
-###################################
-- project:
- name: yardstick-arm64
-
- project: 'yardstick'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- fraser: &fraser
- stream: fraser
- branch: '{stream}'
- gs-pathname: 'stable/{stream}'
- docker-tag: 'stable'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # apex CI PODs
- - arm-virtual03:
- slave-label: arm-packet01
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- # -------------------------------
- testsuite:
- - 'daily'
-
- jobs:
- - 'yardstick-arm64-{installer}-{pod}-{testsuite}-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
- name: 'yardstick-arm64-{installer}-{pod}-{testsuite}-{stream}'
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 60
- abort: true
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - 'yardstick-params-{slave-label}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'k8-nosdn-lb-noha_daily'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: YARDSTICK_SCENARIO_SUITE_NAME
- default: opnfv_${{DEPLOY_SCENARIO}}_{testsuite}.yaml
- description: 'Path to test scenario suite'
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debut output information"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration."
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'yardstick-arm64'
-
- publishers:
- - email:
- recipients: trevor.tao@arm.com yibo.cai@arm.com
- - email-jenkins-admins-on-failure
-
-########################
-# builder macros
-########################
-- builder:
- name: yardstick-arm64
- builders:
- - shell:
- !include-raw: arm64/yardstick-arm64.sh
-
-########################
-# parameter macros
-########################
-
-
-- parameter:
- name: 'yardstick-params-arm-packet01'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
diff --git a/jjb/copper/copper.yaml b/jjb/copper/copper.yaml
deleted file mode 100644
index 620aefd9c..000000000
--- a/jjb/copper/copper.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: copper
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/cperf/cperf-ci-jobs.yaml b/jjb/cperf/cperf-ci-jobs.yaml
deleted file mode 100644
index 59afb89c8..000000000
--- a/jjb/cperf/cperf-ci-jobs.yaml
+++ /dev/null
@@ -1,190 +0,0 @@
----
-###################################
-# job configuration for cperf
-###################################
-- project:
- name: cperf-ci-jobs
- project: cperf
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
-
- installer: apex
-
- testsuite:
- - csit
- - cbench
-
- jobs:
- - 'cperf-{installer}-{testsuite}-{stream}'
-
-################################
-# job template
-################################
-- job-template:
- name: 'cperf-{installer}-{testsuite}-{stream}'
-
- concurrent: true
-
- properties:
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER Suite: $CPERF_SUITE_NAME ODL BRANCH: $ODL_BRANCH'
- - timeout:
- timeout: 400
- abort: true
-
- parameters:
- - cperf-parameter:
- testsuite: '{testsuite}'
- gs-pathname: '{gs-pathname}'
- docker-tag: '{docker-tag}'
- stream: '{stream}'
-
- builders:
- - 'cperf-{testsuite}-builder'
-
-########################
-# parameter macros
-########################
-- parameter:
- name: cperf-parameter
- parameters:
- - string:
- name: CPERF_SUITE_NAME
- default: '{testsuite}'
- description: "Suite name to run"
- - string:
- name: ODL_BRANCH
- default: 'master'
- description: "Branch that OpenDaylight is running"
- - string:
- name: OS_VERSION
- default: 'master'
- description: "OpenStack version (short name, no stable/ prefix)"
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where the opnfv documents will be stored in gs repository"
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debug output information"
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: RC_FILE_PATH
- default: ''
- description: "Path to the OS credentials file if given"
- - string:
- name: SSH_KEY_PATH
- default: ''
- description: "Path to the private SSH key to access OPNFV nodes"
- - string:
- name: NODE_FILE_PATH
- default: ''
- description: "Path to the yaml file describing overcloud nodes"
- - string:
- name: ODL_CONTAINERIZED
- default: 'true'
- description: "boolean set true if ODL on overcloud is a container"
-
-########################
-# trigger macros
-########################
-
-########################
-# builder macros
-########################
-- builder:
- name: cperf-csit-builder
- builders:
- - 'cperf-cleanup'
- - 'cperf-prepare-robot'
- - 'cperf-robot-netvirt-csit'
-
-- builder:
- name: cperf-cbench-builder
- builders:
- - 'cperf-cleanup'
- - 'cperf-prepare-robot'
- - 'cperf-robot-cbench'
-
-- builder:
- name: cperf-prepare-robot
- builders:
- - shell:
- !include-raw: ./cperf-prepare-robot.sh
-
-- builder:
- name: cperf-robot-cbench
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # cbench requires the openflow drop test feature to be installed.
- sshpass -p karaf ssh -o StrictHostKeyChecking=no \
- -o UserKnownHostsFile=/dev/null \
- -o LogLevel=error \
- -p 8101 karaf@$SDN_CONTROLLER_IP \
- feature:install odl-openflowplugin-flow-services-ui odl-openflowplugin-drop-test
-
- robot_cmd="pybot -e exclude -L TRACE -d /tmp \
- -v ODL_SYSTEM_1_IP:${SDN_CONTROLLER_IP} \
- -v ODL_SYSTEM_IP:${SDN_CONTROLLER_IP} \
- -v BUNDLEFOLDER:/opt/opendaylight \
- -v RESTCONFPORT:8081 \
- -v USER_HOME:/tmp \
- -v USER:heat-admin \
- -v ODL_SYSTEM_USER:heat-admin \
- -v TOOLS_SYSTEM_IP:localhost \
- -v of_port:6653"
- robot_suite="/home/opnfv/repos/odl_test/csit/suites/openflowplugin/Performance/010_Cbench.robot"
-
- docker run -i -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
-
-- builder:
- name: cperf-robot-netvirt-csit
- builders:
- - shell:
- !include-raw: ./cperf-robot-netvirt-csit.sh
-
-- builder:
- name: cperf-cleanup
- builders:
- - shell: |
- #!/bin/bash
- [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
- echo "Cleaning up docker containers/images..."
- # Remove previous running containers if exist
- if [[ ! -z $(docker ps -a | grep opnfv/cperf) ]]; then
- echo "Removing existing opnfv/cperf containers..."
- docker ps -a | grep opnfv/cperf | awk '{print $1}' | xargs docker rm -f >${redirect}
- fi
-
- # Remove existing images if exist
- if [[ ! -z $(docker images | grep opnfv/cperf) ]]; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep opnfv/cperf >${redirect}
- image_tags=($(docker images | grep opnfv/cperf | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- echo "Removing docker image opnfv/cperf:$tag..."
- docker rmi opnfv/cperf:$tag >/dev/null
- done
- fi
diff --git a/jjb/cperf/cperf-prepare-robot.sh b/jjb/cperf/cperf-prepare-robot.sh
deleted file mode 100755
index d88c6d510..000000000
--- a/jjb/cperf/cperf-prepare-robot.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [ -z ${RC_FILE_PATH+x} ]; then
- undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
- sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/overcloudrc /tmp/overcloudrc
-else
- cp -f $RC_FILE_PATH ${WORKSPACE}/overcloudrc
-fi
-
-sudo chmod 755 ${WORKSPACE}/overcloudrc
-source ${WORKSPACE}/overcloudrc
-
-# copy ssh key for robot
-
-if [ -z ${SSH_KEY_PATH+x} ]; then
- sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/.ssh/id_rsa ${WORKSPACE}/
- sudo chown -R jenkins-ci:jenkins-ci ${WORKSPACE}/
- # done with sudo. jenkins-ci is the user from this point
- chmod 0600 ${WORKSPACE}/id_rsa
-else
- cp -f ${SSH_KEY_PATH} ${WORKSPACE}/
-fi
-
-docker pull opnfv/cperf:$DOCKER_TAG
-
-sudo mkdir -p /tmp/robot_results
diff --git a/jjb/cperf/cperf-robot-netvirt-csit.sh b/jjb/cperf/cperf-robot-netvirt-csit.sh
deleted file mode 100755
index 3ef747109..000000000
--- a/jjb/cperf/cperf-robot-netvirt-csit.sh
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-source ${WORKSPACE}/overcloudrc
-# note SDN_CONTROLLER_IP is set in overcloudrc, which is the VIP
-# for admin/public network (since we are running single network deployment)
-
-if [ "$OS_VERSION" == 'master' ]; then
- FULL_OS_VER='master'
-else
- FULL_OS_VER="stable/${OS_VERSION}"
-fi
-
-if [ "$ODL_BRANCH" == 'master' ]; then
- ODL_STREAM='flourine'
-else
- ODL_STREAM=${ODL_BRANCH}
-fi
-
-NUM_CONTROL_NODES=$(python ./parse-node-yaml.py num_nodes --file $NODE_FILE_PATH)
-NUM_COMPUTE_NODES=$(python ./parse-node-yaml.py num_nodes --node-type compute --file $NODE_FILE_PATH)
-
-idx=1
-EXTRA_ROBOT_ARGS=""
-for idx in `seq 1 $NUM_CONTROL_NODES`; do
- CONTROLLER_IP=$(python ./parse-node-yaml.py get_value -k address --node-number ${idx} --file $NODE_FILE_PATH)
- EXTRA_ROBOT_ARGS+=" -v ODL_SYSTEM_${idx}_IP:${CONTROLLER_IP} \
- -v OS_CONTROL_NODE_${idx}_IP:${CONTROLLER_IP} \
- -v ODL_SYSTEM_${idx}_IP:${CONTROLLER_IP} \
- -v HA_PROXY_${idx}_IP:${SDN_CONTROLLER_IP}"
-done
-
-idx=1
-for idx in `seq 1 $NUM_COMPUTE_NODES`; do
- COMPUTE_IP=$(python ./parse-node-yaml.py get_value -k address --node-type compute --node-number ${idx} --file $NODE_FILE_PATH)
- EXTRA_ROBOT_ARGS+=" -v OS_COMPUTE_${idx}_IP:${COMPUTE_IP}"
-done
-
-CONTROLLER_1_IP=$(python ./parse-node-yaml.py get_value -k address --node-number 1 --file $NODE_FILE_PATH)
-
-if [ "$ODL_CONTAINERIZED" == 'false' ]; then
- EXTRA_ROBOT_ARGS+=" -v NODE_KARAF_COUNT_COMMAND:'ps axf | grep org.apache.karaf | grep -v grep | wc -l || echo 0' \
- -v NODE_START_COMMAND:'sudo systemctl start opendaylight_api' \
- -v NODE_KILL_COMMAND:'sudo systemctl stop opendaylight_api' \
- -v NODE_STOP_COMMAND:'sudo systemctl stop opendaylight_api' \
- -v NODE_FREEZE_COMMAND:'sudo systemctl stop opendaylight_api' "
-else
- EXTRA_ROBOT_ARGS+=" -v NODE_KARAF_COUNT_COMMAND:\"sudo docker exec opendaylight_api /bin/bash -c 'ps axf | \
- grep org.apache.karaf | grep -v grep | wc -l' || echo 0\" \
- -v NODE_START_COMMAND:\"sudo docker start opendaylight_api\" \
- -v NODE_KILL_COMMAND:\"sudo docker stop opendaylight_api\" \
- -v NODE_STOP_COMMAND:\"sudo docker stop opendaylight_api\" \
- -v NODE_FREEZE_COMMAND:\"sudo docker stop opendaylight_api\" "
-fi
-
-robot_cmd="pybot \
- --removekeywords wuks \
- --xunit robotxunit.xml \
- -c critical \
- -e exclude \
- -d /tmp/robot_results \
- -v BUNDLEFOLDER:/opt/opendaylight \
- -v CONTROLLER_USER:heat-admin \
- -v DEFAULT_LINUX_PROMPT:\$ \
- -v DEFAULT_LINUX_PROMPT_STRICT:]\$ \
- -v DEFAULT_USER:heat-admin \
- -v DEVSTACK_DEPLOY_PATH:/tmp \
- -v HA_PROXY_IP:$SDN_CONTROLLER_IP \
- -v NUM_ODL_SYSTEM:$NUM_CONTROL_NODES \
- -v NUM_OS_SYSTEM:$NUM_CONTROL_NODES \
- -v NUM_TOOLS_SYSTEM:0 \
- -v ODL_SNAT_MODE:conntrack \
- -v ODL_STREAM:$ODL_STREAM \
- -v ODL_SYSTEM_IP: $CONTROLLER_1_IP \
- -v OS_CONTROL_NODE_IP:$CONTROLLER_1_IP \
- -v OPENSTACK_BRANCH:$FULL_OS_VER \
- -v OS_USER:heat-admin \
- -v ODL_ENABLE_L3_FWD:yes \
- -v ODL_SYSTEM_USER:heat-admin \
- -v ODL_SYSTEM_PROMPT:\$ \
- -v PRE_CLEAN_OPENSTACK_ALL:True \
- -v PUBLIC_PHYSICAL_NETWORK:datacentre \
- -v RESTCONFPORT:8081 \
- -v ODL_RESTCONF_USER:admin \
- -v ODL_RESTCONF_PASSWORD:admin \
- -v KARAF_PROMPT_LOGIN:'opendaylight-user' \
- -v KARAF_PROMPT:'opendaylight-user.*root.*>' \
- -v SECURITY_GROUP_MODE:stateful \
- -v USER:heat-admin \
- -v USER_HOME:\$HOME \
- -v TOOLS_SYSTEM_IP:'' \
- -v NODE_ROLE_INDEX_START:0 \
- -v WORKSPACE:/tmp \
- $EXTRA_ROBOT_ARGS \
- -v of_port:6653 "
-
-docker run -i --net=host \
- -v ${WORKSPACE}/id_rsa:/tmp/id_rsa \
- -v ${WORKSPACE}/overcloudrc:/tmp/overcloudrc \
- opnfv/cperf:$DOCKER_TAG \
- /bin/bash -c "source /tmp/overcloudrc; mkdir -p \$HOME/.ssh; cp /tmp/id_rsa \$HOME/.ssh; \
- $robot_cmd /home/opnfv/repos/odl_test/csit/suites/openstack/connectivity/l2.robot;"
diff --git a/jjb/cperf/parse-node-yaml.py b/jjb/cperf/parse-node-yaml.py
deleted file mode 100644
index 5a7575540..000000000
--- a/jjb/cperf/parse-node-yaml.py
+++ /dev/null
@@ -1,71 +0,0 @@
-##############################################################################
-# Copyright (c) 2018 Tim Rozet (trozet@redhat.com) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import argparse
-import sys
-import yaml
-
-
-def get_node_data_by_number(node_type, node_number):
- node_idx = 1
- for node_name, node_data in data['servers'].items():
- if node_type == node_data['type']:
- if node_idx == node_number:
- return node_name, node_data
- else:
- node_idx += 1
-
-
-def get_node_value(node_type, node_number, key):
- node_name, node_data = get_node_data_by_number(node_type, node_number)
- if not key and node_name is not None:
- return node_name
- elif node_data and isinstance(node_data, dict) and key in node_data:
- return node_data[key]
-
-
-def get_number_of_nodes(node_type):
- nodes = data['servers']
- num_nodes = 0
- for node_name, node_data in nodes.items():
- if node_data['type'] == node_type:
- num_nodes += 1
- return num_nodes
-
-
-FUNCTION_MAP = {'num_nodes':
- {'func': get_number_of_nodes,
- 'args': ['node_type']},
- 'get_value':
- {'func': get_node_value,
- 'args': ['node_type', 'node_number', 'key']},
- }
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument('command', choices=FUNCTION_MAP.keys())
- parser.add_argument('-f', '--file',
- dest='node_file',
- required=True)
- parser.add_argument('--node-type',
- default='controller',
- required=False)
- parser.add_argument('--node-number',
- default=1,
- type=int,
- required=False)
- parser.add_argument('-k', '--key',
- required=False)
- args = parser.parse_args(sys.argv[1:])
- with open(args.node_file, 'r') as fh:
- data = yaml.safe_load(fh)
- assert 'servers' in data
- func = FUNCTION_MAP[args.command]['func']
- args = [getattr(args, x) for x in FUNCTION_MAP[args.command]['args']]
- print(func(*args))
diff --git a/jjb/daisy4nfv/daisy-daily-jobs.yaml b/jjb/daisy4nfv/daisy-daily-jobs.yaml
deleted file mode 100644
index ba03bc3f2..000000000
--- a/jjb/daisy4nfv/daisy-daily-jobs.yaml
+++ /dev/null
@@ -1,423 +0,0 @@
----
-# jenkins job templates for Daisy
-# TODO
-# [ ] enable baremetal jobs after baremetal deployment finish
-# [ ] enable jobs in danuble
-# [ ] add more scenarios
-# [ ] integration with yardstick
-
-- project:
-
- name: 'daisy'
- project: '{name}'
- installer: '{name}'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- disabled: true
- gs-pathname: ''
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- pod:
- # -------------------------------
- # CI PODs
- # -------------------------------
- - baremetal:
- slave-label: daisy-baremetal
- <<: *master
- - virtual:
- slave-label: daisy-virtual
- <<: *master
- - baremetal:
- slave-label: daisy-baremetal
- <<: *fraser
- - virtual:
- slave-label: daisy-virtual
- <<: *fraser
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - zte-pod3:
- slave-label: zte-pod3
- <<: *master
- - zte-pod3:
- slave-label: zte-pod3
- <<: *fraser
- - zte-pod9:
- slave-label: zte-pod9
- <<: *master
- - zte-pod9:
- slave-label: zte-pod9
- <<: *fraser
-
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- # HA scenarios
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
- # NOHA scenarios
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
- # ODL_L3 scenarios
- - 'os-odl-nofeature-ha':
- auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
- # ovs_dpdk scenarios
- - 'os-nosdn-ovs_dpdk-noha':
- auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
-
- jobs:
- - '{project}-{scenario}-{pod}-daily-{stream}'
- - '{project}-deploy-{pod}-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{project}-{scenario}-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'daisy-os-.*?-{pod}-daily-.*?'
- - 'daisy-daily-.*'
- - 'daisy-kolla-build-.*'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - 'testapi-parameter'
- - 'daisy-project-parameter':
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - trigger-builds:
- - project: 'daisy-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- UPSTREAM_JOB_NAME=$JOB_NAME
- UPSTREAM_BUILD_ID=$BUILD_ID
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-daisy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- UPSTREAM_JOB_NAME=$JOB_NAME
- UPSTREAM_BUILD_ID=$BUILD_ID
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-daisy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- UPSTREAM_JOB_NAME=$JOB_NAME
- UPSTREAM_BUILD_ID=$BUILD_ID
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
-- job-template:
- name: '{project}-deploy-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'daisy-kolla-build-.*'
- - '{installer}-(build|deploy|test)-daily-(fraser|master)'
- - '{installer}-deploy-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
- - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - 'testapi-parameter'
- - 'daisy-project-parameter':
- gs-pathname: '{gs-pathname}'
- - 'deploy-scenario'
- - string:
- name: DEPLOY_TIMEOUT
- default: '150'
- description: 'Deployment timeout in minutes'
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'track-begin-timestamp'
- - shell:
- !include-raw-escape: ./daisy4nfv-download-artifact.sh
- - shell:
- !include-raw-escape: ./daisy-deploy.sh
- publishers:
- - 'report-provision-result'
-
-########################
-# trigger macros
-########################
-# ----------------------------------------------
-# Triggers for job running on daisy-baremetal against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-virtual against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: '0 16 * * *'
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: '0 12 * * *'
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-baremetal against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '0 0,6 * * *'
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '0 12,18 * * *'
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-virtual against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '0 0 * * *'
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '0 20 * * *'
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on zte-pod3 against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on zte-pod3 against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-zte-pod3-daily-fraser-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-zte-pod3-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-zte-pod3-daily-fraser-trigger'
- triggers:
- - timed: '0 16,22 * * *'
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod3-daily-fraser-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# ZTE POD9 Triggers running against master branch
-# ----------------------------------------------
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod9-daily-master-trigger'
- triggers:
- - timed: '0 20 * * *'
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-zte-pod9-daily-master-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-zte-pod9-daily-master-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-zte-pod9-daily-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# ZTE POD9 Triggers running against fraser branch
-# ----------------------------------------------
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod9-daily-fraser-trigger'
- triggers:
- - timed: '0 10 * * *'
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-zte-pod9-daily-fraser-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-zte-pod9-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-zte-pod9-daily-fraser-trigger'
- triggers:
- - timed: ''
diff --git a/jjb/daisy4nfv/daisy-deploy.sh b/jjb/daisy4nfv/daisy-deploy.sh
deleted file mode 100755
index ea57ab1b4..000000000
--- a/jjb/daisy4nfv/daisy-deploy.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-set -o nounset
-set -o pipefail
-
-echo "--------------------------------------------------------"
-echo "This is $INSTALLER_TYPE deploy job!"
-echo "--------------------------------------------------------"
-
-DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-noha"}
-BRIDGE=${BRIDGE:-pxebr}
-LAB_NAME=${NODE_NAME/-*}
-POD_NAME=${NODE_NAME/*-}
-deploy_ret=0
-
-if [[ ! "$NODE_NAME" =~ "-virtual" ]] && [[ ! "$LAB_NAME" =~ (zte) ]]; then
- echo "Unsupported lab $LAB_NAME for now, Cannot continue!"
- exit $deploy_ret
-fi
-
-# clone the securedlab repo
-cd $WORKSPACE
-
-# There are no PDFs in euphrates branch of pharos repo.
-if [[ "$BRANCH" =~ "euphrates" ]]; then
- CONFIG_REPO_NAME=securedlab
-else
- CONFIG_REPO_NAME=pharos
-fi
-
-if [[ "$BRANCH" =~ "master" ]]; then
- DOCTOR_OPT="-d 1"
-else
- DOCTOR_OPT=""
-fi
-
-LABS_DIR=/var/tmp/opnfv-${CONFIG_REPO_NAME}
-
-echo "Cloning ${CONFIG_REPO_NAME} repo $BRANCH to $LABS_DIR"
-sudo rm -rf $LABS_DIR
-git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/${CONFIG_REPO_NAME} \
- --quiet --branch $BRANCH $LABS_DIR
-
-DEPLOY_COMMAND="sudo -E ./ci/deploy/deploy.sh -L $LABS_DIR \
- -l $LAB_NAME -p $POD_NAME -B $BRIDGE -s $DEPLOY_SCENARIO \
- $DOCTOR_OPT"
-
-# log info to console
-echo """
-Deployment parameters
---------------------------------------------------------
-Scenario: $DEPLOY_SCENARIO
-LAB: $LAB_NAME
-POD: $POD_NAME
-BRIDGE: $BRIDGE
-
-Starting the deployment using $INSTALLER_TYPE. This could take some time...
---------------------------------------------------------
-Issuing command
-$DEPLOY_COMMAND
-"""
-
-# start the deployment
-$DEPLOY_COMMAND
-
-if [ $? -ne 0 ]; then
- echo
- echo "Depolyment failed!"
- deploy_ret=1
-else
- echo
- echo "--------------------------------------------------------"
- echo "Deployment done!"
-fi
-
-exit $deploy_ret
diff --git a/jjb/daisy4nfv/daisy-project-jobs.yaml b/jjb/daisy4nfv/daisy-project-jobs.yaml
deleted file mode 100644
index ec5ba71f2..000000000
--- a/jjb/daisy4nfv/daisy-project-jobs.yaml
+++ /dev/null
@@ -1,312 +0,0 @@
----
-######################################################################
-# Add daily jobs, for buidoing, deploying and testing
-# TODO:
-# - [ ] Add yardstick and functest for test stage
-# - [x] Use daisy-baremetal-defauls for choosing baremetal deployment
-######################################################################
-
-#############################
-# Job configuration for daisy
-#############################
-- project:
- name: daisy-project-jobs
-
- project: 'daisy'
-
- installer: 'daisy'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
- phase:
- - 'build':
- slave-label: 'opnfv-build-centos'
- - 'deploy':
- slave-label: 'daisy-baremetal'
- - 'test':
- slave-label: 'opnfv-build-centos'
-
- jobs:
- - '{installer}-daily-{stream}'
- - '{installer}-{phase}-daily-{stream}'
- - '{installer}-kolla-build-{stream}'
-
-#############################
-# docker build job templates
-#############################
-- job-template:
- name: '{installer}-kolla-build-{stream}'
- disabled: false
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- # Note: Need to block all jobs which may create daisy VM.
- blocking-jobs:
- - '{installer}-kolla-build-.*'
- - 'daisy-deploy-.*'
- - 'daisy-daily-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm
-
- triggers:
- - 'daisy-kolla-build-{stream}-trigger'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'daisy-virtual-defaults'
- - '{installer}-defaults'
- - '{installer}-project-parameter':
- gs-pathname: '{gs-pathname}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 720
- fail: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - shell:
- !include-raw-escape: ./daisy4nfv-build-kolla-image.sh
-
- publishers:
- - '{installer}-recipients'
- - email-jenkins-admins-on-failure
-
-- trigger:
- name: 'daisy-kolla-build-fraser-trigger'
- triggers:
- - timed: '0 0 * * 0'
-
-- trigger:
- name: 'daisy-kolla-build-master-trigger'
- triggers:
- - timed: '0 12 * * 0'
-
-
-########################
-# job templates
-########################
-- job-template:
- name: '{installer}-daily-{stream}'
-
- project-type: multijob
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{installer}-daily-.*'
- - '{installer}-kolla-build-.*'
- - 'daisy4nfv-merge-build-.*'
- - 'daisy4nfv-verify-build-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '0 8 * * *'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - '{installer}-defaults'
- - '{installer}-project-parameter':
- gs-pathname: '{gs-pathname}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: '{installer}-build-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: '{installer}-deploy-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: test
- condition: SUCCESSFUL
- projects:
- - name: '{installer}-test-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
- publishers:
- - '{installer}-recipients'
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: '{installer}-{phase}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 6
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{installer}-daily-(build|deploy|test)-(fraser|master)'
- - '{installer}-.*-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
- - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
- block-level: 'NODE'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- - '{installer}-project-parameter':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{installer}-{phase}-daily-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'daisy-build-daily-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-basic.sh
- - shell:
- !include-raw: ./daisy4nfv-build.sh
- - shell:
- !include-raw: ./daisy4nfv-upload-artifact.sh
- - 'clean-workspace'
-
-- builder:
- name: 'daisy-deploy-daily-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-download-artifact.sh
- - shell:
- !include-raw: ./daisy-deploy.sh
-
-- builder:
- name: 'daisy-test-daily-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
-
-#####################################
-# parameter macros
-#####################################
-- publisher:
- name: 'daisy-recipients'
- publishers:
- # yamllint disable rule:line-length
- - email:
- recipients: hu.zhijiang@zte.com.cn lu.yao135@zte.com.cn zhou.ya@zte.com.cn yangyang1@zte.com.cn julienjut@gmail.com
- # yamllint enable rule:line-length
- - email-jenkins-admins-on-failure
-
-- parameter:
- name: 'daisy-project-parameter'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy4nfv-basic.sh b/jjb/daisy4nfv/daisy4nfv-basic.sh
deleted file mode 100755
index 87f5482e0..000000000
--- a/jjb/daisy4nfv/daisy4nfv-basic.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv basic job!"
-echo "--------------------------------------------------------"
-
diff --git a/jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh b/jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh
deleted file mode 100755
index 0441ea159..000000000
--- a/jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-upload_image_to_opnfv () {
- image=$1
-
- sha512sum -b $image > $image.sha512sum
- gsutil cp $image.sha512sum gs://$GS_URL/upstream/$image.sha512sum
-
- echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
- echo
- gsutil cp $image gs://$GS_URL/upstream/$image
- gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/upstream/$image
-
- # check if we uploaded the file successfully to see if things are fine
- gsutil ls gs://$GS_URL/upstream/$image
- if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- exit 1
- fi
-}
-
-
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv kolla image build job!"
-echo "--------------------------------------------------------"
-
-# start the build
-cd $WORKSPACE
-rm -rf docker_build_dir
-mkdir -p docker_build_dir
-
-# -j is for deciding which branch will be used when building,
-# only for OPNFV
-sudo -E ./ci/kolla-build.sh -j $JOB_NAME -w $WORKSPACE/docker_build_dir
-
-if [ $? -ne 0 ]; then
- echo
- echo "Kolla build failed!"
- deploy_ret=1
-else
- echo
- echo "--------------------------------------------------------"
- echo "Kolla build done!"
-fi
-
-image=$(ls $WORKSPACE/docker_build_dir/kolla-build-output/kolla-image-*.tgz)
-upload_image_to_opnfv $image
-
-echo
-echo "--------------------------------------------------------"
-echo "All done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
deleted file mode 100755
index a081b3bc6..000000000
--- a/jjb/daisy4nfv/daisy4nfv-build.sh
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv build job!"
-echo "--------------------------------------------------------"
-
-# set OPNFV_ARTIFACT_VERSION
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Building Daisy4nfv ISO for a merged change"
- export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-else
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-fi
-
-# build output directory
-OUTPUT_DIR=$WORKSPACE/build_output
-mkdir -p $OUTPUT_DIR
-
-# start the build
-cd $WORKSPACE
-./ci/build.sh $OUTPUT_DIR $OPNFV_ARTIFACT_VERSION
-
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $OUTPUT_DIR/opnfv-$OPNFV_ARTIFACT_VERSION.bin | cut -d' ' -f1)"
- echo "OPNFV_ARTIFACT_URL_ISO=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM_ISO=$(sha512sum $OUTPUT_DIR/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
deleted file mode 100755
index ae5ca3813..000000000
--- a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-
-# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
-[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
-
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
- # get the properties file for the Daisy4nfv BIN built for a merged change
- curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
-else
- # get the latest.properties file in order to get info regarding latest artifact
- echo "Downloading http://$GS_URL/latest.properties"
- curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
-fi
-
-# check if we got the file
-[[ -f $WORKSPACE/latest.properties ]] || exit 1
-
-# source the file so we get artifact metadata
-source $WORKSPACE/latest.properties
-
-# echo the info about artifact that is used during the deployment
-OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
-echo "Using $OPNFV_ARTIFACT for deployment"
-
-[[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL
-
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # check if we already have the image to avoid redownload
- BINSTORE="/bin_mount/opnfv_ci/${BRANCH##*/}"
- if [[ -f "$BINSTORE/$OPNFV_ARTIFACT" && ! -z $OPNFV_ARTIFACT_SHA512SUM ]]; then
- echo "BIN exists locally. Starting to check the sha512sum."
- if [[ $OPNFV_ARTIFACT_SHA512SUM = $(sha512sum -b $BINSTORE/$OPNFV_ARTIFACT | cut -d' ' -f1) ]]; then
- echo "Sha512sum is verified. Skipping the download and using the file from BIN store."
- ln -s $BINSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.bin
- echo "--------------------------------------------------------"
- echo
- ls -al $WORKSPACE/opnfv.bin
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
- exit 0
- fi
- fi
-fi
-
-# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
-echo "This could take some time... Now the time is $(date -u)"
-echo "--------------------------------------------------------"
-echo
-
-# download the file
-if [[ "$NODE_NAME" =~ (zte) ]] && [ -x "$(command -v aria2c)" ]; then
- DOWNLOAD_CMD="aria2c -x 3 --allow-overwrite=true -d $WORKSPACE -o opnfv.bin"
-else
- DOWNLOAD_CMD="curl -L -s -o $WORKSPACE/opnfv.bin"
-fi
-
-maxretries=3
-cnt=0
-rc=1
-while [ $cnt -lt $maxretries ] && [ $rc -ne 0 ]
-do
- cnt=$[cnt + 1]
- $DOWNLOAD_CMD http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
- rc=$?
-done
-
-# list the file
-ls -al $WORKSPACE/opnfv.bin
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml
deleted file mode 100644
index 7e03fabaf..000000000
--- a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml
+++ /dev/null
@@ -1,226 +0,0 @@
----
-- project:
- name: 'daisy4nfv-merge-jobs'
-
- project: 'daisy'
-
- installer: 'daisy'
-
- ###########################################################
- # use alias to keep the jobs'name existed already unchanged
- ###########################################################
- alias: 'daisy4nfv'
-
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
- #####################################
- # patch merge phases
- #####################################
- phase:
- - 'build':
- slave-label: 'opnfv-build-centos'
- - 'deploy-virtual':
- slave-label: 'daisy-virtual'
-
- #####################################
- # jobs
- #####################################
- jobs:
- - '{alias}-merge-{stream}'
- - '{alias}-merge-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: '{alias}-merge-{stream}'
-
- project-type: multijob
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{alias}-merge-(master|fraser)'
- block-level: 'NODE'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'code/**'
- - compare-type: ANT
- pattern: 'deploy/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: '.gitignore'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - '{alias}-merge-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: '{alias}-merge-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: '{alias}-merge-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: '{alias}-merge-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{alias}-merge-{phase}-.*'
- - '{installer}-daily-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - '{alias}-merge-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-merge-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'daisy-merge-build-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-basic.sh
- - shell:
- !include-raw: ./daisy4nfv-build.sh
- - shell:
- !include-raw: ./daisy4nfv-upload-artifact.sh
- - 'clean-workspace'
-
-- builder:
- name: 'daisy-merge-deploy-virtual-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-download-artifact.sh
- - shell:
- !include-raw: ./daisy-deploy.sh
- - 'clean-workspace'
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'daisy4nfv-merge-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
deleted file mode 100755
index bd6eb7ee0..000000000
--- a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv smoke test job!"
-echo "--------------------------------------------------------"
-
diff --git a/jjb/daisy4nfv/daisy4nfv-upload-artifact.sh b/jjb/daisy4nfv/daisy4nfv-upload-artifact.sh
deleted file mode 100755
index def4f6a75..000000000
--- a/jjb/daisy4nfv/daisy4nfv-upload-artifact.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-# check if we built something
-if [ -f $WORKSPACE/.noupload ]; then
- echo "Nothing new to upload. Exiting."
- /bin/rm -f $WORKSPACE/.noupload
- exit 0
-fi
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-importkey () {
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-}
-
-signbin () {
-gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin
-
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin.sig
-echo "BIN signature Upload Complete!"
-}
-
-uploadbin () {
-# log info to console
-echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
-echo
-
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > gsutil.bin.log 2>&1
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso >> gsutil.bin.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-elif [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Uploaded Daisy4nfv artifacts for a merged change"
-fi
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-# BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- echo "Check log $WORKSPACE/gsutil.bin.log on the machine where this build is done."
- exit 1
-fi
-
-echo "Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
-echo
-echo "--------------------------------------------------------"
-echo
-}
-
-importkey
-signbin
-uploadbin
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml
deleted file mode 100644
index 033beeb6d..000000000
--- a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml
+++ /dev/null
@@ -1,225 +0,0 @@
----
-- project:
- name: 'daisy4nfv-verify-jobs'
- project: 'daisy'
- installer: 'daisy'
- ##########################################################
- # use alias to keep the jobs'name existed alread unchanged
- ##########################################################
- alias: 'daisy4nfv'
-
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
- #####################################
- # patch verification phases
- #####################################
- phase:
- - unit:
- slave-label: 'opnfv-build'
- - build:
- slave-label: 'opnfv-build-centos'
-
- #####################################
- # jobs
- #####################################
- jobs:
- - '{alias}-verify-{stream}'
- - '{alias}-verify-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: '{alias}-verify-{stream}'
- project-type: multijob
- disabled: false
- concurrent: true
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{alias}-merge-build-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'code/**'
- - compare-type: ANT
- pattern: 'deploy/**'
- - compare-type: ANT
- pattern: 'tests/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: '.gitignore'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - '{alias}-verify-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: unit
- condition: SUCCESSFUL
- projects:
- - name: '{alias}-verify-unit-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: '{alias}-verify-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: '{alias}-verify-{phase}-{stream}'
- disabled: '{obj:disabled}'
- concurrent: true
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 6
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{alias}-merge-build-.*'
- - '{alias}-verify-build-.*'
- - '{installer}-daily-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - '{alias}-verify-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'daisy-verify-build-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-basic.sh
- - shell:
- !include-raw: ./daisy4nfv-build.sh
- - 'clean-workspace'
-
-- builder:
- name: daisy-verify-unit-macro
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- tox -e py27
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'daisy4nfv-verify-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/doctor/doctor.yaml b/jjb/doctor/doctor.yaml
deleted file mode 100644
index 0d7b781a7..000000000
--- a/jjb/doctor/doctor.yaml
+++ /dev/null
@@ -1,275 +0,0 @@
----
-- project:
- name: doctor
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- docker-tag: 'stable'
- disabled: false
-
- # feature projects' tests are not triggered by functest
- # doctor verify Pods need to deploy with these scenario
- installer:
- - 'apex':
- scenario: 'os-nosdn-kvm-ha'
- - 'fuel':
- scenario: 'os-nosdn-ovs-ha'
- - 'daisy':
- scenario: 'os-nosdn-ovs_dpdk-noha'
-
- arch:
- - 'x86_64'
- - 'aarch64'
-
- inspector:
- - 'sample'
- - 'congress'
-
- exclude:
- - installer: 'apex'
- arch: 'aarch64'
- - installer: 'daisy'
- arch: 'aarch64'
- # disabling the following tests due to limitation of PoD owners
- # these would beenabled again once the PoDs are ready
- - installer: 'fuel'
- arch: 'x86_64'
- - installer: 'daisy'
- arch: 'x86_64'
-
- jobs:
- - 'doctor-verify-{inspector}-{stream}'
- - 'doctor-verify-{installer}-{inspector}-{arch}-{stream}'
-
-- job-template:
- name: 'doctor-verify-{inspector}-{stream}'
- disabled: '{obj:disabled}'
- project-type: 'multijob'
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'doctor-slave-parameter'
- scm:
- - git-scm-gerrit
- triggers:
- - 'doctor-verify':
- project: '{project}'
- branch: '{branch}'
- files: 'doctor_tests/**'
-
- builders:
- - shell: |
- #!/bin/bash
- # we do nothing here as the main stuff will be done
- # in phase jobs
- echo "Triggering phase jobs!"
- - multijob:
- name: 'doctor-verify'
- execution-type: PARALLEL
- projects:
- - name: 'doctor-verify-apex-{inspector}-x86_64-{stream}'
- predefined-parameters: |
- PROJECT=$PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- kill-phase-on: FAILURE
- git-revision: true
- - name: 'doctor-verify-fuel-{inspector}-x86_64-{stream}'
- predefined-parameters: |
- PROJECT=$PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- kill-phase-on: FAILURE
- git-revision: true
- - name: 'doctor-verify-fuel-{inspector}-aarch64-{stream}'
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- kill-phase-on: FAILURE
- git-revision: true
- - name: 'doctor-verify-daisy-{inspector}-x86_64-{stream}'
- predefined-parameters: |
- PROJECT=$PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- kill-phase-on: FAILURE
- git-revision: true
-
-- job-template:
- name: 'doctor-verify-{installer}-{inspector}-{arch}-{stream}'
- disabled: '{obj:disabled}'
- node: 'doctor-{installer}-{arch}'
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 30
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - 'doctor-slave-parameter'
- - 'doctor-parameter':
- docker-tag: '{docker-tag}'
- scenario: '{scenario}'
- - 'doctor-functest-parameter':
- gs-pathname: '{gs-pathname}'
- inspector: '{inspector}'
- scm:
- - git-scm-gerrit
- builders:
- - 'doctor-verify-installer-inspector-builders-macro'
- publishers:
- - 'doctor-verify-publishers-macro'
-
-
-# -------------------------------
-# parameter macros
-# -------------------------------
-- parameter:
- name: 'doctor-parameter'
- parameters:
- - string:
- name: OS_CREDS
- default: /home/jenkins/openstack.creds
- description: 'OpenStack credentials'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: CLEAN_DOCKER_IMAGES
- default: 'false'
- description: 'Remove downloaded docker images (opnfv/functest:*)'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: 'Scenario to deploy and test'
-
-- parameter:
- name: 'doctor-functest-parameter'
- parameters:
- # functest-suite-parameter
- - string:
- name: FUNCTEST_MODE
- default: 'testcase'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'doctor-notification'
- - string:
- name: TESTCASE_OPTIONS
- # yamllint disable rule:line-length
- default: '-e INSPECTOR_TYPE={inspector} -v $WORKSPACE:/home/opnfv/repos/doctor'
- # yamllint enable rule:line-length
- description: 'Addtional parameters specific to test case(s)'
- # functest-parameter
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- # yamllint disable rule:line-length
- description: "Version directory where the opnfv documents will be stored in gs repository"
- # yamllint enable rule:line-length
- - string:
- name: FUNCTEST_REPO_DIR
- default: "/home/opnfv/repos/functest"
- description: "Directory where the Functest repository is cloned"
- - string:
- name: PUSH_RESULTS_TO_DB
- default: "true"
- description: "Push the results of all the tests to the resultDB"
- - string:
- name: CI_DEBUG
- default: 'true'
- description: "Show debug output information"
-# -------------------------------
-# builder macros
-# -------------------------------
-
-- builder:
- name: 'doctor-verify-installer-inspector-builders-macro'
- builders:
- - 'clean-workspace-log'
- # yamllint disable rule:line-length
- - shell: |
- # NOTE: Create symbolic link, so that we can archive file outside
- # of $WORKSPACE .
- # NOTE: We are printing all logs under 'tests/' during test run,
- # so this symbolic link should not be in 'tests/'. Otherwise,
- # we'll have the same log twice in jenkins console log.
- ln -sfn $HOME/opnfv/functest/results/{stream} functest_results
- # NOTE: Get functest script in $WORKSPACE. This functest script is
- # needed to perform VM image download in set-functest-env.sh
- # from E release cycle.
- mkdir -p functest/ci
- wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O functest/ci/download_images.sh
- - 'functest-suite-builder'
- - shell: |
- functest_log="$HOME/opnfv/functest/results/{stream}/$FUNCTEST_SUITE_NAME.log"
- # NOTE: checking the test result, as the previous job could return
- # 0 regardless the result of doctor test scenario.
- grep -e 'doctor test successfully' $functest_log || exit 1
- # yamllint enable rule:line-length
-
-# -------------------------------
-# publisher macros
-# -------------------------------
-- publisher:
- name: 'doctor-verify-publishers-macro'
- publishers:
- - archive:
- artifacts: 'doctor_tests/*.log'
- - archive:
- artifacts: 'functest_results/$FUNCTEST_SUITE_NAME.log'
- - email-jenkins-admins-on-failure
-
-
-#####################################
-# trigger macros
-#####################################
-- trigger:
- name: 'doctor-verify'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '{files}'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
diff --git a/jjb/domino/domino.yaml b/jjb/domino/domino.yaml
deleted file mode 100644
index e91260463..000000000
--- a/jjb/domino/domino.yaml
+++ /dev/null
@@ -1,57 +0,0 @@
----
-- project:
- name: domino
-
- project: '{name}'
-
- jobs:
- - 'domino-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
-- job-template:
- name: 'domino-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- #!/bin/bash
- ./tests/run.sh
diff --git a/jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml b/jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml
deleted file mode 100644
index 8e09942e3..000000000
--- a/jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml
+++ /dev/null
@@ -1,101 +0,0 @@
----
-###################################################
-# Non-ci jobs for Dovetail project
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: dovetail-webportal-project-jobs
-
- project: 'dovetail-webportal'
-
- jobs:
- - 'dovetail-webportal-verify-{stream}'
- - 'dovetail-webportal-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'dovetail-webportal-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- builders:
- - dovetail-unit-tests
-
-- job-template:
- name: 'dovetail-webportal-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - dovetail-webportal-hello-world
-
-################################
-# builders for dovetail project
-###############################
-- builder:
- name: dovetail-webportal-hello-world
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "hello world"
diff --git a/jjb/dovetail/dovetail-artifacts-upload.sh b/jjb/dovetail/dovetail-artifacts-upload.sh
deleted file mode 100755
index f1a9e7222..000000000
--- a/jjb/dovetail/dovetail-artifacts-upload.sh
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-echo "dovetail: pull and save the images"
-
-[[ -d ${CACHE_DIR} ]] || mkdir -p ${CACHE_DIR}
-
-cd ${CACHE_DIR}
-sudo docker pull ${DOCKER_REPO_NAME}:${DOCKER_TAG}
-sudo docker save -o ${STORE_FILE_NAME} ${DOCKER_REPO_NAME}:${DOCKER_TAG}
-sudo chmod og+rw ${STORE_FILE_NAME}
-
-OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-GS_UPLOAD_LOCATION="${STORE_URL}/${OPNFV_ARTIFACT_VERSION}"
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_UPLOAD_LOCATION"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-source $WORKSPACE/opnfv.properties
-
-importkey () {
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-}
-
-sign () {
-gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig ${CACHE_DIR}/${STORE_FILE_NAME}
-
-gsutil cp ${CACHE_DIR}/${STORE_FILE_NAME}.sig ${STORE_URL}/${STORE_FILE_NAME}.sig
-echo "signature Upload Complete!"
-}
-
-upload () {
-# log info to console
-echo "Uploading ${STORE_FILE_NAME} to artifact. This could take some time..."
-echo
-
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp ${CACHE_DIR}/${STORE_FILE_NAME} \
-${STORE_URL}/${STORE_FILE_NAME} > gsutil.dockerfile.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
-${STORE_URL}/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
- ${STORE_URL}/latest.properties > gsutil.latest.log 2>&1
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- ${STORE_URL}/latest.properties \
- ${STORE_URL}/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- ${STORE_URL}/${STORE_FILE_NAME} > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-# BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls ${STORE_URL}/${STORE_FILE_NAME} > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- exit 1
-fi
-
-echo "dovetail: uploading Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-}
-
-#importkey
-#sign
-upload
diff --git a/jjb/dovetail/dovetail-artifacts-upload.yaml b/jjb/dovetail/dovetail-artifacts-upload.yaml
deleted file mode 100644
index 9a11c6e26..000000000
--- a/jjb/dovetail/dovetail-artifacts-upload.yaml
+++ /dev/null
@@ -1,115 +0,0 @@
----
-############################################
-# dovetail upload artifacts job
-############################################
-- project:
- name: dovetail-artifacts-upload
-
- project: 'dovetail'
-
- jobs:
- - 'dovetail-{image}-artifacts-upload-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- image:
- - 'dovetail'
- - 'functest'
- - 'yardstick'
- - 'testapi'
- - 'mongo'
-
-#############################################
-# job template
-#############################################
-
-- job-template:
- name: 'dovetail-{image}-artifacts-upload-{stream}'
-
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - dovetail-parameter:
- gs-pathname: '{gs-pathname}'
- image: '{image}'
- branch: '{branch}'
-
- scm:
- - git-scm
-
- builders:
- - 'dovetail-builder-artifacts-upload'
- - 'dovetail-upload-artifacts-cache-cleanup'
- - 'dovetail-images-cleanup'
-
-####################
-# parameter macros
-####################
-- parameter:
- name: dovetail-parameter
- parameters:
- - string:
- name: CACHE_DIR
- default: $WORKSPACE/cache{gs-pathname}
- description: "the cache to store packages downloaded"
- - string:
- name: STORE_URL
- default: gs://artifacts.opnfv.org/dovetail{gs-pathname}
- description: "LF artifacts url for storage of dovetail packages"
- - string:
- name: DOCKER_REPO_NAME
- default: opnfv/{image}
- description: "docker repo name"
- - string:
- name: DOCKER_TAG
- default: latest
- description: "docker image tag of which will be uploaded to artifacts"
- - string:
- name: STORE_FILE_NAME
- default: image_{image}_{branch}_$BUILD_ID.docker
- description: "stored file name"
-
-####################################
-# builders for dovetail project
-####################################
-- builder:
- name: dovetail-builder-artifacts-upload
- builders:
- - shell:
- !include-raw: ./dovetail-artifacts-upload.sh
-
-- builder:
- name: dovetail-upload-artifacts-cache-cleanup
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "Dovetail: cleanup cache used for storage downloaded packages"
-
- /bin/rm -rf $CACHE_DIR
-
-- builder:
- name: dovetail-images-cleanup
- builders:
- - shell:
- !include-raw: ./dovetail-cleanup.sh
diff --git a/jjb/dovetail/dovetail-ci-jobs.yaml b/jjb/dovetail/dovetail-ci-jobs.yaml
deleted file mode 100644
index a95617ae4..000000000
--- a/jjb/dovetail/dovetail-ci-jobs.yaml
+++ /dev/null
@@ -1,271 +0,0 @@
----
-###################################
-# job configuration for dovetail
-###################################
-- project:
- name: dovetail
-
- project: '{name}'
-
- # --------------------------------------
- # BRANCH ANCHORS
- # --------------------------------------
- # 1)the stream/branch here represents the SUT(System Under Test) stream/branch
- # 2)docker-tag is the docker tag of dovetail(only master by now, then all latest used)
- # the dovetail stream is one-to-one mapping with dovetail docker-tag
- # the dovetail is not sync with A/B/C release
- master: &master
- stream: master
- branch: '{stream}'
- dovetail-branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- dovetail-branch: master
- gs-pathname: '/{stream}'
- docker-tag: 'latest'
-
- # ----------------------------------
- # POD, PLATFORM, AND BRANCH MAPPING
- # ----------------------------------
- # CI PODs
- # This section should only contain the SUTs
- # that have been switched using labels for slaves
- # -----------------------------------------------
- # the pods, SUTs listed here are just examples to
- # let the dovetail tool run, there can be more ways beside CI to
- # run the dovetail tool.
- # pods, SUTs will be added/adjusted when needed
- pod:
- # fuel CI PODs
- - baremetal:
- slave-label: fuel-baremetal
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - virtual:
- slave-label: fuel-virtual
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # compass CI PODs
- - baremetal:
- slave-label: compass-baremetal
- SUT: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: compass-virtual
- SUT: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: compass-baremetal
- SUT: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - virtual:
- slave-label: compass-virtual
- SUT: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # -------------------------------
- # Installers not using labels
- # CI PODs
- # This section should only contain the installers
- # that have not been switched using labels for slaves
- # -------------------------------
- # apex PODs
- - virtual:
- slave-label: apex-virtual-master
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: apex-virtual-master
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - baremetal:
- slave-label: apex-baremetal-master
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # armband CI PODs
- - armband-baremetal:
- slave-label: armband-baremetal
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - armband-virtual:
- slave-label: armband-virtual
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - armband-baremetal:
- slave-label: armband-baremetal
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - armband-virtual:
- slave-label: armband-virtual
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - baremetal-centos:
- slave-label: 'intel-pod8'
- SUT: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod1:
- slave-label: zte-pod1
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod2:
- slave-label: zte-pod2
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod3:
- slave-label: zte-pod3
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod1:
- slave-label: zte-pod1
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - zte-pod3:
- slave-label: zte-pod3
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - huawei-pod4:
- slave-label: huawei-pod4
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - huawei-pod7:
- slave-label: huawei-pod7
- SUT: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
-
- # -------------------------------
- testsuite:
- - 'default'
- - 'proposed_tests'
-
- jobs:
- - 'dovetail-{SUT}-{pod}-{testsuite}-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
- name: 'dovetail-{SUT}-{pod}-{testsuite}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 300
- abort: true
- - fix-workspace-permissions
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{dovetail-branch}'
- - '{SUT}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull dovetail docker image'
- - string:
- name: CI_DEBUG
- default: 'true'
- description: "Show debug output information"
- - string:
- name: TESTSUITE
- default: '{testsuite}'
- description: "dovetail testsuite to run"
- - string:
- name: DOVETAIL_REPO_DIR
- default: "/home/opnfv/dovetail"
- description: "Directory where the dovetail repository is cloned"
- - string:
- name: SUT_BRANCH
- default: '{branch}'
- description: "SUT branch"
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'dovetail-cleanup'
- - 'dovetail-run'
-
- publishers:
- - archive:
- artifacts: 'results/**/*'
- allow-empty: true
- fingerprint: true
- - email-jenkins-admins-on-failure
-
-# -------------------------
-# builder macros
-# -------------------------
-- builder:
- name: dovetail-run
- builders:
- - shell:
- !include-raw: ./dovetail-run.sh
-
-- builder:
- name: dovetail-cleanup
- builders:
- - shell:
- !include-raw: ./dovetail-cleanup.sh
diff --git a/jjb/dovetail/dovetail-cleanup.sh b/jjb/dovetail/dovetail-cleanup.sh
deleted file mode 100755
index 2d66fe022..000000000
--- a/jjb/dovetail/dovetail-cleanup.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-# clean up dependent project docker images, which has no containers and image tag None
-clean_images=(opnfv/functest opnfv/yardstick opnfv/testapi mongo)
-for clean_image in "${clean_images[@]}"; do
- dangling_images=($(docker images -f "dangling=true" | grep ${clean_image} | awk '{print $3}'))
- if [[ -n ${dangling_images} ]]; then
- for image_id in "${dangling_images[@]}"; do
- echo "Removing image $image_id, which has no containers and image tag is None"
- docker rmi $image_id >${redirect}
- done
- fi
-done
-
-echo "Remove dovetail images with tag None and containers with these images ..."
-dangling_images=($(docker images -f "dangling=true" | grep opnfv/dovetail | awk '{print $3}'))
-if [[ -n ${dangling_images} ]]; then
- for image_id in "${dangling_images[@]}"; do
- echo "Removing image $image_id with tag None and its related containers"
- docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
- docker rmi $image_id >${redirect}
- done
-fi
-
-echo "Cleaning up dovetail docker containers..."
-if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then
- echo "Removing existing opnfv/dovetail containers..."
- docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect}
-fi
-
-#echo "Remove dovetail existing images if exist..."
-#if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then
-# echo "Docker images to remove:"
-# docker images | head -1 && docker images | grep opnfv/dovetail >${redirect}
-# image_tags=($(docker images | grep opnfv/dovetail | awk '{print $2}'))
-# for tag in "${image_tags[@]}"; do
-# echo "Removing docker image opnfv/dovetail:$tag..."
-# docker rmi opnfv/dovetail:$tag >${redirect}
-# done
-#fi
diff --git a/jjb/dovetail/dovetail-project-jobs.yaml b/jjb/dovetail/dovetail-project-jobs.yaml
deleted file mode 100644
index 1accffcdb..000000000
--- a/jjb/dovetail/dovetail-project-jobs.yaml
+++ /dev/null
@@ -1,116 +0,0 @@
----
-###################################################
-# Non-ci jobs for Dovetail project
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: dovetail-project-jobs
-
- project: 'dovetail'
-
- jobs:
- - 'dovetail-verify-{stream}'
- - 'dovetail-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'dovetail-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- builders:
- - dovetail-unit-tests
-
-- job-template:
- name: 'dovetail-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - dovetail-unit-tests
-
-################################
-# builders for dovetail project
-###############################
-- builder:
- name: dovetail-hello-world
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "hello world"
-
-
-- builder:
- name: dovetail-unit-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- tox
diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh
deleted file mode 100755
index 2a9c73c2f..000000000
--- a/jjb/dovetail/dovetail-run.sh
+++ /dev/null
@@ -1,366 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#the noun INSTALLER is used in community, here is just the example to run.
-#multi-platforms are supported.
-
-set -e
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-DEPLOY_TYPE=baremetal
-[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
-
-DOVETAIL_HOME=${WORKSPACE}/cvp
-[ -d ${DOVETAIL_HOME} ] && sudo rm -rf ${DOVETAIL_HOME}
-
-mkdir -p ${DOVETAIL_HOME}
-
-DOVETAIL_CONFIG=${DOVETAIL_HOME}/pre_config
-mkdir -p ${DOVETAIL_CONFIG}
-
-DOVETAIL_IMAGES=${DOVETAIL_HOME}/images
-mkdir -p ${DOVETAIL_IMAGES}
-
-ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
-sshkey=""
-# The path of openrc.sh is defined in fetch_os_creds.sh
-OPENRC=${DOVETAIL_CONFIG}/env_config.sh
-CACERT=${DOVETAIL_CONFIG}/os_cacert
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- instack_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
- sshkey="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- if [[ -n $(sudo iptables -L FORWARD |grep "REJECT"|grep "reject-with icmp-port-unreachable") ]]; then
- #note: this happens only in opnfv-lf-pod1
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
-elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
- # If production lab then creds may be retrieved dynamically
- # creds are on the jumphost, always in the same folder
- sudo cp $LAB_CONFIG/admin-openrc $OPENRC
- # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
- # replace the default one by the customized one provided by jenkins config
-fi
-
-# Set iptables rule to allow forwarding return traffic for container
-if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
- sudo iptables -I FORWARD -j RETURN
-fi
-
-releng_repo=${WORKSPACE}/releng
-[ -d ${releng_repo} ] && sudo rm -rf ${releng_repo}
-git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
-
-pharos_repo=${WORKSPACE}/pharos
-[ -d ${pharos_repo} ] && sudo rm -rf ${pharos_repo}
-git clone https://git.opnfv.org/pharos ${pharos_repo} >/dev/null
-
-if [[ ${INSTALLER_TYPE} != 'joid' ]]; then
- echo "SUT branch is $SUT_BRANCH"
- echo "dovetail branch is $BRANCH"
- BRANCH_BACKUP=$BRANCH
- export BRANCH=$SUT_BRANCH
- ${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} -o ${CACERT} >${redirect}
- export BRANCH=$BRANCH_BACKUP
-fi
-
-if [[ -f $OPENRC ]]; then
- echo "INFO: openstack credentials path is $OPENRC"
- if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "compass" ]]; then
- if [[ -f ${CACERT} ]]; then
- echo "INFO: ${INSTALLER_TYPE} openstack cacert file is ${CACERT}"
- echo "export OS_CACERT=${CACERT}" >> ${OPENRC}
- else
- echo "ERROR: Can't find ${INSTALLER_TYPE} openstack cacert file. Please check if it is existing."
- sudo ls -al ${DOVETAIL_CONFIG}
- exit 1
- fi
- fi
- echo "export EXTERNAL_NETWORK=${EXTERNAL_NETWORK}" >> ${OPENRC}
-else
- echo "ERROR: cannot find file $OPENRC. Please check if it is existing."
- sudo ls -al ${DOVETAIL_CONFIG}
- exit 1
-fi
-
-if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "fuel" ]]; then
- sed -i "s#/etc/ssl/certs/mcp_os_cacert#${CACERT}#g" ${OPENRC}
-fi
-cat $OPENRC
-
-# These packages are used for parsing yaml files and decrypting ipmi user and password.
-sudo pip install shyaml
-sudo yum install -y rubygems || sudo apt-get install -y ruby
-sudo gem install hiera-eyaml
-
-if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == "compass" ]]; then
- compass_repo=${WORKSPACE}/compass4nfv/
- git clone https://github.com/opnfv/compass4nfv.git ${compass_repo} >/dev/null
- scenario_file=${compass_repo}/deploy/conf/hardware_environment/$NODE_NAME/os-nosdn-nofeature-ha.yml
- ipmiIp=$(cat ${scenario_file} | shyaml get-value hosts.0.ipmiIp)
- ipmiPass=$(cat ${scenario_file} | shyaml get-value hosts.0.ipmiPass)
- ipmiUser=root
- jumpserver_ip=$(ifconfig | grep -A 5 docker0 | grep "inet addr" | cut -d ':' -f 2 | cut -d ' ' -f 1)
-
- cat << EOF >${DOVETAIL_CONFIG}/pod.yaml
-nodes:
-- {ip: ${jumpserver_ip}, name: node0, password: root, role: Jumpserver, user: root}
-- {ip: 10.1.0.50, name: node1, password: root, role: controller, user: root,
- ipmi_ip: ${ipmiIp}, ipmi_user: ${ipmiUser}, ipmi_password: ${ipmiPass}}
-- {ip: 10.1.0.51, name: node2, password: root, role: controller, user: root}
-- {ip: 10.1.0.52, name: node3, password: root, role: controller, user: root}
-- {ip: 10.1.0.53, name: node4, password: root, role: compute, user: root}
-- {ip: 10.1.0.54, name: node5, password: root, role: compute, user: root}
-
-EOF
-fi
-
-if [[ ! "${SUT_BRANCH}" =~ "danube" && ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
- fuel_ctl_ssh_options="${ssh_options} -i ${SSH_KEY}"
- ssh_user="ubuntu"
- fuel_ctl_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
- "sudo salt 'cfg*' pillar.get _param:openstack_control_address --out text| \
- cut -f2 -d' '")
- ipmi_index=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
- "sudo salt 'ctl*' network.ip_addrs cidr=${fuel_ctl_ip} --out text | grep ${fuel_ctl_ip} | cut -c 5")
- organization="$(cut -d'-' -f1 <<< "${NODE_NAME}")"
- pod_name="$(cut -d'-' -f2 <<< "${NODE_NAME}")"
- pdf_file=${pharos_repo}/labs/${organization}/${pod_name}.yaml
- ipmiIp=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.address)
- ipmiIp="$(cut -d'/' -f1 <<< "${ipmiIp}")"
- ipmiPass=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.pass)
- ipmiUser=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.user)
- [[ $ipmiUser == ENC* ]] && ipmiUser=$(eyaml decrypt -s ${ipmiUser//[[:blank:]]/})
- [[ $ipmiPass == ENC* ]] && ipmiPass=$(eyaml decrypt -s ${ipmiPass//[[:blank:]]/})
-
- cat << EOF >${DOVETAIL_CONFIG}/pod.yaml
-nodes:
-- {ip: ${INSTALLER_IP}, name: node0, key_filename: /home/opnfv/userconfig/pre_config/id_rsa,
- role: Jumpserver, user: ${ssh_user}}
-- {ip: ${fuel_ctl_ip}, name: node1, key_filename: /home/opnfv/userconfig/pre_config/id_rsa,
- role: controller, user: ${ssh_user}, ipmi_ip: ${ipmiIp}, ipmi_user: ${ipmiUser}, ipmi_password: ${ipmiPass}}
-
-EOF
-fi
-
-if [[ ! -f ${DOVETAIL_CONFIG}/pod.yaml ]]; then
- set +e
-
- sudo pip install virtualenv
-
- cd ${releng_repo}/modules
- sudo virtualenv venv
- source venv/bin/activate
- sudo pip install -e ./ >/dev/null
- sudo pip install netaddr
-
- if [[ ${INSTALLER_TYPE} == compass ]]; then
- options="-u root -p root"
- elif [[ ${INSTALLER_TYPE} == fuel ]]; then
- options="-u root -p r00tme"
- elif [[ ${INSTALLER_TYPE} == apex ]]; then
- options="-u stack -k /root/.ssh/id_rsa"
- elif [[ ${INSTALLER_TYPE} == daisy ]]; then
- options="-u root -p r00tme"
- else
- echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
- echo "HA test cases may not run properly."
- fi
-
- cmd="sudo python ${releng_repo}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
- -i ${INSTALLER_IP} ${options} -f ${DOVETAIL_CONFIG}/pod.yaml \
- -s /home/opnfv/userconfig/pre_config/id_rsa"
- echo ${cmd}
- ${cmd}
-
- deactivate
-
- set -e
-
- cd ${WORKSPACE}
-fi
-
-if [ -f ${DOVETAIL_CONFIG}/pod.yaml ]; then
- sudo chmod 666 ${DOVETAIL_CONFIG}/pod.yaml
- echo "Adapt process info for $INSTALLER_TYPE ..."
- if [ "$INSTALLER_TYPE" == "apex" ]; then
- attack_process='rabbitmq_server'
- else
- attack_process='rabbitmq'
- fi
- cat << EOF >> ${DOVETAIL_CONFIG}/pod.yaml
-process_info:
-- {testcase_name: dovetail.ha.rabbitmq, attack_process: ${attack_process}}
-
-EOF
- echo "file ${DOVETAIL_CONFIG}/pod.yaml:"
- cat ${DOVETAIL_CONFIG}/pod.yaml
-else
- echo "Error: cannot find file ${DOVETAIL_CONFIG}/pod.yaml. Please check if it is existing."
- sudo ls -al ${DOVETAIL_CONFIG}
- echo "HA test cases may not run properly."
-fi
-
-if [ "$INSTALLER_TYPE" == "fuel" ]; then
- if [[ "${SUT_BRANCH}" =~ "danube" ]]; then
- echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
- sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
- else
- cp ${SSH_KEY} ${DOVETAIL_CONFIG}/id_rsa
- fi
-fi
-
-if [ "$INSTALLER_TYPE" == "apex" ]; then
- echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
- sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
-fi
-
-if [ "$INSTALLER_TYPE" == "daisy" ]; then
- echo "Fetching id_dsa file from jump_server $INSTALLER_IP..."
- sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_dsa ${DOVETAIL_CONFIG}/id_rsa
-fi
-
-
-image_path=${HOME}/opnfv/dovetail/images
-if [[ ! -d ${image_path} ]]; then
- mkdir -p ${image_path}
-fi
-# sdnvpn test case needs to download this image first before running
-ubuntu_image=${image_path}/ubuntu-16.04-server-cloudimg-amd64-disk1.img
-if [[ ! -f ${ubuntu_image} ]]; then
- echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
- wget -q -nc http://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${image_path}
-fi
-sudo cp ${ubuntu_image} ${DOVETAIL_IMAGES}
-
-# yardstick and bottlenecks need to download this image first before running
-cirros_image=${image_path}/cirros-0.3.5-x86_64-disk.img
-if [[ ! -f ${cirros_image} ]]; then
- echo "Download image cirros-0.3.5-x86_64-disk.img ..."
- wget -q -nc http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img -P ${image_path}
-fi
-sudo cp ${cirros_image} ${DOVETAIL_IMAGES}
-
-# functest needs to download this image first before running
-cirros_image=${image_path}/cirros-0.4.0-x86_64-disk.img
-if [[ ! -f ${cirros_image} ]]; then
- echo "Download image cirros-0.4.0-x86_64-disk.img ..."
- wget -q -nc http://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img -P ${image_path}
-fi
-sudo cp ${cirros_image} ${DOVETAIL_IMAGES}
-
-# snaps_smoke test case needs to download this image first before running
-ubuntu14_image=${image_path}/ubuntu-14.04-server-cloudimg-amd64-disk1.img
-if [[ ! -f ${ubuntu14_image} ]]; then
- echo "Download image ubuntu-14.04-server-cloudimg-amd64-disk1.img ..."
- wget -q -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P ${image_path}
-fi
-sudo cp ${ubuntu14_image} ${DOVETAIL_IMAGES}
-
-# cloudify_ims test case needs to download these 2 images first before running
-cloudify_image=${image_path}/cloudify-manager-premium-4.0.1.qcow2
-if [[ ! -f ${cloudify_image} ]]; then
- echo "Download image cloudify-manager-premium-4.0.1.qcow2 ..."
- wget -q -nc http://repository.cloudifysource.org/cloudify/4.0.1/sp-release/cloudify-manager-premium-4.0.1.qcow2 -P ${image_path}
-fi
-sudo cp ${cloudify_image} ${DOVETAIL_IMAGES}
-trusty_image=${image_path}/trusty-server-cloudimg-amd64-disk1.img
-if [[ ! -f ${trusty_image} ]]; then
- echo "Download image trusty-server-cloudimg-amd64-disk1.img ..."
- wget -q -nc http://cloud-images.ubuntu.com/trusty/current/trusty-server-cloudimg-amd64-disk1.img -P ${image_path}
-fi
-sudo cp ${trusty_image} ${DOVETAIL_IMAGES}
-
-opts="--privileged=true -id"
-
-docker_volume="-v /var/run/docker.sock:/var/run/docker.sock"
-dovetail_home_volume="-v ${DOVETAIL_HOME}:${DOVETAIL_HOME}"
-
-# Pull the image with correct tag
-DOCKER_REPO='opnfv/dovetail'
-if [ "$(uname -m)" = 'aarch64' ]; then
- DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
- DOCKER_TAG="latest"
-fi
-
-echo "Dovetail: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
-docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
-
-cmd="docker run ${opts} -e DOVETAIL_HOME=${DOVETAIL_HOME} ${docker_volume} ${dovetail_home_volume} \
- ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} /bin/bash"
-echo "Dovetail: running docker run command: ${cmd}"
-${cmd} >${redirect}
-sleep 5
-container_id=$(docker ps | grep "${DOCKER_REPO}:${DOCKER_TAG}" | awk '{print $1}' | head -1)
-echo "Container ID=${container_id}"
-if [ -z ${container_id} ]; then
- echo "Cannot find ${DOCKER_REPO} container ID ${container_id}. Please check if it is existing."
- docker ps -a
- exit 1
-fi
-echo "Container Start: docker start ${container_id}"
-docker start ${container_id}
-sleep 5
-docker ps >${redirect}
-if [ $(docker ps | grep "${DOCKER_REPO}:${DOCKER_TAG}" | wc -l) == 0 ]; then
- echo "The container ${DOCKER_REPO} with ID=${container_id} has not been properly started. Exiting..."
- exit 1
-fi
-
-# Modify tempest_conf.yaml file
-tempest_conf_file=${DOVETAIL_CONFIG}/tempest_conf.yaml
-if [[ ${INSTALLER_TYPE} == 'compass' || ${INSTALLER_TYPE} == 'apex' ]]; then
- volume_device='vdb'
-else
- volume_device='vdc'
-fi
-
-cat << EOF >$tempest_conf_file
-
-compute:
- min_compute_nodes: 2
- volume_device_name: ${volume_device}
-
-EOF
-
-echo "${tempest_conf_file}..."
-cat ${tempest_conf_file}
-
-cp_tempest_cmd="docker cp ${DOVETAIL_CONFIG}/tempest_conf.yaml $container_id:/home/opnfv/dovetail/dovetail/userconfig"
-echo "exec command: ${cp_tempest_cmd}"
-$cp_tempest_cmd
-
-if [[ ${TESTSUITE} == 'default' ]]; then
- testsuite=''
-else
- testsuite="--testsuite ${TESTSUITE}"
-fi
-
-run_cmd="dovetail run ${testsuite} -d"
-echo "Container exec command: ${run_cmd}"
-docker exec $container_id ${run_cmd}
-
-sudo cp -r ${DOVETAIL_HOME}/results ./
-# To make sure the file owner is the current user, for the copied results files in the above line
-echo "Change owner of result files ..."
-CURRENT_USER=${SUDO_USER:-$USER}
-PRIMARY_GROUP=$(id -gn $CURRENT_USER)
-echo "Current user is ${CURRENT_USER}, group is ${PRIMARY_GROUP}"
-sudo chown -R ${CURRENT_USER}:${PRIMARY_GROUP} ./results
-
-#remove useless files to save disk space
-sudo rm -rf ./results/workspace
-sudo rm -f ./results/yardstick.img
-sudo rm -f ./results/tmp*
-
-echo "Dovetail: done!"
-
diff --git a/jjb/dovetail/dovetail-weekly-jobs.yaml b/jjb/dovetail/dovetail-weekly-jobs.yaml
deleted file mode 100644
index 5a162bb7f..000000000
--- a/jjb/dovetail/dovetail-weekly-jobs.yaml
+++ /dev/null
@@ -1,139 +0,0 @@
----
-- project:
- name: dovetail-weekly-jobs
- project: dovetail
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- dovetail-branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- dovetail-branch: master
- gs-pathname: '/{stream}'
- docker-tag: 'latest'
-
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # - baremetal:
- # slave-label: apex-baremetal
- # SUT: apex
- # <<: *danube
- - baremetal:
- slave-label: compass-baremetal
- SUT: compass
- <<: *danube
- # - baremetal:
- # slave-label: fuel-baremetal
- # SUT: fuel
- # <<: *danube
- # - baremetal:
- # slave-label: joid-baremetal
- # SUT: joid
- # <<: *danube
-
- testsuite:
- - 'debug'
- - 'compliance_set'
- - 'proposed_tests'
-
- loop:
- - 'weekly':
- job-timeout: 180
-
- jobs:
- - 'dovetail-{SUT}-{pod}-{testsuite}-{loop}-{stream}'
-
-################################
-# job template
-################################
-- job-template:
- name: 'dovetail-{SUT}-{pod}-{testsuite}-{loop}-{stream}'
-
- disabled: true
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: '{job-timeout}'
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{dovetail-branch}'
- - '{SUT}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull dovetail docker image'
- - string:
- name: CI_DEBUG
- default: 'true'
- description: "Show debug output information"
- - string:
- name: TESTSUITE
- default: '{testsuite}'
- description: "dovetail testsuite to run"
- - string:
- name: DOVETAIL_REPO_DIR
- default: "/home/opnfv/dovetail"
- description: "Directory where the dovetail repository is cloned"
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'dovetail-cleanup'
- - 'dovetail-run'
-
- publishers:
- - archive:
- artifacts: 'results/**/*'
- allow-empty: true
- fingerprint: true
- - email-jenkins-admins-on-failure
-
-########################
-# builder macros
-########################
-- builder:
- name: dovetail-run-weekly
- builders:
- - shell:
- !include-raw: ./dovetail-run.sh
-
-- builder:
- name: dovetail-cleanup-weekly
- builders:
- - shell:
- !include-raw: ./dovetail-cleanup.sh
diff --git a/jjb/dpacc/dpacc.yaml b/jjb/dpacc/dpacc.yaml
deleted file mode 100644
index a9a091413..000000000
--- a/jjb/dpacc/dpacc.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: dpacc
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/escalator/escalator-basic.sh b/jjb/escalator/escalator-basic.sh
deleted file mode 100755
index 9c739c422..000000000
--- a/jjb/escalator/escalator-basic.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-echo "--------------------------------------------------------"
-echo "This is escalator basic job!"
-echo "--------------------------------------------------------"
-
diff --git a/jjb/escalator/escalator-build.sh b/jjb/escalator/escalator-build.sh
deleted file mode 100755
index 0e35c27d9..000000000
--- a/jjb/escalator/escalator-build.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-echo "--------------------------------------------------------"
-echo "This is escalator build job!"
-echo "--------------------------------------------------------"
-
-# set OPNFV_ARTIFACT_VERSION
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Building Escalator TAR for a merged change"
- export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-else
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-fi
-
-# build output directory
-OUTPUT_DIR=$WORKSPACE/build_output
-mkdir -p $OUTPUT_DIR
-
-# start the build
-cd $WORKSPACE
-./ci/build.sh $OUTPUT_DIR $OPNFV_ARTIFACT_VERSION
-
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/escalator/escalator-upload-artifact.sh b/jjb/escalator/escalator-upload-artifact.sh
deleted file mode 100755
index 781fb3e3e..000000000
--- a/jjb/escalator/escalator-upload-artifact.sh
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/bin/bash
-echo "--------------------------------------------------------"
-echo "This is escalator upload job!"
-echo "--------------------------------------------------------"
-
-set -o pipefail
-
-# check if we built something
-if [ -f $WORKSPACE/.noupload ]; then
- echo "Nothing new to upload. Exiting."
- /bin/rm -f $WORKSPACE/.noupload
- exit 0
-fi
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-importkey () {
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-}
-
-signtar () {
-gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz
-
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz.sig
-echo "TAR signature Upload Complete!"
-}
-
-uploadtar () {
-# log info to console
-echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
-echo
-
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz > gsutil.tar.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-elif [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Uploaded Escalator TAR for a merged change"
-fi
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-# BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- echo "Check log $WORKSPACE/gsutil.bin.log on the machine where this build is done."
- exit 1
-fi
-
-echo "Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.tar.gz"
-echo
-echo "--------------------------------------------------------"
-echo
-}
-
-importkey
-signtar
-uploadtar
diff --git a/jjb/escalator/escalator.yaml b/jjb/escalator/escalator.yaml
deleted file mode 100644
index d203dc113..000000000
--- a/jjb/escalator/escalator.yaml
+++ /dev/null
@@ -1,317 +0,0 @@
----
-- project:
- name: 'escalator'
-
- project: 'escalator'
-
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- #####################################
- # phases
- #####################################
- phase:
- - 'basic':
- slave-label: 'opnfv-build-centos'
- - 'build':
- slave-label: 'opnfv-build-centos'
-
- #####################################
- # jobs
- #####################################
- jobs:
- - 'escalator-verify-{stream}'
- - 'escalator-verify-{phase}-{stream}'
- - 'escalator-merge-{stream}'
- - 'escalator-merge-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'escalator-verify-{stream}'
-
- project-type: multijob
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-defaults'
- - 'escalator-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'escalator-verify-basic-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'escalator-verify-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'escalator-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - 'escalator-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-
-- job-template:
- name: 'escalator-merge-{stream}'
-
- project-type: multijob
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-defaults'
- - 'escalator-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'escalator-merge-basic-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'escalator-merge-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'escalator-merge-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - 'escalator-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-merge-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'escalator-verify-basic-macro'
- builders:
- - shell:
- !include-raw: ./escalator-basic.sh
-
-- builder:
- name: 'escalator-verify-build-macro'
- builders:
- - shell:
- !include-raw: ./escalator-build.sh
-
-- builder:
- name: 'escalator-merge-basic-macro'
- builders:
- - shell:
- !include-raw: ./escalator-basic.sh
-
-# yamllint disable rule:indentation
-- builder:
- name: 'escalator-merge-build-macro'
- builders:
- - shell:
- !include-raw:
- - ./escalator-build.sh
- - ./escalator-upload-artifact.sh
-# yamllint enable rule:indentation
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'escalator-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/fuel/fuel-daily-jobs.yaml b/jjb/fuel/fuel-daily-jobs.yaml
deleted file mode 100644
index e636538ef..000000000
--- a/jjb/fuel/fuel-daily-jobs.yaml
+++ /dev/null
@@ -1,766 +0,0 @@
----
-# jenkins job templates for Fuel
-- project:
-
- name: 'fuel'
-
- project: '{name}'
-
- installer: '{name}'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- gs-pathname: ''
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- disabled: false
- gs-pathname: '/{stream}'
- danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- disabled: false
- gs-pathname: '/{stream}'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # CI PODs
- # -------------------------------
- pod:
- - baremetal:
- slave-label: fuel-baremetal
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- <<: *fraser
- - virtual:
- slave-label: fuel-virtual
- <<: *fraser
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - zte-pod1:
- slave-label: zte-pod1
- <<: *master
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- # HA scenarios
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-onos-sfc-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-onos-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-ovn-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-kvm-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-ovs-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-kvm_ovs-ha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-nosdn-kvm_ovs_dpdk-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-kvm_ovs_dpdk_bar-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- # NOHA scenarios
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-onos-sfc-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-onos-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-ovn-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl-ovs-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-kvm-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-ovs-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-kvm_ovs_dpdk-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-kvm_ovs_dpdk_bar-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- # danube scenario for Dovetail only
- - 'os-odl_l2-bgpvpn-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-
- exclude:
- - scenario: os-odl_l2-bgpvpn-ha
- stream: master
- - scenario: os-odl_l2-bgpvpn-ha
- stream: fraser
- - scenario: os-ovn-nofeature-noha
- stream: danube
- - scenario: os-ovn-nofeature-ha
- stream: danube
- - scenario: os-odl-ovs-noha
- stream: fraser
-
- jobs:
- - 'fuel-{scenario}-{pod}-daily-{stream}'
- - 'fuel-deploy-{pod}-daily-{stream}'
- - 'fuel-collect-logs-{pod}-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'fuel-{scenario}-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-os-.*?-{pod}-daily-.*'
- - 'fuel-os-.*?-{pod}-weekly-.*'
- - 'fuel-verify-.*'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - trigger-builds:
- - project: 'fuel-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-fuel-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-fuel-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- # 1.here the stream means the SUT stream, dovetail stream is defined in its own job
- # 2.testsuite proposed_tests here is for new test cases planning to add into OVP
- # 3.run proposed_tests on Monday, Wednesday and Friday against ha scenario
- # 4.testsuite default here is for the test cases already added into OVP
- # 5.run default testsuite on Tuesday against ha scenario
- # 6.not used for release criteria or compliance, only to debug the dovetail tool bugs
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: '.*-ha'
- label: '{scenario}'
- - condition-kind: day-of-week
- day-selector: select-days
- days:
- MON: true
- WED: true
- FRI: true
- use-build-time: true
- steps:
- - trigger-builds:
- - project: 'dovetail-fuel-{pod}-proposed_tests-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: '.*-ha'
- label: '{scenario}'
- - condition-kind: day-of-week
- day-selector: select-days
- days:
- TUES: true
- use-build-time: true
- steps:
- - trigger-builds:
- - project: 'dovetail-fuel-{pod}-default-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - conditional-step:
- condition-kind: not
- condition-operand:
- condition-kind: regex-match
- regex: 'danube'
- label: '{stream}'
- steps:
- - trigger-builds:
- - project: 'fuel-collect-logs-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
- publishers:
- - email:
- recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: 'fuel-deploy-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-deploy-{pod}-daily-.*'
- - 'fuel-deploy-generic-daily-.*'
- - 'fuel-deploy-{pod}-weekly-.*'
- - 'fuel-deploy-generic-weekly-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - testapi-parameter
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-ha'
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - track-begin-timestamp
- - shell:
- !include-raw-escape: ./fuel-download-artifact.sh
- - shell:
- !include-raw-escape: ./fuel-deploy.sh
-
- publishers:
- - email:
- recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
- - email-jenkins-admins-on-failure
- - report-provision-result
-
-- job-template:
- name: 'fuel-collect-logs-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-ha'
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - shell:
- !include-raw-escape: ./fuel-logs.sh
-
- publishers:
- - email-jenkins-admins-on-failure
-
-########################
-# trigger macros
-########################
-# ----------------------------------------------
-# Triggers for job running on fuel-baremetal against master branch
-# ----------------------------------------------
-# HA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '5 20 * * *'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '5 2 * * *'
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '5 5 * * *'
-- trigger:
- name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '' # '5 5 * * *'
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '' # '5 8 * * *'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '' # '5 17 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '' # '30 12 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '' # '30 8 * * *'
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-ovs-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-# ----------------------------------------------
-# Triggers for job running on fuel-baremetal against fraser branch
-# ----------------------------------------------
-# HA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 20 * * *'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 2 * * *'
-- trigger:
- name: 'fuel-os-onos-sfc-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 5 * * *'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 8 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 17 * * *'
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 20 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 12 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 8 * * *'
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ----------------------------------------------
-# Triggers for job running on fuel-virtual against master branch
-# ----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 13 * * *'
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 18 * * *'
-- trigger:
- name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '' # '35 20 * * *'
-- trigger:
- name: 'fuel-os-onos-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '' # '5 23 * * *'
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 23 * * *'
-- trigger:
- name: 'fuel-os-odl-ovs-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 2 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '' # '35 6 * * *'
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 9 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '' # '30 16 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '' # '30 20 * * *'
-# ----------------------------------------------
-# Triggers for job running on fuel-virtual against fraser branch
-# ----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 13 * * *'
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 18 * * *'
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '5 23 * * *'
-- trigger:
- name: 'fuel-os-onos-sfc-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '' # '30 20 * * *'
-- trigger:
- name: 'fuel-os-onos-nofeature-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 23 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '' # '30 6 * * *'
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 9 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 16 * * *'
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '' # '0 20 * * *'
-# ----------------------------------------------
-# ZTE POD1 Triggers running against master branch
-# ----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: '0 10 * * *'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-sfc-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-onos-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-ovs-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
deleted file mode 100755
index a4b40f44f..000000000
--- a/jjb/fuel/fuel-deploy.sh
+++ /dev/null
@@ -1,142 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2017 Ericsson AB, Mirantis Inc., Enea Software AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-set -o pipefail
-
-export TERM="vt220"
-
-if [[ "$BRANCH" =~ 'danube' ]]; then
- # source the file so we get OPNFV vars
- # shellcheck disable=SC1091
- source latest.properties
-
- # echo the info about artifact that is used during the deployment
- echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
- # for Danube deployments (no artifact for current master or newer branches)
- # checkout the commit that was used for building the downloaded artifact
- # to make sure the ISO and deployment mechanism uses same versions
- echo "Checking out ${OPNFV_GIT_SHA1}"
- git checkout "${OPNFV_GIT_SHA1}" --quiet
-
- # releng wants us to use nothing else but opnfv.iso for now. We comply.
- ISO_FILE_ARG="-i file://${WORKSPACE}/opnfv.iso"
-fi
-
-# shellcheck disable=SC2153
-if [[ "${JOB_NAME}" =~ 'verify' ]]; then
- # set simplest scenario for virtual deploys to run for verify
- DEPLOY_SCENARIO="os-nosdn-nofeature-noha"
-fi
-
-# set deployment parameters
-export TMPDIR=${HOME}/tmpdir
-# shellcheck disable=SC2153
-LAB_NAME=${NODE_NAME/-*}
-# shellcheck disable=SC2153
-POD_NAME=${NODE_NAME/*-}
-# Armband might override LAB_CONFIG_URL, all others use the default
-LAB_CONFIG_URL=${LAB_CONFIG_URL:-'ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab'}
-
-# Fuel requires deploy script to be ran with sudo, Armband does not
-SUDO='sudo -E'
-if [ "${PROJECT}" = 'fuel' ]; then
- # Fuel currently supports ericsson, intel, lf and zte labs
- if [[ ! "${LAB_NAME}" =~ (ericsson|intel|lf|zte) ]]; then
- echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
- exit 1
- fi
-else
- SUDO=
- # Armband currently supports arm, enea, unh labs
- if [[ ! "${LAB_NAME}" =~ (arm|enea|unh) ]]; then
- echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
- exit 1
- fi
-fi
-
-echo "Using configuration for ${LAB_NAME}"
-
-# create TMPDIR if it doesn't exist, change permissions
-mkdir -p "${TMPDIR}"
-chmod a+x "${HOME}" "${TMPDIR}"
-
-cd "${WORKSPACE}" || exit 1
-if [[ "$BRANCH" =~ (danube|euphrates) ]]; then
- if [[ "${LAB_CONFIG_URL}" =~ ^(git|ssh):// ]]; then
- echo "Cloning securedlab repo ${BRANCH}"
- LOCAL_CFG="${TMPDIR}/securedlab"
- rm -rf "${LOCAL_CFG}"
- git clone --quiet --branch "${BRANCH}" "${LAB_CONFIG_URL}" "${LOCAL_CFG}"
- LAB_CONFIG_ARG="-b file://${LOCAL_CFG}"
- BRIDGE_ARG="-B ${BRIDGE:-pxebr}"
- else
- LAB_CONFIG_ARG="-b ${LAB_CONFIG_URL}"
- fi
-fi
-
-# log file name
-FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
-
-# construct the command
-DEPLOY_COMMAND="${SUDO} ${WORKSPACE}/ci/deploy.sh ${LAB_CONFIG_ARG:-} \
- -l ${LAB_NAME} -p ${POD_NAME} -s ${DEPLOY_SCENARIO} ${ISO_FILE_ARG:-} \
- -S ${TMPDIR} ${BRIDGE_ARG:-} \
- -L ${WORKSPACE}/${FUEL_LOG_FILENAME}"
-
-# log info to console
-echo "Deployment parameters"
-echo "--------------------------------------------------------"
-echo "Scenario: ${DEPLOY_SCENARIO}"
-echo "Lab: ${LAB_NAME}"
-echo "POD: ${POD_NAME}"
-[[ "${BRANCH}" =~ 'danube' ]] && echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
-echo
-echo "Starting the deployment using ${INSTALLER_TYPE}. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# start the deployment
-echo "Issuing command"
-echo "${DEPLOY_COMMAND}"
-echo
-
-${DEPLOY_COMMAND}
-exit_code=$?
-
-# Temporary workaround for ericsson-virtual* PODs functest integration
-# See https://jira.opnfv.org/browse/FUNCTEST-985
-# Set iptables rule to allow forwarding return traffic for container
-redirect=/dev/stdout
-if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || \
- ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
- sudo iptables -I FORWARD -j RETURN
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Deployment is done!"
-
-# upload logs for baremetal deployments
-# work with virtual deployments is still going on, so skip that for now
-if [[ "${JOB_NAME}" =~ (baremetal-daily|baremetal-weekly) ]]; then
- echo "Uploading deployment logs"
- gsutil cp "${WORKSPACE}/${FUEL_LOG_FILENAME}" \
- "gs://${GS_URL}/logs/${FUEL_LOG_FILENAME}" > /dev/null 2>&1
- echo "Logs are available at http://${GS_URL}/logs/${FUEL_LOG_FILENAME}"
-fi
-
-if [[ "${exit_code}" -ne 0 ]]; then
- echo "Deployment failed!"
- exit "${exit_code}"
-fi
-
-echo "Deployment is successful!"
-exit 0
diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh
deleted file mode 100755
index 02ca10305..000000000
--- a/jjb/fuel/fuel-download-artifact.sh
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-
-# disable Fuel ISO download for master branch
-[[ ! "$BRANCH" =~ (danube) ]] && exit 0
-
-# use proxy url to replace the normal URL, or googleusercontent.com will be blocked randomly
-[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
-
-# get the latest.properties file in order to get info regarding latest artifact
-echo "Downloading http://$GS_URL/latest.properties"
-curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
-
-# check if we got the file
-[[ -f $WORKSPACE/latest.properties ]] || exit 1
-
-# source the file so we get artifact metadata
-source $WORKSPACE/latest.properties
-
-# echo the info about artifact that is used during the deployment
-OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
-echo "Using $OPNFV_ARTIFACT for deployment"
-
-# check if we already have the ISO to avoid redownload
-ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}"
-if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
- echo "ISO exists locally. Skipping the download and using the file from ISO store"
- ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso
- echo "--------------------------------------------------------"
- echo
- ls -al $WORKSPACE/opnfv.iso
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
- exit 0
-fi
-
-[[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL
-
-# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
-echo "This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# download the file
-curl -L -s -o $WORKSPACE/opnfv.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
-
-# list the file
-ls -al $WORKSPACE/opnfv.iso
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/fuel/fuel-lab-reconfig.sh b/jjb/fuel/fuel-lab-reconfig.sh
deleted file mode 100755
index 4b42a396e..000000000
--- a/jjb/fuel/fuel-lab-reconfig.sh
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# check to see if ucs login info file exists
-if [ -e ~/.ssh/ucs_creds ];then
- source ~/.ssh/ucs_creds
-else
- echo "Unable to find UCS credentials for LF lab reconfiguration...Exiting"
- exit 1
-fi
-
-# clone releng
-echo "Cloning releng repo..."
-if ! GIT_SSL_NO_VERIFY=true git clone https://gerrit.opnfv.org/gerrit/releng; then
- echo "Unable to clone releng repo...Exiting"
- exit 1
-fi
-
-# log info to console
-echo "Starting the lab reconfiguration for $INSTALLER_TYPE..."
-echo "--------------------------------------------------------"
-echo
-
-# create venv
-$WORKSPACE/releng/utils/lab-reconfiguration/create_venv.sh
-
-# disable nounset because 'activate' script contains unbound variable(s)
-set +o nounset
-# enter venv
-source $WORKSPACE/releng/utils/lab-reconfiguration/venv/bin/activate
-# set nounset back again
-set -o nounset
-
-# verify we are in venv
-if [[ ! $(which python | grep venv) ]]; then
- echo "Unable to activate venv...Exiting"
- exit 1
-fi
-
-python $WORKSPACE/releng/utils/lab-reconfiguration/reconfigUcsNet.py -i $ucs_host -u $ucs_user -p $ucs_password -f $WORKSPACE/releng/utils/lab-reconfiguration/fuel.yaml
-
-# while undergoing reboot
-sleep 30
-
-# check to see if slave is back up
-ping_counter=0
-ping_flag=0
-while [ "$ping_counter" -lt 20 ]; do
- if [[ $(ping -c 5 172.30.10.72) ]]; then
- ping_flag=1
- break
- fi
- ((ping_counter++))
- sleep 10
-done
-
-if [ "$ping_flag" -eq 1 ]; then
- echo "Slave is pingable, now wait 180 seconds for services to start"
- sleep 180
-else
- echo "Slave did not come back up after reboot: please check lf-pod2"
- exit 1
-fi
-
-set +o nounset
-deactivate
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/fuel/fuel-logs.sh b/jjb/fuel/fuel-logs.sh
deleted file mode 100755
index a7d852685..000000000
--- a/jjb/fuel/fuel-logs.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2017 Ericsson AB, Mirantis Inc., Enea Software AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-set -o pipefail
-
-# Fuel requires deploy script to be ran with sudo, Armband does not
-SUDO='sudo -E'
-[ "${PROJECT}" = 'fuel' ] || SUDO=
-
-# Log file name
-FUEL_PM_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}_pm.log.tar.gz"
-
-# Construct the command
-LOG_COMMAND="${SUDO} ${WORKSPACE}/mcp/scripts/log.sh \
- ${WORKSPACE}/${FUEL_PM_LOG_FILENAME}"
-
-# Log info to console
-echo "Collecting post mortem logs ..."
-echo "--------------------------------------------------------"
-echo "${LOG_COMMAND}"
-
-${LOG_COMMAND}
-
-# Upload logs for both baremetal and virtual deployments
-echo "Uploading deployment logs"
-echo "--------------------------------------------------------"
-gsutil cp "${WORKSPACE}/${FUEL_PM_LOG_FILENAME}" \
- "gs://${GS_URL}/logs/${FUEL_PM_LOG_FILENAME}" > /dev/null 2>&1
-echo "Logs are available at http://${GS_URL}/logs/${FUEL_PM_LOG_FILENAME}"
diff --git a/jjb/fuel/fuel-project-jobs.yaml b/jjb/fuel/fuel-project-jobs.yaml
deleted file mode 100644
index fc49f6348..000000000
--- a/jjb/fuel/fuel-project-jobs.yaml
+++ /dev/null
@@ -1,80 +0,0 @@
----
-########################
-# Job configuration for fuel
-########################
-- project:
- name: fuel-project-jobs
-
- project: 'fuel'
-
- installer: 'fuel'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- jobs:
- - 'fuel-deploy-generic-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'fuel-deploy-generic-daily-{stream}'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-os-.*?-daily-.*'
- - 'fuel-deploy-generic-daily-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-ha'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - ericsson-pod2
- - lf-pod2
- - ericsson-pod1
- default-slaves:
- - ericsson-pod2
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - POD: $NODE_NAME Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - shell:
- !include-raw-escape: ./fuel-deploy.sh
diff --git a/jjb/fuel/fuel-verify-jobs.yaml b/jjb/fuel/fuel-verify-jobs.yaml
deleted file mode 100644
index f48822d7f..000000000
--- a/jjb/fuel/fuel-verify-jobs.yaml
+++ /dev/null
@@ -1,208 +0,0 @@
----
-- project:
- name: 'fuel-verify-jobs'
-
- project: 'fuel'
-
- installer: 'fuel'
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'deploy-virtual':
- slave-label: 'fuel-virtual'
- #####################################
- # jobs
- #####################################
- jobs:
- - 'fuel-verify-{stream}'
- - 'fuel-verify-{phase}-{stream}'
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'fuel-verify-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-os-.*?-virtual-daily-.*'
- - 'fuel-verify-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'mcp/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'fuel-virtual-defaults':
- installer: '{installer}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- # Use Functest job definition from jjb/functest/functest-daily-jobs
- - name: 'functest-fuel-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- FUNCTEST_MODE=tier
- FUNCTEST_TIER=healthcheck
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- - name: 'functest-fuel-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- FUNCTEST_MODE=testcase
- FUNCTEST_SUITE_NAME=vping_ssh
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
-- job-template:
- name: 'fuel-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 2
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-verify-deploy-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - 'fuel-virtual-defaults':
- installer: '{installer}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'fuel-verify-deploy-virtual-macro'
- builders:
- - shell:
- !include-raw: ./fuel-deploy.sh
diff --git a/jjb/fuel/fuel-weekly-jobs.yaml b/jjb/fuel/fuel-weekly-jobs.yaml
deleted file mode 100644
index aca625d2b..000000000
--- a/jjb/fuel/fuel-weekly-jobs.yaml
+++ /dev/null
@@ -1,191 +0,0 @@
----
-# jenkins job templates for Fuel
-- project:
-
- name: fuel-weekly
-
- project: fuel
-
- installer: fuel
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- gs-pathname: ''
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- disabled: false
- gs-pathname: '/{stream}'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # CI PODs
- # -------------------------------
- pod:
- - baremetal:
- slave-label: fuel-baremetal
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- <<: *fraser
- - virtual:
- slave-label: fuel-virtual
- <<: *fraser
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- # HA scenarios
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'weekly-trigger-disabled'
-
- jobs:
- - 'fuel-{scenario}-{pod}-weekly-{stream}'
- - 'fuel-deploy-{pod}-weekly-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'fuel-{scenario}-{pod}-weekly-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-os-.*?-{pod}-daily-.*'
- - 'fuel-os-.*?-{pod}-weekly-.*'
- - 'fuel-verify-.*'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - trigger-builds:
- - project: 'fuel-deploy-{pod}-weekly-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-fuel-{pod}-weekly-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
- publishers:
- - email:
- recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: 'fuel-deploy-{pod}-weekly-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-deploy-{pod}-daily-.*'
- - 'fuel-deploy-generic-daily-.*'
- - 'fuel-deploy-{pod}-weekly-.*'
- - 'fuel-deploy-generic-weekly-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - testapi-parameter
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-ha'
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - track-begin-timestamp
- - shell:
- !include-raw-escape: ./fuel-deploy.sh
-
- publishers:
- - email:
- recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
- - email-jenkins-admins-on-failure
- - report-provision-result
-
-########################
-# trigger macros
-########################
-# ----------------------------------------------
-# Triggers for job running on fuel-baremetal against master branch
-# ----------------------------------------------
-# HA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-baremetal-weekly-master-trigger'
- triggers:
- - timed: ''
diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh
deleted file mode 100755
index 7adfdffc7..000000000
--- a/jjb/functest/functest-alpine.sh
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/bin/bash
-
-set -e
-set +u
-set +o pipefail
-
-REPO=${REPO:-opnfv}
-CI_LOOP=${CI_LOOP:-daily}
-TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results
-ENERGY_RECORDER_API_URL=http://energy.opnfv.fr/resources
-
-check_os_deployment() {
- FUNCTEST_IMAGE=${REPO}/functest-healthcheck:${DOCKER_TAG}
- echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
- docker pull ${FUNCTEST_IMAGE}>/dev/null
- cmd="docker run --rm ${volumes} ${FUNCTEST_IMAGE} check_deployment"
- echo "Checking deployment, CMD: ${cmd}"
- eval ${cmd}
- ret_value=$?
- if [ ${ret_value} != 0 ]; then
- echo "ERROR: Problem while checking OpenStack deployment."
- exit 1
- else
- echo "OpenStack deployment OK."
- fi
-
-}
-
-
-run_tiers() {
- tiers=$1
- cmd_opt="run_tests -r -t all"
- [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="run_tests -t all"
- ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
- echo 0 > ${ret_val_file}
-
- for tier in ${tiers[@]}; do
- FUNCTEST_IMAGE=${REPO}/functest-${tier}:${DOCKER_TAG}
- echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
- docker pull ${FUNCTEST_IMAGE}>/dev/null
- cmd="docker run --rm ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
- echo "Running Functest tier '${tier}'. CMD: ${cmd}"
- eval ${cmd}
- ret_value=$?
- if [ ${ret_value} != 0 ]; then
- echo ${ret_value} > ${ret_val_file}
- if [ ${tier} == 'healthcheck' ]; then
- echo "Healthcheck tier failed. Exiting Functest..."
- break
- fi
- fi
- done
-}
-
-run_test() {
- test_name=$1
- cmd_opt="run_tests -t ${test_name}"
- ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
- echo 0 > ${ret_val_file}
- # Determine which Functest image should be used for the test case
- case ${test_name} in
- connection_check|tenantnetwork1|tenantnetwork2|vmready1|vmready2|singlevm1|singlevm2|vping_ssh|vping_userdata|cinder_test|odl|api_check|snaps_health_check)
- FUNCTEST_IMAGE=${REPO}/functest-healthcheck:${DOCKER_TAG} ;;
- tempest_smoke|rally_sanity|refstack_defcore|patrole|shaker|snaps_smoke|neutron_trunk|barbican)
- FUNCTEST_IMAGE=${REPO}/functest-smoke:${DOCKER_TAG} ;;
- tempest_full|rally_full)
- FUNCTEST_IMAGE=${REPO}/functest-components:${DOCKER_TAG} ;;
- cloudify_ims|vyos_vrouter|juju_epc)
- FUNCTEST_IMAGE=${REPO}/functest-vnf:${DOCKER_TAG} ;;
- doctor-notification|bgpvpn|functest-odl-sfc|barometercollectd|fds)
- FUNCTEST_IMAGE=${REPO}/functest-features:${DOCKER_TAG} ;;
- *)
- echo "Unkown test case $test_name"
- exit 1
- ;;
- esac
- echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
- docker pull ${FUNCTEST_IMAGE}>/dev/null
- cmd="docker run --rm ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
- echo "Running Functest test case '${test_name}'. CMD: ${cmd}"
- eval ${cmd}
- ret_value=$?
- if [ ${ret_value} != 0 ]; then
- echo ${ret_value} > ${ret_val_file}
- fi
-}
-
-
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-FUNCTEST_DIR=/home/opnfv/functest
-DEPLOY_TYPE=baremetal
-[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
-HOST_ARCH=$(uname -m)
-DOCKER_TAG=`[[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/}`
-
-# Prepare OpenStack credentials volume
-rc_file=${HOME}/opnfv-openrc.sh
-
-if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
- rc_file=$LAB_CONFIG/admin-openrc
-elif [[ ${INSTALLER_TYPE} == 'compass' ]]; then
- cacert_file_vol="-v ${HOME}/os_cacert:${FUNCTEST_DIR}/conf/os_cacert"
- echo "export OS_CACERT=${FUNCTEST_DIR}/conf/os_cacert" >> ${HOME}/opnfv-openrc.sh
-elif [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
- cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
-fi
-
-rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/env_file"
-
-echo "Functest: Start Docker and prepare environment"
-
-echo "Functest: Download images that will be used by test cases"
-images_dir="${HOME}/opnfv/functest/images"
-download_script=${WORKSPACE}/functest/ci/download_images.sh
-if [[ ! -f ${download_script} ]]; then
- # to support Danube as well
- wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O ${download_script} 2> ${redirect}
-fi
-chmod +x ${download_script}
-${download_script} ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
-
-images_vol="-v ${images_dir}:${FUNCTEST_DIR}/images"
-
-dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
-custom_params=
-test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(cat ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG})
-
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
- -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE} -e CI_LOOP=${CI_LOOP} \
- -e TEST_DB_URL=${TEST_DB_URL} -e ENERGY_RECORDER_API_URL=${ENERGY_RECORDER_API_URL}"
-
-ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
-
-if [ "${INSTALLER_TYPE}" == 'fuel' ]; then
- COMPUTE_ARCH=$(ssh -l ubuntu ${INSTALLER_IP} -i ${SSH_KEY} ${ssh_options} \
- "sudo salt 'cmp*' grains.get cpuarch --out yaml | awk '{print \$2; exit}'")
- envs="${envs} -e POD_ARCH=${COMPUTE_ARCH}"
-fi
-
-
-if [[ ${INSTALLER_TYPE} == 'compass' && ${DEPLOY_SCENARIO} =~ 'sfc' ]]; then
- ssh_key="/tmp/id_rsa"
- user_config="/tmp/openstack_user_config.yml"
- docker cp compass-tasks:/root/.ssh/id_rsa $ssh_key
- docker cp compass-tasks:/etc/openstack_deploy/openstack_user_config.yml $user_config
- sshkey_vol="-v ${ssh_key}:/root/.ssh/id_rsa"
- userconfig_vol="-v ${user_config}:${user_config}"
- envs="${envs} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK}"
-fi
-
-
-volumes="${images_vol} ${results_vol} ${sshkey_vol} ${userconfig_vol} ${rc_file_vol} ${cacert_file_vol}"
-
-set +e
-
-if [ ${FUNCTEST_MODE} == 'testcase' ]; then
- echo "FUNCTEST_MODE=testcase, FUNCTEST_SUITE_NAME=${FUNCTEST_SUITE_NAME}"
- run_test ${FUNCTEST_SUITE_NAME}
-elif [ ${FUNCTEST_MODE} == 'tier' ]; then
- echo "FUNCTEST_MODE=tier, FUNCTEST_TIER=${FUNCTEST_TIER}"
- tiers=(${FUNCTEST_TIER})
- run_tiers ${tiers}
-else
- if [ ${DEPLOY_TYPE} == 'baremetal' ] && [ "${HOST_ARCH}" != "aarch64" ]; then
- if [[ ${BRANCH} == "stable/fraser" ]]; then
- tiers=(healthcheck smoke features vnf parser)
- else
- tiers=(healthcheck smoke features vnf)
- fi
- else
- if [[ ${BRANCH} == "stable/fraser" ]]; then
- tiers=(healthcheck smoke features parser)
- else
- tiers=(healthcheck smoke features)
- fi
- fi
- run_tiers ${tiers}
-fi
diff --git a/jjb/functest/functest-cleanup.sh b/jjb/functest/functest-cleanup.sh
deleted file mode 100755
index c21b543a6..000000000
--- a/jjb/functest/functest-cleanup.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-echo "Cleaning up docker containers/images..."
-FUNCTEST_IMAGE=opnfv/functest
-
-# Remove containers along with image opnfv/functest*:<none>
-dangling_images=($(docker images -f "dangling=true" | grep $FUNCTEST_IMAGE | awk '{print $3}'))
-if [[ -n ${dangling_images} ]]; then
- echo " Removing $FUNCTEST_IMAGE:<none> images and their containers..."
- for image_id in "${dangling_images[@]}"; do
- echo " Removing image_id: $image_id and its containers"
- containers=$(docker ps -a | grep $image_id | awk '{print $1}')
- if [[ -n "$containers" ]];then
- docker rm -f $containers >${redirect}
- fi
- docker rmi $image_id >${redirect}
- done
-fi
-
-# Remove previous running containers if exist
-functest_containers=$(docker ps -a | grep $FUNCTEST_IMAGE | awk '{print $1}')
-if [[ -n ${functest_containers} ]]; then
- echo " Removing existing $FUNCTEST_IMAGE containers..."
- docker rm -f $functest_containers >${redirect}
-fi
-
-# Remove existing images if exist
-if [[ $CLEAN_DOCKER_IMAGES == true ]]; then
- functest_image_tags=($(docker images | grep $FUNCTEST_IMAGE | awk '{print $2}'))
- if [[ -n ${functest_image_tags} ]]; then
- echo " Docker images to be removed:" >${redirect}
- (docker images | head -1 && docker images | grep $FUNCTEST_IMAGE) >${redirect}
- for tag in "${functest_image_tags[@]}"; do
- echo " Removing docker image $FUNCTEST_IMAGE:$tag..."
- docker rmi $FUNCTEST_IMAGE:$tag >${redirect}
- done
- fi
-fi
diff --git a/jjb/functest/functest-daily-jobs.yaml b/jjb/functest/functest-daily-jobs.yaml
deleted file mode 100644
index 1239db5af..000000000
--- a/jjb/functest/functest-daily-jobs.yaml
+++ /dev/null
@@ -1,416 +0,0 @@
----
-###################################
-# job configuration for functest
-###################################
-- project:
- name: functest-daily
-
- project: functest
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # fuel CI PODs
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *fraser
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- <<: *fraser
- # joid CI PODs
- - baremetal:
- slave-label: joid-baremetal
- installer: joid
- <<: *master
- - virtual:
- slave-label: joid-virtual
- installer: joid
- <<: *master
- - baremetal:
- slave-label: joid-baremetal
- installer: joid
- <<: *fraser
- - virtual:
- slave-label: joid-virtual
- installer: joid
- <<: *fraser
- # compass CI PODs
- - baremetal:
- slave-label: compass-baremetal
- installer: compass
- <<: *master
- - virtual:
- slave-label: compass-virtual
- installer: compass
- <<: *master
- - baremetal:
- slave-label: compass-baremetal
- installer: compass
- <<: *fraser
- - virtual:
- slave-label: compass-virtual
- installer: compass
- <<: *fraser
- # apex CI PODs
- - virtual:
- slave-label: apex-virtual-master
- installer: apex
- <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- <<: *master
- - virtual:
- slave-label: apex-virtual-fraser
- installer: apex
- <<: *fraser
- - baremetal:
- slave-label: apex-baremetal-fraser
- installer: apex
- <<: *fraser
- # armband CI PODs
- - armband-baremetal:
- slave-label: armband-baremetal
- installer: fuel
- <<: *master
- - armband-virtual:
- slave-label: armband-virtual
- installer: fuel
- <<: *master
- - armband-baremetal:
- slave-label: armband-baremetal
- installer: fuel
- <<: *fraser
- - armband-virtual:
- slave-label: armband-virtual
- installer: fuel
- <<: *fraser
- # daisy CI PODs
- - baremetal:
- slave-label: daisy-baremetal
- installer: daisy
- <<: *master
- - virtual:
- slave-label: daisy-virtual
- installer: daisy
- <<: *master
- - baremetal:
- slave-label: daisy-baremetal
- installer: daisy
- <<: *fraser
- - virtual:
- slave-label: daisy-virtual
- installer: daisy
- <<: *fraser
- # netvirt 3rd party ci
- - virtual:
- slave-label: odl-netvirt-virtual
- installer: netvirt
- <<: *master
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - orange-pod1:
- slave-label: '{pod}'
- installer: joid
- <<: *master
- - orange-pod5:
- slave-label: '{pod}'
- installer: fuel
- <<: *master
- - orange-pod2:
- slave-label: '{pod}'
- installer: joid
- <<: *master
- - baremetal-centos:
- slave-label: 'intel-pod8'
- installer: compass
- <<: *master
- - nokia-pod1:
- slave-label: '{pod}'
- installer: apex
- <<: *master
- - zte-pod1:
- slave-label: '{pod}'
- installer: fuel
- <<: *master
- - zte-pod1:
- slave-label: '{pod}'
- installer: fuel
- <<: *fraser
- - zte-pod2:
- slave-label: '{pod}'
- installer: daisy
- <<: *master
- - zte-pod2:
- slave-label: '{pod}'
- installer: daisy
- <<: *fraser
- - zte-pod3:
- slave-label: '{pod}'
- installer: daisy
- <<: *master
- - zte-pod3:
- slave-label: '{pod}'
- installer: daisy
- <<: *fraser
- - zte-pod9:
- slave-label: '{pod}'
- installer: daisy
- <<: *master
- - zte-pod9:
- slave-label: '{pod}'
- installer: daisy
- <<: *fraser
- # -------------------------------
- # PODs for verify jobs triggered by each patch upload
- # - ool-virtual1:
- # slave-label: '{pod}'
- # installer: apex
- # <<: *master
- # -------------------------------
-
- testsuite:
- - 'suite':
- job-timeout: 60
- - 'daily':
- job-timeout: 480
-
- jobs:
- - 'functest-{installer}-{pod}-{testsuite}-{stream}'
-
-################################
-# job template
-################################
-- job-template:
- name: 'functest-{installer}-{pod}-{testsuite}-{stream}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER Suite: $FUNCTEST_MODE Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: '{job-timeout}'
- abort: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - 'functest-{testsuite}-parameter'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-noha'
- - string:
- name: CLEAN_DOCKER_IMAGES
- default: 'false'
- description: 'Remove downloaded docker images (opnfv/functest*:*)'
- - functest-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'functest-{testsuite}-builder'
-
-
-########################
-# parameter macros
-########################
-- parameter:
- name: functest-daily-parameter
- parameters:
- - string:
- name: FUNCTEST_MODE
- default: 'daily'
- description: "Daily suite name to run"
-
-- parameter:
- name: functest-suite-parameter
- parameters:
- - choice:
- name: FUNCTEST_MODE
- choices:
- - 'tier'
- - 'testcase'
- default: 'tier'
- description: "Test case or Tier to be run"
- - choice:
- name: FUNCTEST_SUITE_NAME
- choices:
- - 'connection_check'
- - 'api_check'
- - 'snaps_health_check'
- - 'vping_ssh'
- - 'vping_userdata'
- - 'cinder_test'
- - 'tempest_smoke'
- - 'rally_sanity'
- - 'refstack_defcore'
- - 'patrole'
- - 'odl'
- - 'snaps_smoke'
- - 'neutron_trunk'
- - 'doctor-notification'
- - 'bgpvpn'
- - 'functest-odl-sfc'
- - 'barometercollectd'
- - 'fds'
- - 'tempest_full'
- - 'rally_full'
- - 'cloudify_ims'
- - 'vyos_vrouter'
- - 'juju_epc'
- - 'parser'
- default: 'connection_check'
- - choice:
- name: FUNCTEST_TIER
- choices:
- - 'healthcheck'
- - 'smoke'
- - 'features'
- - 'components'
- - 'vnf'
- - 'parser'
- default: 'healthcheck'
- - string:
- name: TESTCASE_OPTIONS
- default: ''
- description: 'Additional parameters specific to test case(s)'
-
-- parameter:
- name: functest-parameter
- parameters:
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where the opnfv documents will be stored in gs repository"
- - string:
- name: FUNCTEST_REPO_DIR
- default: "/home/opnfv/repos/functest"
- description: "Directory where the Functest repository is cloned"
- - string:
- name: PUSH_RESULTS_TO_DB
- default: "true"
- description: "Push the results of all the tests to the resultDB"
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debug output information"
- - string:
- name: RC_FILE_PATH
- default: ''
- description: "Path to the OS credentials file if given"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-########################
-# trigger macros
-########################
-- trigger:
- name: 'functest-master'
- triggers:
- - pollscm:
- cron: "H 9 * * *"
-########################
-# builder macros
-########################
-- builder:
- name: functest-daily-builder
- builders:
- - 'functest-cleanup'
- - 'functest-daily'
- - 'functest-store-results'
- - 'functest-exit'
-
-- builder:
- name: functest-suite-builder
- builders:
- - 'functest-cleanup'
- - 'functest-daily'
- - 'functest-store-results'
- - 'functest-exit'
-
-- builder:
- name: functest-daily
- builders:
- # yamllint disable rule:indentation
- - conditional-step:
- condition-kind: regex-match
- regex: "os-.*"
- label: '$DEPLOY_SCENARIO'
- steps:
- - shell:
- !include-raw:
- - ./functest-env-presetup.sh
- - ../../utils/fetch_os_creds.sh
- - ./functest-alpine.sh
- - conditional-step:
- condition-kind: regex-match
- regex: "k8-.*"
- label: '$DEPLOY_SCENARIO'
- steps:
- - shell:
- !include-raw:
- - ../../utils/fetch_k8_conf.sh
- - ./functest-k8.sh
-
-# yamllint enable rule:indentation
-- builder:
- name: functest-store-results
- builders:
- - shell:
- !include-raw: ../../utils/push-test-logs.sh
-
-- builder:
- name: functest-cleanup
- builders:
- - shell:
- !include-raw: ./functest-cleanup.sh
-
-- builder:
- name: functest-exit
- builders:
- - shell:
- !include-raw: ./functest-exit.sh
diff --git a/jjb/functest/functest-docker.yaml b/jjb/functest/functest-docker.yaml
deleted file mode 100644
index 74738f280..000000000
--- a/jjb/functest/functest-docker.yaml
+++ /dev/null
@@ -1,307 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-- project:
-
- name: functest-docker
-
- project: functest
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- disabled: false
-
- arch_tag:
- - 'amd64':
- slave_label: 'opnfv-build-ubuntu'
- - 'arm64':
- slave_label: 'opnfv-build-ubuntu-arm'
-
- # yamllint disable rule:key-duplicates
- image:
- - 'core'
- - 'tempest'
- - 'healthcheck'
- - 'features'
- - 'components'
- - 'parser'
- - 'smoke'
- - 'vnf'
-
- exclude:
- - stream: 'master'
- image: 'parser'
- - stream: 'fraser'
- image: 'tempest'
-
- # settings for jobs run in multijob phases
- build-job-settings: &build-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters: |
- PUSH_IMAGE=$PUSH_IMAGE
- COMMIT_ID=$COMMIT_ID
- GERRIT_REFNAME=$GERRIT_REFNAME
- DOCKERFILE=$DOCKERFILE
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- manifest-job-settings: &manifest-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters:
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- # yamllint enable rule:key-duplicates
- jobs:
- - "functest-docker-{stream}"
- - "functest-{image}-docker-build-{arch_tag}-{stream}"
- - "functest-{image}-docker-manifest-{stream}"
-
-########################
-# job templates
-########################
-- job-template:
- name: 'functest-docker-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- parameters:
- - functest-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- arch_tag: 'amd64'
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- builders:
- - multijob:
- name: 'build functest-core images'
- execution-type: PARALLEL
- projects:
- - name: 'functest-core-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-core-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish functest-core manifests'
- execution-type: PARALLEL
- projects:
- - name: 'functest-core-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - multijob:
- name: 'build functest-tempest images'
- execution-type: PARALLEL
- projects:
- - name: 'functest-tempest-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-tempest-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish functest-tempest manifests'
- execution-type: PARALLEL
- projects:
- - name: 'functest-tempest-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - multijob:
- name: 'build all functest images'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'functest-healthcheck-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-healthcheck-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-features-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-features-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-components-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-components-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-parser-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-parser-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-smoke-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-smoke-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-vnf-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-vnf-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish all manifests'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'functest-healthcheck-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-features-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-components-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-parser-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-smoke-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-vnf-docker-manifest-{stream}'
- <<: *manifest-job-settings
-
- publishers:
- - 'functest-amd64-recipients'
- - 'functest-arm64-recipients'
-
-- job-template:
- name: 'functest-{image}-docker-build-{arch_tag}-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - functest-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: '{slave_label}'
- arch_tag: '{arch_tag}'
- scm:
- - git-scm
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{arch_tag}" in
- "arm64")
- sudo amd64_dirs= arm64_dirs=docker/{image} bash ./build.sh ;;
- *)
- sudo amd64_dirs=docker/{image} arm64_dirs= bash ./build.sh ;;
- esac
- exit $?
-
-- job-template:
- name: 'functest-{image}-docker-manifest-{stream}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-
- disabled: '{obj:disabled}'
-
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{stream}" in
- "master")
- tag="latest" ;;
- *)
- tag="{stream}" ;;
- esac
- sudo manifest-tool push from-args \
- --platforms linux/amd64,linux/arm64 \
- --template $REPO/functest-{image}:ARCH-$tag \
- --target $REPO/functest-{image}:$tag
- exit $?
-
-# parameter macro
-- parameter:
- name: functest-job-parameters
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: '{slave_label}'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: "Dockerfile"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-# publisher macros
-- publisher:
- name: 'functest-arm64-recipients'
- publishers:
- - email:
- recipients: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- delia.popescu@enea.com
-
-- publisher:
- name: 'functest-amd64-recipients'
- publishers:
- - email:
- recipients: >
- jalausuch@suse.com morgan.richomme@orange.com
- cedric.ollivier@orange.com feng.xiaowei@zte.com.cn
- juha.kosonen@nokia.com wangwulin@huawei.com
- valentin.boucher@kontron.com
diff --git a/jjb/functest/functest-env-presetup.sh b/jjb/functest/functest-env-presetup.sh
deleted file mode 100755
index 510670bc2..000000000
--- a/jjb/functest/functest-env-presetup.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env bash
-set -o errexit
-set -o pipefail
-
-# Fetch INSTALLER_IP for APEX deployments
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- if [ -n "$RC_FILE_PATH" ]; then
- echo "RC_FILE_PATH is set: ${RC_FILE_PATH}...skipping detecting UC IP"
- else
- echo "Gathering IP information for Apex installer VM"
- ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
- if sudo virsh list | grep undercloud; then
- echo "Installer VM detected"
- undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
- export sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
- export stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
-
- if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- echo "Installer ip is ${INSTALLER_IP}"
- else
- echo "No available installer VM exists and no credentials provided...exiting"
- exit 1
- fi
- fi
-
-elif [[ ${INSTALLER_TYPE} == 'daisy' ]]; then
- echo "Gathering IP information for Daisy installer VM"
- if sudo virsh list | grep daisy; then
- echo "Installer VM detected"
-
- bridge_name=$(sudo virsh domiflist daisy | grep vnet | awk '{print $3}')
- echo "Bridge is $bridge_name"
-
- installer_mac=$(sudo virsh domiflist daisy | grep vnet | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- export INSTALLER_IP=$(/usr/sbin/arp -e -i $bridge_name | grep ${installer_mac} | head -n 1 | awk {'print $1'})
-
- echo "Installer ip is ${INSTALLER_IP}"
- else
- echo "No available installer VM exists...exiting"
- exit 1
- fi
-
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
- if [[ ! "${BRANCH}" =~ "danube" ]]; then
- echo "Map mcp ssh_key"
- export sshkey_vol="-v ${SSH_KEY:-/var/lib/opnfv/mcp.rsa}:/root/.ssh/id_rsa"
- fi
-fi
-
diff --git a/jjb/functest/functest-exit.sh b/jjb/functest/functest-exit.sh
deleted file mode 100644
index 925a3cfbb..000000000
--- a/jjb/functest/functest-exit.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-if [ ! -f ${ret_val_file} ]; then
- echo "Return value not found!"
- exit -1
-fi
-
-ret_val=`cat ${ret_val_file}`
-
-exit ${ret_val}
diff --git a/jjb/functest/functest-k8.sh b/jjb/functest/functest-k8.sh
deleted file mode 100755
index 419c4e051..000000000
--- a/jjb/functest/functest-k8.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash
-
-set -e
-set +u
-set +o pipefail
-
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-FUNCTEST_DIR=/home/opnfv/functest
-
-rc_file=${HOME}/k8.creds
-sudo rm -rf $rc_file
-
-if [[ ${INSTALLER_TYPE} == 'compass' ]]; then
- admin_conf_file_vol="-v ${HOME}/admin.conf:/root/.kube/config"
- echo "export KUBERNETES_PROVIDER=local" >> $rc_file
- KUBE_MASTER_URL=$(cat ${HOME}/admin.conf|grep server| awk '{print $2}')
- echo "export KUBE_MASTER_URL=$KUBE_MASTER_URL" >> $rc_file
- KUBE_MASTER_IP=$(echo $KUBE_MASTER_URL|awk -F'https://|:[0-9]+' '$0=$2')
- echo "export KUBE_MASTER_IP=$KUBE_MASTER_IP" >> $rc_file
-elif [[ ${INSTALLER_TYPE} == 'joid' && ${BRANCH} == 'master' ]]; then
- admin_conf_file_vol="-v ${HOME}/joid_config/config:/root/.kube/config"
- rc_file=${HOME}/joid_config/k8config
-else
- echo "Not supported by other installers yet"
- exit 1
-fi
-
-rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/env_file"
-
-dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
-
-volumes="${rc_file_vol} ${results_vol} ${admin_conf_file_vol}"
-
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} \
- -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
- -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE}"
-
-DOCKER_TAG=`[[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/}`
-
-set +e
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo 0 > ${ret_val_file}
-
-FUNCTEST_IMAGES="\
-opnfv/functest-kubernetes-healthcheck:${DOCKER_TAG} \
-opnfv/functest-kubernetes-smoke:${DOCKER_TAG} \
-opnfv/functest-kubernetes-features:${DOCKER_TAG}"
-cmd_opt="run_tests -r -t all"
-
-for image in ${FUNCTEST_IMAGES}; do
- echo "Pulling Docker image ${image} ..."
- docker pull "${image}" >/dev/null
- cmd="docker run --rm ${envs} ${volumes} ${image} /bin/bash -c '${cmd_opt}'"
- echo "Running Functest k8s test cases, CMD: ${cmd}"
- eval ${cmd}
- ret_value=$?
- if [ ${ret_value} != 0 ]; then
- echo ${ret_value} > ${ret_val_file}
- fi
-done
diff --git a/jjb/functest/functest-kubernetes-docker.yaml b/jjb/functest/functest-kubernetes-docker.yaml
deleted file mode 100644
index 2085251c6..000000000
--- a/jjb/functest/functest-kubernetes-docker.yaml
+++ /dev/null
@@ -1,182 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-- project:
-
- name: functest-kubernetes-docker
-
- project: functest-kubernetes
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- disabled: false
-
- arch_tag:
- - 'amd64':
- slave_label: 'opnfv-build-ubuntu'
-
- # yamllint disable rule:key-duplicates
- image:
- - 'core'
- - 'healthcheck'
- - 'smoke'
- - 'features'
-
- # settings for jobs run in multijob phases
- build-job-settings: &build-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters: |
- PUSH_IMAGE=$PUSH_IMAGE
- COMMIT_ID=$COMMIT_ID
- GERRIT_REFNAME=$GERRIT_REFNAME
- DOCKERFILE=$DOCKERFILE
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- manifest-job-settings: &manifest-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters:
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- # yamllint enable rule:key-duplicates
- jobs:
- - "functest-kubernetes-docker-{stream}"
- - "functest-kubernetes-{image}-docker-build-{arch_tag}-{stream}"
-
-########################
-# job templates
-########################
-- job-template:
- name: 'functest-kubernetes-docker-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- parameters:
- - functest-kubernetes-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- arch_tag: 'amd64'
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- builders:
- - multijob:
- name: 'build functest-kubernetes-core images'
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-core-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'build functest-kubernetes-[healthcheck,features] image'
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-healthcheck-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-kubernetes-features-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'build functest-kubernetes-smoke image'
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-smoke-docker-build-amd64-{stream}'
- <<: *build-job-settings
-
- publishers:
- - 'functest-kubernetes-amd64-recipients'
-
-- job-template:
- name: 'functest-kubernetes-{image}-docker-build-{arch_tag}-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - functest-kubernetes-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: '{slave_label}'
- arch_tag: '{arch_tag}'
- scm:
- - git-scm
- builders:
- - shell: |
- #!/bin/bash -ex
- sudo amd64_dirs=docker/{image} bash ./build.sh
- exit $?
-
-# parameter macro
-- parameter:
- name: functest-kubernetes-job-parameters
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: '{slave_label}'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: "Dockerfile"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-- publisher:
- name: 'functest-kubernetes-amd64-recipients'
- publishers:
- - email:
- recipients: >
- jalausuch@suse.com morgan.richomme@orange.com
- cedric.ollivier@orange.com feng.xiaowei@zte.com.cn
- juha.kosonen@nokia.com wangwulin@huawei.com
- valentin.boucher@kontron.com
diff --git a/jjb/functest/functest-kubernetes-pi.yaml b/jjb/functest/functest-kubernetes-pi.yaml
new file mode 100644
index 000000000..efab56c05
--- /dev/null
+++ b/jjb/functest/functest-kubernetes-pi.yaml
@@ -0,0 +1,891 @@
+---
+- functest-kubernetes-pi-containers: &functest-kubernetes-pi-containers
+ name: 'functest-kubernetes-pi-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- functest-kubernetes-pi-params: &functest-kubernetes-pi-params
+ name: 'functest-kubernetes-pi-params'
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-smoke'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-security-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-security-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-security'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-cnf'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-jobs: &functest-kubernetes-pi-jobs
+ name: 'functest-kubernetes-pi-jobs'
+ current-parameters: true
+
+- parameter:
+ name: functest-kubernetes-pi-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: functest-kubernetes-pi-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- parameter:
+ name: functest-kubernetes-pi-DEPLOY_SCENARIO
+ parameters:
+ - string:
+ name: DEPLOY_SCENARIO
+ default: k8-nosdn-nofeature-noha
+
+- functest-kubernetes-pi-run-containers: &functest-kubernetes-pi-run-containers
+ name: 'functest-kubernetes-pi-run-containers'
+ <<: *functest-kubernetes-pi-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+
+- builder:
+ name: functest-kubernetes-pi-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker pull $image
+
+- builder:
+ name: functest-kubernetes-pi-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ mkdir -p $WORKSPACE/results
+ chown {uid}:{gid} $WORKSPACE/results
+ docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/functest-kubernetes/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: functest-kubernetes-pi-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker rmi $image || true
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-security-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-security-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-healthcheck'
+ test:
+ - k8s_quick
+ - k8s_smoke
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-smoke'
+ test:
+ - xrally_kubernetes
+ - k8s_conformance
+ - k8s_conformance_serial
+ - sig_api_machinery
+ - sig_api_machinery_serial
+ - sig_apps
+ - sig_apps_serial
+ - sig_auth
+ - sig_cluster_lifecycle
+ - sig_instrumentation
+ - sig_network
+ - sig_node
+ - sig_scheduling_serial
+ - sig_storage
+ - sig_storage_serial
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-security-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-security'
+ test:
+ - kube_hunter
+ - kube_bench_master
+ - kube_bench_node
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-benchmarking'
+ test:
+ - xrally_kubernetes_full
+ - netperf
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-cnf'
+ test:
+ - k8s_vims
+ - helm_vims
+ - cnf_testsuite
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-{test}-run'
+
+- builder:
+ name: functest-kubernetes-pi-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ mkdir -p $WORKSPACE/results
+ chown {uid}:{gid} $WORKSPACE/results
+ docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes \
+ -v /home/opnfv/functest-kubernetes/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image zip_campaign
+
+- job-template:
+ name: 'functest-kubernetes-pi-{tag}-zip'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-zip:
+ <<: *functest-kubernetes-pi-run-containers
+
+- project:
+ name: 'functest-kubernetes-pi-zip'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-healthcheck'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-{tag}-zip'
+
+- job-template:
+ name: 'functest-kubernetes-pi-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-kubernetes-(pi-)*.*-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-healthcheck:{tag}
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-k8s_quick-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-k8s_smoke-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-smoke:{tag}
+ execution-type: SEQUENTIALLY
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-k8s_conformance_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_api_machinery-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_api_machinery_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_apps-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_apps_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_auth-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_cluster_lifecycle-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_instrumentation-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_network-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_node-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_scheduling_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_storage-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_storage_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-security:{tag}
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-kube_hunter-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-kube_bench_master-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-kube_bench_node-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-benchmarking:{tag}
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-xrally_kubernetes_full-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-netperf-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-cnf:{tag}
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-helm_vims-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-cnf_testsuite-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'functest-kubernetes-pi-{tag}-zip'
+ <<: *functest-kubernetes-pi-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-pi-daily'
+ <<: *functest-kubernetes-pi-params
+ jobs:
+ - 'functest-kubernetes-pi-{tag}-daily'
+
+- view:
+ name: functest-kubernetes-pi
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-pi-[a-z-0-9.]+-daily$
diff --git a/jjb/functest/functest-kubernetes-project-jobs.yaml b/jjb/functest/functest-kubernetes-project-jobs.yaml
deleted file mode 100644
index 7f8dd8d53..000000000
--- a/jjb/functest/functest-kubernetes-project-jobs.yaml
+++ /dev/null
@@ -1,257 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: functest-kubernetes-project-jobs
-
- project: 'functest-kubernetes'
-
- jobs:
- - 'functest-kubernetes-verify-{stream}'
- - 'functest-kubernetes-verify-{phase}-{stream}'
- - 'functest-kubernetes-docs-upload-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- phase:
- - 'unit-tests-and-docs':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-x86_64':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-aarch64':
- slave-label: 'opnfv-build-ubuntu-arm'
-
-- job-template:
- name: 'functest-kubernetes-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - 'functest-kubernetes-verify-triggers-macro':
- project: '{project}'
- branch: '{branch}'
-
- builders:
- - shell: |
- #!/bin/bash
- # we do nothing here as the main stuff will be done
- # in phase jobs
- echo "Triggering phase jobs!"
- - multijob:
- name: 'functest-kubernetes-build-and-unittest'
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-verify-unit-tests-and-docs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'functest-kubernetes-verify-build-x86_64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=x86_64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'functest-kubernetes-verify-build-aarch64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=aarch64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
-
-- job-template:
- name: 'functest-kubernetes-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 30
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
-
- scm:
- - git-scm-gerrit
-
- builders:
- - 'functest-kubernetes-verify-{phase}-builders-macro'
-
- publishers:
- - 'functest-kubernetes-verify-{phase}-publishers-macro'
-
-- job-template:
- name: 'functest-kubernetes-docs-upload-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - 'functest-kubernetes-docs-upload-triggers-macro':
- project: '{project}'
- branch: '{branch}'
-
- builders:
- - functest-kubernetes-upload-doc-artifact
-
-################################
-# job triggers
-################################
-- trigger:
- name: 'functest-kubernetes-verify-triggers-macro'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
-- trigger:
- name: 'functest-kubernetes-docs-upload-triggers-macro'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-################################
-# job builders
-################################
-
-- builder:
- name: 'functest-kubernetes-verify-unit-tests-and-docs-builders-macro'
- builders:
- - shell: |
- cd $WORKSPACE && tox
-
-- builder:
- name: 'functest-kubernetes-verify-build-x86_64-builders-macro'
- builders:
- - shell: |
- echo "Not activated!"
-
-- builder:
- name: 'functest-kubernetes-verify-build-aarch64-builders-macro'
- builders:
- - shell: |
- echo "Not activated!"
-
-- builder:
- name: 'functest-kubernetes-upload-doc-artifact'
- builders:
- - shell: |
- cd $WORKSPACE && tox -edocs
- wget -O - https://git.opnfv.org/releng/plain/utils/upload-artifact.sh | bash -s "api/_build" "docs"
-################################
-# job publishers
-################################
-- publisher:
- name: 'functest-kubernetes-verify-unit-tests-and-docs-publishers-macro'
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 10
- unhealthy: 20
- failing: 30
- - method:
- healthy: 50
- unhealthy: 40
- failing: 30
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'functest-kubernetes-verify-build-x86_64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'functest-kubernetes-verify-build-aarch64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
diff --git a/jjb/functest/functest-kubernetes.yaml b/jjb/functest/functest-kubernetes.yaml
new file mode 100644
index 000000000..dcab411cd
--- /dev/null
+++ b/jjb/functest/functest-kubernetes.yaml
@@ -0,0 +1,2198 @@
+---
+- functest-kubernetes-containers: &functest-kubernetes-containers
+ name: 'functest-kubernetes-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- functest-kubernetes-params: &functest-kubernetes-params
+ name: 'functest-kubernetes-params'
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params: &functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-smoke-params: &functest-kubernetes-opnfv-functest-kubernetes-smoke-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-smoke'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-security-params: &functest-kubernetes-opnfv-functest-kubernetes-security-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-security'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params: &functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-cnf-params: &functest-kubernetes-opnfv-functest-kubernetes-cnf-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-cnf'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-jobs: &functest-kubernetes-jobs
+ name: 'functest-kubernetes-jobs'
+ current-parameters: true
+
+- parameter:
+ name: functest-kubernetes-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: functest-kubernetes-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- parameter:
+ name: functest-kubernetes-DEPLOY_SCENARIO
+ parameters:
+ - string:
+ name: DEPLOY_SCENARIO
+ default: k8-nosdn-nofeature-noha
+
+- functest-kubernetes-run-containers: &functest-kubernetes-run-containers
+ name: 'functest-kubernetes-run-containers'
+ <<: *functest-kubernetes-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+
+- builder:
+ name: functest-kubernetes-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker pull $image
+
+- builder:
+ name: functest-kubernetes-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ mkdir -p $WORKSPACE/results
+ chown {uid}:{gid} $WORKSPACE/results
+ docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/functest-kubernetes/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: functest-kubernetes-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker rmi $image || true
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-pull'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-pull'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-smoke-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-smoke-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-pull'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-security-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-security-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-pull'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-pull'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-cnf-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-cnf-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-healthcheck'
+ test:
+ - k8s_quick
+ - k8s_smoke
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-smoke-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-smoke'
+ test:
+ - xrally_kubernetes
+ - k8s_conformance
+ - k8s_conformance_serial
+ - sig_api_machinery
+ - sig_api_machinery_serial
+ - sig_apps
+ - sig_apps_serial
+ - sig_auth
+ - sig_cluster_lifecycle
+ - sig_instrumentation
+ - sig_network
+ - sig_node
+ - sig_scheduling_serial
+ - sig_storage
+ - sig_storage_serial
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-security-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-security'
+ test:
+ - kube_hunter
+ - kube_bench_master
+ - kube_bench_node
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-benchmarking'
+ test:
+ - xrally_kubernetes_full
+ - netperf
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-cnf-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-cnf'
+ test:
+ - k8s_vims
+ - helm_vims
+ - cnf_testsuite
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-{test}-run'
+
+- builder:
+ name: functest-kubernetes-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ mkdir -p $WORKSPACE/results
+ chown {uid}:{gid} $WORKSPACE/results
+ docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes \
+ -v /home/opnfv/functest-kubernetes/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image zip_campaign
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-zip'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-zip:
+ <<: *functest-kubernetes-run-containers
+
+- project:
+ name: 'functest-kubernetes-zip'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-healthcheck'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-{tag}-zip'
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-kubernetes-(pi-)*.*-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-pull'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-pull'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-pull'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-pull'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-pull'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-healthcheck:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_quick-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_smoke-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-smoke:{tag}
+ execution-type: SEQUENTIALLY
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_api_machinery-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_api_machinery_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_apps-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_apps_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_auth-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_cluster_lifecycle-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_instrumentation-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_network-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_node-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_scheduling_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_storage-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_storage_serial-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-security:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_hunter-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench_master-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench_node-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-benchmarking:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-xrally_kubernetes_full-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-netperf-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-cnf:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-helm_vims-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-cnf_testsuite-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'functest-kubernetes-{tag}-zip'
+ <<: *functest-kubernetes-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-daily'
+ <<: *functest-kubernetes-params
+ jobs:
+ - 'functest-kubernetes-{tag}-daily'
+
+- view:
+ name: functest-kubernetes
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-[a-z0-9.-]+-daily$
+
+- functest-kubernetes-build-containers: &functest-kubernetes-build-containers
+ name: 'functest-kubernetes-build-containers'
+ <<: *functest-kubernetes-containers
+ ref_arg: '{ref_arg}'
+ path: '{path}'
+ build_args: '{build_args}'
+ from: '{from}'
+
+- builder:
+ name: functest-kubernetes-build-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ build_args=""
+ if [ "{build_args}" != "None" ]; then
+ for i in $(echo {build_args} | tr -d '[]' |sed "s/, / /g" ); \
+ do build_args="--build-arg $i $build_args"; done
+ fi
+ if [ "{ref_arg}" != "None" ]; then
+ build_args="$build_args --build-arg {ref_arg}={ref}"
+ fi
+ cd {path}
+ if [ "{from}" != "None" ]; then
+ sed -i {from} Dockerfile
+ fi
+ docker build $build_args \
+ --pull=false --no-cache --force-rm=true \
+ -t $image .
+
+- scm:
+ name: functest-kubernetes-scm
+ scm:
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/functest-kubernetes'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ branches:
+ - '{ref}'
+
+- functest-kubernetes-dep: &functest-kubernetes-dep
+ name: 'functest-kubernetes-containers'
+ repo: '{repo}'
+ container: '{container}'
+ port: '{port}'
+ tag: '{dependency}'
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-dep-pull'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-dep
+
+- functest-kubernetes-dep-params: &functest-kubernetes-dep-params
+ name: 'functest-kubernetes-dep-params'
+ repo: '_'
+ container: 'alpine'
+ port:
+ tag:
+ - latest:
+ dependency: 3.16
+ - v1.29:
+ dependency: 3.16
+ - v1.28:
+ dependency: 3.16
+ - v1.27:
+ dependency: 3.16
+ - v1.26:
+ dependency: 3.16
+
+- project:
+ name: 'functest-kubernetes-dep-pull'
+ <<: *functest-kubernetes-dep-params
+ jobs:
+ - 'functest-kubernetes-{tag}-dep-pull'
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-dep-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-dep
+
+- project:
+ name: 'functest-kubernetes-dep-rmi'
+ <<: *functest-kubernetes-dep-params
+ jobs:
+ - 'functest-kubernetes-{tag}-dep-rmi'
+
+- builder:
+ name: functest-kubernetes-tox
+ builders:
+ - shell: |
+ set +x
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install software-properties-common gpg -y
+ add-apt-repository -y ppa:deadsnakes/ppa
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ libxml2-dev libxslt-dev libffi-dev libjpeg-dev \
+ python3.10 python3.10-dev python3.10-distutils \
+ python3.9 python3.9-dev python3.9-distutils \
+ python3.8 python3.8-dev python3.8-distutils \
+ python3-pip enchant-2 -y
+ pip3 install tox
+
+ tox
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-tox'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ triggers:
+ - functest-kubernetes-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-tox:
+
+- project:
+ name: functest-kubernetes-tox
+ <<: *functest-kubernetes-params
+ jobs:
+ - 'functest-kubernetes-{tag}-tox'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-core-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ port:
+ container: functest-kubernetes-core
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-healthcheck-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ port:
+ container: functest-kubernetes-healthcheck
+ ref_arg:
+ path: docker/healthcheck
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-cnf-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ port:
+ container: functest-kubernetes-cnf
+ ref_arg: BRANCH
+ path: docker/cnf
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-security-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ port:
+ container: functest-kubernetes-security
+ ref_arg: BRANCH
+ path: docker/security
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-smoke-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ port:
+ container: functest-kubernetes-smoke
+ ref_arg:
+ path: docker/smoke
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-benchmarking-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ port:
+ container: functest-kubernetes-benchmarking
+ ref_arg:
+ path: docker/benchmarking
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-gate'
+
+- trigger:
+ name: functest-kubernetes-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'functest-kubernetes'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-review'
+ project-type: multijob
+ triggers:
+ - functest-kubernetes-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-kubernetes-(pi-)*.*-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: remove dependency
+ projects:
+ - name: 'functest-kubernetes-{tag}-dep-rmi'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: pull dependency
+ projects:
+ - name: 'functest-kubernetes-{tag}-dep-pull'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build opnfv/functest-kubernetes-core
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build containers
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build opnfv/functest-kubernetes-smoke
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build opnfv/functest-kubernetes-benchmarking
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-healthcheck:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_quick-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_smoke-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-smoke:{tag}
+ execution-type: SEQUENTIALLY
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_api_machinery-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_api_machinery_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_apps-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_apps_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_auth-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_cluster_lifecycle-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_instrumentation-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_network-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_node-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_scheduling_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_storage-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_storage_serial-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-security:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_hunter-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench_master-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench_node-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-benchmarking:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-xrally_kubernetes_full-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-netperf-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-cnf:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-helm_vims-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-cnf_testsuite-run'
+ <<: *functest-kubernetes-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-review'
+ <<: *functest-kubernetes-params
+ jobs:
+ - 'functest-kubernetes-{tag}-review'
+
+- view:
+ name: functest-kubernetes-review
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-[a-z0-9.-]+-review$
+
+- view:
+ name: functest-kubernetes-tox
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-[a-z0-9.-]+-tox$
+
+- builder:
+ name: functest-kubernetes-push-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker push $image
+
+- trigger:
+ name: functest-kubernetes-commit
+ triggers:
+ - pollscm:
+ cron: "*/30 * * * *"
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-core-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-core
+ port:
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-healthcheck-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-healthcheck
+ port:
+ ref_arg:
+ path: docker/healthcheck
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-cnf-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-cnf
+ port:
+ ref_arg: BRANCH
+ path: docker/cnf
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-security-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-security
+ port:
+ ref_arg: BRANCH
+ path: docker/security
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-smoke-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-smoke
+ port:
+ ref_arg:
+ path: docker/smoke
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-benchmarking-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-benchmarking
+ port:
+ ref_arg:
+ path: docker/benchmarking
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-docker'
+ project-type: multijob
+ triggers:
+ - functest-kubernetes-commit
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-kubernetes-(pi-)*.*-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove dependency
+ projects:
+ - name: 'functest-kubernetes-{tag}-dep-rmi'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: pull dependency
+ projects:
+ - name: 'functest-kubernetes-{tag}-dep-pull'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build opnfv/functest-kubernetes-core
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-build'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build containers
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-build'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-build'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-build'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build opnfv/functest-kubernetes-smoke
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-build'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build opnfv/functest-kubernetes-benchmarking
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-build'
+ <<: *functest-kubernetes-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-docker'
+ <<: *functest-kubernetes-params
+ jobs:
+ - 'functest-kubernetes-{tag}-docker'
+
+- builder:
+ name: functest-kubernetes-trivy
+ builders:
+ - shell: |
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./trivy image --exit-code 1 $image
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-core'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-cnf'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-security'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-smoke'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-trivy'
+
+- builder:
+ name: functest-kubernetes-grype
+ builders:
+ - shell: |
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./grype -q $image
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-core'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-cnf'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-security'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-smoke'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-grype'
+
+- builder:
+ name: functest-kubernetes-sbom
+ builders:
+ - shell: |
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ mkdir -p ~/.docker
+ curl -sSfL https://raw.githubusercontent.com/docker/sbom-cli-plugin/main/install.sh | sh -s --
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker sbom $image
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-core'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-cnf'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-security'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-smoke'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-sbom'
+
+- view:
+ name: functest-kubernetes-docker
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-[a-z0-9.-]+-docker$
+
+- view:
+ name: functest-kubernetes-trivy
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes-pi)(?!functest-kubernetes-ng)^functest-kubernetes-[a-z-0-9.]+-trivy$
+
+- view:
+ name: functest-kubernetes-grype
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes-pi)(?!functest-kubernetes-ng)^functest-kubernetes-[a-z-0-9.]+-grype$
+
+- view:
+ name: functest-kubernetes-sbom
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes-pi)(?!functest-kubernetes-ng)^functest-kubernetes-[a-z-0-9.]+-sbom$
diff --git a/jjb/functest/functest-pi.yaml b/jjb/functest/functest-pi.yaml
new file mode 100644
index 000000000..1ac14f584
--- /dev/null
+++ b/jjb/functest/functest-pi.yaml
@@ -0,0 +1,1239 @@
+---
+- functest-pi-containers: &functest-pi-containers
+ name: 'functest-pi-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- functest-pi-params: &functest-pi-params
+ name: 'functest-pi-params'
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-healthcheck-params: &functest-pi-ollivier-functest-healthcheck-params
+ name: 'functest-pi-ollivier-functest-healthcheck-params'
+ repo: 'ollivier'
+ container: 'functest-healthcheck'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-smoke-params: &functest-pi-ollivier-functest-smoke-params
+ name: 'functest-pi-ollivier-functest-smoke-params'
+ repo: 'ollivier'
+ container: 'functest-smoke'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-smoke-cntt-params: &functest-pi-ollivier-functest-smoke-cntt-params
+ name: 'functest-pi-ollivier-functest-smoke-cntt-params'
+ repo: 'ollivier'
+ container: 'functest-smoke-cntt'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-benchmarking-params: &functest-pi-ollivier-functest-benchmarking-params
+ name: 'functest-pi-ollivier-functest-benchmarking-params'
+ repo: 'ollivier'
+ container: 'functest-benchmarking'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-benchmarking-cntt-params: &functest-pi-ollivier-functest-benchmarking-cntt-params
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-params'
+ repo: 'ollivier'
+ container: 'functest-benchmarking-cntt'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-vnf-params: &functest-pi-ollivier-functest-vnf-params
+ name: 'functest-pi-ollivier-functest-vnf-params'
+ repo: 'ollivier'
+ container: 'functest-vnf'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-jobs: &functest-pi-jobs
+ name: 'functest-pi-jobs'
+ current-parameters: true
+
+- parameter:
+ name: functest-pi-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: functest-pi-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- parameter:
+ name: functest-pi-EXTERNAL_NETWORK
+ parameters:
+ - string:
+ name: EXTERNAL_NETWORK
+ default: public
+
+- parameter:
+ name: functest-pi-VOLUME_DEVICE_NAME
+ parameters:
+ - string:
+ name: VOLUME_DEVICE_NAME
+ default: sdb
+
+- parameter:
+ name: functest-pi-IMAGE_PROPERTIES
+ parameters:
+ - string:
+ name: IMAGE_PROPERTIES
+ default: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+
+- functest-pi-run-containers: &functest-pi-run-containers
+ name: 'functest-pi-run-containers'
+ <<: *functest-pi-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+ DASHBOARD_URL: '{DASHBOARD_URL}'
+
+- builder:
+ name: functest-pi-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker pull $image
+
+- builder:
+ name: functest-pi-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/functest/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ -e DASHBOARD_URL={DASHBOARD_URL} \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: functest-pi-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker rmi $image || true
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-healthcheck-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-healthcheck-pull'
+ <<: *functest-pi-ollivier-functest-healthcheck-params
+ jobs:
+ - 'functest-pi-ollivier-functest-healthcheck-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-healthcheck-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-healthcheck-rmi'
+ <<: *functest-pi-ollivier-functest-healthcheck-params
+ jobs:
+ - 'functest-pi-ollivier-functest-healthcheck-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-pull'
+ <<: *functest-pi-ollivier-functest-smoke-params
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-rmi'
+ <<: *functest-pi-ollivier-functest-smoke-params
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-pull'
+ <<: *functest-pi-ollivier-functest-smoke-cntt-params
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-cntt-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-rmi'
+ <<: *functest-pi-ollivier-functest-smoke-cntt-params
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-pull'
+ <<: *functest-pi-ollivier-functest-benchmarking-params
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-rmi'
+ <<: *functest-pi-ollivier-functest-benchmarking-params
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-pull'
+ <<: *functest-pi-ollivier-functest-benchmarking-cntt-params
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-rmi'
+ <<: *functest-pi-ollivier-functest-benchmarking-cntt-params
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-vnf-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-vnf-pull'
+ <<: *functest-pi-ollivier-functest-vnf-params
+ jobs:
+ - 'functest-pi-ollivier-functest-vnf-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-vnf-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-vnf-rmi'
+ <<: *functest-pi-ollivier-functest-vnf-params
+ jobs:
+ - 'functest-pi-ollivier-functest-vnf-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-healthcheck-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-healthcheck'
+ <<: *functest-pi-ollivier-functest-healthcheck-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-healthcheck'
+ test:
+ - connection_check
+ - tenantnetwork1
+ - tenantnetwork2
+ - vmready1
+ - vmready2
+ - singlevm1
+ - singlevm2
+ - vping_ssh
+ - vping_userdata
+ - cinder_test
+ - odl
+ - tempest_smoke
+ - tempest_horizon
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-healthcheck-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke'
+ <<: *functest-pi-ollivier-functest-smoke-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-smoke'
+ test:
+ - tempest_neutron
+ - tempest_cinder
+ - tempest_keystone
+ - tempest_heat
+ - tempest_telemetry
+ - rally_sanity
+ - refstack_compute
+ - refstack_object
+ - refstack_platform
+ - tempest_full
+ - tempest_scenario
+ - tempest_slow
+ - patrole_admin
+ - patrole_member
+ - patrole_reader
+ - tempest_barbican
+ - tempest_octavia
+ - tempest_cyborg
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-cntt'
+ <<: *functest-pi-ollivier-functest-smoke-cntt-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-smoke-cntt'
+ test:
+ - tempest_neutron_cntt
+ - tempest_cinder_cntt
+ - tempest_keystone_cntt
+ - tempest_heat_cntt
+ - rally_sanity_cntt
+ - tempest_full_cntt
+ - tempest_scenario_cntt
+ - tempest_slow_cntt
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-cntt-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking'
+ <<: *functest-pi-ollivier-functest-benchmarking-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-benchmarking'
+ test:
+ - rally_full
+ - rally_jobs
+ - vmtp
+ - shaker
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt'
+ <<: *functest-pi-ollivier-functest-benchmarking-cntt-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-benchmarking-cntt'
+ test:
+ - rally_full_cntt
+ - rally_jobs_cntt
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-vnf-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-vnf'
+ <<: *functest-pi-ollivier-functest-vnf-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-vnf'
+ test:
+ - cloudify
+ - cloudify_ims
+ - heat_ims
+ - vyos_vrouter
+ - juju_epc
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-vnf-{tag}-{test}-run'
+
+- builder:
+ name: functest-pi-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest \
+ -v /home/opnfv/functest/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ -e DASHBOARD_URL={DASHBOARD_URL} \
+ $image zip_campaign
+
+- job-template:
+ name: 'functest-pi-{tag}-zip'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-zip:
+ <<: *functest-pi-run-containers
+
+- project:
+ name: 'functest-pi-zip'
+ <<: *functest-pi-ollivier-functest-healthcheck-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-healthcheck'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-{tag}-zip'
+
+- job-template:
+ name: 'functest-pi-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-pull'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-healthcheck:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-connection_check-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tenantnetwork1-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tenantnetwork2-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vmready1-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vmready2-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-singlevm1-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-singlevm2-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vping_ssh-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vping_userdata-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-cinder_test-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-odl-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tempest_smoke-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tempest_horizon-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-smoke:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_neutron-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_cinder-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_keystone-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_heat-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_telemetry-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-rally_sanity-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_compute-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_object-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_platform-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_full-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_scenario-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_slow-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_admin-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_member-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_reader-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_barbican-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_octavia-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_cyborg-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-smoke-cntt:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-benchmarking:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rally_full-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rally_jobs-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-vmtp-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-shaker-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-benchmarking-cntt:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-vnf:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-cloudify-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-cloudify_ims-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-heat_ims-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-vyos_vrouter-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-juju_epc-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'functest-pi-{tag}-zip'
+ <<: *functest-pi-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-pi-daily'
+ <<: *functest-pi-params
+ jobs:
+ - 'functest-pi-{tag}-daily'
+
+- view:
+ name: functest-pi
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-pi-[a-z-0-9.]+-daily$
diff --git a/jjb/functest/functest-project-jobs.yaml b/jjb/functest/functest-project-jobs.yaml
deleted file mode 100644
index 9a123053f..000000000
--- a/jjb/functest/functest-project-jobs.yaml
+++ /dev/null
@@ -1,257 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: functest-project-jobs
-
- project: 'functest'
-
- jobs:
- - 'functest-verify-{stream}'
- - 'functest-verify-{phase}-{stream}'
- - 'functest-docs-upload-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- phase:
- - 'unit-tests-and-docs':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-x86_64':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-aarch64':
- slave-label: 'opnfv-build-ubuntu-arm'
-
-- job-template:
- name: 'functest-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - 'functest-verify-triggers-macro':
- project: '{project}'
- branch: '{branch}'
-
- builders:
- - shell: |
- #!/bin/bash
- # we do nothing here as the main stuff will be done
- # in phase jobs
- echo "Triggering phase jobs!"
- - multijob:
- name: 'functest-build-and-unittest'
- execution-type: PARALLEL
- projects:
- - name: 'functest-verify-unit-tests-and-docs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'functest-verify-build-x86_64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=x86_64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'functest-verify-build-aarch64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=aarch64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
-
-- job-template:
- name: 'functest-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 30
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
-
- scm:
- - git-scm-gerrit
-
- builders:
- - 'functest-verify-{phase}-builders-macro'
-
- publishers:
- - 'functest-verify-{phase}-publishers-macro'
-
-- job-template:
- name: 'functest-docs-upload-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - 'functest-docs-upload-triggers-macro':
- project: '{project}'
- branch: '{branch}'
-
- builders:
- - functest-upload-doc-artifact
-
-################################
-# job triggers
-################################
-- trigger:
- name: 'functest-verify-triggers-macro'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
-- trigger:
- name: 'functest-docs-upload-triggers-macro'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-################################
-# job builders
-################################
-
-- builder:
- name: 'functest-verify-unit-tests-and-docs-builders-macro'
- builders:
- - shell: |
- cd $WORKSPACE && tox
-
-- builder:
- name: 'functest-verify-build-x86_64-builders-macro'
- builders:
- - shell: |
- echo "Not activated!"
-
-- builder:
- name: 'functest-verify-build-aarch64-builders-macro'
- builders:
- - shell: |
- echo "Not activated!"
-
-- builder:
- name: 'functest-upload-doc-artifact'
- builders:
- - shell: |
- cd $WORKSPACE && tox -edocs
- wget -O - https://git.opnfv.org/releng/plain/utils/upload-artifact.sh | bash -s "api/build" "docs"
-################################
-# job publishers
-################################
-- publisher:
- name: 'functest-verify-unit-tests-and-docs-publishers-macro'
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 10
- unhealthy: 20
- failing: 30
- - method:
- healthy: 50
- unhealthy: 40
- failing: 30
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'functest-verify-build-x86_64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'functest-verify-build-aarch64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh
deleted file mode 100755
index 469a57726..000000000
--- a/jjb/functest/functest-suite.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
-if [ -z $container_id ]; then
- echo "Functest container not found"
- exit 1
-fi
-
-global_ret_val=0
-
-tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
-for test in ${tests[@]}; do
- cmd="run_tests -t $test"
- docker exec $container_id $cmd
- let global_ret_val+=$?
-done
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo ${global_ret_val}>${ret_val_file}
-
-exit 0
diff --git a/jjb/functest/functest-weekly-jobs.yaml b/jjb/functest/functest-weekly-jobs.yaml
deleted file mode 100644
index c88fa0050..000000000
--- a/jjb/functest/functest-weekly-jobs.yaml
+++ /dev/null
@@ -1,128 +0,0 @@
----
-###################################
-# job configuration for functest
-###################################
-- project:
- name: functest-weekly
-
- project: functest
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- disabled: false
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- docker-tag: '{stream}'
- disabled: false
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # fuel CI PODs
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *fraser
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- <<: *fraser
- # -------------------------------
- jobs:
- - 'functest-{installer}-{pod}-weekly-{stream}'
-
-################################
-# job template
-################################
-- job-template:
- name: 'functest-{installer}-{pod}-weekly-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER Suite: $FUNCTEST_SUITE_NAME Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: '400'
- abort: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'weekly'
- description: "Weekly suite name to run"
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: CLEAN_DOCKER_IMAGES
- default: 'false'
- description: 'Remove downloaded docker images (opnfv/functest*:*)'
- - functest-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'functest-weekly-builder'
-########################
-# builder macros
-########################
-- builder:
- name: functest-weekly-builder
- builders:
- - shell:
- !include-raw: ./functest-cleanup.sh
- # yamllint disable rule:indentation
- - shell:
- !include-raw:
- - ./functest-env-presetup.sh
- - ../../utils/fetch_os_creds.sh
- - ./functest-alpine.sh
- # yamllint enable rule:indentation
- - shell:
- !include-raw: ../../utils/push-test-logs.sh
- - shell:
- !include-raw: ./functest-exit.sh
diff --git a/jjb/functest/functest.yaml b/jjb/functest/functest.yaml
new file mode 100644
index 000000000..463dd9a0a
--- /dev/null
+++ b/jjb/functest/functest.yaml
@@ -0,0 +1,2663 @@
+---
+- functest-containers: &functest-containers
+ name: 'functest-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- functest-params: &functest-params
+ name: 'functest-params'
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-healthcheck-params: &functest-opnfv-functest-healthcheck-params
+ name: 'functest-opnfv-functest-healthcheck-params'
+ repo: 'opnfv'
+ container: 'functest-healthcheck'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-smoke-params: &functest-opnfv-functest-smoke-params
+ name: 'functest-opnfv-functest-smoke-params'
+ repo: 'opnfv'
+ container: 'functest-smoke'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-smoke-cntt-params: &functest-opnfv-functest-smoke-cntt-params
+ name: 'functest-opnfv-functest-smoke-cntt-params'
+ repo: 'opnfv'
+ container: 'functest-smoke-cntt'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-benchmarking-params: &functest-opnfv-functest-benchmarking-params
+ name: 'functest-opnfv-functest-benchmarking-params'
+ repo: 'opnfv'
+ container: 'functest-benchmarking'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-benchmarking-cntt-params: &functest-opnfv-functest-benchmarking-cntt-params
+ name: 'functest-opnfv-functest-benchmarking-cntt-params'
+ repo: 'opnfv'
+ container: 'functest-benchmarking-cntt'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-vnf-params: &functest-opnfv-functest-vnf-params
+ name: 'functest-opnfv-functest-vnf-params'
+ repo: 'opnfv'
+ container: 'functest-vnf'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-jobs: &functest-jobs
+ name: 'functest-jobs'
+ current-parameters: true
+
+- parameter:
+ name: functest-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: functest-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- parameter:
+ name: functest-EXTERNAL_NETWORK
+ parameters:
+ - string:
+ name: EXTERNAL_NETWORK
+ default: public
+
+- parameter:
+ name: functest-VOLUME_DEVICE_NAME
+ parameters:
+ - string:
+ name: VOLUME_DEVICE_NAME
+ default: sdb
+
+- parameter:
+ name: functest-IMAGE_PROPERTIES
+ parameters:
+ - string:
+ name: IMAGE_PROPERTIES
+ default: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+
+- functest-run-containers: &functest-run-containers
+ name: 'functest-run-containers'
+ <<: *functest-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+ DASHBOARD_URL: '{DASHBOARD_URL}'
+
+- builder:
+ name: functest-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker pull $image
+
+- builder:
+ name: functest-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/functest/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ -e DASHBOARD_URL={DASHBOARD_URL} \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: functest-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker rmi $image || true
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-pull'
+ <<: *functest-opnfv-functest-healthcheck-params
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-rmi'
+ <<: *functest-opnfv-functest-healthcheck-params
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-smoke-pull'
+ <<: *functest-opnfv-functest-smoke-params
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-smoke-rmi'
+ <<: *functest-opnfv-functest-smoke-params
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt-pull'
+ <<: *functest-opnfv-functest-smoke-cntt-params
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt-rmi'
+ <<: *functest-opnfv-functest-smoke-cntt-params
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-pull'
+ <<: *functest-opnfv-functest-benchmarking-params
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-rmi'
+ <<: *functest-opnfv-functest-benchmarking-params
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt-pull'
+ <<: *functest-opnfv-functest-benchmarking-cntt-params
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt-rmi'
+ <<: *functest-opnfv-functest-benchmarking-cntt-params
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-vnf-pull'
+ <<: *functest-opnfv-functest-vnf-params
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-vnf-rmi'
+ <<: *functest-opnfv-functest-vnf-params
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck'
+ <<: *functest-opnfv-functest-healthcheck-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-healthcheck'
+ test:
+ - connection_check
+ - tenantnetwork1
+ - tenantnetwork2
+ - vmready1
+ - vmready2
+ - singlevm1
+ - singlevm2
+ - vping_ssh
+ - vping_userdata
+ - cinder_test
+ - odl
+ - tempest_smoke
+ - tempest_horizon
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-opnfv-functest-smoke'
+ <<: *functest-opnfv-functest-smoke-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-smoke'
+ test:
+ - tempest_neutron
+ - tempest_cinder
+ - tempest_keystone
+ - tempest_heat
+ - tempest_telemetry
+ - rally_sanity
+ - refstack_compute
+ - refstack_object
+ - refstack_platform
+ - tempest_full
+ - tempest_scenario
+ - tempest_slow
+ - patrole_admin
+ - patrole_member
+ - patrole_reader
+ - tempest_barbican
+ - tempest_octavia
+ - tempest_cyborg
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt'
+ <<: *functest-opnfv-functest-smoke-cntt-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-smoke-cntt'
+ test:
+ - tempest_neutron_cntt
+ - tempest_cinder_cntt
+ - tempest_keystone_cntt
+ - tempest_heat_cntt
+ - rally_sanity_cntt
+ - tempest_full_cntt
+ - tempest_scenario_cntt
+ - tempest_slow_cntt
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking'
+ <<: *functest-opnfv-functest-benchmarking-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-benchmarking'
+ test:
+ - rally_full
+ - rally_jobs
+ - vmtp
+ - shaker
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt'
+ <<: *functest-opnfv-functest-benchmarking-cntt-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-benchmarking-cntt'
+ test:
+ - rally_full_cntt
+ - rally_jobs_cntt
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-opnfv-functest-vnf'
+ <<: *functest-opnfv-functest-vnf-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-vnf'
+ test:
+ - cloudify
+ - cloudify_ims
+ - heat_ims
+ - vyos_vrouter
+ - juju_epc
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-{test}-run'
+
+- builder:
+ name: functest-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest \
+ -v /home/opnfv/functest/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ -e DASHBOARD_URL={DASHBOARD_URL} \
+ $image zip_campaign
+
+- job-template:
+ name: 'functest-{tag}-zip'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-zip:
+ <<: *functest-run-containers
+
+- project:
+ name: 'functest-zip'
+ <<: *functest-opnfv-functest-healthcheck-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-healthcheck'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-{tag}-zip'
+
+- job-template:
+ name: 'functest-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-rmi'
+ <<: *functest-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-pull'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-pull'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-pull'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-pull'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-pull'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-pull'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-healthcheck:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-connection_check-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-tenantnetwork1-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-tenantnetwork2-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-vmready1-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-vmready2-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-singlevm1-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-singlevm2-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-vping_ssh-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-vping_userdata-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-cinder_test-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-odl-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-tempest_smoke-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-tempest_horizon-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-smoke:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_neutron-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cinder-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_keystone-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_heat-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-refstack_compute-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-refstack_object-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-refstack_platform-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_full-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_scenario-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_slow-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_admin-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_member-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_reader-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_barbican-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_octavia-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-smoke-cntt:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-benchmarking:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-rally_full-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-rally_jobs-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-vmtp-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-shaker-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-benchmarking-cntt:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-vnf:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-vnf-{tag}-cloudify-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-cloudify_ims-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-heat_ims-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-vyos_vrouter-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-juju_epc-run'
+ <<: *functest-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'functest-{tag}-zip'
+ <<: *functest-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-daily'
+ <<: *functest-params
+ jobs:
+ - 'functest-{tag}-daily'
+
+- view:
+ name: functest
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-[a-z0-9.-]+-daily$
+
+- functest-build-containers: &functest-build-containers
+ name: 'functest-build-containers'
+ <<: *functest-containers
+ ref_arg: '{ref_arg}'
+ path: '{path}'
+ build_args: '{build_args}'
+ from: '{from}'
+
+- builder:
+ name: functest-build-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ build_args=""
+ if [ "{build_args}" != "None" ]; then
+ for i in $(echo {build_args} | tr -d '[]' |sed "s/, / /g" ); \
+ do build_args="--build-arg $i $build_args"; done
+ fi
+ if [ "{ref_arg}" != "None" ]; then
+ build_args="$build_args --build-arg {ref_arg}={ref}"
+ fi
+ cd {path}
+ if [ "{from}" != "None" ]; then
+ sed -i {from} Dockerfile
+ fi
+ sudo docker build $build_args \
+ --pull=false --no-cache --force-rm=true \
+ -t $image .
+
+- scm:
+ name: functest-scm
+ scm:
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/functest'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ branches:
+ - '{ref}'
+
+- functest-dep: &functest-dep
+ name: 'functest-containers'
+ repo: '{repo}'
+ container: '{container}'
+ port: '{port}'
+ tag: '{dependency}'
+
+- job-template:
+ name: 'functest-{tag}-dep-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-dep
+
+- functest-dep-params: &functest-dep-params
+ name: 'functest-dep-params'
+ repo: '_'
+ container: 'alpine'
+ port:
+ tag:
+ - latest:
+ dependency: 3.14
+ - zed:
+ dependency: 3.14
+ - yoga:
+ dependency: 3.14
+ - xena:
+ dependency: 3.14
+ - wallaby:
+ dependency: 3.13
+
+- project:
+ name: 'functest-dep-pull'
+ <<: *functest-dep-params
+ jobs:
+ - 'functest-{tag}-dep-pull'
+
+- job-template:
+ name: 'functest-{tag}-dep-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-dep
+
+- project:
+ name: 'functest-dep-rmi'
+ <<: *functest-dep-params
+ jobs:
+ - 'functest-{tag}-dep-rmi'
+
+- builder:
+ name: functest-tox
+ builders:
+ - shell: |
+ set +x
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install software-properties-common gpg -y
+ sudo add-apt-repository -y ppa:deadsnakes/ppa
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ libxml2-dev libxslt-dev libffi-dev libjpeg-dev \
+ python3.10 python3.10-dev python3.10-distutils \
+ python3.9 python3.9-dev python3.9-distutils \
+ python3.8 python3.8-dev python3.8-distutils \
+ python3-pip enchant-2 -y
+ sudo pip3 install tox
+
+ tox
+
+- job-template:
+ name: 'functest-{tag}-tox'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ triggers:
+ - functest-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-tox:
+
+- project:
+ name: functest-tox
+ <<: *functest-params
+ jobs:
+ - 'functest-{tag}-tox'
+
+- job-template:
+ name: 'functest-opnfv-functest-core-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-opnfv-functest-core-gate
+ <<: *functest-params
+ repo: opnfv
+ port:
+ container: functest-core
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-opnfv-functest-healthcheck-gate
+ <<: *functest-params
+ repo: opnfv
+ port:
+ container: functest-healthcheck
+ ref_arg: BRANCH
+ path: docker/healthcheck
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-opnfv-functest-smoke-gate
+ <<: *functest-params
+ repo: opnfv
+ port:
+ container: functest-smoke
+ ref_arg: BRANCH
+ path: docker/smoke
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-opnfv-functest-benchmarking-gate
+ <<: *functest-params
+ repo: opnfv
+ port:
+ container: functest-benchmarking
+ ref_arg: BRANCH
+ path: docker/benchmarking
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-opnfv-functest-vnf-gate
+ <<: *functest-params
+ repo: opnfv
+ port:
+ container: functest-vnf
+ ref_arg:
+ path: docker/vnf
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-opnfv-functest-smoke-cntt-gate
+ <<: *functest-params
+ repo: opnfv
+ port:
+ container: functest-smoke-cntt
+ ref_arg: BRANCH
+ path: docker/smoke-cntt
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-opnfv-functest-benchmarking-cntt-gate
+ <<: *functest-params
+ repo: opnfv
+ port:
+ container: functest-benchmarking-cntt
+ ref_arg: BRANCH
+ path: docker/benchmarking-cntt
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-gate'
+
+- trigger:
+ name: functest-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'functest'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+
+- job-template:
+ name: 'functest-{tag}-review'
+ project-type: multijob
+ triggers:
+ - functest-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-rmi'
+ <<: *functest-jobs
+ - multijob:
+ name: remove dependency
+ projects:
+ - name: 'functest-{tag}-dep-rmi'
+ <<: *functest-jobs
+ - multijob:
+ name: pull dependency
+ projects:
+ - name: 'functest-{tag}-dep-pull'
+ <<: *functest-jobs
+ - multijob:
+ name: build opnfv/functest-core
+ projects:
+ - name: 'functest-opnfv-functest-core-{tag}-gate'
+ <<: *functest-jobs
+ - multijob:
+ name: build containers
+ projects:
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-gate'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-gate'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-gate'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-gate'
+ <<: *functest-jobs
+ - multijob:
+ name: build cntt containers
+ projects:
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-gate'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-gate'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-healthcheck:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-connection_check-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-tenantnetwork1-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-tenantnetwork2-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-vmready1-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-vmready2-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-singlevm1-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-singlevm2-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-vping_ssh-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-vping_userdata-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-cinder_test-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-odl-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-tempest_smoke-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-tempest_horizon-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-smoke:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_neutron-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cinder-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_keystone-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_heat-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-refstack_compute-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-refstack_object-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-refstack_platform-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_full-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_scenario-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_slow-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_admin-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_member-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_reader-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_barbican-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_octavia-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-smoke-cntt:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-benchmarking:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-rally_full-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-rally_jobs-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-vmtp-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-shaker-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-benchmarking-cntt:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+ <<: *functest-jobs
+ - multijob:
+ name: opnfv/functest-vnf:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-vnf-{tag}-cloudify-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-cloudify_ims-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-heat_ims-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-vyos_vrouter-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-juju_epc-run'
+ <<: *functest-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-review'
+ <<: *functest-params
+ jobs:
+ - 'functest-{tag}-review'
+
+- view:
+ name: functest-review
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-[a-z0-9.-]+-review$
+
+- view:
+ name: functest-tox
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-[a-z0-9.-]+-tox$
+
+- builder:
+ name: functest-push-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker push $image
+
+- trigger:
+ name: functest-commit
+ triggers:
+ - pollscm:
+ cron: "*/30 * * * *"
+
+- job-template:
+ name: 'functest-opnfv-functest-core-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-core-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-core
+ port:
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-healthcheck-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-healthcheck
+ port:
+ ref_arg: BRANCH
+ path: docker/healthcheck
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-smoke-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-smoke
+ port:
+ ref_arg: BRANCH
+ path: docker/smoke
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-benchmarking-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-benchmarking
+ port:
+ ref_arg: BRANCH
+ path: docker/benchmarking
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-vnf-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-vnf
+ port:
+ ref_arg:
+ path: docker/vnf
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-smoke-cntt-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-smoke-cntt
+ port:
+ ref_arg: BRANCH
+ path: docker/smoke-cntt
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-benchmarking-cntt-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-benchmarking-cntt
+ port:
+ ref_arg: BRANCH
+ path: docker/benchmarking-cntt
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-build'
+
+- job-template:
+ name: 'functest-{tag}-docker'
+ project-type: multijob
+ triggers:
+ - functest-commit
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove dependency
+ projects:
+ - name: 'functest-{tag}-dep-rmi'
+ <<: *functest-jobs
+ - multijob:
+ name: pull dependency
+ projects:
+ - name: 'functest-{tag}-dep-pull'
+ <<: *functest-jobs
+ - multijob:
+ name: build opnfv/functest-core
+ projects:
+ - name: 'functest-opnfv-functest-core-{tag}-build'
+ <<: *functest-jobs
+ - multijob:
+ name: build containers
+ projects:
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-build'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-build'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-build'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-vnf-{tag}-build'
+ <<: *functest-jobs
+ - multijob:
+ name: build cntt containers
+ projects:
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-build'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-build'
+ <<: *functest-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-docker'
+ <<: *functest-params
+ jobs:
+ - 'functest-{tag}-docker'
+
+- builder:
+ name: functest-trivy
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./trivy image --exit-code 1 $image
+
+- job-template:
+ name: 'functest-opnfv-functest-core-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-core-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-core'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-healthcheck'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-vnf-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-vnf'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-trivy'
+
+- builder:
+ name: functest-grype
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sudo sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./grype -q $image
+
+- job-template:
+ name: 'functest-opnfv-functest-core-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-core-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-core'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-healthcheck'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-vnf-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-vnf'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-grype'
+
+- builder:
+ name: functest-sbom
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ sudo mkdir -p ~/.docker
+ curl -sSfL https://raw.githubusercontent.com/docker/sbom-cli-plugin/main/install.sh | sudo sh -s --
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker sbom $image
+
+- job-template:
+ name: 'functest-opnfv-functest-core-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-core-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-core'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-healthcheck'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-vnf-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-vnf'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-sbom'
+
+- view:
+ name: functest-docker
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-[a-z0-9.-]+-docker$
+
+- view:
+ name: functest-trivy
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes)(?!functest-pi)^functest-[a-z-0-9.]+-trivy$
+
+- view:
+ name: functest-grype
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes)(?!functest-pi)^functest-[a-z-0-9.]+-grype$
+
+- view:
+ name: functest-sbom
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes)(?!functest-pi)^functest-[a-z-0-9.]+-sbom$
diff --git a/jjb/functest/xtesting-ci-docker.yaml b/jjb/functest/xtesting-ci-docker.yaml
new file mode 100644
index 000000000..c439d1484
--- /dev/null
+++ b/jjb/functest/xtesting-ci-docker.yaml
@@ -0,0 +1,189 @@
+---
+- builder:
+ name: xtesting-ci-docker-builder
+ builders:
+ - shell: |
+ cd {dir}
+ sudo docker build --pull -t {image} .
+ sudo docker push {image}
+ sudo docker system prune --all -f
+
+- builder:
+ name: xtesting-ci-docker-multiarch-builder
+ builders:
+ - shell: |
+ sudo docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
+ git clone https://github.com/estesp/manifest-tool
+ (cd manifest-tool && git checkout v0.9.0 && \
+ sudo -E make && sudo -E make install)
+ (cd {dir} && \
+ sudo docker build --pull -t {image}:amd64-{tag} . && \
+ sudo docker push {image}:amd64-{tag})
+ (cd {dir} && \
+ git checkout Dockerfile
+ sed -i -e "s|{from_amd64}|{to_arm64}|g" Dockerfile
+ sudo docker build --pull -t {image}:arm64-{tag} . && \
+ sudo docker push {image}:arm64-{tag})
+ (cd {dir} && \
+ git checkout Dockerfile
+ sed -i -e "s|{from_amd64}|{to_arm}|g" Dockerfile
+ sudo docker build --pull -t {image}:arm-{tag} . && \
+ sudo docker push {image}:arm-{tag})
+ sudo manifest-tool push from-args \
+ --platforms linux/amd64,linux/arm,linux/arm64 \
+ --template {image}:ARCH-{tag} \
+ --target {image}:{tag}
+ sudo docker system prune --all -f
+
+- scm:
+ name: xtesting-ci-docker-scm
+ scm:
+ - git:
+ url: https://github.com/collivier/xtesting-docker.git
+ git-config-name:
+ git-config-email:
+
+- trigger:
+ name: xtesting-ci-docker-trigger
+ triggers:
+ - pollscm:
+ cron: "H/30 * * * *"
+ - timed: '@daily'
+
+- parameter:
+ name: xtesting-ci-docker-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'xtesting'
+
+- job-template:
+ name: xtesting-ci-docker-{stream}
+ scm:
+ - xtesting-ci-docker-scm
+ parameters:
+ - xtesting-ci-docker-parameter
+ triggers:
+ - xtesting-ci-docker-trigger
+ builders:
+ - xtesting-ci-docker-builder:
+ image: '{image}'
+ dir: '{dir}'
+
+- job-template:
+ name: xtesting-ci-docker-multitarch-{stream}
+ scm:
+ - xtesting-ci-docker-scm
+ parameters:
+ - xtesting-ci-docker-parameter
+ triggers:
+ - xtesting-ci-docker-trigger
+ builders:
+ - xtesting-ci-docker-multiarch-builder:
+ image: '{image}'
+ dir: '{dir}'
+ tag: '{tag}'
+ from_amd64: '{from_amd64}'
+ to_arm64: '{to_arm64}'
+ to_arm: '{to_arm}'
+
+- project:
+ name: xtesting-ci-docker
+ stream:
+ - jenkins-lts-slim:
+ dir: jenkins.debian
+ image: opnfv/xtesting-jenkins:lts-slim
+ - jenkins-lts-alpine:
+ dir: jenkins
+ image: opnfv/xtesting-jenkins:lts-alpine
+ - s3www-v0.5.3:
+ dir: s3www
+ image: opnfv/xtesting-s3www:v0.5.3
+ jobs:
+ - xtesting-ci-docker-{stream}
+
+- project:
+ name: xtesting-ci-docker-multiarch
+ stream:
+ - jenkins-agent-debian:
+ dir: jenkins-agent
+ image: opnfv/xtesting-jenkins-agent
+ tag: 4.9-bullseye
+ from_amd64: debian:bullseye
+ to_arm64: arm64v8/debian:bullseye
+ to_arm: arm32v7/debian:bullseye
+ - jenkins-agent-ubuntu:
+ dir: jenkins-agent-ubuntu
+ image: opnfv/xtesting-jenkins-agent
+ tag: 4.9-jammy
+ from_amd64: ubuntu:jammy
+ to_arm64: arm64v8/ubuntu:jammy
+ to_arm: arm32v7/ubuntu:jammy
+ - jenkins-agent-auto:
+ dir: jenkins-agent-auto
+ image: opnfv/xtesting-jenkins-agent-auto
+ tag: 4.9-bullseye
+ from_amd64: opnfv/xtesting-jenkins-agent:4.9-bullseye
+ to_arm64: opnfv/xtesting-jenkins-agent:arm64-4.9-bullseye
+ to_arm: opnfv/xtesting-jenkins-agent:arm-4.9-bullseye
+ jobs:
+ - xtesting-ci-docker-multitarch-{stream}
+
+- builder:
+ name: xtesting-ci-docker-trivy-builder
+ builders:
+ - shell: |
+ curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b .
+ ./trivy image --exit-code 1 {image}
+
+- trigger:
+ name: xtesting-ci-docker-trivy-trigger
+ triggers:
+ - timed: '@daily'
+
+- parameter:
+ name: xtesting-ci-docker-trivy-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'xtesting'
+
+- job-template:
+ name: 'xtesting-ci-docker-trivy-{stream}'
+ triggers:
+ - xtesting-ci-docker-trivy-trigger
+ parameters:
+ - xtesting-ci-docker-trivy-parameter
+ builders:
+ - xtesting-ci-docker-trivy-builder:
+ image: '{image}'
+
+- project:
+ name: 'xtesting-ci-docker-trivy'
+ stream:
+ - jenkins-lts-slim:
+ image: opnfv/xtesting-jenkins:lts-slim
+ - jenkins-lts-alpine:
+ image: opnfv/xtesting-jenkins:lts-alpine
+ - s3www-v0.5.3:
+ image: opnfv/xtesting-s3www:v0.5.3
+ - xtesting-jenkins-agent-4.9-bullseye:
+ image: opnfv/xtesting-jenkins-agent:4.9-bullseye
+ - xtesting-jenkins-agent-4.9-jammy:
+ image: opnfv/xtesting-jenkins-agent:4.9-jammy
+ - xtesting-jenkins-agent-auto-4.9-bullseye:
+ image: opnfv/xtesting-jenkins-agent-auto:4.9-bullseye
+ jobs:
+ - 'xtesting-ci-docker-trivy-{stream}'
+
+- view:
+ name: xtesting-ci-docker
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-ci-docker.*$
diff --git a/jjb/functest/xtesting-ci-vm.yaml b/jjb/functest/xtesting-ci-vm.yaml
new file mode 100644
index 000000000..367598fe4
--- /dev/null
+++ b/jjb/functest/xtesting-ci-vm.yaml
@@ -0,0 +1,233 @@
+---
+- scm:
+ name: xtesting-ci-vm-scm
+ scm:
+ - git:
+ url: '{url}'
+ branches:
+ - '{ref}'
+
+- builder:
+ name: xtesting-ci-vm-builder
+ builders:
+ - shell: |
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
+ echo "deb https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee \
+ /etc/apt/sources.list.d/google-cloud-sdk.list
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive sudo apt-get \
+ -o DPkg::Lock::Timeout=300 install python3-diskimage-builder -y
+ if [ "{project}" == "xtestingci" ]; then
+ export ELEMENTS_PATH=$(pwd)/elements
+ else
+ git init ansible-role-xtesting
+ (cd ansible-role-xtesting &&
+ git fetch --tags https://github.com/collivier/ansible-role-xtesting.git {role-version} &&
+ git checkout FETCH_HEAD)
+ export ELEMENTS_PATH=$(pwd)/elements:$(pwd)/ansible-role-xtesting/elements
+ fi
+ export DIB_XTESTINGCI_VERSION={role-version}
+ disk-image-create --image-size 20 -o {project}-{version}.qcow2 debian vm {project}
+ gsutil cp {project}-{version}.qcow2 gs://artifacts.opnfv.org/{project}/{project}-{version}.qcow2
+ rm -rf {project}-{version}.qcow2 {project}-{version}.d
+
+- trigger:
+ name: xtesting-ci-vm-trigger
+ triggers:
+ - timed: '@weekly'
+
+- parameter:
+ name: xtesting-ci-vm-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'opnfv-build'
+
+- job-template:
+ name: '{project}-{version}-vm'
+ scm:
+ - xtesting-ci-vm-scm:
+ url: '{url}'
+ ref: '{ref}'
+ triggers:
+ - xtesting-ci-vm-trigger
+ parameters:
+ - xtesting-ci-vm-parameter
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ blocking-level: 'NODE'
+ blocking-jobs:
+ - '^.*-vm$'
+ builders:
+ - xtesting-ci-vm-builder:
+ project: '{project}'
+ version: '{version}'
+ role-version: '{role-version}'
+
+- project:
+ name: 'xtesting-ci-vm'
+ project:
+ - xtestingci:
+ url: https://github.com/collivier/ansible-role-xtesting.git
+ - xtesting:
+ url: https://git.opnfv.org/functest-xtesting.git
+ - functest:
+ url: https://git.opnfv.org/functest.git
+ - functest-kubernetes:
+ url: https://git.opnfv.org/functest-kubernetes.git
+ version:
+ - latest:
+ ref: master
+ role-version: master
+ - '2023.2':
+ ref: stable/2023.2
+ role-version: master
+ - '2023.1':
+ ref: stable/2023.1
+ role-version: master
+ - zed:
+ ref: stable/zed
+ role-version: master
+ - yoga:
+ ref: stable/yoga
+ role-version: master
+ - xena:
+ ref: stable/xena
+ role-version: master
+ - wallaby:
+ ref: stable/wallaby
+ role-version: master
+ - leguer:
+ ref: stable/leguer
+ role-version: master
+ - kali:
+ ref: stable/kali
+ role-version: master
+ - jerma:
+ ref: stable/jerma
+ role-version: master
+ - v1.28:
+ ref: stable/v1.28
+ role-version: master
+ - v1.27:
+ ref: stable/v1.27
+ role-version: master
+ - v1.26:
+ ref: stable/v1.26
+ role-version: master
+ - v1.25:
+ ref: stable/v1.25
+ role-version: master
+ - v1.24:
+ ref: stable/v1.24
+ role-version: master
+ - v1.23:
+ ref: stable/v1.23
+ role-version: master
+ - v1.22:
+ ref: stable/v1.22
+ role-version: master
+ exclude:
+ - project: xtestingci
+ version: '2023.2'
+ - project: functest
+ version: '2023.2'
+ - project: functest-kubernetes
+ version: '2023.2'
+ - project: xtestingci
+ version: '2023.1'
+ - project: functest
+ version: '2023.1'
+ - project: functest-kubernetes
+ version: '2023.1'
+ - project: xtestingci
+ version: zed
+ - project: functest-kubernetes
+ version: zed
+ - project: xtestingci
+ version: yoga
+ - project: functest-kubernetes
+ version: yoga
+ - project: xtestingci
+ version: xena
+ - project: functest-kubernetes
+ version: xena
+ - project: xtestingci
+ version: wallaby
+ - project: functest-kubernetes
+ version: wallaby
+ - project: xtestingci
+ version: leguer
+ - project: xtesting
+ version: leguer
+ - project: functest-kubernetes
+ version: leguer
+ - project: xtestingci
+ version: kali
+ - project: xtesting
+ version: kali
+ - project: functest-kubernetes
+ version: kali
+ - project: xtestingci
+ version: jerma
+ - project: xtesting
+ version: jerma
+ - project: functest-kubernetes
+ version: jerma
+ - project: xtestingci
+ version: v1.28
+ - project: functest
+ version: v1.28
+ - project: xtesting
+ version: v1.28
+ - project: xtestingci
+ version: v1.27
+ - project: functest
+ version: v1.27
+ - project: xtesting
+ version: v1.27
+ - project: xtestingci
+ version: v1.26
+ - project: functest
+ version: v1.26
+ - project: xtesting
+ version: v1.26
+ - project: xtestingci
+ version: v1.25
+ - project: functest
+ version: v1.25
+ - project: xtesting
+ version: v1.25
+ - project: xtestingci
+ version: v1.24
+ - project: functest
+ version: v1.24
+ - project: xtesting
+ version: v1.24
+ - project: xtestingci
+ version: v1.23
+ - project: functest
+ version: v1.23
+ - project: xtesting
+ version: v1.23
+ - project: xtestingci
+ version: v1.22
+ - project: functest
+ version: v1.22
+ - project: xtesting
+ version: v1.22
+ jobs:
+ - '{project}-{version}-vm'
+
+- view:
+ name: xtesting-ci-vm
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^.*-vm$
diff --git a/jjb/functest/xtesting-ci.yaml b/jjb/functest/xtesting-ci.yaml
new file mode 100644
index 000000000..fa7fe8265
--- /dev/null
+++ b/jjb/functest/xtesting-ci.yaml
@@ -0,0 +1,182 @@
+---
+- builder:
+ name: xtesting-ci-tests
+ builders:
+ - shell: |
+ set +x
+ sudo apt-get -o DPkg::Lock::Timeout=300 update
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install python3-pip docker.io podman -y
+ curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.17.0/kind-linux-amd64
+ chmod +x ./kind
+ sudo mv ./kind /usr/local/bin/kind
+ kind delete clusters xtesting jenkins gitlab || true
+ sudo docker ps -aq |xargs sudo docker stop || true
+ sudo docker ps -aq |xargs sudo docker rm || true
+ sudo docker system prune -f --all || true
+ sudo rm -f /etc/systemd/system/docker.service.d/http-proxy.conf
+ sudo systemctl daemon-reload
+ sudo systemctl restart docker
+ sudo podman ps -aq |xargs sudo podman stop || true
+ sudo podman ps -aq |xargs sudo podman rm || true
+ sudo rm -rfv /data /tmp/xtesting*
+ sudo apt-get install ansible patch -y
+ rm -rf ~/.ansible/roles/collivier.xtesting
+ case {release} in
+ stable)
+ ansible-galaxy install -f collivier.xtesting ;;
+ *)
+ ansible-galaxy install -f git+https://github.com/collivier/ansible-role-xtesting.git,{release}
+ mv ~/.ansible/roles/ansible-role-xtesting ~/.ansible/roles/collivier.xtesting ;;
+ esac
+ (cd ~/.ansible/roles/collivier.xtesting; patch -p1 < tests/docker_config_json.patch)
+ ansible-galaxy collection install -f -r ~/.ansible/roles/collivier.xtesting/requirements.yml
+ ansible-playbook -vvvv ~/.ansible/roles/collivier.xtesting/tests/{playbook}.yml
+
+- builder:
+ name: xtesting-ci-tests-remote
+ builders:
+ - shell: |
+ set +x
+ ssh opnfv@10.200.140.224 << EOF
+ sudo apt-get -o DPkg::Lock::Timeout=300 update
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install python3-pip docker.io podman -y
+ curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.17.0/kind-linux-amd64
+ chmod +x ./kind
+ sudo mv ./kind /usr/local/bin/kind
+ kind delete clusters xtesting jenkins gitlab || true
+ sudo docker ps -aq |xargs sudo docker stop || true
+ sudo docker ps -aq |xargs sudo docker rm || true
+ sudo docker system prune -f --all || true
+ sudo rm -f /etc/systemd/system/docker.service.d/http-proxy.conf
+ sudo systemctl daemon-reload
+ sudo systemctl restart docker
+ sudo podman ps -aq |xargs sudo podman stop || true
+ sudo podman ps -aq |xargs sudo podman rm || true
+ sudo rm -rfv /data /tmp/xtesting*
+ EOF
+ sudo apt-get install ansible patch -y
+ rm -rf ~/.ansible/roles/collivier.xtesting
+ case {release} in
+ stable)
+ ansible-galaxy install -f collivier.xtesting ;;
+ *)
+ ansible-galaxy install -f git+https://github.com/collivier/ansible-role-xtesting.git,{release}
+ mv ~/.ansible/roles/ansible-role-xtesting ~/.ansible/roles/collivier.xtesting ;;
+ esac
+ (cd ~/.ansible/roles/collivier.xtesting; patch -p1 < tests/docker_config_json.patch)
+ ansible-galaxy collection install -f -r ~/.ansible/roles/collivier.xtesting/requirements.yml
+ sed -i "s/127.0.0.1/10.200.140.224/g" ~/.ansible/roles/collivier.xtesting/tests/{playbook}.yml
+ echo 10.200.140.224 ansible_host=10.200.140.224 ansible_user=opnfv > /tmp/inventory
+ ansible-playbook -i /tmp/inventory -vvvv ~/.ansible/roles/collivier.xtesting/tests/{playbook}.yml
+
+- parameter:
+ name: xtesting-ci-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- job-template:
+ name: 'xtesting-ci-tests-{release}-{playbook}'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-ci-node:
+ node: '{node}'
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ blocking-level: 'NODE'
+ blocking-jobs:
+ - '^xtesting-ci-tests-.*$'
+ builders:
+ - xtesting-ci-tests:
+ playbook: '{playbook}'
+ release: '{release}'
+
+- job-template:
+ name: 'xtesting-ci-tests-remote-{release}-{playbook}'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-ci-node:
+ node: '{node}'
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ blocking-level: 'NODE'
+ blocking-jobs:
+ - '^xtesting-ci-tests-.*$'
+ builders:
+ - xtesting-ci-tests-remote:
+ playbook: '{playbook}'
+ release: '{release}'
+
+- playbook: &playbook
+ name: 'playbook'
+ playbook:
+ - all
+ - podman
+ - proxy
+ - proxy2
+ - radosgw
+ - nexus
+ - repo
+ - twice
+ - macro
+ - branch
+ - jjb
+ - gitlab.insert
+ - jenkins_kind
+ - jenkins_kind2
+ - gitlab_kind
+ - chainedci
+ - kubernetes0
+ - kubernetes1
+ - kubernetes2
+ - proxy_kubernetes
+ - proxy_kubernetes2
+ - k8s_jenkins_kind0
+ - k8s_jenkins_kind1
+ - k8s_jenkins_kind2
+ - k8s_jenkins_kind3
+ - k8s_gitlab0
+ - k8s_gitlab1
+ - k8s_gitlab_kind0
+ - k8s_gitlab_kind1
+ - k8s_gitlab_kind2
+ - k8s_gitlab_kind3
+
+- project:
+ name: xtesting-ci-tests
+ <<: *playbook
+ node: xtestingci
+ release:
+ - stable
+ - master
+ jobs:
+ - 'xtesting-ci-tests-{release}-{playbook}'
+
+- project:
+ name: xtesting-ci-tests-remote
+ <<: *playbook
+ node: xtesting
+ release:
+ - stable
+ - master
+ jobs:
+ - 'xtesting-ci-tests-remote-{release}-{playbook}'
+
+- view:
+ name: xtesting-ci
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-ci-tests-.*$
diff --git a/jjb/functest/xtesting-docker.yaml b/jjb/functest/xtesting-docker.yaml
deleted file mode 100644
index 251c3954e..000000000
--- a/jjb/functest/xtesting-docker.yaml
+++ /dev/null
@@ -1,224 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-- project:
-
- name: xtesting-docker
-
- project: functest-xtesting
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- disabled: false
-
- arch_tag:
- - 'amd64':
- slave_label: 'opnfv-build-ubuntu'
- - 'arm64':
- slave_label: 'opnfv-build-ubuntu-arm'
-
- # settings for jobs run in multijob phases
- build-job-settings: &build-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters: |
- PUSH_IMAGE=$PUSH_IMAGE
- COMMIT_ID=$COMMIT_ID
- GERRIT_REFNAME=$GERRIT_REFNAME
- DOCKERFILE=$DOCKERFILE
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- manifest-job-settings: &manifest-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters:
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- # yamllint enable rule:key-duplicates
- jobs:
- - "xtesting-docker-{stream}"
- - "xtesting-docker-build-{arch_tag}-{stream}"
- - "xtesting-docker-manifest-{stream}"
-
-########################
-# job templates
-########################
-- job-template:
- name: 'xtesting-docker-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- parameters:
- - xtesting-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- arch_tag: 'amd64'
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- builders:
- - multijob:
- name: 'build xtesting images'
- execution-type: PARALLEL
- projects:
- - name: 'xtesting-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'xtesting-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish xtesting manifests'
- execution-type: PARALLEL
- projects:
- - name: 'xtesting-docker-manifest-{stream}'
- <<: *manifest-job-settings
-
- publishers:
- - 'xtesting-amd64-recipients'
- - 'xtesting-arm64-recipients'
-
-- job-template:
- name: 'xtesting-docker-build-{arch_tag}-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - xtesting-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: '{slave_label}'
- arch_tag: '{arch_tag}'
- scm:
- - git-scm
- builders:
- - shell: |
- #!/bin/bash -ex
- sudo arch={arch_tag} bash ./build.sh
- exit $?
-
-- job-template:
- name: 'xtesting-docker-manifest-{stream}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-
- disabled: '{obj:disabled}'
-
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{stream}" in
- "master")
- tag="latest" ;;
- *)
- tag="{stream}" ;;
- esac
- sudo manifest-tool push from-args \
- --platforms linux/amd64,linux/arm64 \
- --template $REPO/xtesting:ARCH-$tag \
- --target $REPO/xtesting:$tag
- exit $?
-
-- parameter:
- name: xtesting-job-parameters
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: '{slave_label}'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: "Dockerfile"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-# publisher macros
-- publisher:
- name: 'xtesting-arm64-recipients'
- publishers:
- - email:
- recipients: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- delia.popescu@enea.com
-
-- publisher:
- name: 'xtesting-amd64-recipients'
- publishers:
- - email:
- recipients: >
- jalausuch@suse.com morgan.richomme@orange.com
- cedric.ollivier@orange.com feng.xiaowei@zte.com.cn
- juha.kosonen@nokia.com wangwulin@huawei.com
- valentin.boucher@kontron.com
diff --git a/jjb/functest/xtesting-pi.yaml b/jjb/functest/xtesting-pi.yaml
new file mode 100644
index 000000000..86ac98f30
--- /dev/null
+++ b/jjb/functest/xtesting-pi.yaml
@@ -0,0 +1,425 @@
+---
+- xtesting-pi-containers: &xtesting-pi-containers
+ name: 'xtesting-pi-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- xtesting-pi-params: &xtesting-pi-params
+ name: 'xtesting-pi-params'
+ tag:
+ - latest:
+ node: opnfv-build
+ - 2023.2:
+ node: opnfv-build
+ - 2023.1:
+ node: opnfv-build
+ - zed:
+ node: opnfv-build
+ - yoga:
+ node: opnfv-build
+ - xena:
+ node: opnfv-build
+ - wallaby:
+ node: opnfv-build
+ - arm-latest:
+ node: opnfv-build
+ - arm-2.23.2:
+ node: opnfv-build
+ - arm-2.23.1:
+ node: opnfv-build
+ - arm-zed:
+ node: opnfv-build
+ - arm-yoga:
+ node: opnfv-build
+ - arm-xena:
+ node: opnfv-build
+ - arm-wallaby:
+ node: opnfv-build
+ - arm64-latest:
+ node: opnfv-build
+ - arm64-2023.2:
+ node: opnfv-build
+ - arm64-2023.1:
+ node: opnfv-build
+ - arm64-zed:
+ node: opnfv-build
+ - arm64-yoga:
+ node: opnfv-build
+ - arm64-xena:
+ node: opnfv-build
+ - arm64-wallaby:
+ node: opnfv-build
+
+- xtesting-pi-ollivier-xtesting-params: &xtesting-pi-ollivier-xtesting-params
+ name: 'xtesting-pi-ollivier-xtesting-params'
+ repo: 'ollivier'
+ container: 'xtesting'
+ port:
+ tag:
+ - latest:
+ node: opnfv-build
+ - 2023.2:
+ node: opnfv-build
+ - 2023.1:
+ node: opnfv-build
+ - zed:
+ node: opnfv-build
+ - yoga:
+ node: opnfv-build
+ - xena:
+ node: opnfv-build
+ - wallaby:
+ node: opnfv-build
+ - arm-latest:
+ node: opnfv-build
+ - arm-2.23.2:
+ node: opnfv-build
+ - arm-2.23.1:
+ node: opnfv-build
+ - arm-zed:
+ node: opnfv-build
+ - arm-yoga:
+ node: opnfv-build
+ - arm-xena:
+ node: opnfv-build
+ - arm-wallaby:
+ node: opnfv-build
+ - arm64-latest:
+ node: opnfv-build
+ - arm64-2023.2:
+ node: opnfv-build
+ - arm64-2023.1:
+ node: opnfv-build
+ - arm64-zed:
+ node: opnfv-build
+ - arm64-yoga:
+ node: opnfv-build
+ - arm64-xena:
+ node: opnfv-build
+ - arm64-wallaby:
+ node: opnfv-build
+
+- xtesting-pi-jobs: &xtesting-pi-jobs
+ name: 'xtesting-pi-jobs'
+ current-parameters: true
+
+- parameter:
+ name: xtesting-pi-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: xtesting-pi-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- xtesting-pi-run-containers: &xtesting-pi-run-containers
+ name: 'xtesting-pi-run-containers'
+ <<: *xtesting-pi-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+
+- builder:
+ name: xtesting-pi-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker pull $image
+
+- builder:
+ name: xtesting-pi-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/xtesting/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: xtesting-pi-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker rmi $image || true
+
+- job-template:
+ name: 'xtesting-pi-ollivier-xtesting-{tag}-pull'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ builders:
+ - xtesting-pi-pull-containers:
+ <<: *xtesting-pi-containers
+
+- project:
+ name: 'xtesting-pi-ollivier-xtesting-pull'
+ <<: *xtesting-pi-ollivier-xtesting-params
+ jobs:
+ - 'xtesting-pi-ollivier-xtesting-{tag}-pull'
+
+- job-template:
+ name: 'xtesting-pi-ollivier-xtesting-{tag}-rmi'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ builders:
+ - xtesting-pi-remove-images:
+ <<: *xtesting-pi-containers
+
+- project:
+ name: 'xtesting-pi-ollivier-xtesting-rmi'
+ <<: *xtesting-pi-ollivier-xtesting-params
+ jobs:
+ - 'xtesting-pi-ollivier-xtesting-{tag}-rmi'
+
+- job-template:
+ name: 'xtesting-pi-ollivier-xtesting-{tag}-{test}-run'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ - xtesting-pi-build_tag:
+ build_tag: ''
+ builders:
+ - xtesting-pi-run-containers:
+ <<: *xtesting-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'xtesting-pi-ollivier-xtesting'
+ <<: *xtesting-pi-ollivier-xtesting-params
+ volumes:
+ env:
+ published_ports:
+ container: 'xtesting'
+ test:
+ - first
+ - second
+ - third
+ - fourth
+ - fifth
+ - sixth
+ - eighth
+ - nineth
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ exclude:
+ - tag: wallaby
+ test: nineth
+ - tag: xena
+ test: nineth
+ jobs:
+ - 'xtesting-pi-ollivier-xtesting-{tag}-{test}-run'
+
+- builder:
+ name: xtesting-pi-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting \
+ -v /home/opnfv/xtesting/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image zip_campaign
+
+- job-template:
+ name: 'xtesting-pi-{tag}-zip'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ - xtesting-pi-build_tag:
+ build_tag: ''
+ builders:
+ - xtesting-pi-zip:
+ <<: *xtesting-pi-run-containers
+
+- project:
+ name: 'xtesting-pi-zip'
+ <<: *xtesting-pi-ollivier-xtesting-params
+ volumes:
+ env:
+ published_ports:
+ container: 'xtesting'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'xtesting-pi-{tag}-zip'
+
+- job-template:
+ name: 'xtesting-pi-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ - xtesting-pi-build_tag:
+ build_tag: ''
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^xtesting-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-rmi'
+ <<: *xtesting-pi-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-pull'
+ <<: *xtesting-pi-jobs
+ - multijob:
+ name: ollivier/xtesting:{tag}
+ projects:
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-first-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-second-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-third-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-fourth-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-fifth-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-sixth-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-eighth-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-nineth-run'
+ <<: *xtesting-pi-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'xtesting-pi-{tag}-zip'
+ <<: *xtesting-pi-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-pi-daily'
+ <<: *xtesting-pi-params
+ jobs:
+ - 'xtesting-pi-{tag}-daily'
+
+- view:
+ name: xtesting-pi
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-pi-[a-z-0-9.]+-daily$
diff --git a/jjb/functest/xtesting-project-jobs.yaml b/jjb/functest/xtesting-project-jobs.yaml
deleted file mode 100644
index e109387c6..000000000
--- a/jjb/functest/xtesting-project-jobs.yaml
+++ /dev/null
@@ -1,257 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: xtesting-project-jobs
-
- project: 'functest-xtesting'
-
- jobs:
- - 'xtesting-verify-{stream}'
- - 'xtesting-verify-{phase}-{stream}'
- - 'xtesting-docs-upload-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- phase:
- - 'unit-tests-and-docs':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-x86_64':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-aarch64':
- slave-label: 'opnfv-build-ubuntu-arm'
-
-- job-template:
- name: 'xtesting-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - 'xtesting-verify-triggers-macro':
- project: '{project}'
- branch: '{branch}'
-
- builders:
- - shell: |
- #!/bin/bash
- # we do nothing here as the main stuff will be done
- # in phase jobs
- echo "Triggering phase jobs!"
- - multijob:
- name: 'xtesting-build-and-unittest'
- execution-type: PARALLEL
- projects:
- - name: 'xtesting-verify-unit-tests-and-docs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'xtesting-verify-build-x86_64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=x86_64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'xtesting-verify-build-aarch64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=aarch64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
-
-- job-template:
- name: 'xtesting-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 30
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
-
- scm:
- - git-scm-gerrit
-
- builders:
- - 'xtesting-verify-{phase}-builders-macro'
-
- publishers:
- - 'xtesting-verify-{phase}-publishers-macro'
-
-- job-template:
- name: 'xtesting-docs-upload-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - 'xtesting-docs-upload-triggers-macro':
- project: '{project}'
- branch: '{branch}'
-
- builders:
- - xtesting-upload-doc-artifact
-
-################################
-# job triggers
-################################
-- trigger:
- name: 'xtesting-verify-triggers-macro'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
-- trigger:
- name: 'xtesting-docs-upload-triggers-macro'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-################################
-# job builders
-################################
-
-- builder:
- name: 'xtesting-verify-unit-tests-and-docs-builders-macro'
- builders:
- - shell: |
- cd $WORKSPACE && tox
-
-- builder:
- name: 'xtesting-verify-build-x86_64-builders-macro'
- builders:
- - shell: |
- echo "Not activated!"
-
-- builder:
- name: 'xtesting-verify-build-aarch64-builders-macro'
- builders:
- - shell: |
- echo "Not activated!"
-
-- builder:
- name: 'xtesting-upload-doc-artifact'
- builders:
- - shell: |
- cd $WORKSPACE && tox -edocs
- wget -O - https://git.opnfv.org/releng/plain/utils/upload-artifact.sh | bash -s "api/_build" "docs"
-################################
-# job publishers
-################################
-- publisher:
- name: 'xtesting-verify-unit-tests-and-docs-publishers-macro'
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 10
- unhealthy: 20
- failing: 30
- - method:
- healthy: 50
- unhealthy: 40
- failing: 30
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'xtesting-verify-build-x86_64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'xtesting-verify-build-aarch64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
diff --git a/jjb/functest/xtesting.yaml b/jjb/functest/xtesting.yaml
new file mode 100644
index 000000000..fbd8ba445
--- /dev/null
+++ b/jjb/functest/xtesting.yaml
@@ -0,0 +1,986 @@
+---
+- xtesting-containers: &xtesting-containers
+ name: 'xtesting-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- xtesting-params: &xtesting-params
+ name: 'xtesting-params'
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: opnfv-build
+ dependency: 3.17
+ - 2023.2:
+ from:
+ build_args:
+ branch: stable/2023.2
+ node: opnfv-build
+ dependency: 3.17
+ - 2023.1:
+ from:
+ build_args:
+ branch: stable/2023.1
+ node: opnfv-build
+ dependency: 3.17
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: opnfv-build
+ dependency: 3.16
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: opnfv-build
+ dependency: 3.16
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: opnfv-build
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: opnfv-build
+ dependency: 3.13
+
+- xtesting-opnfv-xtesting-params: &xtesting-opnfv-xtesting-params
+ name: 'xtesting-opnfv-xtesting-params'
+ repo: 'opnfv'
+ container: 'xtesting'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: opnfv-build
+ dependency: 3.17
+ - 2023.2:
+ from:
+ build_args:
+ branch: stable/2023.2
+ node: opnfv-build
+ dependency: 3.17
+ - 2023.1:
+ from:
+ build_args:
+ branch: stable/2023.1
+ node: opnfv-build
+ dependency: 3.17
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: opnfv-build
+ dependency: 3.16
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: opnfv-build
+ dependency: 3.16
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: opnfv-build
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: opnfv-build
+ dependency: 3.13
+
+- xtesting-jobs: &xtesting-jobs
+ name: 'xtesting-jobs'
+ current-parameters: true
+
+- parameter:
+ name: xtesting-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: xtesting-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- xtesting-run-containers: &xtesting-run-containers
+ name: 'xtesting-run-containers'
+ <<: *xtesting-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+
+- builder:
+ name: xtesting-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker pull $image
+
+- builder:
+ name: xtesting-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/xtesting/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: xtesting-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker rmi $image || true
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-pull'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-pull-containers:
+ <<: *xtesting-containers
+
+- project:
+ name: 'xtesting-opnfv-xtesting-pull'
+ <<: *xtesting-opnfv-xtesting-params
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-pull'
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-rmi'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-remove-images:
+ <<: *xtesting-containers
+
+- project:
+ name: 'xtesting-opnfv-xtesting-rmi'
+ <<: *xtesting-opnfv-xtesting-params
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-rmi'
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-{test}-run'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ - xtesting-build_tag:
+ build_tag: ''
+ builders:
+ - xtesting-run-containers:
+ <<: *xtesting-run-containers
+ test: '{test}'
+
+- project:
+ name: 'xtesting-opnfv-xtesting'
+ <<: *xtesting-opnfv-xtesting-params
+ volumes:
+ env:
+ published_ports:
+ container: 'xtesting'
+ test:
+ - first
+ - second
+ - third
+ - fourth
+ - fifth
+ - sixth
+ - eighth
+ - nineth
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ exclude:
+ - tag: wallaby
+ test: nineth
+ - tag: xena
+ test: nineth
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-{test}-run'
+
+- builder:
+ name: xtesting-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting \
+ -v /home/opnfv/xtesting/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image zip_campaign
+
+- job-template:
+ name: 'xtesting-{tag}-zip'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ - xtesting-build_tag:
+ build_tag: ''
+ builders:
+ - xtesting-zip:
+ <<: *xtesting-run-containers
+
+- project:
+ name: 'xtesting-zip'
+ <<: *xtesting-opnfv-xtesting-params
+ volumes:
+ env:
+ published_ports:
+ container: 'xtesting'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'xtesting-{tag}-zip'
+
+- job-template:
+ name: 'xtesting-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ - xtesting-build_tag:
+ build_tag: ''
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^xtesting-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'xtesting-opnfv-xtesting-{tag}-rmi'
+ <<: *xtesting-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'xtesting-opnfv-xtesting-{tag}-pull'
+ <<: *xtesting-jobs
+ - multijob:
+ name: opnfv/xtesting:{tag}
+ projects:
+ - name: 'xtesting-opnfv-xtesting-{tag}-first-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-second-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-third-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-fourth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-fifth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-sixth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-eighth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-nineth-run'
+ <<: *xtesting-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'xtesting-{tag}-zip'
+ <<: *xtesting-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-daily'
+ <<: *xtesting-params
+ jobs:
+ - 'xtesting-{tag}-daily'
+
+- view:
+ name: xtesting
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-[a-z0-9.-]+-daily$
+
+- xtesting-build-containers: &xtesting-build-containers
+ name: 'xtesting-build-containers'
+ <<: *xtesting-containers
+ ref_arg: '{ref_arg}'
+ path: '{path}'
+ build_args: '{build_args}'
+ from: '{from}'
+
+- builder:
+ name: xtesting-build-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ build_args=""
+ if [ "{build_args}" != "None" ]; then
+ for i in $(echo {build_args} | tr -d '[]' |sed "s/, / /g" ); \
+ do build_args="--build-arg $i $build_args"; done
+ fi
+ if [ "{ref_arg}" != "None" ]; then
+ build_args="$build_args --build-arg {ref_arg}={ref}"
+ fi
+ cd {path}
+ if [ "{from}" != "None" ]; then
+ sed -i {from} Dockerfile
+ fi
+ sudo docker build $build_args \
+ --pull=false --no-cache --force-rm=true \
+ -t $image .
+
+- scm:
+ name: xtesting-scm
+ scm:
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/functest-xtesting'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ branches:
+ - '{ref}'
+
+- xtesting-dep: &xtesting-dep
+ name: 'xtesting-containers'
+ repo: '{repo}'
+ container: '{container}'
+ port: '{port}'
+ tag: '{dependency}'
+
+- job-template:
+ name: 'xtesting-{tag}-dep-pull'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-pull-containers:
+ <<: *xtesting-dep
+
+- xtesting-dep-params: &xtesting-dep-params
+ name: 'xtesting-dep-params'
+ repo: '_'
+ container: 'alpine'
+ port:
+ tag:
+ - latest:
+ dependency: 3.17
+ - 2023.2:
+ dependency: 3.17
+ - 2023.1:
+ dependency: 3.17
+ - zed:
+ dependency: 3.16
+ - yoga:
+ dependency: 3.16
+ - xena:
+ dependency: 3.14
+ - wallaby:
+ dependency: 3.13
+
+- project:
+ name: 'xtesting-dep-pull'
+ <<: *xtesting-dep-params
+ jobs:
+ - 'xtesting-{tag}-dep-pull'
+
+- job-template:
+ name: 'xtesting-{tag}-dep-rmi'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-remove-images:
+ <<: *xtesting-dep
+
+- project:
+ name: 'xtesting-dep-rmi'
+ <<: *xtesting-dep-params
+ jobs:
+ - 'xtesting-{tag}-dep-rmi'
+
+- builder:
+ name: xtesting-tox
+ builders:
+ - shell: |
+ set +x
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install software-properties-common gpg -y
+ sudo add-apt-repository -y ppa:deadsnakes/ppa
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ libxml2-dev libxslt-dev libffi-dev libjpeg-dev \
+ python3.10 python3.10-dev python3.10-distutils \
+ python3.9 python3.9-dev python3.9-distutils \
+ python3.8 python3.8-dev python3.8-distutils \
+ python3-pip enchant-2 -y
+ sudo pip3 install tox
+
+ tox
+
+- job-template:
+ name: 'xtesting-{tag}-tox'
+ scm:
+ - xtesting-scm:
+ ref: $GERRIT_REFSPEC
+ triggers:
+ - xtesting-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-tox:
+
+- project:
+ name: xtesting-tox
+ <<: *xtesting-params
+ jobs:
+ - 'xtesting-{tag}-tox'
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-gate'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ scm:
+ - xtesting-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - xtesting-build-containers:
+ <<: *xtesting-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: xtesting-opnfv-xtesting-gate
+ <<: *xtesting-params
+ repo: opnfv
+ port:
+ container: xtesting
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-gate'
+
+- trigger:
+ name: xtesting-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'functest-xtesting'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+
+- job-template:
+ name: 'xtesting-{tag}-review'
+ project-type: multijob
+ triggers:
+ - xtesting-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ - xtesting-build_tag:
+ build_tag: ''
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^xtesting-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'xtesting-opnfv-xtesting-{tag}-rmi'
+ <<: *xtesting-jobs
+ - multijob:
+ name: remove dependency
+ projects:
+ - name: 'xtesting-{tag}-dep-rmi'
+ <<: *xtesting-jobs
+ - multijob:
+ name: pull dependency
+ projects:
+ - name: 'xtesting-{tag}-dep-pull'
+ <<: *xtesting-jobs
+ - multijob:
+ name: opnfv/xtesting
+ projects:
+ - name: 'xtesting-opnfv-xtesting-{tag}-gate'
+ <<: *xtesting-jobs
+ - multijob:
+ name: opnfv/xtesting:{tag}
+ projects:
+ - name: 'xtesting-opnfv-xtesting-{tag}-first-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-second-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-third-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-fourth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-fifth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-sixth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-eighth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-nineth-run'
+ <<: *xtesting-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-review'
+ <<: *xtesting-params
+ jobs:
+ - 'xtesting-{tag}-review'
+
+- view:
+ name: xtesting-review
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-[a-z0-9.-]+-review$
+
+- view:
+ name: xtesting-tox
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-[a-z0-9.-]+-tox$
+
+- builder:
+ name: xtesting-push-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker push $image
+
+- trigger:
+ name: xtesting-commit
+ triggers:
+ - pollscm:
+ cron: "*/30 * * * *"
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-build'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ scm:
+ - xtesting-scm:
+ ref: '{branch}'
+ builders:
+ - xtesting-build-containers:
+ <<: *xtesting-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - xtesting-push-containers:
+ <<: *xtesting-build-containers
+ ref: '{branch}'
+
+- project:
+ name: xtesting-opnfv-xtesting-build
+ <<: *xtesting-params
+ repo: opnfv
+ container: xtesting
+ port:
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-build'
+
+- job-template:
+ name: 'xtesting-{tag}-docker'
+ project-type: multijob
+ triggers:
+ - xtesting-commit
+ scm:
+ - xtesting-scm:
+ ref: '{branch}'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^xtesting-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove dependency
+ projects:
+ - name: 'xtesting-{tag}-dep-rmi'
+ <<: *xtesting-jobs
+ - multijob:
+ name: pull dependency
+ projects:
+ - name: 'xtesting-{tag}-dep-pull'
+ <<: *xtesting-jobs
+ - multijob:
+ name: opnfv/xtesting
+ projects:
+ - name: 'xtesting-opnfv-xtesting-{tag}-build'
+ <<: *xtesting-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-docker'
+ <<: *xtesting-params
+ jobs:
+ - 'xtesting-{tag}-docker'
+
+- builder:
+ name: xtesting-trivy
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./trivy image --exit-code 1 $image
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-trivy'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-trivy:
+ <<: *xtesting-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-opnfv-xtesting-trivy'
+ <<: *xtesting-params
+ repo: 'opnfv'
+ container: 'xtesting'
+ port:
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-trivy'
+
+- builder:
+ name: xtesting-grype
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sudo sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./grype -q $image
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-grype'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-grype:
+ <<: *xtesting-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-opnfv-xtesting-grype'
+ <<: *xtesting-params
+ repo: 'opnfv'
+ container: 'xtesting'
+ port:
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-grype'
+
+- builder:
+ name: xtesting-sbom
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ sudo mkdir -p ~/.docker
+ curl -sSfL https://raw.githubusercontent.com/docker/sbom-cli-plugin/main/install.sh | sudo sh -s --
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker sbom $image
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-sbom'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-sbom:
+ <<: *xtesting-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-opnfv-xtesting-sbom'
+ <<: *xtesting-params
+ repo: 'opnfv'
+ container: 'xtesting'
+ port:
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-sbom'
+
+- view:
+ name: xtesting-docker
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-[a-z0-9.-]+-docker$
+
+- view:
+ name: xtesting-trivy
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!xtesting-pi)^xtesting-[a-z-0-9.]+-trivy$
+
+- view:
+ name: xtesting-grype
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!xtesting-pi)^xtesting-[a-z-0-9.]+-grype$
+
+- view:
+ name: xtesting-sbom
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!xtesting-pi)^xtesting-[a-z-0-9.]+-sbom$
diff --git a/jjb/global-jjb b/jjb/global-jjb
deleted file mode 160000
-Subproject 9b240453b91f3c4b0844ea1c593721a1c421caa
diff --git a/jjb/global/basic-jobs.yaml b/jjb/global/basic-jobs.yaml
deleted file mode 100644
index a8b9cffbc..000000000
--- a/jjb/global/basic-jobs.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
----
-##
-# Basic Job Config
-#
-# This is used for project which don't have any jobs of substance
-# defined yet, but still need 'Verified+1'.
-##
-- job-group:
- name: '{project}-verify-basic'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- jobs:
- - '{project}-verify-{stream}'
-
-- job-template:
- name: '{project}-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit-trigger-patchset-created:
- project: '{project}'
- branch: '{branch}'
- files: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- echo "Nothing to verify!"
diff --git a/jjb/global/installer-params.yaml b/jjb/global/installer-params.yaml
deleted file mode 100644
index f663c4556..000000000
--- a/jjb/global/installer-params.yaml
+++ /dev/null
@@ -1,156 +0,0 @@
----
-- parameter:
- name: 'apex-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.X.X'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: apex
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'external'
- description: 'external network for test'
-
-- parameter:
- name: 'compass-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.200.2'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: compass
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'ext-net'
- description: 'external network for test'
-
-- parameter:
- name: 'fuel-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '10.20.0.2'
- description: 'IP of the installer'
- - string:
- name: SSH_KEY
- default: "/var/lib/opnfv/mcp.rsa"
- description: 'Path to private SSH key to access environment nodes'
- - string:
- name: INSTALLER_TYPE
- default: fuel
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'floating_net'
- description: 'external network for test'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debug output information"
-
-- parameter:
- name: 'joid-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.122.5'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: joid
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: MODEL
- default: 'os'
- description: 'Model to deploy (os|k8)'
- - string:
- name: OS_RELEASE
- default: 'pike'
- description: 'OpenStack release (mitaka|ocata|pike)'
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net
- description: "External network used for Floating ips."
- - string:
- name: LAB_CONFIG
- default: "$HOME/joid_config"
- description: "Local lab config and Openstack openrc location"
- - string:
- name: MAAS_REINSTALL
- default: 'false'
- description: "Reinstall MAAS and Bootstrap before deploy [true/false]"
- - string:
- name: UBUNTU_DISTRO
- default: 'xenial'
- description: "Ubuntu distribution to use for Openstack (xenial)"
- - string:
- name: CPU_ARCHITECTURE
- default: 'amd64'
- description: "CPU Architecture to use for Ubuntu distro "
-
-- parameter:
- name: 'daisy-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '10.20.7.3'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: daisy
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: BRIDGE
- default: 'br7'
- description: 'pxe bridge for booting of Daisy master'
- - string:
- name: EXTERNAL_NETWORK
- default: 'admin_external'
- description: 'external network for test'
-
-- parameter:
- name: 'infra-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.122.2'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: infra
- description: 'Installer used for deploying OPNFV on this POD'
-
-- parameter:
- name: 'netvirt-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.X.X'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: apex
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'external'
- description: 'external network for test'
-
-- parameter:
- name: 'deploy-scenario'
- parameters:
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "OPNFV deployment scenario"
diff --git a/jjb/global/installer-report.sh b/jjb/global/installer-report.sh
deleted file mode 100755
index 6cd83f1f1..000000000
--- a/jjb/global/installer-report.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-source $WORKSPACE/installer_track.sh
-
-gen_content()
-{
- cat <<EOF
-{
- "installer": "$INSTALLER",
- "version": "$INSTALLER_VERSION",
- "pod_name": "$NODE_NAME",
- "job_name": "$JOB_NAME",
- "build_id": "$BUILD_ID",
- "scenario": "$DEPLOY_SCENARIO",
- "upstream_job_name": "$UPSTREAM_JOB_NAME",
- "upstream_build_id":"$UPSTREAM_BUILD_ID",
- "criteria": "$PROVISION_RESULT",
- "start_date": "$TIMESTAMP_START",
- "stop_date": "$TIMESTAMP_END",
- "details":""
-}
-EOF
-}
-
-echo "Installer: $INSTALLER provision result: $PROVISION_RESULT"
-echo $(gen_content)
-
-set -o xtrace
-curl -H "Content-Type: application/json" -X POST -v -d "$(gen_content)" \
- $TESTAPI_URL/deployresults || true
-
-# INFO
-# postbuildscript plugin shall always return the original job running status,
-# for the result returned from postbuildscript affects the CI pipeline.
-if [ "$PROVISION_RESULT" == "PASS" ]; then
- exit 0
-else
- exit 1
-fi
diff --git a/jjb/global/releng-defaults.yaml b/jjb/global/releng-defaults.yaml
index 2e94767e8..a6ee7fa3d 100644
--- a/jjb/global/releng-defaults.yaml
+++ b/jjb/global/releng-defaults.yaml
@@ -11,6 +11,19 @@
node: master
+ # Defaults for global-jjb jobs
+ build-timeout: 60
+ build-node: 'opnfv-build'
+ gerrit-server-name: 'gerrit.opnfv.org'
+ jenkins-ssh-credential: 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
+ lftools-version: '<1.0.0'
+ throttle_categories: []
+ throttle-enabled: true
+ throttle-max-per-node: 1
+ throttle-max-total: 2
+ throttle-option: project
+
+
properties:
- logrotate-default
diff --git a/jjb/global/releng-macros.yaml b/jjb/global/releng-macros.yaml
index b282cff4f..ea48cc80d 100644
--- a/jjb/global/releng-macros.yaml
+++ b/jjb/global/releng-macros.yaml
@@ -101,7 +101,7 @@
name: git-scm
scm:
- git: &git-scm-defaults
- credentials-id: '$SSH_CREDENTIAL_ID'
+ credentials-id: 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
url: '$GIT_BASE'
branches:
- 'origin/$BRANCH'
@@ -136,7 +136,7 @@
name: git-scm-with-submodules
scm:
- git:
- credentials-id: '$SSH_CREDENTIAL_ID'
+ credentials-id: 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
url: '$GIT_BASE'
refspec: ''
branches:
@@ -200,6 +200,32 @@
notbuilt: false
- trigger:
+ name: gerrit-trigger-patchset-approved
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - comment-added-event:
+ approval-category: 'CRVW'
+ approval-value: 2
+ - comment-added-contains-event:
+ comment-contains-value: 'gate'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: '{files}'
+ skip-vote:
+ successful: false
+ failed: false
+ unstable: false
+ notbuilt: false
+
+- trigger:
name: gerrit-trigger-change-merged
triggers:
- gerrit:
@@ -332,7 +358,8 @@
echo
cat gerrit_comment.txt
echo
- ssh -p 29418 gerrit.opnfv.org \
+ ssh -o 'PubkeyAcceptedKeyTypes +ssh-rsa' \
+ -p 29418 fbot@gerrit.opnfv.org \
"gerrit review -p $GERRIT_PROJECT \
-m '$(cat gerrit_comment.txt)' \
$GERRIT_PATCHSET_REVISION \
@@ -373,93 +400,6 @@
- report-build-result-to-gerrit
- builder:
- name: lint-init
- builders:
- - shell: |
- #!/bin/bash
- # Ensure we start with a clean environment
- rm -f bash-violation.log python-violation.log yaml-violation.log violation.log
- git --no-pager diff --diff-filter=MCRAT --name-only HEAD^1 > modified_files
-
-- builder:
- name: lint-report
- builders:
- - shell: |
- #!/bin/bash
- if [[ -s violation.log ]]; then
- cat violation.log
- echo "Reporting lint result...."
- set -x
- msg="Found syntax error and/or coding style violation(s) in the files modified by your patchset."
- sed -i -e "1s#^#${msg}\n\n#" violation.log
- cmd="gerrit review -p $GERRIT_PROJECT -m \"$(cat violation.log)\" $GERRIT_PATCHSET_REVISION --notify NONE"
- ssh -p 29418 gerrit.opnfv.org "$cmd"
-
- # Make sure the caller job failed
- exit 1
- fi
-
-- builder:
- name: lint-bash-code
- builders:
- - shell: |
- #!/bin/bash
- echo "Checking bash code..."
- for f in $(egrep '\.sh$' modified_files)
- do
- bash -n "$f" 2>> bash-violation.log
- done
- if [[ -s bash-violation.log ]]; then
- echo -e "Bash syntax error(s)\n---" >> violation.log
- sed -e 's/^/ /g' bash-violation.log >> violation.log
- fi
-
-- builder:
- name: lint-python-code
- builders:
- - shell: |
- #!/bin/bash
- # Install python package
- sudo pip install "flake8==2.6.2"
-
- echo "Checking python code..."
- for f in $(egrep '\.py$' modified_files)
- do
- flake8 "$f" >> python-violation.log
- done
- if [[ -s python-violation.log ]]; then
- echo -e "Python violation(s)\n---" >> violation.log
- sed -e 's/^/ /g' python-violation.log >> violation.log
- fi
-
-- builder:
- name: lint-yaml-code
- builders:
- - shell: |
- #!/bin/bash
- # sudo Install python packages
- sudo pip install "yamllint==1.8.2"
-
- echo "Checking yaml file..."
- for f in $(egrep '\.ya?ml$' modified_files)
- do
- yamllint "$f" >> yaml-violation.log
- done
- if [[ -s yaml-violation.log ]]; then
- echo -e "YAML violation(s)\n---" >> violation.log
- sed -e 's/^/ /g' yaml-violation.log >> violation.log
- fi
-
-- builder:
- name: lint-all-code
- builders:
- - lint-init
- - lint-bash-code
- - lint-python-code
- - lint-yaml-code
- - lint-report
-
-- builder:
name: clean-workspace
builders:
- shell: |
@@ -523,494 +463,3 @@
failure: true
send-to:
- recipients
-
-# Email PTL publishers
-- email_ptl_defaults: &email_ptl_defaults
- name: 'email_ptl_defaults'
- content-type: text
- attach-build-log: true
- attachments: '*.log'
- compress-log: true
- always: true
- subject: '{subject}'
-
-- publisher: &email_apex_ptl_defaults
- name: 'email-apex-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- trozet@redhat.com
-- publisher:
- name: 'email-apex-os-net-config-ptl'
- <<: *email_apex_ptl_defaults
-- publisher:
- name: 'email-apex-puppet-tripleo-ptl'
- <<: *email_apex_ptl_defaults
-- publisher:
- name: 'email-apex-tripleo-heat-templates-ptl'
- <<: *email_apex_ptl_defaults
-
-- publisher:
- name: 'email-armband-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- bob.monkman@arm.com
-
-- publisher:
- name: 'email-auto-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- tina.tsou@arm.com
-
-- publisher:
- name: 'email-availability-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- fuqiao@chinamobile.com
-
-- publisher:
- name: 'email-bamboo-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- donaldh@cisco.com
-
-- publisher:
- name: 'email-barometer-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- aasmith@redhat.com
-
-- publisher:
- name: 'email-bottlenecks-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- gabriel.yuyang@huawei.com
-
-- publisher:
- name: 'email-calipso-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- korlev@cisco.com
-
-- publisher:
- name: 'email-clover-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- stephen.wong1@huawei.com
-
-- publisher: &email_compass4nfv_ptl_defaults
- name: 'email-compass4nfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- chigang@huawei.com
-- publisher:
- name: 'email-compass-containers-ptl'
- <<: *email_compass4nfv_ptl_defaults
-
-- publisher:
- name: 'email-conductor-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- limingjiang@huawei.com
-
-- publisher:
- name: 'email-container4nfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- jiaxuan@chinamobile.com
-
-- publisher:
- name: 'email-copper-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- aimeeu.opensource@gmail.com
-
-- publisher:
- name: 'email-cperf-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- matt.welch@intel.com
-
-- publisher:
- name: 'email-daisy-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- hu.zhijiang@zte.com.cn
-
-- publisher:
- name: 'email-doctor-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- r-mibu@cq.jp.nec.com
-
-- publisher:
- name: 'email-domino-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- ulas.kozat@huawei.com
-
-- publisher:
- name: 'email-dovetail-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- georg.kunz@ericsson.com
-
-- publisher:
- name: 'email-dpacc-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- denglingli@chinamobile.com
-
-- publisher:
- name: 'email-enfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- JBuchanan@advaoptical.com
-
-- publisher:
- name: 'email-fds-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- fbrockne@cisco.com
-
-- publisher:
- name: 'email-fuel-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- gelkinbard@mirantis.com
-
-- publisher:
- name: 'email-functest-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- cedric.ollivier@orange.com
-
-- publisher:
- name: 'email-ipv6-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- bh526r@att.com
-
-- publisher:
- name: 'email-joid-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- artur.tyloch@canonical.com
-
-- publisher:
- name: 'email-kvmfornfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- raghuveer.reddy@intel.com
-
-- publisher:
- name: 'email-models-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- bs3131@att.com
-
-- publisher:
- name: 'email-moon-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- ruan.he@orange.com
-
-- publisher:
- name: 'email-netready-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- georg.kunz@ericsson.com
-
-- publisher:
- name: 'email-nfvbench-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- ahothan@cisco.com
-
-- publisher:
- name: 'email-onosfw-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- su.wei@huawei.com
-
-- publisher:
- name: 'email-opera-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- Yingjun.li@huawei.com
-
-- publisher:
- name: 'email-opnfvdocs-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- sofia.wallin@ericsson.com
-
-- publisher:
- name: 'email-orchestra-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- giuseppe.carella@fokus.fraunhofer.de
-
-- publisher:
- name: 'email-ovn4nfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- trinath.somanchi@gmail.com
-
-- publisher:
- name: 'email-ovno-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- wsmackie@juniper.net
-
-- publisher:
- name: 'email-ovsnfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- MarkD.Graymark.d.gray@intel.com
-
-- publisher:
- name: 'email-parser-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- shang.xiaodong@zte.com.cn
-
-- publisher: &email_pharos_ptl_defaults
- name: 'email-pharos-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- zhang.jun3g@zte.com.cn
-- publisher:
- name: 'email-pharos-tools-ptl'
- <<: *email_pharos_ptl_defaults
-
-- publisher:
- name: 'email-promise-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- kunzmann@docomolab-euro.com
-
-- publisher:
- name: 'email-qtip-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- wu.zhihui1@zte.com.cn
-
-- publisher: &email_releng_ptl_defaults
- name: 'email-releng-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- fatih.degirmenci@ericsson.com
-- publisher:
- name: 'email-releng-anteater-ptl'
- <<: *email_releng_ptl_defaults
-- publisher:
- name: 'email-releng-testresults-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- fatih.degirmenci@ericsson.com
- feng.xiaowei@zte.com.cn
-- publisher:
- name: 'email-releng-utils-ptl'
- <<: *email_releng_ptl_defaults
-- publisher:
- name: 'email-releng-xci-ptl'
- <<: *email_releng_ptl_defaults
-
-- publisher:
- name: 'email-samplevnf-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- deepak.s@intel.com
-
-- publisher:
- name: 'email-sdnvpn-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- tim.irnich@ericsson.com
-
-- publisher:
- name: 'email-securityscanning-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- lhinds@redhat.com
-
-- publisher:
- name: 'email-sfc-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- mbuil@suse.com
-
-- publisher:
- name: 'email-snaps-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- s.pisarski@cablelabs.com
-
-- publisher:
- name: 'email-stor4nfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- shane.wang@intel.com
-
-- publisher:
- name: 'email-storperf-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- mark.beierl@emc.com
-
-- publisher:
- name: 'email-ves-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- bryan.sullivan@att.com
-
-- publisher:
- name: 'email-vswitchperf-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- sridhar.rao@spirent.com
-
-- publisher:
- name: 'email-yardstick-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- ross.b.brattain@intel.com
-
-- publisher:
- name: 'report-provision-result'
- publishers:
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - SUCCESS
- build-steps:
- - shell: |
- echo "export PROVISION_RESULT=PASS" >> $WORKSPACE/installer_track.sh
- echo "export INSTALLER=$INSTALLER_TYPE" >> $WORKSPACE/installer_track.sh
- echo "export TIMESTAMP_END="\'`date '+%Y-%m-%d %H:%M:%S.%3N'`\' >> $WORKSPACE/installer_track.sh
- - shell:
- !include-raw: installer-report.sh
- mark-unstable-if-failed: true
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - FAILURE
- - NOT_BUILT
- - UNSTABLE
- build-steps:
- - shell: |
- echo "export PROVISION_RESULT=FAIL" >> $WORKSPACE/installer_track.sh
- echo "export INSTALLER=$INSTALLER_TYPE" >> $WORKSPACE/installer_track.sh
- echo "export TIMESTAMP_END="\'`date '+%Y-%m-%d %H:%M:%S.%3N'`\' >> $WORKSPACE/installer_track.sh
- - shell:
- !include-raw: installer-report.sh
- mark-unstable-if-failed: true
diff --git a/jjb/global/slave-params.yaml b/jjb/global/slave-params.yaml
index 0219be54b..7d0996bb6 100644
--- a/jjb/global/slave-params.yaml
+++ b/jjb/global/slave-params.yaml
@@ -1,495 +1,4 @@
---
-#####################################################
-# Parameters for slaves using old labels
-# This will be cleaned up once the new job structure and
-# use of the new labels are in place
-#####################################################
-- parameter:
- name: 'apex-baremetal-master-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-baremetal-fraser-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-baremetal-euphrates-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-baremetal-danube-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-danube'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-
-- parameter:
- name: 'apex-virtual-master-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-virtual-fraser-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-virtual-euphrates-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-virtual-danube-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-danube'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'lf-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod1
- default-slaves:
- - lf-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'lf-pod3-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod3
- default-slaves:
- - lf-pod3
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'lf-pod4-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod4
- default-slaves:
- - lf-pod4
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'lf-pod5-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod5
- default-slaves:
- - lf-pod5
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-#####################################################
-# Parameters for CI baremetal PODs
-#####################################################
-- parameter:
- name: 'apex-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-- parameter:
- name: 'compass-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'compass-baremetal-master-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-baremetal-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'compass-baremetal-branch-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-baremetal-branch'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'fuel-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'fuel-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'armband-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'armband-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
- description: 'Base URI to the configuration directory'
-
-- parameter:
- name: 'auto-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'auto-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'joid-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'joid-baremetal'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net
- description: "External network floating ips"
-
-- parameter:
- name: 'daisy-baremetal-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod2
- default-slaves:
- - zte-pod2
- - label:
- name: SLAVE_LABEL
- default: 'daisy-baremetal'
- - string:
- name: INSTALLER_IP
- default: '10.20.7.3'
- description: 'IP of the installer'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-#####################################################
-# Parameters for CI virtual PODs
-#####################################################
-- parameter:
- name: 'apex-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'compass-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'compass-virtual-master-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-virtual-master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'compass-virtual-branch-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'compass-virtual-branch'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'fuel-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'fuel-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'armband-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'armband-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: LAB_CONFIG_URL
- default: ssh://jenkins-enea@gerrit.opnfv.org:29418/securedlab
- description: 'Base URI to the configuration directory'
-
-- parameter:
- name: 'joid-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'joid-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'daisy-virtual-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-virtual1
- - zte-virtual2
- default-slaves:
- - zte-virtual1
- - label:
- name: SLAVE_LABEL
- default: 'daisy-virtual'
- - string:
- name: INSTALLER_IP
- default: '10.20.11.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'daisy1'
- description: 'pxe bridge for booting of Daisy master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-#####################################################
-# Parameters for build slaves
-#####################################################
-- parameter:
- name: 'opnfv-build-centos-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-centos'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'opnfv-build-ubuntu-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'pharos-dashboard-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'pharos-dashboard'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
- parameter:
name: 'opnfv-build-defaults'
parameters:
@@ -497,6 +6,8 @@
name: SLAVE_LABEL
default: 'opnfv-build'
description: 'Slave label on Jenkins'
+ all-nodes: false
+ node-eligibility: 'ignore-offline'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -507,39 +18,14 @@
description: "Directory where the build artifact will be located upon the completion of the build."
- parameter:
- name: 'ericsson-build3-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-build3'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'huawei-build-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-build
- default-slaves:
- - huawei-build
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'opnfv-build-ubuntu-arm-defaults'
+ name: 'opnfv-build-ubuntu-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu-arm'
+ default: 'anuket-build'
description: 'Slave label on Jenkins'
+ all-nodes: false
+ node-eligibility: 'ignore-offline'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -549,88 +35,6 @@
default: $WORKSPACE/build_output
description: "Directory where the build artifact will be located upon the completion of the build."
-#####################################################
-# Parameters for none-CI PODs
-#####################################################
-- parameter:
- name: 'cengn-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - cengn-pod1
- default-slaves:
- - cengn-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod1
- default-slaves:
- - intel-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod2
- default-slaves:
- - intel-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'intel-pod9-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod9
- default-slaves:
- - intel-pod9
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod10-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod10
- default-slaves:
- - intel-pod10
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'intel-pod12-defaults'
parameters:
@@ -645,514 +49,3 @@
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'huawei-pod3-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod3
- default-slaves:
- - huawei-pod3
- - label:
- name: SLAVE_LABEL
- default: 'huawei-test'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'huawei-pod4-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod4
- default-slaves:
- - huawei-pod4
- - label:
- name: SLAVE_LABEL
- default: 'huawei-test'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod8-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod8
- default-slaves:
- - intel-pod8
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'intel-pod17-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod17
- default-slaves:
- - intel-pod17
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'huawei-virtual5-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'huawei-virtual5'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'huawei-virtual7-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-virtual7
- default-slaves:
- - huawei-virtual7
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'huawei-pod7-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod7
- default-slaves:
- - huawei-pod7
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'zte-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod1
- default-slaves:
- - zte-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.6.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br6'
- description: 'pxe bridge for booting of Fuel master'
-
-- parameter:
- name: 'zte-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod2
- default-slaves:
- - zte-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.7.3'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br7'
- description: 'pxe bridge for booting of Daisy master'
-
-- parameter:
- name: 'zte-pod3-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod3
- default-slaves:
- - zte-pod3
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.0.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br0'
- description: 'pxe bridge for booting of Daisy master'
-
-- parameter:
- name: 'zte-pod9-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod9
- default-slaves:
- - zte-pod9
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.7.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br0'
- description: 'pxe bridge for booting of Daisy master'
-
-- parameter:
- name: zte-virtual5-defaults
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-virtual5
- default-slaves:
- - zte-virtual5
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: zte-virtual6-defaults
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-virtual6
- default-slaves:
- - zte-virtual6
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'juniper-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - juniper-pod1
- default-slaves:
- - juniper-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: CEPH_DISKS
- default: /srv
- description: "Disks to use by ceph (comma separated list)"
-
-- parameter:
- name: 'orange-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod1
- default-slaves:
- - orange-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'orange-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod2
- default-slaves:
- - orange-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'orange-pod5-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod5
- default-slaves:
- - orange-pod5
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'dell-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - dell-pod1
- default-slaves:
- - dell-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'dell-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - dell-pod2
- default-slaves:
- - dell-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'nokia-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - nokia-pod1
- default-slaves:
- - nokia-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'intel-virtual6-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-virtual6
- default-slaves:
- - intel-virtual6
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-virtual10-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-virtual10
- default-slaves:
- - intel-virtual10
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'doctor-slave-parameter'
- parameters:
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to be used'
-
-- parameter:
- name: 'ericsson-virtual5-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual5'
- - string:
- name: GIT_BASE
- default: https://git.opendaylight.org/gerrit/p/$PROJECT.git
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-virtual12-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual12'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-virtual13-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual13'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-virtual-pod1bl01-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual-pod1bl01'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'odl-netvirt-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'odl-netvirt-virtual'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'odl-netvirt-virtual-intel-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'odl-netvirt-virtual-intel'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'flex-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - flex-pod1
- default-slaves:
- - flex-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-#####################################################
-# These slaves are just dummy slaves for sandbox jobs
-#####################################################
-- parameter:
- name: 'sandbox-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'sandbox-baremetal'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'sandbox-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'sandbox-virtual'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'dummy-pod1-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'dummy-pod1'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
diff --git a/jjb/ipv6/ipv6.yaml b/jjb/ipv6/ipv6.yaml
deleted file mode 100644
index 2946ec77b..000000000
--- a/jjb/ipv6/ipv6.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: ipv6
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/joid/joid-daily-jobs.yaml b/jjb/joid/joid-daily-jobs.yaml
deleted file mode 100644
index 2719c7292..000000000
--- a/jjb/joid/joid-daily-jobs.yaml
+++ /dev/null
@@ -1,384 +0,0 @@
----
-########################
-# Job configuration for joid
-########################
-- project:
-
- name: 'joid'
-
- project: '{name}'
-
- installer: '{name}'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- disabled: false
- gs-pathname: '/{stream}'
- master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- gs-pathname: ''
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # CI PODs
- # -------------------------------
- pod:
- - baremetal:
- slave-label: joid-baremetal
- <<: *fraser
- - baremetal:
- slave-label: joid-baremetal
- <<: *master
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-lxd-ha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-lxd-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl_l2-nofeature-ha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-odl_l2-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-ocl-nofeature-ha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-ocl-nofeature-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'k8-nosdn-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'k8-nosdn-lb-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'k8-ovn-lb-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-openbaton-ha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'k8-canal-lb-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'k8-nosdn-lb_ceph-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- - 'k8-multus-lb-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- - 'os-ovn-nofeature-ha':
- auto-trigger-name: 'daily-trigger-disabled'
-
- jobs:
- - 'joid-{scenario}-{pod}-daily-{stream}'
- - 'joid-deploy-{pod}-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'joid-{scenario}-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'joid-(os|k8)-.*?-{pod}-daily-.*'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - trigger-builds:
- - project: 'joid-deploy-{pod}-daily-{stream}'
- current-parameters: true
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-joid-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-joid-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={scenario}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-# 1.dovetail only euphrates by now, not sync with A/B/C branches
-# 2.here the stream means the SUT stream, dovetail stream is defined in
-# its own job
-# 3.only debug testsuite here(includes basic testcase,
-# i.e. one tempest smoke ipv6, two vping from functest)
-# 4.not used for release criteria or compliance,
-# only to debug the dovetail tool bugs with joid
-# - trigger-builds:
-# - project: 'dovetail-joid-{pod}-proposed_tests-{stream}'
-# current-parameters: false
-# predefined-parameters:
-# DEPLOY_SCENARIO={scenario}
-# block: true
-# same-node: true
-# block-thresholds:
-# build-step-failure-threshold: 'never'
-# failure-threshold: 'never'
-# unstable-threshold: 'FAILURE'
-
-- job-template:
- name: 'joid-deploy-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'joid-deploy-{pod}-daily-.*'
- block-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 180
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'builder-macro'
-########################
-# builder macros
-########################
-- builder:
- name: 'builder-macro'
- builders:
- - shell: |
- #!/bin/bash
- echo "Running $INSTALLER_TYPE with controller $SDN_CONTROLLER"
- echo
- echo "------ First Executing clean.sh ------"
- cd $WORKSPACE/ci
- ./clean.sh
- - shell:
- !include-raw: ./joid-deploy.sh
-
-########################
-# trigger macros
-########################
-# os-nosdn-nofeature-ha trigger - branch: fraser
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 2 * * *'
-# os-nosdn-nofeature-ha trigger - branch: master
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: '5 2 * * *'
-# os-odl_l2-nofeature-ha trigger - branch: fraser
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 7 * * *'
-# os-odl_l2-nofeature-ha trigger - branch: master
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: '5 7 * * *'
-# os-nosdn-lxd-noha trigger - branch: fraser
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 22 * * *'
-# os-nosdn-lxd-noha trigger - branch: master
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 22 * * *'
-# os-nosdn-lxd-ha trigger - branch: fraser
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 10 * * *'
-# os-nosdn-lxd-ha trigger - branch: master
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
- triggers:
- - timed: '5 10 * * *'
-# os-nosdn-nofeature-noha trigger - branch: fraser
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 4 * * *'
-# os-nosdn-nofeature-noha trigger - branch: master
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 4 * * *'
-# k8-nosdn-nofeature-noha trigger - branch: fraser
-- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 15 * * *'
-# k8-nosdn-nofeature-noha trigger - branch: master
-- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 15 * * *'
-# k8-nosdn-lb-noha trigger - branch: fraser
-- trigger:
- name: 'joid-k8-nosdn-lb-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 20 * * *'
-# k8-nosdn-lb-noha trigger - branch: master
-- trigger:
- name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 20 * * *'
-# k8-ovn-lb-noha trigger - branch: fraser
-- trigger:
- name: 'joid-k8-ovn-lb-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 17 * * *'
-# k8-ovn-lb-noha trigger - branch: master
-- trigger:
- name: 'joid-k8-ovn-lb-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 17 * * *'
-# os-nosdn-openbaton-ha trigger - branch: fraser
-- trigger:
- name: 'joid-os-nosdn-openbaton-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '' # Disabled as there is no branch for orchestra
-# os-nosdn-openbaton-ha trigger - branch: master
-- trigger:
- name: 'joid-os-nosdn-openbaton-ha-baremetal-master-trigger'
- triggers:
- - timed: '5 23 * * *'
-# os-ocl-nofeature-ha trigger - branch: fraser
-- trigger:
- name: 'joid-os-ocl-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 13 * * *'
-# os-ocl-nofeature-ha trigger - branch: master
-- trigger:
- name: 'joid-os-ocl-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: '5 13 * * *'
-# os-ocl-nofeature-noha trigger - branch: fraser
-- trigger:
- name: 'joid-os-ocl-nofeature-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 16 * * *'
-# os-ocl-nofeature-noha trigger - branch: master
-- trigger:
- name: 'joid-os-ocl-nofeature-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 16 * * *'
-# k8-canal-lb-noha trigger - branch: fraser
-- trigger:
- name: 'joid-k8-canal-lb-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 14 * * *'
-# k8-canal-lb-noha trigger - branch: master
-- trigger:
- name: 'joid-k8-canal-lb-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 14 * * *'
-# k8-multus-lb-noha trigger - branch: fraser
-- trigger:
- name: 'joid-k8-multus-lb-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 18 * * *'
-# k8-multus-lb-noha trigger - branch: master
-- trigger:
- name: 'joid-k8-multus-lb-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 18 * * *'
-# k8-nosdn-lb_ceph-noha trigger - branch: fraser
-- trigger:
- name: 'joid-k8-nosdn-lb_ceph-noha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 18 * * *'
-# k8-nosdn-lb_ceph-noha trigger - branch: master
-- trigger:
- name: 'joid-k8-nosdn-lb_ceph-noha-baremetal-master-trigger'
- triggers:
- - timed: '5 18 * * *'
-# os-ovn-nofeature-ha trigger - branch: fraser
-- trigger:
- name: 'joid-os-ovn-nofeature-ha-baremetal-fraser-trigger'
- triggers:
- - timed: '5 19 * * *'
-# os-ovn-nofeature-ha trigger - branch: master
-- trigger:
- name: 'joid-os-ovn-nofeature-ha-baremetal-master-trigger'
- triggers:
- - timed: '5 19 * * *'
diff --git a/jjb/joid/joid-deploy.sh b/jjb/joid/joid-deploy.sh
deleted file mode 100644
index 9740d38bf..000000000
--- a/jjb/joid/joid-deploy.sh
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Orange and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set +e
-set -o nounset
-
-##
-## Functions
-##
-function exit_on_error {
- RES=$1
- MSG=$2
- if [ $RES != 0 ]; then
- echo "FAILED - $MSG"
- exit $RES
- fi
-}
-
-##
-## Create LAB_CONFIG folder if not exists
-##
-
-mkdir -p $LAB_CONFIG
-
-##
-## Set Joid pod config name
-##
-
-case $NODE_NAME in
- *virtual*)
- POD=default ;;
- *)
- POD=$NODE_NAME ;;
-esac
-export POD_NAME=${POD/-}
-
-##
-## Redeploy MAAS or recover the previous config
-##
-
-cd $WORKSPACE/ci
-
-if [ -e "$LAB_CONFIG/deployconfig.yaml" ] && [ "$MAAS_REINSTALL" == "false" ]; then
- echo "------ Recover Juju environment to use MAAS ------"
- if [ ! -e deployconfig.yaml ]; then
- cp $LAB_CONFIG/deployconfig.yaml .
- cp $LAB_CONFIG/deployment.yaml .
- cp $LAB_CONFIG/labconfig.yaml .
- fi
-else
- if ["$NODE_NAME" == "default" ]; then
- echo "------ Redeploy MAAS ------"
- ./03-maasdeploy.sh default
- exit_on_error $? "MAAS Deploy FAILED"
- else
- echo "------ Redeploy MAAS ------"
- ./03-maasdeploy.sh custom $LAB_CONFIG/labconfig.yaml
- exit_on_error $? "MAAS Deploy FAILED"
- fi
-fi
-
-##
-## Configure Joid deployment
-##
-
-# Based on scenario naming we can get joid options
-# naming convention:
-# <model>-<controller>-<nfvfeature>-<mode>[-<extrastuff>]
-# With parameters:
-# model=(os|k8)
-# controller=(nosdn|odl_l3|odl_l2|onos|ocl)
-# No odl_l3 today
-# nfvfeature=(kvm|ovs|dpdk|nofeature)
-# '_' list separated.
-# mode=(ha|noha)
-# extrastuff=(none)
-# Optional field - Not used today
-
-IFS='-' read -r -a DEPLOY_OPTIONS <<< "${DEPLOY_SCENARIO}--"
-#last -- need to avoid nounset error
-
-JOID_MODEL=${DEPLOY_OPTIONS[0]}
-SDN_CONTROLLER=${DEPLOY_OPTIONS[1]}
-NFV_FEATURES=${DEPLOY_OPTIONS[2]}
-HA_MODE=${DEPLOY_OPTIONS[3]}
-EXTRA=${DEPLOY_OPTIONS[4]}
-
-if [ "$SDN_CONTROLLER" == 'odl_l2' ] || [ "$SDN_CONTROLLER" == 'odl_l3' ]; then
- SDN_CONTROLLER='odl'
-fi
-
-# Add extra to features
-if [ "$EXTRA" != "" ];then
- NFV_FEATURES="${NFV_FEATURES}_${EXTRA}"
-fi
-
-# temporary sfc feature is availble only on onos and trusty
-if [ "$NFV_FEATURES" == 'sfc' ] && [ "$SDN_CONTROLLER" == 'onos' ];then
- UBUNTU_DISTRO=trusty
-fi
-
-##
-## Configure Joid deployment
-##
-
-if [ "$JOID_MODEL" == 'k8' ]; then
- echo "------ Deploy with juju ------"
- echo "Execute: ./deploy.sh -m $JOID_MODEL -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES"
-
- ./deploy.sh -m kubernetes -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES
- exit_on_error $? "Main deploy FAILED"
-fi
-
-##
-## Set Admin RC
-##
-if [ "$JOID_MODEL" == 'os' ]; then
- echo "------ Deploy with juju ------"
- echo "Execute: ./deploy.sh -m $JOID_MODEL -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES"
-
- ./deploy.sh -m openstack -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES
- exit_on_error $? "Main deploy FAILED"
-
- JOID_ADMIN_OPENRC=$LAB_CONFIG/admin-openrc
- echo "------ Create OpenRC file [$JOID_ADMIN_OPENRC] ------"
-
- # get controller IP
- case "$SDN_CONTROLLER" in
- "odl")
- SDN_CONTROLLER_IP=$(juju status odl-controller/0 |grep public-address|sed -- 's/.*\: //')
- ;;
- "onos")
- SDN_CONTROLLER_IP=$(juju status onos-controller/0 |grep public-address|sed -- 's/.*\: //')
- ;;
- *)
- SDN_CONTROLLER_IP='none'
- ;;
- esac
- SDN_PASSWORD='admin'
-
- # export the openrc file by getting the one generated by joid and add SDN
- # controller for Functest
- # cp ./cloud/admin-openrc $JOID_ADMIN_OPENRC
- echo export SDN_CONTROLLER=$SDN_CONTROLLER_IP >> $JOID_ADMIN_OPENRC
- echo export SDN_PASSWORD=$SDN_PASSWORD >> $JOID_ADMIN_OPENRC
-
-fi
-
-##
-## Exit success
-##
-
-echo "Deploy success"
-exit 0
diff --git a/jjb/joid/joid-verify-jobs.yaml b/jjb/joid/joid-verify-jobs.yaml
deleted file mode 100644
index 2d1c7c9f3..000000000
--- a/jjb/joid/joid-verify-jobs.yaml
+++ /dev/null
@@ -1,213 +0,0 @@
----
-- project:
- name: 'joid-verify-jobs'
-
- project: 'joid'
-
- installer: 'joid'
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'basic':
- slave-label: 'opnfv-build-ubuntu'
- - 'deploy-virtual':
- slave-label: 'joid-virtual'
- - 'smoke-test':
- slave-label: 'joid-virtual'
- #####################################
- # jobs
- #####################################
- jobs:
- - 'joid-verify-{stream}'
- - 'joid-verify-{phase}-{stream}'
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'joid-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: multijob
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'joid-verify-master'
- - 'joid-verify-danube'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'joid-virtual-defaults'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'joid-verify-basic-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'joid-verify-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- - name: 'joid-verify-smoke-test-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'joid-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'joid-verify-deploy-.*'
- - 'joid-verify-test-.*'
- block-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'joid-verify-basic-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
-
-- builder:
- name: 'joid-verify-deploy-virtual-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
-
-- builder:
- name: 'joid-verify-smoke-test-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
diff --git a/jjb/kvmfornfv/kvmfornfv-download-artifact.sh b/jjb/kvmfornfv/kvmfornfv-download-artifact.sh
deleted file mode 100755
index ea37eb29c..000000000
--- a/jjb/kvmfornfv/kvmfornfv-download-artifact.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-# do stuff differently based on the job type
-case "$JOB_TYPE" in
- verify)
- echo "Downloading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
- GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
- ;;
- daily)
- gsutil cp gs://$GS_URL/latest.properties $WORKSPACE/latest.properties
- source $WORKSPACE/latest.properties
- GS_UPLOAD_LOCATION=$OPNFV_ARTIFACT_URL
- echo "Downloading artifacts from $GS_UPLOAD_LOCATION for daily run. This could take some time..."
- ;;
- *)
- echo "Artifact download is not enabled for $JOB_TYPE jobs"
- exit 1
-esac
-
-GS_GUESTIMAGE_LOCATION="gs://artifacts.opnfv.org/$PROJECT/guest-image"
-/bin/mkdir -p $WORKSPACE/build_output
-gsutil cp -r $GS_UPLOAD_LOCATION/* $WORKSPACE/build_output > $WORKSPACE/gsutil.log 2>&1
-gsutil cp $GS_GUESTIMAGE_LOCATION/guest1.sha512 $WORKSPACE/build_output > $WORKSPACE/gsutil.log 2>&1
-
-echo "--------------------------------------------------------"
-ls -al $WORKSPACE/build_output
-echo "--------------------------------------------------------"
-echo
-echo "Downloaded artifacts!"
diff --git a/jjb/kvmfornfv/kvmfornfv-test.sh b/jjb/kvmfornfv/kvmfornfv-test.sh
deleted file mode 100755
index b31d61cce..000000000
--- a/jjb/kvmfornfv/kvmfornfv-test.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-##########################################################
-##This script includes executing cyclictest scripts.
-##########################################################
-#The latest build packages are stored in build_output
-
-ls -al $WORKSPACE/build_output
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-echo $TEST_NAME
-
-# do stuff differently based on the job type
-case "$JOB_TYPE" in
- verify)
- #start the test
- cd $WORKSPACE
- ./ci/test_kvmfornfv.sh $JOB_TYPE
- ;;
- daily)
- #start the test
- cd $WORKSPACE
- ./ci/test_kvmfornfv.sh $JOB_TYPE $TEST_NAME
- ;;
- *)
- echo "Test is not enabled for $JOB_TYPE jobs"
- exit 1
-esac
diff --git a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
deleted file mode 100755
index 91b6f4481..000000000
--- a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/bash
-set -o nounset
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-case "$JOB_TYPE" in
- verify)
- OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
- GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
- echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
- gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
- echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
- ;;
- daily)
- echo "Uploading daily artifacts This could take some time..."
- OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
- GS_UPLOAD_LOCATION="gs://$GS_URL/$OPNFV_ARTIFACT_VERSION"
- GS_LOG_LOCATION="gs://$GS_URL/logs-$(date -u +"%Y-%m-%d")"/
- ;;
- *)
- echo "Artifact upload is not enabled for $JOB_TYPE jobs"
- exit 1
-esac
-
-# save information regarding artifacts into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_UPLOAD_LOCATION"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-source $WORKSPACE/opnfv.properties
-
-# upload artifacts
-if [[ "$PHASE" == "build" ]]; then
- gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
- gsutil -m setmeta -r \
- -h "Cache-Control:private, max-age=0, no-transform" \
- $GS_UPLOAD_LOCATION > /dev/null 2>&1
-else
- if [[ "$JOB_TYPE" == "daily" ]]; then
- log_dir=$WORKSPACE/build_output/log
- if [[ -d "$log_dir" ]]; then
- #Uploading logs to artifacts
- echo "Uploading artifacts for future debugging needs...."
- gsutil cp -r $WORKSPACE/build_output/log-*.tar.gz $GS_LOG_LOCATION > $WORKSPACE/gsutil.log 2>&1
- # verifying the logs uploaded by cyclictest daily test job
- gsutil ls $GS_LOG_LOCATION > /dev/null 2>&1
- if [[ $? -ne 0 ]]; then
- echo "Problem while uploading logs to artifacts!"
- echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
- exit 1
- fi
- else
- echo "No test logs/artifacts available for uploading"
- fi
- fi
-fi
-
-# upload metadata file for the artifacts built by daily job
-if [[ "$JOB_TYPE" == "daily" && "$PHASE" == "build" ]]; then
- gsutil cp $WORKSPACE/opnfv.properties $GS_UPLOAD_LOCATION/opnfv.properties > $WORKSPACE/gsutil.log 2>&1
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > $WORKSPACE/gsutil.log 2>&1
- gsutil -m setmeta -r \
- -h "Cache-Control:private, max-age=0, no-transform" \
- $GS_UPLOAD_LOCATION/opnfv.properties \
- gs://$GS_URL/latest.properties > /dev/null 2>&1
-fi
-
-# verifying the artifacts uploading by verify/daily build job
-if [[ "$PHASE" == "build" ]]; then
- gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1
- if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifacts!"
- echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
- exit 1
- fi
-fi
-echo "Uploaded artifacts!"
diff --git a/jjb/kvmfornfv/kvmfornfv.yaml b/jjb/kvmfornfv/kvmfornfv.yaml
deleted file mode 100644
index ad497e97d..000000000
--- a/jjb/kvmfornfv/kvmfornfv.yaml
+++ /dev/null
@@ -1,386 +0,0 @@
----
-- project:
- name: kvmfornfv
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'build':
- slave-label: 'opnfv-build-ubuntu'
- - 'test':
- slave-label: 'intel-pod10'
- #####################################
- # patch verification phases
- #####################################
- testname:
- - 'cyclictest'
- - 'packet_forward'
- - 'livemigration'
- #####################################
- # patch verification phases
- #####################################
- jobs:
- - 'kvmfornfv-verify-{stream}'
- - 'kvmfornfv-verify-{phase}-{stream}'
- - 'kvmfornfv-merge-{stream}'
- - 'kvmfornfv-daily-{stream}'
- - 'kvmfornfv-daily-build-{stream}'
- - 'kvmfornfv-{testname}-daily-test-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'kvmfornfv-verify-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 3
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-verify-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: test
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-verify-test-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'kvmfornfv-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: PHASE
- default: '{phase}'
- description: "Execution of kvmfornfv daily '{phase}' job ."
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-
-- job-template:
- name: 'kvmfornfv-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - shell:
- !include-raw: ./kvmfornfv-build.sh
-
-- job-template:
- name: 'kvmfornfv-daily-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: cyclictest-build
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-daily-build-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: cyclictest-test
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-cyclictest-daily-test-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: packetforward-test
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-packet_forward-daily-test-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: livemigration-test
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-livemigration-daily-test-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'kvmfornfv-daily-build-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: PHASE
- default: 'build'
- description: "Execution of kvmfornfv daily 'build' job ."
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-daily-build-macro'
-
-- job-template:
- name: 'kvmfornfv-{testname}-daily-test-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'intel-pod10-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: TEST_NAME
- default: '{testname}'
- description: "Daily job to execute kvmfornfv '{testname}' testcase."
- - string:
- name: PHASE
- default: 'test'
- description: "Execution of kvmfornfv daily 'test' job ."
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-{testname}-daily-test-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'kvmfornfv-verify-build-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-build.sh
- - shell:
- !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
- name: 'kvmfornfv-verify-test-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-download-artifact.sh
- - shell:
- !include-raw: ./kvmfornfv-test.sh
-
-- builder:
- name: 'kvmfornfv-daily-build-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-build.sh
- - shell:
- !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
- name: 'kvmfornfv-cyclictest-daily-test-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-download-artifact.sh
- - shell:
- !include-raw: ./kvmfornfv-test.sh
- - shell:
- !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
- name: 'kvmfornfv-packet_forward-daily-test-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-download-artifact.sh
- - shell:
- !include-raw: ./kvmfornfv-test.sh
-
-- builder:
- name: 'kvmfornfv-livemigration-daily-test-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-download-artifact.sh
- - shell:
- !include-raw: ./kvmfornfv-test.sh
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'kvmfornfv-defaults'
- parameters:
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/laas/laas.yml b/jjb/laas/laas.yml
new file mode 100644
index 000000000..197495475
--- /dev/null
+++ b/jjb/laas/laas.yml
@@ -0,0 +1,72 @@
+---
+- parameter:
+ name: 'laas-dashboard-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'laas-dashboard'
+ description: 'Slave label on Jenkins'
+ all-nodes: false
+ node-eligibility: 'ignore-offline'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+
+- project:
+ name: laas-dashboard-jobs
+
+ project: 'laas'
+ project-name: 'laas'
+
+ stream:
+ - master
+
+ disabled: false
+
+ jobs:
+ - 'laas-dashboard-backup'
+ - 'laas-dashboard-deploy'
+
+- job-template:
+ name: 'laas-dashboard-backup'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - 'laas-dashboard-defaults'
+
+ triggers:
+ - timed: '@daily'
+
+ builders:
+ - shell:
+ !include-raw: shell/backup-dashboard.sh
+
+- job-template:
+ name: 'laas-dashboard-deploy'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: 'master'
+ - 'laas-dashboard-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit-trigger-change-merged:
+ branch: 'master'
+ project: '{project}'
+ files: 'dashboard/docker-compose.yml'
+
+ builders:
+ - shell:
+ !include-raw: shell/deploy-dashboard.sh
diff --git a/jjb/laas/shell/backup-dashboard.sh b/jjb/laas/shell/backup-dashboard.sh
new file mode 100644
index 000000000..39f5be4e4
--- /dev/null
+++ b/jjb/laas/shell/backup-dashboard.sh
@@ -0,0 +1,28 @@
+#!/bin/bash -eux
+##############################################################################
+# Copyright (c) 2018 Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+BACKUP_DIR=$HOME/backups
+DATE=$(date +%Y%m%d)
+TAR_FILE=laas-dashboard-db-$DATE.tar.tz
+
+mkdir -p $BACKUP_DIR
+echo "-- $DATE --"
+echo "--> Backing up Lab as a Service Dashboard"
+
+docker run --rm \
+ -v laas-data:/laas-data:ro \
+ -v $BACKUP_DIR:/backup \
+ alpine \
+ tar -czf /backup/$TAR_FILE -C /laas-data ./
+
+/usr/local/bin/gsutil cp $BACKUP_DIR/$TAR_FILE \
+ gs://opnfv-backups/laas-dashboard/ && rm $BACKUP_DIR/$TAR_FILE
+
+echo "--> LAAS dashboard backup complete"
diff --git a/jjb/laas/shell/deploy-dashboard.sh b/jjb/laas/shell/deploy-dashboard.sh
new file mode 100644
index 000000000..d3ad29b31
--- /dev/null
+++ b/jjb/laas/shell/deploy-dashboard.sh
@@ -0,0 +1,19 @@
+#!/bin/bash -eux
+##############################################################################
+# Copyright (c) 2018 Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+cp $HOME/config.env $WORKSPACE/dashboard
+cd $WORKSPACE/dashboard
+
+docker-compose pull
+docker-compose up -d
+
+# Copy JIRA keys into web container
+WEB_CONTAINER="$(docker ps --filter 'name=dg01' -q)"
+docker cp $HOME/rsa.pub $WEB_CONTAINER:/laas_dashboard/account/
+docker cp $HOME/rsa.pem $WEB_CONTAINER:/laas_dashboard/account/
diff --git a/jjb/models/models.yaml b/jjb/models/models.yaml
deleted file mode 100644
index 40fcf68de..000000000
--- a/jjb/models/models.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: models
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/moon/moon.yaml b/jjb/moon/moon.yaml
deleted file mode 100644
index 73b0fc731..000000000
--- a/jjb/moon/moon.yaml
+++ /dev/null
@@ -1,60 +0,0 @@
----
-- project:
- name: moon
-
- project: '{name}'
-
- jobs:
- - 'moon-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - euphrates:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
-- job-template:
- name: 'moon-verify-{stream}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- #!/bin/bash
- echo "launch Moon unit tests"
- #nosetest $WORKSPACE/keystone-moon/keystone/tests/moon/unit
diff --git a/jjb/netready/netready-gluon-build.sh b/jjb/netready/netready-gluon-build.sh
deleted file mode 100755
index 141e84cbd..000000000
--- a/jjb/netready/netready-gluon-build.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Building Gluon packages."
-echo "------------------------"
-echo
-
-OPNFV_ARTIFACT_VERSION=$(echo $(date -u +"%Y%m%d"))
-
-# build all packages
-cd $WORKSPACE/ci
-./build-gluon-packages.sh
-
-# list the contents of BUILD_OUTPUT directory
-echo "Build Directory is ${BUILD_DIRECTORY}"
-echo "Build Directory Contents:"
-echo "---------------------------------------"
-ls -alR $BUILD_DIRECTORY
-
-# get version infos from Gluon from spec
-GLUON_VERSION=$(grep Version: $BUILD_DIRECTORY/rpm_specs/gluon.spec | awk '{ print $2 }')
-GLUON_RELEASE=$(grep 'define release' $BUILD_DIRECTORY/rpm_specs/gluon.spec | awk '{ print $3 }')_$OPNFV_ARTIFACT_VERSION
-
-ARTIFACT_NAME=gluon-$GLUON_VERSION-$GLUON_RELEASE.noarch.rpm
-ARTIFACT_PATH=$BUILD_DIRECTORY/noarch/$ARTIFACT_NAME
-
-echo "Writing opnfv.properties file"
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/$ARTIFACT_NAME"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $ARTIFACT_PATH | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
- echo "ARTIFACT_LIST=$ARTIFACT_PATH"
-) > $WORKSPACE/opnfv.properties
-
-echo "---------------------------------------"
-echo "Done!"
diff --git a/jjb/netready/netready-upload-gluon-packages.sh b/jjb/netready/netready-upload-gluon-packages.sh
deleted file mode 100755
index 7c1e33727..000000000
--- a/jjb/netready/netready-upload-gluon-packages.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Uploading Gluon packages"
-echo "--------------------------------------------------------"
-echo
-
-source $WORKSPACE/opnfv.properties
-
-for artifact in $ARTIFACT_LIST; do
- echo "Uploading artifact: ${artifact}"
- gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.$(basename $artifact).log
- echo "Upload complete for ${artifact}"
-done
-
-gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log
-gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.properties.log
-
-echo "--------------------------------------------------------"
-echo "Upload done!"
-
-echo "Artifacts are not available as:"
-for artifact in $ARTIFACT_LIST; do
- echo "http://$GS_URL/$(basename $artifact)"
-done
diff --git a/jjb/netready/netready.yaml b/jjb/netready/netready.yaml
deleted file mode 100644
index 798029373..000000000
--- a/jjb/netready/netready.yaml
+++ /dev/null
@@ -1,69 +0,0 @@
----
-- project:
- name: netready
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-{stream}'
- - 'netready-build-gluon-packages-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
-- job-template:
- name: 'netready-build-gluon-packages-daily-{stream}'
-
- disabled: true
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'netready-parameter':
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- builders:
- - 'netready-gluon-build'
-
- triggers:
- - timed: '@midnight'
-
-
-########################
-# builder macros
-########################
-
-- builder:
- name: 'netready-gluon-build'
- builders:
- - shell:
- !include-raw: ./netready-gluon-build.sh
- - shell:
- !include-raw: ./netready-upload-gluon-packages.sh
-
-
-########################
-# parameter macros
-########################
-
-- parameter:
- name: netready-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/nfvbench/nfvbench-views.yaml b/jjb/nfvbench/nfvbench-views.yaml
new file mode 100644
index 000000000..4884adb78
--- /dev/null
+++ b/jjb/nfvbench/nfvbench-views.yaml
@@ -0,0 +1,12 @@
+---
+- view:
+ name: nfvbench
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^nfvbench-.*
diff --git a/jjb/nfvbench/nfvbench.yaml b/jjb/nfvbench/nfvbench.yaml
index 44e85d2c8..bb5083b4d 100644
--- a/jjb/nfvbench/nfvbench.yaml
+++ b/jjb/nfvbench/nfvbench.yaml
@@ -14,11 +14,13 @@
gs-pathname: ''
docker-tag: 'latest'
disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- docker-tag: 'stable'
- disabled: false
+
+- parameter:
+ name: nfvbench-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'opnfv-build'
- job-template:
name: 'nfvbench-build-{stream}'
@@ -35,7 +37,7 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
# yamllint enable rule:line-length
- - 'opnfv-build-ubuntu-defaults'
+ - nfvbench-parameter
scm:
- git-scm-gerrit
@@ -44,8 +46,9 @@
- gerrit:
server-name: 'gerrit.opnfv.org'
trigger-on:
+ - change-merged-event
- comment-added-contains-event:
- comment-contains-value: 'buildvm'
+ comment-contains-value: 'remerge'
projects:
- project-compare-type: 'ANT'
project-pattern: '{project}'
@@ -73,7 +76,7 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
# yamllint enable rule:line-length
- - 'opnfv-build-ubuntu-defaults'
+ - nfvbench-parameter
scm:
- git-scm-gerrit
@@ -86,6 +89,10 @@
exclude-drafts: 'false'
exclude-trivial-rebase: 'false'
exclude-no-code-change: 'false'
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
projects:
- project-compare-type: 'ANT'
project-pattern: '{project}'
@@ -95,4 +102,25 @@
builders:
- shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install software-properties-common gpg -y
+ sudo add-apt-repository -y ppa:deadsnakes/ppa
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ libxml2-dev libxslt-dev libffi-dev libjpeg-dev \
+ python3.8 python3.8-dev python3.8-distutils \
+ python3-pip -y
cd $WORKSPACE && tox
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
+ echo "deb https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee \
+ /etc/apt/sources.list.d/google-cloud-sdk.list
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ google-cloud-sdk python3-venv -y
+ cd $WORKSPACE/nfvbenchvm/dib
+ bash verify-image.sh -v
diff --git a/jjb/onosfw/onosfw.yaml b/jjb/onosfw/onosfw.yaml
deleted file mode 100644
index 58a50bd57..000000000
--- a/jjb/onosfw/onosfw.yaml
+++ /dev/null
@@ -1,192 +0,0 @@
----
-- project:
-
- name: onosfw
-
- jobs:
- - 'onosfw-verify-{stream}'
- - 'onosfw-daily-{stream}'
- - 'onosfw-build-{stream}'
-
- # only master branch is enabled at the moment to keep no of jobs sane
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- project: 'onosfw'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'onosfw-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - 'builder-onosfw-helloworld'
-
-- job-template:
- name: 'onosfw-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - trigger-builds:
- - project: 'onosfw-build-{stream}'
- git-revision: true
- block: true
-
-- job-template:
- name: 'onosfw-build-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon the completion of the build."
-
- scm:
- - git-scm
-
- builders:
- - 'builder-onosfw-helloworld'
-
-########################
-# builder macros
-########################
-- builder:
- name: 'builder-onosfw-build'
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # log info to console
- echo "Starting the build of $PROJECT. This could take some time..."
- echo "--------------------------------------------------------"
- echo
-
- # create the cache directory if it doesn't exist
- [[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
- [[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
-
- # set OPNFV_ARTIFACT_VERSION
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-
- # start the build
- cd $WORKSPACE/
- ./ci/build.sh $BUILD_DIRECTORY/
-
- # list the build artifacts
- ls -al $BUILD_DIRECTORY
-
- # save information regarding artifact into file
- (
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/onosfw.iso | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
- ) > $BUILD_DIRECTORY/opnfv.properties
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
-
-
-# yamllint disable rule:line-length
-- builder:
- name: 'builder-onosfw-upload-artifact'
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # log info to console
- echo "Uploading the $INSTALLER artifact. This could take some time..."
- echo "--------------------------------------------------------"
- echo
-
- # source the opnfv.properties to get ARTIFACT_VERSION
- source $BUILD_DIRECTORY/opnfv.properties
-
- # upload artifact and additional files to google storage
- gsutil cp $BUILD_DIRECTORY/onosfw.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
- gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
- gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
- echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-# yamllint enable rule:line-length
-
-
-- builder:
- name: 'builder-onosfw-helloworld'
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello world!"
diff --git a/jjb/openci/create-ane.sh b/jjb/openci/create-ane.sh
deleted file mode 100755
index 8a4da8f52..000000000
--- a/jjb/openci/create-ane.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# This script creates ArtifactPublishedEvent
-# The JMS Messaging Plugin doesn't handle the newlines well so the eventBody is
-# constructed on a single line. This is something that needs to be fixed properly
-
-cat << EOF > $WORKSPACE/event.properties
-type=$PUBLISH_EVENT_TYPE
-origin=$PUBLISH_EVENT_ORIGIN
-eventBody="{ 'type': '$PUBLISH_EVENT_TYPE', 'id': '$(uuidgen)', 'time': '$(date -u +%Y-%m-%d_%H:%M:%SUTC)', 'origin': '$PUBLISH_EVENT_ORIGIN', 'buildUrl': '$BUILD_URL', 'branch': 'master', 'artifactLocation': '$ARTIFACT_LOCATION', 'confidenceLevel': { $CONFIDENCE_LEVEL } }"
-EOF
-echo "Constructed $PUBLISH_EVENT_TYPE"
-echo "--------------------------------------------"
-cat $WORKSPACE/event.properties
-echo "--------------------------------------------"
diff --git a/jjb/openci/create-cde.sh b/jjb/openci/create-cde.sh
deleted file mode 100755
index 410db50e6..000000000
--- a/jjb/openci/create-cde.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# This script creates CompositionDefinedEvent
-# The JMS Messaging Plugin doesn't handle the newlines well so the eventBody is
-# constructed on a single line. This is something that needs to be fixed properly
-
-cat << EOF > $WORKSPACE/event.properties
-type=$PUBLISH_EVENT_TYPE
-origin=$PUBLISH_EVENT_ORIGIN
-scenario=$DEPLOY_SCENARIO
-eventBody="{ 'type': '$PUBLISH_EVENT_TYPE', 'id': '$(uuidgen)', 'time': '$(date -u +%Y-%m-%d_%H:%M:%SUTC)', 'origin': '$PUBLISH_EVENT_ORIGIN', 'buildUrl': '$BUILD_URL', 'branch': 'master', 'compositionName': '$DEPLOY_SCENARIO', 'compositionMetadataUrl': '$SCENARIO_METADATA_LOCATION' }"
-EOF
-echo "Constructed $PUBLISH_EVENT_TYPE"
-echo "--------------------------------------------"
-cat $WORKSPACE/event.properties
-echo "--------------------------------------------"
diff --git a/jjb/openci/create-clme.sh b/jjb/openci/create-clme.sh
deleted file mode 100755
index 5e8ee10c9..000000000
--- a/jjb/openci/create-clme.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# This script creates ConfidenceLevelModifiedEvent
-# The JMS Messaging Plugin doesn't handle the newlines well so the eventBody is
-# constructed on a single line. This is something that needs to be fixed properly
-
-cat << EOF > $WORKSPACE/event.properties
-type=$PUBLISH_EVENT_TYPE
-origin=$PUBLISH_EVENT_ORIGIN
-scenario=$DEPLOY_SCENARIO
-eventBody="{ 'type': '$PUBLISH_EVENT_TYPE', 'id': '$(uuidgen)', 'time': '$(date -u +%Y-%m-%d_%H:%M:%SUTC)', 'origin': '$PUBLISH_EVENT_ORIGIN', 'buildUrl': '$BUILD_URL', 'branch': 'master', 'compositionName': '$DEPLOY_SCENARIO', 'compositionMetadataUrl': '$SCENARIO_METADATA_LOCATION', 'confidenceLevel': { $CONFIDENCE_LEVEL } }"
-EOF
-echo "Constructed $PUBLISH_EVENT_TYPE"
-echo "--------------------------------------------"
-cat $WORKSPACE/event.properties
-echo "--------------------------------------------"
diff --git a/jjb/openci/openci-odl-daily-jobs.yaml b/jjb/openci/openci-odl-daily-jobs.yaml
deleted file mode 100644
index d80feadf6..000000000
--- a/jjb/openci/openci-odl-daily-jobs.yaml
+++ /dev/null
@@ -1,99 +0,0 @@
----
-- project:
- name: openci-odl
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- jobs:
- - 'openci-odl-autorelease-daily-{stream}'
- - 'openci-odl-promote-daily-{stream}'
-
-# This job gets triggered manually for the demo purposes.
-#
-# In prototype, either what this job does needs to be integrated to
-# ODL autorelease job or triggered by the upstream autorelease job.
-- job-template:
- name: 'openci-odl-autorelease-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - string:
- name: PUBLISH_EVENT_TYPE
- default: ArtifactPublishedEvent
- description: 'The type of the event this job publishes'
- - string:
- name: PUBLISH_EVENT_ORIGIN
- default: ODL
- description: 'Originating community'
- - string:
- name: ARTIFACT_LOCATION
- default: https://url/to/artifact/on/odl/nexus/$BUILD_NUMBER
- description: 'The location of the artifact on ODL Nexus'
- - string:
- name: CONFIDENCE_LEVEL
- default: "'autorelease': 'SUCCESS'"
- description: 'The confidence level the published artifact gained'
- - 'opnfv-build-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./create-ane.sh
- - inject:
- properties-file: "$WORKSPACE/event.properties"
-
- publishers:
- - jms-messaging:
- provider-name: openci.activemq
- msg-type: Custom
- msg-props: |
- type=$type
- origin=$origin
- msg-content:
- $eventBody
-
-# This job gets triggered by a ConfidenceLevelModifiedEvent published
-# by OPNFV jobs so ODL can promote the autorelease artifact even further.
-#
-# This job is created for the demo purposes and might not be there for
-# the prototype.
-- job-template:
- name: 'openci-odl-promote-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - 'opnfv-build-defaults'
-
- triggers:
- - jms-messaging:
- provider-name: openci.activemq
- selector: CI_TYPE = 'custom'
- checks:
- - field: origin
- expected-value: 'OPNFV'
- - field: type
- expected-value: 'ConfidenceLevelModifiedEvent'
- - field: scenario
- expected-value: 'os-odl-nofeature'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo
- echo "Triggered by $type"
- echo "----------------------------------"
- echo $CI_MESSAGE
- echo "----------------------------------"
- echo " Promoted ODL Autorelease artifact as release candidate!"
diff --git a/jjb/openci/openci-onap-daily-jobs.yaml b/jjb/openci/openci-onap-daily-jobs.yaml
deleted file mode 100644
index 28c3e6948..000000000
--- a/jjb/openci/openci-onap-daily-jobs.yaml
+++ /dev/null
@@ -1,99 +0,0 @@
----
-- project:
- name: openci-onap
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- jobs:
- - 'openci-onap-autorelease-daily-{stream}'
- - 'openci-onap-promote-daily-{stream}'
-
-# This job gets triggered manually for the demo purposes.
-#
-# In prototype, either what this job does needs to be integrated to
-# ONAP autorelease job or triggered by the upstream autorelease job.
-- job-template:
- name: 'openci-onap-autorelease-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - string:
- name: PUBLISH_EVENT_TYPE
- default: ArtifactPublishedEvent
- description: 'The type of the event this job publishes'
- - string:
- name: PUBLISH_EVENT_ORIGIN
- default: ONAP
- description: 'Originating community'
- - string:
- name: ARTIFACT_LOCATION
- default: https://url/to/artifact/on/onap/nexus/$BUILD_NUMBER
- description: 'The location of the artifact on ONAP Nexus'
- - string:
- name: CONFIDENCE_LEVEL
- default: "'autorelease': 'SUCCESS'"
- description: 'The confidence level the published artifact gained'
- - 'opnfv-build-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./create-ane.sh
- - inject:
- properties-file: "$WORKSPACE/event.properties"
-
- publishers:
- - jms-messaging:
- provider-name: openci.activemq
- msg-type: Custom
- msg-props: |
- type=$type
- origin=$origin
- msg-content:
- $eventBody
-
-# This job gets triggered by a ConfidenceLevelModifiedEvent published
-# by OPNFV jobs so ONAP can promote the autorelease artifact even further.
-#
-# This job is created for the demo purposes and might not be there for
-# the prototype.
-- job-template:
- name: 'openci-onap-promote-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - 'opnfv-build-defaults'
-
- triggers:
- - jms-messaging:
- provider-name: openci.activemq
- selector: CI_TYPE = 'custom'
- checks:
- - field: origin
- expected-value: 'OPNFV'
- - field: type
- expected-value: 'ConfidenceLevelModifiedEvent'
- - field: scenario
- expected-value: 'k8-nosdn-onap'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo
- echo "Triggered by $type"
- echo "----------------------------------"
- echo $CI_MESSAGE
- echo "----------------------------------"
- echo " Promoted ONAP Autorelease artifact as release candidate!"
diff --git a/jjb/openci/openci-opnfv-daily-jobs.yaml b/jjb/openci/openci-opnfv-daily-jobs.yaml
deleted file mode 100644
index cb150148b..000000000
--- a/jjb/openci/openci-opnfv-daily-jobs.yaml
+++ /dev/null
@@ -1,149 +0,0 @@
----
-- project:
- name: openci-opnfv
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- scenario:
- - 'os-odl-nofeature-ha':
- origin: ODL
- - 'k8-nosdn-onap-ha':
- origin: ONAP
-
- jobs:
- - 'openci-opnfv-{scenario}-compose-daily-{stream}'
- - 'openci-opnfv-{scenario}-test-daily-{stream}'
-
-- job-template:
- name: 'openci-opnfv-{scenario}-compose-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - string:
- name: PUBLISH_EVENT_TYPE
- default: CompositionDefinedEvent
- description: 'The type of the event this job publishes'
- - string:
- name: PUBLISH_EVENT_ORIGIN
- default: OPNFV
- description: 'Originating community'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: 'The scenario that is composed'
- - string:
- name: SCENARIO_METADATA_LOCATION
- default: https://url/to/scenario/metadata/on/opnfv/artifact/repo/$BUILD_NUMBER
- description: 'The location of the scenario metadata'
- - 'opnfv-build-defaults'
-
- triggers:
- - jms-messaging:
- provider-name: openci.activemq
- selector: CI_TYPE = 'custom'
- checks:
- - field: origin
- expected-value: '{origin}'
- - field: type
- expected-value: 'ArtifactPublishedEvent'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo
- echo "Triggered by $type"
- echo "----------------------------------"
- echo $CI_MESSAGE
- echo "----------------------------------"
- - shell:
- !include-raw-escape: ./create-cde.sh
- - inject:
- properties-file: "$WORKSPACE/event.properties"
-
- publishers:
- - jms-messaging:
- provider-name: openci.activemq
- msg-type: Custom
- msg-props: |
- type=$type
- origin=$origin
- scenario=$scenario
- msg-content:
- $eventBody
-
-- job-template:
- name: 'openci-opnfv-{scenario}-test-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - string:
- name: PUBLISH_EVENT_TYPE
- default: ConfidenceLevelModifiedEvent
- description: 'The type of the event this job publishes'
- - string:
- name: PUBLISH_EVENT_ORIGIN
- default: OPNFV
- description: 'Originating community'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: 'The scenario that is composed'
- - string:
- name: SCENARIO_METADATA_LOCATION
- default: https://url/to/scenario/metadata/on/opnfv/artifact/repo/$BUILD_NUMBER
- description: 'The location of the scenario metadata'
- - string:
- name: CONFIDENCE_LEVEL
- default: "'opnfvdaily': 'SUCCESS'"
- description: 'The confidence level the published artifact gained'
- - 'opnfv-build-defaults'
-
- triggers:
- - jms-messaging:
- provider-name: openci.activemq
- selector: CI_TYPE = 'custom'
- checks:
- - field: origin
- expected-value: 'OPNFV'
- - field: type
- expected-value: 'CompositionDefinedEvent'
- - field: scenario
- expected-value: '{scenario}'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo
- echo "Triggered by $type"
- echo "----------------------------------"
- echo $CI_MESSAGE
- echo "----------------------------------"
- - shell:
- !include-raw-escape: ./create-clme.sh
- - inject:
- properties-file: "$WORKSPACE/event.properties"
-
- publishers:
- - jms-messaging:
- provider-name: openci.activemq
- msg-type: Custom
- msg-props: |
- type=$type
- origin=$origin
- scenario=$scenario
- msg-content:
- $eventBody
diff --git a/jjb/opera/opera-daily-jobs.yaml b/jjb/opera/opera-daily-jobs.yaml
deleted file mode 100644
index f0841a34b..000000000
--- a/jjb/opera/opera-daily-jobs.yaml
+++ /dev/null
@@ -1,99 +0,0 @@
----
-- project:
- name: 'opera-daily-jobs'
-
- project: 'opera'
-
- #####################################
- # branch definitions
- #####################################
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
-
- #####################################
- # pod definitions
- #####################################
- pod:
- - virtual:
- slave-label: 'huawei-virtual7'
- os-version: 'xenial'
- <<: *master
-
- #####################################
- # jobs
- #####################################
- jobs:
- - 'opera-{pod}-daily-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'opera-{pod}-daily-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 240
- fail: true
-
- triggers:
- - timed: '@midnight'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DEPLOY_SCENARIO
- default: os-nosdn-openo-ha
- - '{slave-label}-defaults'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'compass-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-openo-ha
- COMPASS_OS_VERSION=xenial
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: functest
- condition: SUCCESSFUL
- projects:
- - name: 'functest-compass-{pod}-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-openo-ha
- FUNCTEST_MODE=testcase
- FUNCTEST_SUITE_NAME=opera_vims
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
diff --git a/jjb/opera/opera-project-jobs.yaml b/jjb/opera/opera-project-jobs.yaml
deleted file mode 100644
index f866342a4..000000000
--- a/jjb/opera/opera-project-jobs.yaml
+++ /dev/null
@@ -1,56 +0,0 @@
----
-- project:
-
- name: opera-project
-
- project: 'opera'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
-
- jobs:
- - 'opera-build-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'opera-build-{stream}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - timed: 'H 23 * * *'
-
- builders:
- - 'opera-build-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'opera-build-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Hello world!"
diff --git a/jjb/opera/opera-verify-jobs.yaml b/jjb/opera/opera-verify-jobs.yaml
deleted file mode 100644
index 97c9a43ec..000000000
--- a/jjb/opera/opera-verify-jobs.yaml
+++ /dev/null
@@ -1,157 +0,0 @@
----
-- project:
- name: 'opera-verify-jobs'
-
- project: 'opera'
-
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
-
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'basic'
- - 'deploy'
-
- #####################################
- # jobs
- #####################################
- jobs:
- - 'opera-verify-{stream}'
- - 'opera-verify-{phase}-{stream}'
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'opera-verify-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 120
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'huawei-pod7-defaults'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'opera-verify-basic-{stream}'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'opera-verify-deploy-{stream}'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'opera-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 120
- fail: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'opera-verify-basic-macro'
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello world!"
-
-- builder:
- name: 'opera-verify-deploy-macro'
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello world!"
diff --git a/jjb/opnfvdocs/docs-rtd.yaml b/jjb/opnfvdocs/docs-rtd.yaml
deleted file mode 100644
index ece856903..000000000
--- a/jjb/opnfvdocs/docs-rtd.yaml
+++ /dev/null
@@ -1,99 +0,0 @@
----
-- project:
- name: docs-rtd
- jobs:
- - 'docs-merge-rtd-{stream}'
- - 'docs-verify-rtd-{stream}'
-
- stream:
- - master:
- branch: 'master'
- - fraser:
- branch: 'stable/{stream}'
- - danube:
- branch: 'stable/{stream}'
- - euphrates:
- branch: 'stable/{stream}'
-
- project: 'opnfvdocs'
- rtdproject: 'opnfv'
- # TODO: Archive Artifacts
-
-- job-template:
- name: 'docs-merge-rtd-{stream}'
-
- project-type: freestyle
-
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'lf-build1'
- description: 'Slave label on Jenkins'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
-
- triggers:
- - gerrit-trigger-change-merged:
- project: '**'
- branch: '{branch}'
- files: 'docs/**/*.*'
-
- builders:
- - 'remove-old-docs-from-opnfv-artifacts'
- - shell: |
- if [ $GERRIT_BRANCH == "master" ]; then
- RTD_BUILD_VERSION=latest
- else
- RTD_BUILD_VERSION=${{GERRIT_BRANCH/\//-}}
- fi
- curl -X POST --data "version_slug=$RTD_BUILD_VERSION" https://readthedocs.org/build/opnfvdocsdemo
-
-
-- job-template:
- name: 'docs-verify-rtd-{stream}'
-
- project-type: freestyle
-
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'lf-build2'
- description: 'Slave label on Jenkins'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/opnfvdocs
- description: 'Git URL to use on this Jenkins Slave'
-
- scm:
- - git-scm-with-submodules:
- branch: '{branch}'
-
- triggers:
- - gerrit-trigger-patchset-created:
- server: 'gerrit.opnfv.org'
- project: '**'
- branch: '{branch}'
- files: 'docs/**/*.*'
-
- builders:
- - shell: |
- if [ "$GERRIT_PROJECT" != "opnfvdocs" ]; then
- cd docs/submodules/$GERRIT_PROJECT
- git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
- else
- git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
- fi
- - shell: |
- sudo pip install virtualenv
- virtualenv $WORKSPACE/venv
- . $WORKSPACE/venv/bin/activate
- pip install --upgrade pip
- pip freeze
- pip install tox
- sed -i s,\-b\ html,\-b\ singlehtml,g tox.ini
- tox -edocs
- - 'upload-review-docs'
diff --git a/jjb/opnfvdocs/opnfvdocs-views.yaml b/jjb/opnfvdocs/opnfvdocs-views.yaml
new file mode 100644
index 000000000..f33c728ed
--- /dev/null
+++ b/jjb/opnfvdocs/opnfvdocs-views.yaml
@@ -0,0 +1,12 @@
+---
+- view:
+ name: opnfvdocs
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^opnfvdocs-.*
diff --git a/jjb/opnfvdocs/opnfvdocs.yaml b/jjb/opnfvdocs/opnfvdocs.yaml
index 5f08f69a4..3047a740e 100644
--- a/jjb/opnfvdocs/opnfvdocs.yaml
+++ b/jjb/opnfvdocs/opnfvdocs.yaml
@@ -1,115 +1,62 @@
---
-########################
-# Job configuration for opnfvdocs
-########################
-- project:
-
- name: opnfvdocs
-
- project: '{name}'
-
- jobs:
- - 'opnfvdocs-verify-shellcheck-{stream}'
- - 'opnfvdocs-merge-shellcheck-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'opnfvdocs-verify-shellcheck-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_CLONE_BASE
- default: ssh://gerrit.opnfv.org:29418
- description: "Used for overriding the GIT URL coming from parameters macro."
-
+- scm:
+ name: opnfvdocs-scm
scm:
- - git-scm-gerrit
-
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/opnfvdocs'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ branches:
+ - '{ref}'
+
+- builder:
+ name: opnfvdocs-lint-bash-code
+ builders:
+ - shell: |
+ #!/bin/bash
+ echo "Checking bash code..."
+ for f in $(egrep '\.sh$' modified_files)
+ do
+ bash -n "$f" 2>> bash-violation.log
+ done
+ if [[ -s bash-violation.log ]]; then
+ echo -e "Bash syntax error(s)\n---" >> violation.log
+ sed -e 's/^/ /g' bash-violation.log >> violation.log
+ fi
+
+- trigger:
+ name: opnfvdocs-patchset-created
triggers:
- gerrit:
- server-name: 'gerrit.opnfv.org'
trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
+ - patchset-created-event
- comment-added-contains-event:
comment-contains-value: 'recheck'
- comment-added-contains-event:
comment-contains-value: 'reverify'
projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
+ - project-compare-type: 'ANT'
+ project-pattern: 'opnfvdocs'
branches:
- branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
-
- builders:
- - lint-bash-code
+ branch-pattern: master
+ - branch-compare-type: 'ANT'
+ branch-pattern: stable/nile
- job-template:
- name: 'opnfvdocs-merge-shellcheck-{stream}'
-
- disabled: '{obj:disabled}'
-
+ name: opnfvdocs-verify-shellcheck
parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_CLONE_BASE
- default: ssh://gerrit.opnfv.org:29418
- description: "Used for overriding the GIT URL coming from parameters macro."
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon\
- \ the completion of the build."
-
+ - label:
+ name: node
+ default: opnfv-build
scm:
- - git-scm
-
+ - opnfvdocs-scm:
+ ref: $GERRIT_REFSPEC
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
+ - opnfvdocs-patchset-created
builders:
- - lint-bash-code
+ - opnfvdocs-lint-bash-code
+
+- project:
+ name: opnfvdocs
+ jobs:
+ - opnfvdocs-verify-shellcheck
diff --git a/jjb/opnfvdocs/project.cfg b/jjb/opnfvdocs/project.cfg
deleted file mode 100644
index 598269737..000000000
--- a/jjb/opnfvdocs/project.cfg
+++ /dev/null
@@ -1,42 +0,0 @@
-apex
-armband
-availability
-bottlenecks
-clover
-compass4nfv
-copper
-conductor
-container4nfv
-daisy
-doctor
-domino
-dovetail
-dpacc
-escalator
-fastpathmetrics
-fuel
-functest
-genesisreq
-ipv6
-joid
-kvmfornfv
-lsoapi
-moon
-movie
-multisite
-octopus
-onosfw
-ovno
-ovsnfv
-parser
-pharos
-policytest
-prediction
-promise
-qtip
-sdnvpn
-sfc
-storperf
-vnf_forwarding_graph
-vswitchperf
-yardstick
diff --git a/jjb/orchestra/orchestra-daily-jobs.yaml b/jjb/orchestra/orchestra-daily-jobs.yaml
deleted file mode 100644
index 7c2deaec4..000000000
--- a/jjb/orchestra/orchestra-daily-jobs.yaml
+++ /dev/null
@@ -1,100 +0,0 @@
----
-###################################
-# job configuration for orchestra
-###################################
-- project:
- name: 'orchestra-daily-jobs'
-
- project: 'orchestra'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- # ------------------------------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # ------------------------------------------------------
- pod:
- - virtual:
- slave-label: 'joid-virtual'
- os-version: 'xenial'
- <<: *master
-
- jobs:
- - 'orchestra-{pod}-daily-{stream}'
-
-################################
-# job template
-################################
-- job-template:
- name: 'orchestra-{pod}-daily-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
-
- - timeout:
- timeout: 240
- fail: true
-
- triggers:
- - timed: '@daily'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DEPLOY_SCENARIO
- default: os-nosdn-openbaton-ha
- - '{slave-label}-defaults'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'joid-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-openbaton-ha
- COMPASS_OS_VERSION=xenial
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: functest
- condition: SUCCESSFUL
- projects:
- - name: 'functest-joid-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-openbaton-ha
- FUNCTEST_MODE=testcase
- FUNCTEST_SUITE_NAME=orchestra_ims
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
diff --git a/jjb/orchestra/orchestra-project-jobs.yaml b/jjb/orchestra/orchestra-project-jobs.yaml
deleted file mode 100644
index 60732ce03..000000000
--- a/jjb/orchestra/orchestra-project-jobs.yaml
+++ /dev/null
@@ -1,49 +0,0 @@
----
-- project:
-
- name: orchestra-project
-
- project: 'orchestra'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
-
- jobs:
- - 'orchestra-build-{stream}'
-
-- job-template:
- name: 'orchestra-build-{stream}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
-
- scm:
- - git-scm
-
- triggers:
- - timed: 'H 23 * * *'
-
- builders:
- - 'orchestra-build-macro'
-
-- builder:
- name: 'orchestra-build-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Hello world!"
diff --git a/jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml b/jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml
deleted file mode 100644
index ca4cfeede..000000000
--- a/jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml
+++ /dev/null
@@ -1,88 +0,0 @@
----
-- project:
- name: 'ovn4nfv-daily-jobs'
-
- project: 'ovn4nfv'
-
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- pod:
- - virtual:
- slave-label: 'joid-virtual'
- os-version: 'xenial'
- <<: *master
-
- jobs:
- - 'ovn4nfv-{pod}-daily-{stream}'
-
-- job-template:
- name: 'ovn4nfv-{pod}-daily-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
-
- - timeout:
- timeout: 240
- fail: true
-
- triggers:
- - timed: '@daily'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DEPLOY_SCENARIO
- default: os-ovn-nofeature-noha
- - '{slave-label}-defaults'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'joid-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-ovn-nofeature-noha
- COMPASS_OS_VERSION=xenial
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: functest
- condition: SUCCESSFUL
- projects:
- - name: 'functest-joid-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-ovn-nofeature-ha
- FUNCTEST_MODE=testcase
- FUNCTEST_SUITE_NAME=ovn4nfv_test_suite
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
diff --git a/jjb/ovn4nfv/ovn4nfv-project-jobs.yaml b/jjb/ovn4nfv/ovn4nfv-project-jobs.yaml
deleted file mode 100644
index 2ce4b6df9..000000000
--- a/jjb/ovn4nfv/ovn4nfv-project-jobs.yaml
+++ /dev/null
@@ -1,56 +0,0 @@
----
-- project:
- name: ovn4nfv
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- jobs:
- - 'ovn4nfv-build-{stream}'
-
-- job-template:
- name: 'ovn4nfv-build-{stream}'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - opnfv-build-defaults
-
- scm:
- - git-scm
-
- triggers:
- - timed: 'H 23 * * *'
-
- builders:
- - 'ovn4nfv-build-macro'
-
-- builder:
- name: 'ovn4nfv-build-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "hello world"
diff --git a/jjb/ovsnfv/ovsnfv.yaml b/jjb/ovsnfv/ovsnfv.yaml
deleted file mode 100644
index 87c6ca14f..000000000
--- a/jjb/ovsnfv/ovsnfv.yaml
+++ /dev/null
@@ -1,157 +0,0 @@
----
-- project:
- name: ovsnfv
-
- project: '{name}'
-
- jobs:
- - 'ovsnfv-verify-{stream}'
- - 'ovsnfv-merge-{stream}'
- - 'ovsnfv-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'ovsnfv-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon the completion of the build."
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'ci/**'
-
- builders:
- - build-rpms
-
-- job-template:
- name: 'ovsnfv-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon the completion of the build."
-
- scm:
- - git-scm
-
- wrappers:
- - timeout:
- timeout: 24
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'ci/**'
-
- builders:
- - build-rpms
-
-- job-template:
- name: 'ovsnfv-daily-{stream}'
-
- disabled: 'true'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon the completion of the build."
-
- scm:
- - git-scm
-
- wrappers:
- - timeout:
- timeout: 24
- fail: true
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - build-rpms
-
- publishers:
- - email:
- recipients: therbert@redhat.com mark.d.gray@intel.com billy.o.mahony@intel.com
- - email-jenkins-admins-on-failure
-
-- builder:
- name: build-rpms
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- cd $WORKSPACE/ci
- ./build.sh
diff --git a/jjb/parser/parser.yaml b/jjb/parser/parser.yaml
deleted file mode 100644
index 574138f6d..000000000
--- a/jjb/parser/parser.yaml
+++ /dev/null
@@ -1,78 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: parser
-
- project: '{name}'
-
- jobs:
- - 'parser-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'parser-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: 'governance/**'
- - compare-type: ANT
- pattern: '*.txt|.gitignore|.gitreview|INFO|LICENSE'
-
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
-
- # ut and pep8 check parser/tosca2heat
- echo "Running tox on tosca2heat/tosca-parser ..."
- cd $WORKSPACE/tosca2heat/tosca-parser && tox
- echo "Running tox on tosca2heat/heat-translator ..."
- cd $WORKSPACE/tosca2heat/heat-translator && tox
diff --git a/jjb/pharos/check-jinja2.yaml b/jjb/pharos/check-jinja2.yaml
deleted file mode 100644
index e3f0df78a..000000000
--- a/jjb/pharos/check-jinja2.yaml
+++ /dev/null
@@ -1,98 +0,0 @@
----
-########################
-# Job configuration to validate jinja2 files
-########################
-- project:
-
- name: validate-pdf-templates
-
- project: 'pharos'
-
- jobs:
- - 'validate-pdf-jinja2-templates-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'validate-pdf-jinja2-templates-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slave to execute jnija template test
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.yaml'
- - compare-type: ANT
- pattern: 'config/utils/*.py'
- - compare-type: ANT
- pattern: 'config/installers/**/*.j2'
- - compare-type: ANT
- pattern: 'check-*.sh'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
-
- builders:
- - check-pdf-jinja
- - check-pdf-schema
-
-- builder:
- name: check-pdf-jinja
- builders:
- - shell: |
- $WORKSPACE/config/utils/check-jinja2.sh
-- builder:
- name: check-pdf-schema
- builders:
- - shell: |
- $WORKSPACE/config/utils/check-schema.sh
diff --git a/jjb/pharos/pharos.yaml b/jjb/pharos/pharos.yaml
deleted file mode 100644
index c1bb1ba0e..000000000
--- a/jjb/pharos/pharos.yaml
+++ /dev/null
@@ -1,88 +0,0 @@
----
-- project:
- name: pharos
-
- project:
- - '{name}'
- - '{name}-tools'
-
- disabled: false
-
- jobs:
- - '{project}-verify-basic'
- - 'backup-pharos-dashboard'
- - 'deploy-pharos-dashboard':
- disabled: true
-
-- job-template:
- name: 'backup-pharos-dashboard'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - 'pharos-dashboard-defaults'
-
- triggers:
- - timed: '@daily'
-
- builders:
- - shell: |
- BACKUP_DIR=$HOME/backups/
- TMP_DIR=$HOME/tmp/
- mkdir -p $BACKUP_DIR
- echo "-- $(date +%Y%m%d) --"
- echo "Backing up Pharos Dashboard data..."
- sudo docker run --rm \
- -v pharos-data:/pharos-data:ro \
- -v $TMP_DIR:/backup \
- alpine \
- tar -czf /backup/pharos-dashboard-db-$(date +%Y%m%d).tar.gz -C /pharos-data ./
- sudo mv $TMP_DIR/pharos-dashboard-db-$(date +%Y%m%d).tar.gz $BACKUP_DIR
- sudo chown $USER:$USER $BACKUP_DIR/pharos-dashboard-db-$(date +%Y%m%d).tar.gz
- echo "...complete"
-
-- job-template:
- name: 'deploy-pharos-dashboard'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: 'pharos-tools'
- branch: 'master'
- - 'pharos-dashboard-defaults'
-
- scm:
- - git:
- choosing-strategy: 'gerrit'
- refspec: '$GERRIT_REFSPEC'
- branches:
- - 'origin/$BRANCH'
- timeout: 15
- credentials-id: '$SSH_CREDENTIAL_ID'
- url: '$GIT_BASE'
- skip-tag: true
- wipe-workspace: false
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - ref-updated-event
- - comment-added-contains-event:
- comment-contains-value: '^redeploy$'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'pharos-tools'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/master'
-
- builders:
- - shell: |
- cp $HOME/config.env $WORKSPACE/dashboard
- cp $HOME/rsa.pub $WORKSPACE/dashboard
- cp $HOME/rsa.pem $WORKSPACE/dashboard
- cd $WORKSPACE/dashboard
- sudo docker-compose build
- sudo docker-compose up -d
diff --git a/jjb/promise/promise.yaml b/jjb/promise/promise.yaml
deleted file mode 100644
index 6e6fb54a8..000000000
--- a/jjb/promise/promise.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: promise
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/qtip/qtip-experimental-jobs.yaml b/jjb/qtip/qtip-experimental-jobs.yaml
deleted file mode 100644
index 4e79c0553..000000000
--- a/jjb/qtip/qtip-experimental-jobs.yaml
+++ /dev/null
@@ -1,49 +0,0 @@
----
-###########################################
-# Experimental jobs for development purpose
-###########################################
-
-- project:
- name: qtip-experimental-jobs
- project: qtip
- jobs:
- - 'qtip-experimental-{pod}-{stream}'
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- pod:
- - zte-virtual5:
- installer: apex
- pod: zte-virtual5
-
-################################
-## job templates
-#################################
-
-- job-template:
- name: 'qtip-experimental-{pod}-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{pod}-defaults'
- scm:
- - git-scm-gerrit
-
- triggers:
- - experimental:
- project: '{project}'
- branch: '{branch}'
- files: '**'
-
- builders:
- - shell: |
- #!/bin/bash
- source tests/ci/experimental.sh
diff --git a/jjb/qtip/qtip-validate-jobs.yaml b/jjb/qtip/qtip-validate-jobs.yaml
deleted file mode 100644
index 0fc623c74..000000000
--- a/jjb/qtip/qtip-validate-jobs.yaml
+++ /dev/null
@@ -1,159 +0,0 @@
----
-#######################
-# validate after MERGE
-#######################
-- project:
- name: qtip
- project: qtip
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: latest
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '{stream}'
- docker-tag: 'stable'
-
- # -------------------------------
- # JOB VARIABLES
- # -------------------------------
- qpi:
- - compute:
- installer: apex
- pod: zte-virtual5
- sut: node
- <<: *master
- - storage:
- installer: apex
- pod: zte-virtual5
- sut: ''
- <<: *master
- - network:
- installer: apex
- pod: zte-virtual5
- sut: ''
- <<: *master
- - compute:
- installer: fuel
- pod: zte-virtual6
- sut: vnf
- <<: *master
- - compute:
- installer: apex
- pod: zte-virtual5
- sut: node
- <<: *fraser
- - storage:
- installer: apex
- pod: zte-virtual5
- sut: ''
- <<: *fraser
- - compute:
- installer: fuel
- pod: zte-virtual6
- sut: vnf
- <<: *fraser
-
-
- # -------------------------------
- # JOB LIST
- # -------------------------------
- jobs:
- - 'qtip-{qpi}-{installer}-{pod}-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
- name: 'qtip-{qpi}-{installer}-{pod}-{stream}'
- disabled: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{pod}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: generic
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debug output information"
- - string:
- name: TEST_SUITE
- default: '{qpi}'
- - string:
- name: TESTAPI_URL
- default: 'http://testresults.opnfv.org/test/api/v1'
- - string:
- name: OPNFV_RELEASE
- default: '{stream}'
- - string:
- name: SUT
- description: "System Under Test"
- default: '{sut}'
- scm:
- - git-scm
-
- wrappers:
- - timeout:
- timeout: 120
- abort: true
- - timestamps
-
- triggers:
- - 'qtip-daily'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - shell: |
- #!/bin/bash
- source tests/ci/periodic.sh
-
- publishers:
- - qtip-common-publishers
- - email-jenkins-admins-on-failure
-
-################
-# MARCOS
-################
-
-# --------
-# builder
-# --------
-
-# ----------
-# parameter
-# ----------
-
-# ----------
-# publisher
-# ----------
-
-- publisher:
- name: qtip-common-publishers
- publishers:
- - email:
- recipients: wu.zhihui1@zte.com.cn, zhang.yujunz@zte.com.cn
-
-# --------
-# trigger
-# --------
-
-- trigger:
- name: 'qtip-daily'
- triggers:
- - timed: '0 15 * * *'
diff --git a/jjb/qtip/qtip-verify-jobs.yaml b/jjb/qtip/qtip-verify-jobs.yaml
deleted file mode 100644
index 12cfa95b7..000000000
--- a/jjb/qtip/qtip-verify-jobs.yaml
+++ /dev/null
@@ -1,195 +0,0 @@
----
-######################
-# verify before MERGE
-######################
-
-- project:
- name: qtip-verify-jobs
- project: qtip
- jobs:
- - 'qtip-verify-{stream}'
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-################################
-## job templates
-#################################
-- job-template:
- name: 'qtip-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - qtip-unit-tests-and-docs-build
- publishers:
- - publish-coverage
- - email-jenkins-admins-on-failure
-
-# Upload juypter notebook to artifacts for review
-# TODO(yujunz): deal with *.ipynb deletion
-- job-template:
- name: 'qtip-verify-notebook-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.ipynb'
-
- builders:
- - upload-under-review-notebooks-to-opnfv-artifacts
- - report-build-result-to-gerrit
-
-- job-template:
- name: 'qtip-merged-notebook-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon the completion of the build."
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '*'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*ipynb'
-
- builders:
- - remove-old-docs-from-opnfv-artifacts
-
-################################
-## job builders
-#################################
-- builder:
- name: qtip-unit-tests-and-docs-build
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
-
- tox
-
-# modified from upload-under-review-docs-to-opnfv-artifacts
-# in global/releng-macro.yml
-- builder:
- name: upload-under-review-notebooks-to-opnfv-artifacts
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- export PATH=$PATH:/usr/local/bin/
-
- [[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
-
- echo
- echo "###########################"
- echo "UPLOADING DOCS UNDER REVIEW"
- echo "###########################"
- echo
-
- gs_base="artifacts.opnfv.org/$PROJECT/review"
- gs_path="$gs_base/$GERRIT_CHANGE_NUMBER"
- local_path="upload/$GERRIT_CHANGE_NUMBER"
-
- mkdir -p $local_path
-
- git diff HEAD~1 --name-status | grep -E "[AM]\t.+\.ipynb$" | awk '{print $2}' \
- | xargs -I '{}' cp '{}' $local_path
- gsutil -m cp -r "$local_path" "gs://$gs_base/"
-
- echo "Document link(s):" >> gerrit_comment.txt
- find "$local_path" | grep -e 'ipynb$' | \
- sed -e "s|^$local_path| https://nbviewer.jupyter.org/url/$gs_path|" >> gerrit_comment.txt
diff --git a/jjb/releng/artifact-cleanup.yaml b/jjb/releng/artifact-cleanup.yaml
deleted file mode 100644
index 2a250618c..000000000
--- a/jjb/releng/artifact-cleanup.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
----
-- project:
- name: releng-artifact-cleanup
-
- project: 'releng'
-
- jobs:
- - 'releng-artifact-cleanup-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
-
-
-- job-template:
- name: 'releng-artifact-cleanup-daily-{stream}'
-
- # Job template for daily builders
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: master
-
- disabled: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
-
- scm:
- - git-scm
-
- triggers:
- - timed: 'H H * * *'
-
- builders:
- - shell: |
- $WORKSPACE/utils/retention_script.sh
diff --git a/jjb/releng/branch-or-tag.sh b/jjb/releng/branch-or-tag.sh
new file mode 100755
index 000000000..f9767eca1
--- /dev/null
+++ b/jjb/releng/branch-or-tag.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -e -o pipefail
+set -x
+
+GIT_URL=${GIT_URL:-https://gerrit.opnfv.org/gerrit}
+STREAM=${STREAM:-'nostream'}
+RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
+
+echo "--> Verifying $RELEASE_FILES."
+for release_file in $RELEASE_FILES; do
+ # Verify the release file schema
+ python3 releases/scripts/verify_schema.py \
+ -s releases/schema.yaml \
+ -y $release_file
+done
+
+for release_file in $RELEASE_FILES; do
+ while read -r repo branch ref; do
+ echo "$repo" "$branch" "$ref"
+ unset branch_actual
+ branch_actual="$(git ls-remote "https://gerrit.opnfv.org/gerrit/$repo.git" "refs/heads/$branch" | awk '{print $1}')"
+
+ if [ -n "$branch_actual" ]; then
+ echo "$repo refs/heads/$branch already exists at $branch_actual"
+ source jjb/releng/releng-release-tagging.sh
+ else
+ echo "This is a branching job"
+ source jjb/releng/releng-release-create-branch.sh
+ fi
+
+ done < <(python3 releases/scripts/repos.py -b -f "$release_file")
+done
diff --git a/jjb/releng/compass4nfv-docker.yaml b/jjb/releng/compass4nfv-docker.yaml
deleted file mode 100644
index 2a5453658..000000000
--- a/jjb/releng/compass4nfv-docker.yaml
+++ /dev/null
@@ -1,264 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-- project:
-
- name: compass-docker
-
- project: compass-containers
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- disabled: true
-
- arch_tag:
- - 'amd64':
- slave_label: 'opnfv-build-ubuntu'
- docker_file: 'Dockerfile'
- - 'arm64':
- slave_label: 'opnfv-build-ubuntu-arm'
- docker_file: 'Dockerfile-arm64'
-
- # yamllint disable rule:key-duplicates
- image:
- - 'tasks'
- - 'cobbler'
- - 'db'
- - 'deck'
- - 'tasks-base'
- - 'tasks-k8s'
- - 'tasks-osa'
-
- # settings for jobs run in multijob phases
- build-job-settings: &build-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters: |
- PUSH_IMAGE=$PUSH_IMAGE
- COMMIT_ID=$COMMIT_ID
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- manifest-job-settings: &manifest-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters:
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- # yamllint enable rule:key-duplicates
- jobs:
- - "compass-docker-{stream}"
- - "compass-{image}-build-{arch_tag}-{stream}"
- - "compass-{image}-manifest-{stream}"
-
-########################
-# job templates
-########################
-- job-template:
- name: 'compass-docker-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- parameters:
- - compass-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- docker_file: 'Dockerfile'
- arch_tag: 'amd64'
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
-
- builders:
- - multijob:
- name: 'build compass-tasks-base images'
- execution-type: PARALLEL
- projects:
- - name: 'compass-tasks-base-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'compass-tasks-base-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish compass-tasks-base manifests'
- execution-type: PARALLEL
- projects:
- - name: 'compass-tasks-base-manifest-{stream}'
- <<: *manifest-job-settings
- - multijob:
- name: 'build all compass images'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'compass-cobbler-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'compass-db-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'compass-deck-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'compass-tasks-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'compass-tasks-k8s-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'compass-tasks-osa-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'compass-cobbler-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'compass-db-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'compass-deck-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'compass-tasks-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'compass-tasks-k8s-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'compass-tasks-osa-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish all manifests'
- execution-type: PARALLEL
- projects:
- - name: 'compass-cobbler-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'compass-db-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'compass-deck-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'compass-tasks-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'compass-tasks-k8s-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'compass-tasks-osa-manifest-{stream}'
- <<: *manifest-job-settings
-
- publishers:
- - 'compass-amd64-recipients'
- - 'compass-arm64-recipients'
-
-- job-template:
- name: 'compass-{image}-build-{arch_tag}-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - compass-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: '{slave_label}'
- docker_file: '{docker_file}'
- arch_tag: '{arch_tag}'
- - string:
- name: DOCKER_REPO_NAME
- default: "opnfv/compass-{image}"
- description: "Dockerhub repo to be pushed to."
- - string:
- name: DOCKER_DIR
- default: "compass-{image}"
- description: "Directory containing files needed by the Dockerfile"
- scm:
- - git-scm
- builders:
- - shell:
- !include-raw-escape: ./opnfv-docker.sh
-
-- job-template:
- name: 'compass-{image}-manifest-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - compass-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- docker_file: 'Dockerfile'
- arch_tag: 'amd64'
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{stream}" in
- "master")
- tag="latest" ;;
- *)
- tag="{stream}" ;;
- esac
- sudo manifest-tool push from-args \
- --platforms linux/amd64,linux/arm64 \
- --template opnfv/compass-{image}:ARCH-$tag \
- --target opnfv/compass-{image}:$tag
- exit $?
-
-# parameter macro
-- parameter:
- name: compass-job-parameters
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: '{slave_label}'
- description: 'Slave label on Jenkins'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: '{docker_file}'
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
-
-# publisher macros
-- publisher:
- name: 'compass-amd64-recipients'
- publishers:
- - email:
- recipients: >
- chigang@huawei.com
- huangxiangyui5@huawei.com
- xueyifei@huawei.com
- wutianwei1@huawei.com
-
-- publisher:
- name: 'compass-arm64-recipients'
- publishers:
- - email:
- recipients: >
- yibo.cai@arm.com
diff --git a/jjb/releng/opnfv-docker-arm.yaml b/jjb/releng/opnfv-docker-arm.yaml
deleted file mode 100644
index 9066b419e..000000000
--- a/jjb/releng/opnfv-docker-arm.yaml
+++ /dev/null
@@ -1,198 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-
-- project:
-
- name: opnfv-docker-arm
-
- master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- euphrates: &euphrates
- stream: euphrates
- branch: 'stable/{stream}'
- disabled: false
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- disabled: false
- dovetail-arm-receivers: &dovetail-arm-receivers
- receivers: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- storperf-arm-receivers: &storperf-arm-receivers
- receivers: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- mark.beierl@emc.com
- yardstick-arm-receivers: &yardstick-arm-receivers
- receivers: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- delia.popescu@enea.com
- other-receivers: &other-receivers
- receivers: ''
-
- dockerfile: "Dockerfile"
- dockerdir: "docker"
- docker_repo_name: "opnfv/{project}_aarch64"
- arch_tag: ""
-
- # yamllint disable rule:key-duplicates
- dockerrepo:
- # projects with jobs for master
- - 'dovetail':
- project: 'dovetail'
- <<: *master
- <<: *dovetail-arm-receivers
- - 'storperf-master':
- project: 'storperf'
- dockerdir: 'docker/storperf-master'
- docker_repo_name: 'opnfv/storperf-master'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-graphite':
- project: 'storperf'
- dockerdir: 'docker/storperf-graphite'
- docker_repo_name: 'opnfv/storperf-graphite'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-httpfrontend':
- project: 'storperf'
- dockerdir: 'docker/storperf-httpfrontend'
- docker_repo_name: 'opnfv/storperf-httpfrontend'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-reporting':
- project: 'storperf'
- dockerdir: 'docker/storperf-reporting'
- docker_repo_name: 'opnfv/storperf-reporting'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-swaggerui':
- project: 'storperf'
- dockerdir: 'docker/storperf-swaggerui'
- docker_repo_name: 'opnfv/storperf-swaggerui'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'yardstick':
- project: 'yardstick'
- <<: *master
- <<: *yardstick-arm-receivers
- # projects with jobs for stable/euphrates
- - 'dovetail':
- project: 'dovetail'
- <<: *euphrates
- <<: *dovetail-arm-receivers
- - 'storperf-master':
- project: 'storperf'
- dockerdir: 'docker/storperf-master'
- docker_repo_name: 'opnfv/storperf-master'
- arch_tag: 'aarch64'
- <<: *fraser
- <<: *storperf-arm-receivers
- - 'storperf-graphite':
- project: 'storperf'
- dockerdir: 'docker/storperf-graphite'
- docker_repo_name: 'opnfv/storperf-graphite'
- arch_tag: 'aarch64'
- <<: *fraser
- <<: *storperf-arm-receivers
- - 'storperf-httpfrontend':
- project: 'storperf'
- dockerdir: 'docker/storperf-httpfrontend'
- docker_repo_name: 'opnfv/storperf-httpfrontend'
- arch_tag: 'aarch64'
- <<: *fraser
- <<: *storperf-arm-receivers
- - 'storperf-reporting':
- project: 'storperf'
- dockerdir: 'docker/storperf-reporting'
- docker_repo_name: 'opnfv/storperf-reporting'
- arch_tag: 'aarch64'
- <<: *fraser
- <<: *storperf-arm-receivers
- - 'storperf-swaggerui':
- project: 'storperf'
- dockerdir: 'docker/storperf-swaggerui'
- docker_repo_name: 'opnfv/storperf-swaggerui'
- arch_tag: 'aarch64'
- <<: *fraser
- <<: *storperf-arm-receivers
- - 'yardstick':
- project: 'yardstick'
- <<: *fraser
- <<: *yardstick-arm-receivers
-
- # yamllint enable rule:key-duplicates
- jobs:
- - '{dockerrepo}-docker-build-arm-push-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{dockerrepo}-docker-build-arm-push-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters: &parameters
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-arm-defaults'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: DOCKER_REPO_NAME
- default: "{docker_repo_name}"
- description: "Dockerhub repo to be pushed to."
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKER_DIR
- default: "{dockerdir}"
- description: "Directory containing files needed by the Dockerfile"
- - string:
- name: DOCKERFILE
- default: "{dockerfile}"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag"
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- builders: &builders
- - shell:
- !include-raw-escape: ./opnfv-docker.sh
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- publishers:
- - email:
- recipients: '{receivers}'
- - email-jenkins-admins-on-failure
diff --git a/jjb/releng/opnfv-docker-custom.yaml b/jjb/releng/opnfv-docker-custom.yaml
deleted file mode 100644
index e564ab5ef..000000000
--- a/jjb/releng/opnfv-docker-custom.yaml
+++ /dev/null
@@ -1,111 +0,0 @@
----
-########################
-# Job configuration for opnfv-docker-custom
-########################
-- project:
-
- name: opnfv-docker-custom
-
- project: '{name}'
-
- jobs:
- - 'opnfv-docker-custom-verify-{stream}'
- - 'opnfv-docker-custom-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'opnfv-docker-custom-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'disabled'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'docker/**'
-
- builders:
- - verify-docker
-
-- job-template:
- name: 'opnfv-docker-custom-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'disabled'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'docker/**'
-
- builders:
- - merge-docker
-
-- builder:
- name: verify-docker
- builders:
- - shell: |
- /bin/bash $WORKSPACE/verify-docker.sh
-
-- builder:
- name: merge-docker
- builders:
- - shell: |
- /bin/bash $WORKSPACE/merge-docker.sh
diff --git a/jjb/releng/opnfv-docker.sh b/jjb/releng/opnfv-docker.sh
deleted file mode 100644
index e64764128..000000000
--- a/jjb/releng/opnfv-docker.sh
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-
-echo "Using Docker $(docker --version) on $NODE_NAME"
-echo "Starting Docker build for $DOCKER_REPO_NAME ..."
-echo "--------------------------------------------------------"
-echo
-
-function remove_containers_images()
-{
- # Remove previous running containers if exist
- #
- # $ docker ps -a
- # CONTAINER ID IMAGE COMMAND ...
- # 6a796ed40b8e opnfv/compass-tasks:latest "/bin/bash" ...
- # 99fcb59f4787 opnfv/compass-tasks-base:latest "/bin/bash" ...
- # cc5eee16b995 opnfv/compass-tasks-k8s "/bin/bash" ...
- #
- # Cut image name by leading space and ending space or colon(tag)
- if [[ -n "$(docker ps -a | grep " $DOCKER_REPO_NAME[ :]")" ]]; then
- echo "Removing existing $DOCKER_REPO_NAME containers..."
- docker ps -a | grep " $DOCKER_REPO_NAME[ :]" | awk '{print $1}' | xargs docker rm -f
- t=60
- # Wait max 60 sec for containers to be removed
- while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep " $DOCKER_REPO_NAME[ :]")" ]]; do
- sleep 1
- let t=t-1
- done
- fi
-
-
- # Remove existing images if exist
- #
- # $ docker images
- # REPOSITORY TAG IMAGE ID ...
- # opnfv/compass-tasks latest 6501569fd328 ...
- # opnfv/compass-tasks-base latest 8764fe29c434 ...
- # opnfv/compass-tasks-k8s latest 61094cac9e65 ...
- #
- # Cut image name by start of line and ending space
- if [[ -n "$(docker images | grep "^$DOCKER_REPO_NAME ")" ]]; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep "^$DOCKER_REPO_NAME "
- image_ids=($(docker images | grep "^$DOCKER_REPO_NAME " | awk '{print $3}'))
- for id in "${image_ids[@]}"; do
- if [[ -n "$(docker images|grep "^$DOCKER_REPO_NAME "|grep $id)" ]]; then
- echo "Removing docker image $DOCKER_REPO_NAME:$id..."
- docker rmi -f $id
- fi
- done
- fi
-}
-
-
-count=30 # docker build jobs might take up to ~30 min
-while [[ -n `ps -ef| grep 'docker build' | grep $DOCKER_REPO_NAME | grep -v grep` ]]; do
- echo "Build or cleanup of $DOCKER_REPO_NAME in progress. Waiting..."
- sleep 60
- count=$(( $count - 1 ))
- if [ $count -eq 0 ]; then
- echo "Timeout. Aborting..."
- exit 1
- fi
-done
-
-# Remove the existing containers and images before building
-remove_containers_images
-
-DOCKER_PATH=$WORKSPACE/$DOCKER_DIR
-
-cd $DOCKER_PATH || exit 1
-HOST_ARCH="$(uname -m)"
-#If there is a patch for other arch then x86, apply the patch and
-#replace Dockerfile file
-dockerfile_patch="Dockerfile.${HOST_ARCH}.patch"
-if [[ -f "${dockerfile_patch}" ]]; then
- patch -f Dockerfile -p1 < "${dockerfile_patch}"
-fi
-
-# Get tag version
-echo "Current branch: $BRANCH"
-
-BUILD_BRANCH=$BRANCH
-
-GERRIT_REFNAME=${GERRIT_REFNAME:-''}
-RELEASE_VERSION=${GERRIT_REFNAME/refs\/tags\/}
-
-# If we're being triggered by a comment-added job, then extract the tag
-# from the comment and use that as the release version.
-# Expected comment format: retag opnfv-x.y.z
-if [[ "${GERRIT_EVENT_TYPE:-}" == "comment-added" ]]; then
- RELEASE_VERSION=$(echo "$GERRIT_EVENT_COMMENT_TEXT" | grep 'retag' | awk '{print $2}')
-fi
-
-if [[ "$BRANCH" == "master" ]]; then
- DOCKER_TAG="latest"
-elif [[ -n "${RELEASE_VERSION-}" ]]; then
- DOCKER_TAG=${RELEASE_VERSION}
- if git checkout ${RELEASE_VERSION}; then
- echo "Successfully checked out the git tag ${RELEASE_VERSION}"
- else
- echo "The tag ${RELEASE_VERSION} doesn't exist in the repository. Existing tags are:"
- git tag
- exit 1
- fi
-else
- DOCKER_TAG="stable"
-fi
-
-if [[ -n "${COMMIT_ID-}" && -n "${RELEASE_VERSION-}" ]]; then
- DOCKER_TAG=$RELEASE_VERSION
- BUILD_BRANCH=$COMMIT_ID
-fi
-
-ARCH_BUILD_ARG=""
-ARCH_TAG=${ARCH_TAG:-}
-if [[ -n "${ARCH_TAG}" ]]; then
- DOCKER_TAG=${ARCH_TAG}-${DOCKER_TAG}
- ARCH_BUILD_ARG="--build-arg ARCH=${ARCH_TAG}"
-fi
-
-# Start the build
-echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
-echo "--------------------------------------------------------"
-echo
-cmd="docker build --pull=true --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BUILD_BRANCH
- $ARCH_BUILD_ARG
- -f $DOCKERFILE $DOCKER_PATH"
-
-echo ${cmd}
-${cmd}
-
-
-# list the images
-echo "Available images are:"
-docker images
-
-# Push image to Dockerhub
-if [[ "$PUSH_IMAGE" == "true" ]]; then
- echo "Pushing $DOCKER_REPO_NAME:$DOCKER_TAG to the docker registry..."
- echo "--------------------------------------------------------"
- echo
- docker push $DOCKER_REPO_NAME:$DOCKER_TAG
-fi
-
-# Remove the existing containers and images after building
-remove_containers_images
diff --git a/jjb/releng/opnfv-docker.yaml b/jjb/releng/opnfv-docker.yaml
deleted file mode 100644
index 0d0659807..000000000
--- a/jjb/releng/opnfv-docker.yaml
+++ /dev/null
@@ -1,405 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-
-- project:
-
- name: opnfv-docker
-
- master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- disabled: false
- euphrates: &euphrates
- stream: euphrates
- branch: 'stable/{stream}'
- disabled: false
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- disabled: false
- storperf-receivers: &storperf-receivers
- receivers: >
- mark.beierl@emc.com
- other-receivers: &other-receivers
- receivers: ''
-
- dockerfile: "Dockerfile"
- dockerdir: "docker"
- arch_tag: ""
-
- # This is the dockerhub repo the image will be pushed to as
- # 'opnfv/{dockerrepo}. See: DOCKER_REPO_NAME parameter.
- # 'project' is the OPNFV repo we expect to contain the Dockerfile
- # yamllint disable rule:key-duplicates
- dockerrepo:
- # projects with jobs for master
- - 'releng-anteater':
- project: 'releng-anteater'
- <<: *master
- <<: *other-receivers
- - 'barometer-collectd':
- project: 'barometer'
- dockerdir: 'docker/barometer-collectd'
- <<: *master
- <<: *other-receivers
- - 'barometer-grafana':
- project: 'barometer'
- dockerdir: 'docker/barometer-grafana'
- <<: *master
- <<: *other-receivers
- - 'barometer-influxdb':
- project: 'barometer'
- dockerdir: 'docker/barometer-influxdb'
- <<: *master
- <<: *other-receivers
- - 'barometer-kafka':
- project: 'barometer'
- dockerdir: 'docker/barometer-kafka'
- <<: *master
- <<: *other-receivers
- - 'barometer-ves':
- project: 'barometer'
- dockerdir: 'docker/barometer-ves'
- <<: *master
- <<: *other-receivers
- - 'barometer-snmp':
- project: 'barometer'
- dockerdir: 'docker/barometer-snmp'
- <<: *master
- <<: *other-receivers
- - 'bottlenecks':
- project: 'bottlenecks'
- <<: *master
- <<: *other-receivers
- - 'clover':
- project: 'clover'
- dockerdir: '.'
- dockerfile: 'docker/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-nginx-lb':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/lb/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-nginx-proxy':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/proxy/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-nginx-server':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/server/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-snort-ids':
- project: 'clover'
- dockerdir: 'samples/services/snort_ids/docker'
- dockerfile: 'Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'cperf':
- project: 'cperf'
- <<: *master
- <<: *other-receivers
- - 'dovetail':
- project: 'dovetail'
- <<: *master
- <<: *other-receivers
- - 'dovetail-webportal-api':
- project: 'dovetail-webportal'
- dockerfile: 'Dockerfile.api'
- <<: *master
- <<: *other-receivers
- - 'dovetail-webportal-web':
- project: 'dovetail-webportal'
- dockerfile: 'Dockerfile.web'
- <<: *master
- <<: *other-receivers
- - 'nfvbench':
- project: 'nfvbench'
- <<: *master
- <<: *other-receivers
- - 'qtip':
- project: 'qtip'
- dockerdir: '.'
- dockerfile: 'docker/Dockerfile.local'
- <<: *master
- <<: *other-receivers
- - 'qtip-nettest':
- project: 'qtip'
- dockerdir: 'contrib/nettest'
- <<: *master
- <<: *other-receivers
- - 'storperf-master':
- project: 'storperf'
- dockerdir: 'docker/storperf-master'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-graphite':
- project: 'storperf'
- dockerdir: 'docker/storperf-graphite'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-httpfrontend':
- project: 'storperf'
- dockerdir: 'docker/storperf-httpfrontend'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-reporting':
- project: 'storperf'
- dockerdir: 'docker/storperf-reporting'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-swaggerui':
- project: 'storperf'
- dockerdir: 'docker/storperf-swaggerui'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'yardstick':
- project: 'yardstick'
- <<: *master
- <<: *other-receivers
- # projects with jobs for euphrates
- - 'barometer-collectd':
- project: 'barometer'
- dockerdir: 'docker/barometer-collectd'
- <<: *fraser
- <<: *other-receivers
- - 'barometer-grafana':
- project: 'barometer'
- dockerdir: 'docker/barometer-grafana'
- <<: *fraser
- <<: *other-receivers
- - 'barometer-influxdb':
- project: 'barometer'
- dockerdir: 'docker/barometer-influxdb'
- <<: *fraser
- <<: *other-receivers
- - 'barometer-kafka':
- project: 'barometer'
- dockerdir: 'docker/barometer-kafka'
- <<: *fraser
- <<: *other-receivers
- - 'barometer-ves':
- project: 'barometer'
- dockerdir: 'docker/barometer-ves'
- <<: *fraser
- <<: *other-receivers
- - 'barometer-snmp':
- project: 'barometer'
- dockerdir: 'docker/barometer-snmp'
- <<: *fraser
- <<: *other-receivers
- - 'bottlenecks':
- project: 'bottlenecks'
- <<: *euphrates
- <<: *other-receivers
- - 'nfvbench':
- project: 'nfvbench'
- <<: *fraser
- <<: *other-receivers
- - 'qtip':
- project: 'qtip'
- dockerdir: '.'
- dockerfile: 'docker/Dockerfile.local'
- <<: *fraser
- <<: *other-receivers
- - 'storperf-master':
- project: 'storperf'
- dockerdir: 'docker/storperf-master'
- arch_tag: 'x86_64'
- <<: *fraser
- <<: *storperf-receivers
- - 'storperf-graphite':
- project: 'storperf'
- dockerdir: 'docker/storperf-graphite'
- arch_tag: 'x86_64'
- <<: *fraser
- <<: *storperf-receivers
- - 'storperf-httpfrontend':
- project: 'storperf'
- dockerdir: 'docker/storperf-httpfrontend'
- arch_tag: 'x86_64'
- <<: *fraser
- <<: *storperf-receivers
- - 'storperf-reporting':
- project: 'storperf'
- dockerdir: 'docker/storperf-reporting'
- arch_tag: 'x86_64'
- <<: *fraser
- <<: *storperf-receivers
- - 'storperf-swaggerui':
- project: 'storperf'
- dockerdir: 'docker/storperf-swaggerui'
- arch_tag: 'x86_64'
- <<: *fraser
- <<: *storperf-receivers
- - 'yardstick':
- project: 'yardstick'
- <<: *fraser
- <<: *other-receivers
- # projects with jobs for fraser
- - 'bottlenecks':
- project: 'bottlenecks'
- <<: *fraser
- <<: *other-receivers
- - 'clover':
- project: 'clover'
- dockerdir: '.'
- dockerfile: 'docker/Dockerfile'
- <<: *fraser
- <<: *other-receivers
- - 'clover-ns-nginx-lb':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/lb/Dockerfile'
- <<: *fraser
- <<: *other-receivers
- - 'clover-ns-nginx-proxy':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/proxy/Dockerfile'
- <<: *fraser
- <<: *other-receivers
- - 'clover-ns-nginx-server':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/server/Dockerfile'
- <<: *fraser
- <<: *other-receivers
- - 'clover-ns-snort-ids':
- project: 'clover'
- dockerdir: 'samples/services/snort_ids/docker'
- dockerfile: 'Dockerfile'
- <<: *fraser
- <<: *other-receivers
- # yamllint enable rule:key-duplicates
- jobs:
- - "{dockerrepo}-docker-build-push-{stream}"
-
-- project:
-
- name: opnfv-monitor-docker # projects which only monitor dedicated file or path
-
- dockerfile: "Dockerfile"
- dockerdir: "docker"
- arch_tag: ""
-
- project:
- # projects with jobs for master
- - 'daisy':
- dockerrepo: 'daisy'
- <<: *master
- - 'escalator':
- dockerrepo: 'escalator'
- <<: *master
-
- jobs:
- - '{project}-docker-build-push-monitor-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{dockerrepo}-docker-build-push-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters: &parameters
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: DOCKER_REPO_NAME
- default: "opnfv/{dockerrepo}"
- description: "Dockerhub repo to be pushed to."
- - string:
- name: DOCKER_DIR
- default: "{dockerdir}"
- description: "Directory containing files needed by the Dockerfile"
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: "{dockerfile}"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- builders: &builders
- - shell:
- !include-raw-escape: ./opnfv-docker.sh
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- publishers:
- - email:
- recipients: '{receivers}'
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: '{project}-docker-build-push-monitor-{stream}'
- disabled: '{obj:disabled}'
- parameters: *parameters
-
- scm:
- - git-scm
-
- builders: *builders
-
- # trigger only matching the file name
- triggers:
- - gerrit:
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'docker/**'
diff --git a/jjb/releng/opnfv-lint.yaml b/jjb/releng/opnfv-lint.yaml
deleted file mode 100644
index 6483e3262..000000000
--- a/jjb/releng/opnfv-lint.yaml
+++ /dev/null
@@ -1,186 +0,0 @@
----
-########################
-# Job configuration for opnfv-lint
-########################
-- project:
-
- name: opnfv-lint
-
- project: opnfv-lint
-
- jobs:
- - 'opnfv-lint-verify-{stream}'
- - 'opnfv-yamllint-verify-{stream}'
- - 'opnfv-pylint-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'opnfv-lint-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slaves to execute yamllint
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'releng|doctor|pharos'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.py'
- - compare-type: ANT
- pattern: '**/*.sh'
- - compare-type: ANT
- pattern: '**/*.yml'
- - compare-type: ANT
- pattern: '**/*.yaml'
-
- builders:
- - lint-all-code
-
-- job-template:
- name: 'opnfv-pylint-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slaves to execute yamllint
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'sdnvpn|qtip|daisy|sfc|escalator'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.py'
-
- builders:
- - lint-init
- - lint-python-code
- - lint-report
-
-- job-template:
- name: 'opnfv-yamllint-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slaves to execute yamllint
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'armband|fuel|releng-anteater'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.yml'
- - compare-type: ANT
- pattern: '**/*.yaml'
-
- builders:
- - lint-init
- - lint-yaml-code
- - lint-report
diff --git a/jjb/releng/opnfv-repo-archiver.sh b/jjb/releng/opnfv-repo-archiver.sh
deleted file mode 100644
index b7ff3baa0..000000000
--- a/jjb/releng/opnfv-repo-archiver.sh
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-export PATH=$PATH:/usr/local/bin/
-
-DATE="$(date +%Y%m%d)"
-
-declare -a PROJECT_LIST
-EXCLUDE_PROJECTS="All-Projects|All-Users|securedlab"
-CLONE_PATH="$WORKSPACE/opnfv-repos"
-
-# Generate project list from gerrit
-PROJECT_LIST=($(ssh -p 29418 jenkins-ci@gerrit.opnfv.org gerrit ls-projects | egrep -v $EXCLUDE_PROJECTS))
-
-echo "Cloning all OPNFV repositories"
-echo "------------------------------"
-
-for PROJECT in "${PROJECT_LIST[@]}"; do
- echo "> Cloning $PROJECT"
- if [ ! -d "$CLONE_PATH/$PROJECT" ]; then
- git clone "https://gerrit.opnfv.org/gerrit/$PROJECT.git" $CLONE_PATH/$PROJECT
- else
- pushd "$CLONE_PATH/$PROJECT" &>/dev/null
- git pull -f origin master
- popd &> /dev/null
- fi
-
- # Don't license scan kernel or qemu in kvmfornfv
- if [ "$PROJECT" == "kvmfornfv" ]; then
- rm -rf "$CLONE_PATH/$PROJECT/"{kernel,qemu}
- fi
-done
-
-echo "Finished cloning OPNFV repositories"
-echo "-----------------------------------"
-
-# Copy repos and clear git data
-echo "Copying repos to $WORKSPACE/opnfv-archive and removing .git files"
-cp -R $CLONE_PATH $WORKSPACE/opnfv-archive
-find $WORKSPACE/opnfv-archive -type d -iname '.git' -exec rm -rf {} +
-find $WORKSPACE/opnfv-archive -type f -iname '.git*' -exec rm -rf {} +
-
-# Create archive
-echo "Creating archive: opnfv-archive-$DATE.tar.gz"
-echo "--------------------------------------"
-cd $WORKSPACE
-tar -czf "opnfv-archive-$DATE.tar.gz" opnfv-archive && rm -rf opnfv-archive
-echo "Archiving Complete."
-
-echo "Uploading artifacts"
-echo "--------------------------------------"
-
-gsutil cp "$WORKSPACE/opnfv-archive-$DATE.tar.gz" \
- "gs://opnfv-archive/opnfv-archive-$DATE.tar.gz" 2>&1
-
-echo "https://storage.googleapis.com/opnfv-archive/opnfv-archive-$DATE.tar.gz" > archive-link.txt
-
-rm -f opnfv-archive-$DATE.tar.gz
-
-echo "Finished"
diff --git a/jjb/releng/opnfv-utils.yaml b/jjb/releng/opnfv-utils.yaml
deleted file mode 100644
index 19fb4b5af..000000000
--- a/jjb/releng/opnfv-utils.yaml
+++ /dev/null
@@ -1,210 +0,0 @@
----
-- project:
-
- name: opnfv-utils
-
- jobs:
- - 'prune-docker-images'
- - 'archive-repositories'
- - 'check-status-of-slaves'
- - 'ansible-build-server'
- - 'generate-artifacts-index-pages'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'prune-docker-images'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: Slaves to prune docker images
- default-slaves:
- - arm-build3
- - arm-build4
- - arm-build5
- - arm-build6
- - ericsson-build3
- - ericsson-build4
- - lf-build2
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- # yamllint disable rule:line-length
- - shell: |
- #!/bin/bash
- (docker ps -q; docker ps -aq) | sort | uniq -u | xargs --no-run-if-empty docker rm
- docker images -f dangling=true -q | xargs --no-run-if-empty docker rmi
-
-
- # yamllint enable rule:line-length
- triggers:
- - timed: '@midnight'
-
-- job-template:
- name: 'archive-repositories'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: Where to create the archive
- default-slaves:
- - master
- allowed-multiselect: false
- ignore-offline-nodes: true
-
- triggers:
- - timed: '@monthly'
-
- builders:
- - shell:
- !include-raw-escape: opnfv-repo-archiver.sh
-
- publishers:
- - email-ext:
- content-type: 'text'
- failure: false
- always: true
- body:
- ${{FILE,path="archive-link.txt"}}
- reply-to: >
- helpdesk@opnfv.org
- recipients: >
- swinslow@linuxfoundation.org
-
-
-- job-template:
- name: 'check-status-of-slaves'
-
- disabled: false
- concurrent: true
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'script lives on master node'
- default-slaves:
- - lf-build1
- allowed-multiselect: false
- ignore-offline-nodes: true
- - project-parameter:
- project: releng
- branch: master
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - shell: |
- cd $WORKSPACE/utils/
- bash slave-monitor-0.1.sh
-
-- job-template:
- name: 'ansible-build-server'
-
- project-type: freestyle
-
- disabled: false
- concurrent: true
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: Build Servers
- default-slaves:
- - lf-build1
- - lf-build2
- - ericsson-build4
- allowed-multiselect: true
- ignore-offline-nodes: true
- - project-parameter:
- project: releng
- branch: master
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - install-ansible
- - run-ansible-build-server-playbook
-
-
-- builder:
- name: install-ansible
- builders:
- - shell: |
- # Install ansible here
- if [ -f /etc/centos-release ] \
- || [ -f /etc/redhat-release ] \
- || [ -f /etc/system-release ]; then
- sudo yum -y install ansible
- fi
- if [ -f /etc/debian_version ] \
- || grep -qi ubuntu /etc/lsb-release \
- || grep -qi ubuntu /etc/os-release; then
- sudo apt-get -y install ansible
- fi
-
-- builder:
- name: run-ansible-build-server-playbook
- builders:
- - shell: |
- # run playbook
- sudo ansible-playbook -i \
- $WORKSPACE/utils/build-server-ansible/inventory.ini \
- $WORKSPACE/utils/build-server-ansible/main.yml
-
-
-- job-template:
- name: 'generate-artifacts-index-pages'
-
- project-type: freestyle
-
- disabled: false
-
- concurrent: false
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: Build Servers
- default-slaves:
- - lf-build2
- - project-parameter:
- project: releng
- branch: master
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@hourly'
-
- builders:
- - generate-artifact-html
-
-
-- builder:
- name: generate-artifact-html
- builders:
- - shell: |
- cd $WORKSPACE/utils/
- ./artifacts.opnfv.org.sh
diff --git a/jjb/releng/releng-ci-jobs.yaml b/jjb/releng/releng-ci-jobs.yaml
deleted file mode 100644
index 49abd7dd4..000000000
--- a/jjb/releng/releng-ci-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: releng-builder-jobs
- project: 'releng'
- project-name: 'releng'
-
- build-timeout: 60
- build-node: 'lf-build1'
- gerrit-server-name: 'gerrit.opnfv.org'
- jenkins-ssh-credential: 'd42411ac011ad6f3dd2e1fa34eaa5d87f910eb2e'
- jjb-version: '2.0.3'
- lftools-version: '<1.0.0'
-
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-releng/38594/'
- rtd-token: '291c6a0109493b4457e566d06141212452c65784'
- project-pattern: 'releng'
-
- jobs:
- - '{project-name}-ci-jobs'
- - '{project-name}-rtd-jobs'
diff --git a/jjb/releng/releng-jobs.yaml b/jjb/releng/releng-jobs.yaml
new file mode 100644
index 000000000..309b7ce68
--- /dev/null
+++ b/jjb/releng/releng-jobs.yaml
@@ -0,0 +1,496 @@
+---
+- scm:
+ name: releng-scm
+ scm:
+ - git:
+ url: 'ssh://fbot@gerrit.opnfv.org:29418/releng'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ submodule:
+ recursive: true
+ branches:
+ - '{ref}'
+
+- trigger:
+ name: releng-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: 'master'
+
+- trigger:
+ name: releng-patchset-merged
+ triggers:
+ - gerrit:
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: 'master'
+
+- builder:
+ name: releng-jjb-verify
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install jenkins-job-builder -y
+ jenkins-jobs test --recursive -o tmp jjb
+ rm -rf tmp
+
+- builder:
+ name: releng-jjb-merge
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install jenkins-job-builder -y
+ jenkins-jobs update --recursive --delete-old jjb
+
+- parameter:
+ name: releng-jjb-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- job-template:
+ name: releng-jjb-verify
+ triggers:
+ - releng-patchset-created
+ scm:
+ - releng-scm:
+ ref: $GERRIT_REFSPEC
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ builders:
+ - releng-jjb-verify
+
+- project:
+ name: releng-jjb-verify
+ jobs:
+ - releng-jjb-verify
+
+- job-template:
+ name: releng-jjb-merge
+ triggers:
+ - releng-patchset-merged
+ scm:
+ - releng-scm:
+ ref: master
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ builders:
+ - releng-jjb-merge
+
+- project:
+ name: releng-jjb-merge
+ jobs:
+ - releng-jjb-merge
+
+- scm:
+ name: opnfv-scm
+ scm:
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/{project}'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ submodule:
+ recursive: true
+ branches:
+ - '{ref}'
+
+- trigger:
+ name: releng-tox-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: recheck
+ - comment-added-contains-event:
+ comment-contains-value: reverify
+ server-name: gerrit.opnfv.org
+ projects:
+ - project-compare-type: 'REG_EXP'
+ project-pattern: '^(?!functest).*'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: master
+ - branch-compare-type: 'ANT'
+ branch-pattern: stable/nile
+ - branch-compare-type: 'ANT'
+ branch-pattern: stable/moselle
+
+- builder:
+ name: releng-tox
+ builders:
+ - shell: |
+ [ -f tox.ini ] || exit 0
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install tox -y
+ tox --recreate
+
+- parameter:
+ name: releng-tox-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- job-template:
+ name: releng-tox
+ triggers:
+ - releng-tox-patchset-created
+ scm:
+ - opnfv-scm:
+ ref: $GERRIT_REFSPEC
+ project: $GERRIT_PROJECT
+ parameters:
+ - releng-tox-node:
+ node: opnfv-build
+ builders:
+ - releng-tox
+
+- project:
+ name: releng-tox
+ jobs:
+ - releng-tox
+
+- project:
+ name: releng-release-jobs
+ stream:
+ - nile
+ - orinoco
+ - v1.22
+ - v1.23
+ - v1.24
+ - v1.25
+ - v1.26
+ - v1.27
+ - v1.28
+ - v1.29
+ - xena
+ - wallaby
+ - yoga
+ - zed
+ - '2023.1'
+ - '2023.2'
+ jobs:
+ - 'releng-release-{stream}-verify'
+ - 'releng-release-{stream}-merge'
+
+- parameter:
+ name: stream-parameter
+ parameters:
+ - string:
+ name: STREAM
+ default: '{stream}'
+
+- job-template:
+ name: 'releng-release-{stream}-verify'
+ scm:
+ - releng-scm:
+ ref: $GERRIT_REFSPEC
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ - stream-parameter:
+ stream: '{stream}'
+ - string:
+ name: GIT_URL
+ default: ssh://fbot@gerrit.opnfv.org:29418/
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/master'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'releases/{stream}/**'
+ - compare-type: ANT
+ pattern: 'releases/schema.yaml'
+ - compare-type: ANT
+ pattern: 'releases/scripts/verify_schema.py'
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install python3-pygerrit2 \
+ python3-ruamel.yaml -y
+ - shell: !include-raw-escape:
+ - branch-or-tag.sh
+
+- job-template:
+ name: 'releng-release-{stream}-merge'
+ scm:
+ - releng-scm:
+ ref: $GERRIT_REFSPEC
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ - stream-parameter:
+ stream: '{stream}'
+ - string:
+ name: GIT_URL
+ default: ssh://fbot@gerrit.opnfv.org:29418/
+ triggers:
+ - gerrit:
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: 'master'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'releases/{stream}/**'
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install python3-pygerrit2 \
+ python3-ruamel.yaml -y
+ - shell: !include-raw-escape:
+ - branch-or-tag.sh
+
+- builder:
+ name: lint-init
+ builders:
+ - shell: |
+ #!/bin/bash
+ # Ensure we start with a clean environment
+ rm -f bash-violation.log python-violation.log yaml-violation.log violation.log
+ git --no-pager diff --diff-filter=MCRAT --name-only HEAD^1 > modified_files
+
+- builder:
+ name: lint-report
+ builders:
+ - shell: |
+ #!/bin/bash
+ if [[ -s violation.log ]]; then
+ cat violation.log
+ echo "Reporting lint result...."
+ set -x
+ msg="Found syntax error and/or coding style violation(s) in the files modified by your patchset."
+ sed -i -e "1s#^#${msg}\n\n#" violation.log
+ cmd="gerrit review -p $GERRIT_PROJECT -m \"$(cat violation.log)\" $GERRIT_PATCHSET_REVISION --notify NONE"
+ ssh -o 'PubkeyAcceptedKeyTypes +ssh-rsa' -p 29418 fbot@gerrit.opnfv.org "$cmd"
+
+ # Make sure the caller job failed
+ exit 1
+ fi
+
+- builder:
+ name: lint-bash-code
+ builders:
+ - shell: |
+ #!/bin/bash
+ echo "Checking bash code..."
+ for f in $(egrep '\.sh$' modified_files)
+ do
+ bash -n "$f" 2>> bash-violation.log
+ done
+ if [[ -s bash-violation.log ]]; then
+ echo -e "Bash syntax error(s)\n---" >> violation.log
+ sed -e 's/^/ /g' bash-violation.log >> violation.log
+ fi
+
+- builder:
+ name: lint-python-code
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install flake8 -y
+
+ echo "Checking python code..."
+ for f in $(egrep '\.py$' modified_files)
+ do
+ flake8 "$f" >> python-violation.log
+ done
+ if [[ -s python-violation.log ]]; then
+ echo -e "Python violation(s)\n---" >> violation.log
+ sed -e 's/^/ /g' python-violation.log >> violation.log
+ fi
+
+- builder:
+ name: lint-yaml-code
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install yamllint -y
+
+ echo "Checking yaml file..."
+ for f in $(egrep '\.ya?ml$' modified_files)
+ do
+ yamllint "$f" >> yaml-violation.log
+ done
+ if [[ -s yaml-violation.log ]]; then
+ echo -e "YAML violation(s)\n---" >> violation.log
+ sed -e 's/^/ /g' yaml-violation.log >> violation.log
+ fi
+
+- builder:
+ name: lint-all-code
+ builders:
+ - lint-init
+ - lint-bash-code
+ - lint-python-code
+ - lint-yaml-code
+ - lint-report
+
+- project:
+ name: releng-lint
+ jobs:
+ - 'releng-lint-verify'
+
+- job-template:
+ name: 'releng-lint-verify'
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ scm:
+ - releng-scm:
+ ref: $GERRIT_REFSPEC
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: recheck
+ - comment-added-contains-event:
+ comment-contains-value: reverify
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: 'master'
+ file-paths:
+ - compare-type: ANT
+ pattern: '**/*.py'
+ - compare-type: ANT
+ pattern: '**/*.sh'
+ - compare-type: ANT
+ pattern: '**/*.yml'
+ - compare-type: ANT
+ pattern: '**/*.yaml'
+ builders:
+ - lint-all-code
+
+- project:
+ name: releng-generate-artifacts-index-pages
+ jobs:
+ - releng-generate-artifacts-index-pages
+
+- job-template:
+ name: releng-generate-artifacts-index-pages
+ scm:
+ - releng-scm:
+ ref: master
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ triggers:
+ - timed: '@daily'
+ builders:
+ - generate-artifact-html
+
+- builder:
+ name: generate-artifact-html
+ builders:
+ - shell: |
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
+ echo "deb https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee \
+ /etc/apt/sources.list.d/google-cloud-sdk.list
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ google-cloud-sdk -y
+ cd $WORKSPACE/utils/
+ ./artifacts.opnfv.org.sh
+
+- project:
+ name: releng-artifact-cleanup
+ jobs:
+ - releng-artifact-cleanup
+
+- job-template:
+ name: releng-artifact-cleanup
+ scm:
+ - releng-scm:
+ ref: master
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ triggers:
+ - timed: '@daily'
+ builders:
+ - releng-artifact-cleanup
+
+- builder:
+ name: releng-artifact-cleanup
+ builders:
+ - shell: |
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
+ echo "deb https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee \
+ /etc/apt/sources.list.d/google-cloud-sdk.list
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ google-cloud-sdk -y
+ $WORKSPACE/utils/retention_script.sh
diff --git a/jjb/releng/releng-release-create-branch.sh b/jjb/releng/releng-release-create-branch.sh
index 92be0e637..beb493b1a 100644
--- a/jjb/releng/releng-release-create-branch.sh
+++ b/jjb/releng/releng-release-create-branch.sh
@@ -9,29 +9,65 @@
##############################################################################
set -xe
+GIT_URL=${GIT_URL:-https://gerrit.opnfv.org/gerrit}
+STREAM=${STREAM:-'nostream'}
+RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
+
# Configure the git user/email as we'll be pushing up changes
-git config --global user.name "jenkins-ci"
+git config --global user.name "fbot"
git config --global user.email "jenkins-opnfv-ci@opnfv.org"
# Ensure we are able to generate Commit-IDs for new patchsets
curl -kLo .git/hooks/commit-msg https://gerrit.opnfv.org/gerrit/tools/hooks/commit-msg
chmod +x .git/hooks/commit-msg
-# Activate virtualenv, supressing shellcheck warning
-# shellcheck source=/dev/null
-. $WORKSPACE/venv/bin/activate
-pip install -r releases/scripts/requirements.txt
+clone_repo(){
+echo "--> Cloning $repo"
+if [ ! -d $repo ]; then
+ git clone $GIT_URL/$repo.git $repo
+fi
+}
-STREAM=${STREAM:-'nostream'}
-RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
+check_if_ref_exists(){
+clone_repo
+cd "$repo"
+if git rev-list refs/heads/master | grep "$ref"; then
+ echo "$ref exists"
+ REF_EXISTS=true
+ cd -
+else
+ echo "$ref Does not exist please submit a valid ref for branching"
+ exit 1
+fi
+}
+
+run_merge(){
+unset NEW_FILES
+if [[ $REF_EXISTS = true && "$JOB_NAME" =~ "merge" ]]; then
+ ssh -o 'PubkeyAcceptedKeyTypes +ssh-rsa' -n -f -p 29418 fbot@gerrit.opnfv.org gerrit create-branch "$repo" "$branch" "$ref"
+ python3 releases/scripts/create_jobs.py -f $release_file
+ NEW_FILES=$(git status --porcelain --untracked=no | cut -c4-)
+fi
+if [ -n "$NEW_FILES" ]; then
+ git add $NEW_FILES
+ git commit -sm "Create Stable Branch Jobs for $(basename $release_file .yaml)"
+ git push origin HEAD:refs/for/master
+fi
+}
+main(){
for release_file in $RELEASE_FILES; do
- python releases/scripts/create_branch.py -f $release_file
- python releases/scripts/create_jobs.py -f $release_file
- NEW_FILES=$(git status --porcelain --untracked=no | cut -c4-)
- if [ -n "$NEW_FILES" ]; then
- git add $NEW_FILES
- git commit -sm "Create Stable Branch Jobs for $(basename $release_file .yaml)"
- git push origin HEAD:refs/for/master
- fi
+ while read -r repo branch ref; do
+ echo "$repo" "$branch" "$ref"
+ branches="$(git ls-remote "https://gerrit.opnfv.org/gerrit/$repo.git" "refs/heads/$branch")"
+ if ! [ -z "$branches" ]; then
+ echo "refs/heads/$branch already exists at $ref ($branches)"
+ else
+ run_merge
+ fi
+ done < <(python3 releases/scripts/repos.py -b -f "$release_file")
done
+}
+
+check_if_ref_exists
+main
diff --git a/jjb/releng/releng-release-jobs.yaml b/jjb/releng/releng-release-jobs.yaml
deleted file mode 100644
index c24cdb91d..000000000
--- a/jjb/releng/releng-release-jobs.yaml
+++ /dev/null
@@ -1,119 +0,0 @@
-# SPDX-License-Identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
----
-- project:
- name: releng-release-jobs
-
- build-node: 'opnfv-build'
-
- stream:
- - fraser
-
- jobs:
- - 'releng-release-{stream}-verify'
- - 'releng-release-{stream}-merge'
-
- project: 'releng'
-
-- job-template:
- name: 'releng-release-{stream}-verify'
-
- parameters:
- - '{build-node}-defaults'
- - stream-parameter:
- stream: '{stream}'
- - project-parameter:
- project: '{project}'
- branch: 'master'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'releng'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/master'
- file-paths:
- - compare-type: ANT
- pattern: 'releases/{stream}/**'
- - compare-type: ANT
- pattern: 'releases/schema.yaml'
- - compare-type: ANT
- pattern: 'releases/scripts/verify_schema.py'
-
- builders:
- - shell: !include-raw-escape:
- - releng-release-create-venv.sh
- - releng-release-tagging.sh
-
- publishers:
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: 'releng-release-{stream}-merge'
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Only run merge job on build1'
- default-slaves:
- - lf-build1
- allowed-multiselect: false
- ignore-offline-nodes: true
- - stream-parameter:
- stream: '{stream}'
- - project-parameter:
- project: '{project}'
- branch: 'master'
- # Override GIT_URL so we can send patches back to Gerrit and
- # modify repos
- - string:
- name: GIT_URL
- default: ssh://$USER@gerrit.opnfv.org:29418/
- description: 'Git URL to use on this Jenkins Slave'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit-trigger-change-merged:
- project: '{project}'
- branch: 'master'
- files: 'releases/{stream}/*'
-
- builders:
- - shell: !include-raw-escape:
- - releng-release-create-venv.sh
- - releng-release-tagging.sh
- # - releng-release-create-branch.sh
-
- publishers:
- - email-jenkins-admins-on-failure
-
-- parameter:
- name: stream-parameter
- parameters:
- - string:
- name: STREAM
- default: '{stream}'
- description: "OPNFV Stable Stream"
diff --git a/jjb/releng/releng-release-tagging.sh b/jjb/releng/releng-release-tagging.sh
index f8cf9c8ea..1fce35f6c 100644
--- a/jjb/releng/releng-release-tagging.sh
+++ b/jjb/releng/releng-release-tagging.sh
@@ -8,6 +8,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
set -e -o pipefail
+set -x
GIT_URL=${GIT_URL:-https://gerrit.opnfv.org/gerrit}
STREAM=${STREAM:-'nostream'}
@@ -15,10 +16,6 @@ RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
echo "--> Verifying $RELEASE_FILES."
for release_file in $RELEASE_FILES; do
- # Verify the release file schema
- python releases/scripts/verify_schema.py \
- -s releases/schema.yaml \
- -y $release_file
# Verify tag for each repo exist and are attached to commits on stable-branch
while read -r repo tag ref
@@ -52,7 +49,7 @@ for release_file in $RELEASE_FILES; do
# If the tag doesn't exist and we're in a merge job,
# everything has been verified up to this point and we
# are ready to create the tag.
- git config --global user.name "jenkins-ci"
+ git config --global user.name "fbot"
git config --global user.email "jenkins-opnfv-ci@opnfv.org"
echo "--> Creating $tag tag for $repo at $ref"
git tag -am "$tag" $tag $ref
@@ -66,5 +63,5 @@ for release_file in $RELEASE_FILES; do
popd &> /dev/null
echo "--> Done verifing $repo"
- done < <(python releases/scripts/repos.py -f $release_file)
+ done < <(python3 releases/scripts/repos.py -f $release_file)
done
diff --git a/jjb/releng/releng-views.yaml b/jjb/releng/releng-views.yaml
new file mode 100644
index 000000000..fbd69982d
--- /dev/null
+++ b/jjb/releng/releng-views.yaml
@@ -0,0 +1,12 @@
+---
+- view:
+ name: releng
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^releng-.*
diff --git a/jjb/releng/testresults-automate.yaml b/jjb/releng/testresults-automate.yaml
index 01fbe0c6d..3a9beadfe 100644
--- a/jjb/releng/testresults-automate.yaml
+++ b/jjb/releng/testresults-automate.yaml
@@ -28,6 +28,8 @@
name: SLAVE_LABEL
default: 'testresults'
description: 'Slave label on Jenkins'
+ all-nodes: false
+ node-eligibility: 'ignore-offline'
- project-parameter:
project: 'releng-testresults'
branch: 'master'
diff --git a/jjb/releng/testresults-verify.yaml b/jjb/releng/testresults-verify.yaml
index 958833c0f..c4f4c0f0b 100644
--- a/jjb/releng/testresults-verify.yaml
+++ b/jjb/releng/testresults-verify.yaml
@@ -12,6 +12,7 @@
jobs:
- '{module}-verify-{stream}'
- '{module}-client-verify-{stream}'
+ - '{module}-ui-verify-{stream}'
project: 'releng-testresults'
@@ -30,7 +31,7 @@
triggers:
- gerrit-trigger-patchset-created:
server: 'gerrit.opnfv.org'
- project: '**'
+ project: '{project}'
branch: '{branch}'
files: '{module}/**'
@@ -61,7 +62,7 @@
triggers:
- gerrit-trigger-patchset-created:
server: 'gerrit.opnfv.org'
- project: '**'
+ project: '{project}'
branch: '{branch}'
files: '{module}/{module}-client/**'
@@ -76,3 +77,61 @@
publishers:
- publish-coverage
- email-jenkins-admins-on-failure
+
+- job-template:
+ name: '{module}-ui-verify-{stream}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ wrappers:
+ - nodejs-installator:
+ name: "testapi node"
+
+ triggers:
+ - gerrit-trigger-patchset-created:
+ server: 'gerrit.opnfv.org'
+ project: '{project}'
+ branch: '{branch}'
+ files: '{module}/opnfv_testapi/ui/**'
+
+ builders:
+ - shell: |
+ set -e
+
+ if ! which google-chrome > /dev/null; then
+ #Install Google Chrome
+ wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
+ echo 'deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main' | sudo tee \
+ /etc/apt/sources.list.d/google-chrome.list
+ sudo apt-get update
+ sudo apt-get install -y google-chrome-stable
+ fi
+
+ npm -v
+ node -v
+ cd ./{module}/opnfv_testapi/ui/
+ npm install -g grunt-cli
+ npm install
+ grunt e2e
+ mv ../tests/UI/coverage/cobertura-coverage.xml ../../../coverage.xml # Moving coverage \
+ # xml to workspace as required by publish-coverage publisher.
+
+ publishers:
+ - text-finder-failures
+ - publish-coverage
+ - email-jenkins-admins-on-failure
+
+- publisher:
+ name: 'text-finder-failures'
+ publishers:
+ - text-finder:
+ regexp: " 0 failures"
+ also-check-console-output: true
+ succeed-if-found: true
diff --git a/jjb/securedlab/check-jinja2.yaml b/jjb/securedlab/check-jinja2.yaml
deleted file mode 100644
index f7a7941dc..000000000
--- a/jjb/securedlab/check-jinja2.yaml
+++ /dev/null
@@ -1,85 +0,0 @@
----
-########################
-# Job configuration to validate jinja2 files
-########################
-- project:
-
- name: validate-templates
-
- project: 'securedlab'
-
- jobs:
- - 'validate-jinja2-templates-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'validate-jinja2-templates-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slave to execute jnija template test
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.yaml'
- - compare-type: ANT
- pattern: 'check-jinja2.sh'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
-
- builders:
- - check-jinja
-
-- builder:
- name: check-jinja
- builders:
- - shell: |
- $WORKSPACE/check-jinja2.sh
diff --git a/jjb/sfc/sfc-project-jobs.yaml b/jjb/sfc/sfc-project-jobs.yaml
deleted file mode 100644
index f858d1f5f..000000000
--- a/jjb/sfc/sfc-project-jobs.yaml
+++ /dev/null
@@ -1,102 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: sfc-project-jobs
-
- project: 'sfc'
-
- jobs:
- - 'sfc-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
-- job-template:
- name: 'sfc-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - sfc-unit-tests-and-docs
-
- publishers:
- - sfc-unit-tests-and-docs-publisher
-
-################################
-# job builders
-################################
-
-- builder:
- name: sfc-unit-tests-and-docs
- builders:
- - shell: |
- cd $WORKSPACE && tox
-
-################################
-# job publishers
-################################
-- publisher:
- name: 'sfc-unit-tests-and-docs-publisher'
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- fail-no-reports: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 0
- unhealthy: 0
- failing: 0
- - method:
- healthy: 0
- unhealthy: 0
- failing: 0
- - email-jenkins-admins-on-failure
diff --git a/jjb/snaps/snaps-verify-jobs.yaml b/jjb/snaps/snaps-verify-jobs.yaml
deleted file mode 100644
index 10514c3b6..000000000
--- a/jjb/snaps/snaps-verify-jobs.yaml
+++ /dev/null
@@ -1,77 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: snaps
-
- project: '{name}'
-
- jobs:
- - 'snaps-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
-- job-template:
- name: 'snaps-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DEPLOYMENT_HOST_IP
- default: 192.168.122.2
- description: 'IP of the deployment node'
- - string:
- name: CONTROLLER_IP
- default: 192.168.122.3
- description: 'IP of the controller node'
- - 'intel-virtual10-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- #!/bin/bash
-
- cd $WORKSPACE/ci
- ./run_tests.sh $DEPLOYMENT_HOST_IP $CONTROLLER_IP
diff --git a/jjb/stor4nfv/stor4nfv-project.yaml b/jjb/stor4nfv/stor4nfv-project.yaml
deleted file mode 100644
index 6e796cfd4..000000000
--- a/jjb/stor4nfv/stor4nfv-project.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: stor4nfv
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/storperf/storperf-daily-jobs.yaml b/jjb/storperf/storperf-daily-jobs.yaml
deleted file mode 100644
index fe6aebb11..000000000
--- a/jjb/storperf/storperf-daily-jobs.yaml
+++ /dev/null
@@ -1,187 +0,0 @@
----
-###################################
-# job configuration for storperf
-###################################
-- project:
- name: storperf-daily
-
- project: storperf
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- docker-tag: 'stable'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # # fuel CI PODs
- # - baremetal:
- # slave-label: fuel-baremetal
- # installer: fuel
- # <<: *master
- # - virtual:
- # slave-label: fuel-virtual
- # installer: fuel
- # <<: *master
- # # joid CI PODs
- # - baremetal:
- # slave-label: joid-baremetal
- # installer: joid
- # <<: *master
- # - virtual:
- # slave-label: joid-virtual
- # installer: joid
- # <<: *master
- # # compass CI PODs
- # - baremetal:
- # slave-label: compass-baremetal
- # installer: compass
- # <<: *master
- # - virtual:
- # slave-label: compass-virtual
- # installer: compass
- # <<: *master
- # # apex CI PODs
- # - virtual:
- # slave-label: apex-virtual-master
- # installer: apex
- # <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- <<: *fraser
- ## armband CI PODs
- # - armband-baremetal:
- # slave-label: armband-baremetal
- # installer: fuel
- # <<: *master
- # - armband-virtual:
- # slave-label: armband-virtual
- # installer: fuel
- # <<: *master
- ## daisy CI PODs
- # - baremetal:
- # slave-label: daisy-baremetal
- # installer: daisy
- # <<: *master
- # - virtual:
- # slave-label: daisy-virtual
- # installer: daisy
- # <<: *master
-
- jobs:
- - 'storperf-{installer}-{pod}-daily-{stream}'
-
-################################
-# job template
-################################
-- job-template:
- name: 'storperf-{installer}-{pod}-daily-{stream}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: '45'
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: CLEAN_DOCKER_IMAGES
- default: 'false'
- description: 'Remove downloaded docker images (opnfv/storperf*:*)'
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where the opnfv documents will be stored in gs repository"
- - string:
- name: DISK_TYPE
- default: 'HDD'
- description: 'The type of hard disk that Cinder uses'
- - string:
- name: VOLUME_SIZE
- default: '2'
- description: 'Size of Cinder volume (in GB)'
- - string:
- name: WORKLOADS
- default: 'rw'
- description: 'Workloads to run'
- - string:
- name: BLOCK_SIZES
- default: '16384'
- description: 'Block sizes for VM I/O operations'
- - string:
- name: QUEUE_DEPTHS
- default: '4'
- description: 'Number of simultaneous I/O operations to keep active'
- - string:
- name: STEADY_STATE_SAMPLES
- default: '10'
- description: 'Number of samples to use (1 per minute) to measure steady state'
- - string:
- name: TEST_CASE
- choices:
- - 'snia_steady_state'
- description: 'The test case to run'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'storperf-daily-builder'
-
-########################
-# builder macros
-########################
-- builder:
- name: storperf-daily-builder
- builders:
- - shell: |
- #!/bin/bash
-
- cd $WORKSPACE
- ./ci/daily.sh
diff --git a/jjb/storperf/storperf-verify-jobs.yaml b/jjb/storperf/storperf-verify-jobs.yaml
deleted file mode 100644
index 4d6b1740a..000000000
--- a/jjb/storperf/storperf-verify-jobs.yaml
+++ /dev/null
@@ -1,200 +0,0 @@
----
-- project:
- name: storperf-verify
-
- project: 'storperf'
-
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- docker-tag: 'latest'
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- docker-tag: 'stable'
- # -------------------------------
- # patch verification phases
- # -------------------------------
- phase:
- - 'unit-test':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-x86_64':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-aarch64':
- slave-label: 'opnfv-build-ubuntu-arm'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'storperf-verify-{stream}'
- - 'storperf-verify-{phase}-{stream}'
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'storperf-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - shell: |
- #!/bin/bash
-
- # we do nothing here as the main stuff will be done
- # in phase jobs
- echo "Triggering phase jobs!"
- - multijob:
- name: 'storperf-build-and-unittest'
- execution-type: PARALLEL
- projects:
- - name: 'storperf-verify-unit-test-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'storperf-verify-build-x86_64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=x86_64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'storperf-verify-build-aarch64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=aarch64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
-
-- job-template:
- name: 'storperf-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 60
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
-
- scm:
- - git-scm-gerrit
-
- builders:
- - 'storperf-verify-{phase}-builders-macro'
-
- publishers:
- - 'storperf-verify-{phase}-publishers-macro'
-# -------------------------------
-# builder macros
-# -------------------------------
-- builder:
- name: 'storperf-verify-unit-test-builders-macro'
- builders:
- - shell: |
- $WORKSPACE/ci/verify.sh
-
-- builder:
- name: 'storperf-verify-build-x86_64-builders-macro'
- builders:
- - shell: |
- $WORKSPACE/ci/verify-build.sh
-
-- builder:
- name: 'storperf-verify-build-aarch64-builders-macro'
- builders:
- - shell: |
- $WORKSPACE/ci/verify-build.sh
-# -------------------------------
-# publisher macros
-# -------------------------------
-- publisher:
- name: 'storperf-verify-unit-test-publishers-macro'
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 60
- unhealthy: 50
- failing: 40
- - method:
- healthy: 60
- unhealthy: 50
- failing: 40
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'storperf-verify-build-x86_64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'storperf-verify-build-aarch64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
diff --git a/jjb/storperf/storperf.yaml b/jjb/storperf/storperf.yaml
deleted file mode 100644
index fe5784777..000000000
--- a/jjb/storperf/storperf.yaml
+++ /dev/null
@@ -1,78 +0,0 @@
----
-- project:
- name: storperf
-
- project: '{name}'
-
- jobs:
- - 'storperf-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- docker-tag: 'latest'
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- docker-tag: 'stable'
-
-- job-template:
- name: 'storperf-merge-{stream}'
-
- node: opnfv-build-ubuntu
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- # yamllint disable rule:line-length
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - shell: |
- $WORKSPACE/ci/merge.sh
-
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 10
- unhealthy: 20
- failing: 30
- - method:
- healthy: 50
- unhealthy: 40
- failing: 30
- - email-jenkins-admins-on-failure
diff --git a/jjb/ves/ves.yaml b/jjb/ves/ves.yaml
deleted file mode 100644
index f8c5da2b4..000000000
--- a/jjb/ves/ves.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: ves
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yaml b/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yaml
deleted file mode 100644
index b11c20f62..000000000
--- a/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: vnf_forwarding_graph
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/vswitchperf/vswitchperf.yaml b/jjb/vswitchperf/vswitchperf.yaml
deleted file mode 100644
index ba0742f3b..000000000
--- a/jjb/vswitchperf/vswitchperf.yaml
+++ /dev/null
@@ -1,199 +0,0 @@
----
-- project:
-
- name: vswitchperf
-
- project: '{name}'
-
- jobs:
- - 'vswitchperf-verify-{stream}'
- - 'vswitchperf-merge-{stream}'
- - 'vswitchperf-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
-
- name: 'vswitchperf-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'vswitchperf-verify-.*'
- - 'vswitchperf-merge-.*'
- - 'vswitchperf-daily-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'intel-pod12-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: '@midnight'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make MORE_MAKE_FLAGS="-j 10"
- cd ../ci
- scl enable rh-python34 "source ~/vsperfenv/bin/activate ; ./build-vsperf.sh daily"
-
-- job-template:
- name: 'vswitchperf-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: freestyle
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'vswitchperf-verify-.*'
- - 'vswitchperf-merge-.*'
- - 'vswitchperf-daily-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod12
- - ericsson-build4
- default-slaves:
- - intel-pod12
- - ericsson-build4
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make MORE_MAKE_FLAGS="-j 5"
- cd ../ci
- ./build-vsperf.sh verify
-
-- job-template:
- name: 'vswitchperf-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: freestyle
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'vswitchperf-verify-.*'
- - 'vswitchperf-merge-.*'
- - 'vswitchperf-daily-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod12
- - ericsson-build4
- default-slaves:
- - intel-pod12
- - ericsson-build4
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make MORE_MAKE_FLAGS="-j 5"
- cd ../ci
- ./build-vsperf.sh merge
diff --git a/jjb/xci/bifrost-cleanup-job.yaml b/jjb/xci/bifrost-cleanup-job.yaml
deleted file mode 100644
index d51776173..000000000
--- a/jjb/xci/bifrost-cleanup-job.yaml
+++ /dev/null
@@ -1,146 +0,0 @@
----
-- project:
- name: 'openstack-bifrost-cleanup'
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
-
- # -------------------------------
- # projects
- # -------------------------------
- project:
- - 'openstack':
- project-repo: 'https://git.openstack.org/openstack/bifrost'
- clone-location: '/opt/bifrost'
- - 'opnfv':
- project-repo: 'https://gerrit.opnfv.org/gerrit/releng-xci'
- clone-location: '/opt/releng-xci'
-
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - '{project}-bifrost-cleanup-{stream}'
-
-- job-template:
- name: '{project}-bifrost-cleanup-{stream}'
-
- disabled: true
- concurrent: false
-
- node: bifrost-verify-virtual
-
- # Make sure no verify job is running on any of the slaves since that would
- # produce build logs after we wipe the destination directory.
- properties:
- - build-blocker:
- blocking-jobs:
- - '{project}-bifrost-verify-*'
-
- parameters:
- - string:
- name: PROJECT
- default: '{project}'
-
- builders:
- - shell: |
- #!/bin/bash
-
- set -eu
-
- # DO NOT change this unless you know what you are doing.
- BIFROST_GS_URL="gs://artifacts.opnfv.org/cross-community-ci/openstack/bifrost/$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/"
-
- # This should never happen... even 'recheck' uses the last jobs'
- # gerrit information. Better exit with error so we can investigate
- [[ ! -n $GERRIT_NAME ]] || [[ ! -n $GERRIT_CHANGE_NUMBER ]] && exit 1
-
- echo "Removing build artifacts for $GERRIT_NAME/$GERRIT_CHANGE_NUMBER"
-
- if ! [[ "$BIFROST_GS_URL" =~ "/cross-community-ci/openstack/bifrost/" ]]; then
- echo "Oops! BIFROST_GS_URL=$BIFROST_GS_URL does not seem like a valid"
- echo "bifrost location on the Google storage server. Please double-check"
- echo "that it's set properly or fix this line if necessary."
- echo "gsutil will not be executed until this is fixed!"
- exit 1
- fi
- try_to_rm=1
- while [[ $try_to_rm -lt 6 ]]; do
- gsutil -m rm -r $BIFROST_GS_URL && _exitcode=$? && break
- _exitcode=$?
- echo "gsutil rm failed! Trying again... (attempt #$try_to_rm)"
- let try_to_rm += 1
- # Give it some time...
- sleep 10
- done
- exit $_exitcode
-
- triggers:
- - '{project}-gerrit-trigger-cleanup':
- branch: '{branch}'
-
- publishers:
- # yamllint disable rule:line-length
- - email:
- recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
- # yamllint enable rule:line-length
- - email-jenkins-admins-on-failure
-# -------------------------------
-# trigger macros
-# -------------------------------
-- trigger:
- name: 'openstack-gerrit-trigger-cleanup'
- triggers:
- - gerrit:
- server-name: 'review.openstack.org'
- escape-quotes: true
- trigger-on:
- # We only run this when the change is merged or
- # abandoned since we don't need the logs anymore
- - change-merged-event
- - change-abandoned-event
- # This is an OPNFV maintenance job. We don't want to provide
- # feedback on Gerrit
- silent: true
- silent-start: true
- projects:
- - project-compare-type: 'PLAIN'
- project-pattern: 'openstack/bifrost'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'doc/**'
- - compare-type: ANT
- pattern: 'releasenotes/**'
- readable-message: true
-
-- trigger:
- name: 'opnfv-gerrit-trigger-cleanup'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- # We only run this when the change is merged or
- # abandoned since we don't need the logs anymore
- - change-merged-event
- - change-abandoned-event
- # This is an OPNFV maintenance job. We don't want to provide
- # feedback on Gerrit
- silent: true
- silent-start: true
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'releng-xci'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'bifrost/**'
- readable-message: true
diff --git a/jjb/xci/bifrost-periodic-jobs.yaml b/jjb/xci/bifrost-periodic-jobs.yaml
deleted file mode 100644
index fbe2e205a..000000000
--- a/jjb/xci/bifrost-periodic-jobs.yaml
+++ /dev/null
@@ -1,152 +0,0 @@
----
-- project:
- project: 'releng-xci'
-
- name: 'bifrost-periodic'
- # -------------------------------
- # Branch Anchors
- # -------------------------------
- # the versions stated here default to branches which then later
- # on used for checking out the branches, pulling in head of the branch.
- master: &master
- stream: master
- openstack-bifrost-version: '{stream}'
- opnfv-releng-version: 'master'
- gs-pathname: ''
- ocata: &ocata
- stream: ocata
- openstack-bifrost-version: 'stable/{stream}'
- opnfv-releng-version: 'master'
- gs-pathname: '/{stream}'
- # -------------------------------
- # XCI PODs
- # -------------------------------
- pod:
- - virtual:
- <<: *master
- - virtual:
- <<: *ocata
- # -------------------------------
- # XCI PODs
- # -------------------------------
- # -------------------------------
- # Supported Distros
- # -------------------------------
- distro:
- - 'xenial':
- disabled: false
- slave-label: xci-xenial-virtual
- dib-os-release: 'xenial'
- dib-os-element: 'ubuntu-minimal'
- # yamllint disable rule:line-length
- dib-os-packages: 'vlan,vim,less,bridge-utils,sudo,language-pack-en,iputils-ping,rsyslog,curl,python,debootstrap,ifenslave,ifenslave-2.6,lsof,lvm2,tcpdump,nfs-kernel-server,chrony,iptables'
- # yamllint enable rule:line-length
- extra-dib-elements: 'openssh-server'
- - 'centos7':
- disabled: true
- slave-label: xci-centos7-virtual
- dib-os-release: '7'
- dib-os-element: 'centos7'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- - 'suse':
- disabled: true
- slave-label: xci-suse-virtual
- dib-os-release: '42.3'
- dib-os-element: 'opensuse-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
-
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'bifrost-provision-{pod}-{distro}-periodic-{stream}'
-
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'bifrost-provision-{pod}-{distro}-periodic-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '^xci-os.*'
- - '^xci-deploy.*'
- - '^xci-functest.*'
- - '^bifrost-.*periodic.*'
- - '^osa-.*periodic.*'
- block-level: 'NODE'
- - logrotate-default
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{opnfv-releng-version}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- - string:
- name: XCI_FLAVOR
- default: 'ha'
- - string:
- name: OPENSTACK_BIFROST_VERSION
- default: '{openstack-bifrost-version}'
- - string:
- name: OPNFV_RELENG_VERSION
- default: '{opnfv-releng-version}'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: DIB_OS_RELEASE
- default: '{dib-os-release}'
- - string:
- name: DIB_OS_ELEMENT
- default: '{dib-os-element}'
- - string:
- name: DIB_OS_PACKAGES
- default: '{dib-os-packages}'
- - string:
- name: EXTRA_DIB_ELEMENTS
- default: '{extra-dib-elements}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - label:
- name: SLAVE_LABEL
- default: '{slave-label}'
- - string:
- name: CI_LOOP
- default: 'periodic'
-
- wrappers:
- - fix-workspace-permissions
-
- scm:
- - git-scm
-
- # trigger is disabled until we know which jobs we will have
- # and adjust stuff accordingly
- triggers:
- - timed: '' # '@midnight'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME - Flavor: $XCI_FLAVOR"
- - 'bifrost-provision-builder'
-
-# --------------------------
-# builder macros
-# --------------------------
-- builder:
- name: bifrost-provision-builder
- builders:
- - shell:
- !include-raw: ./bifrost-provision.sh
diff --git a/jjb/xci/bifrost-provision.sh b/jjb/xci/bifrost-provision.sh
deleted file mode 100755
index 4d646a676..000000000
--- a/jjb/xci/bifrost-provision.sh
+++ /dev/null
@@ -1,109 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-trap cleanup_and_upload EXIT
-
-function fix_ownership() {
- if [ -z "${JOB_URL+x}" ]; then
- echo "Not running as part of Jenkins. Handle the logs manually."
- else
- # Make sure cache exists
- [[ ! -d ${HOME}/.cache ]] && mkdir ${HOME}/.cache
-
- sudo chown -R jenkins:jenkins $WORKSPACE
- sudo chown -R jenkins:jenkins ${HOME}/.cache
- fi
-}
-
-function cleanup_and_upload() {
- original_exit=$?
- fix_ownership
- exit $original_exit
-}
-
-# check distro to see if we support it
-if [[ ! "$DISTRO" =~ (xenial|centos7|suse) ]]; then
- echo "Distro $DISTRO is not supported!"
- exit 1
-fi
-
-# remove previously cloned repos
-sudo /bin/rm -rf /opt/bifrost /opt/openstack-ansible /opt/releng-xci /opt/functest
-
-# Fix up permissions
-fix_ownership
-
-# ensure the versions to checkout are set
-export OPENSTACK_BIFROST_VERSION=${OPENSTACK_BIFROST_VERSION:-master}
-export OPNFV_RELENG_VERSION=${OPNFV_RELENG_VERSION:-master}
-
-# log some info
-echo -e "\n"
-echo "***********************************************************************"
-echo "* *"
-echo "* Provision OpenStack Nodes *"
-echo "* *"
-echo " bifrost version: $OPENSTACK_BIFROST_VERSION"
-echo " releng version: $OPNFV_RELENG_VERSION"
-echo "* *"
-echo "***********************************************************************"
-echo -e "\n"
-
-# clone the repos and checkout the versions
-sudo git clone --quiet https://git.openstack.org/openstack/bifrost /opt/bifrost
-cd /opt/bifrost && sudo git checkout --quiet $OPENSTACK_BIFROST_VERSION
-echo "xci: using bifrost commit"
-git show --oneline -s --pretty=format:'%h - %s (%cr) <%an>'
-
-sudo git clone --quiet https://gerrit.opnfv.org/gerrit/releng-xci /opt/releng-xci
-cd /opt/releng-xci && sudo git checkout --quiet $OPNFV_RELENG_VERSION
-echo "xci: using releng commit"
-git show --oneline -s --pretty=format:'%h - %s (%cr) <%an>'
-
-# source flavor vars
-source "$WORKSPACE/xci/config/${XCI_FLAVOR}-vars"
-
-# combine opnfv and upstream scripts/playbooks
-sudo /bin/cp -rf /opt/releng-xci/bifrost/* /opt/bifrost/
-
-# cleanup remnants of previous deployment
-cd /opt/bifrost
-sudo -E ./scripts/destroy-env.sh
-
-# provision VMs for the flavor
-cd /opt/bifrost
-./scripts/bifrost-provision.sh
-
-# list the provisioned VMs
-cd /opt/bifrost
-source env-vars
-ironic node-list
-sudo -H -E virsh list
-
-echo "OpenStack nodes are provisioned!"
-# here we have to do something in order to capture what was the working sha1
-# hardcoding stuff for the timebeing
-
-cd /opt/bifrost
-BIFROST_GIT_SHA1=$(git rev-parse HEAD)
-
-# log some info
-echo -e "\n"
-echo "***********************************************************************"
-echo "* BIFROST SHA1 TO PIN *"
-echo "* *"
-echo " $BIFROST_GIT_SHA1"
-echo "* *"
-echo "***********************************************************************"
-
-echo -e "\n"
diff --git a/jjb/xci/bifrost-verify-jobs.yaml b/jjb/xci/bifrost-verify-jobs.yaml
deleted file mode 100644
index 4ca783524..000000000
--- a/jjb/xci/bifrost-verify-jobs.yaml
+++ /dev/null
@@ -1,223 +0,0 @@
----
-- project:
- name: 'openstack-bifrost-verify'
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- # -------------------------------
- # distros
- # -------------------------------
- distro:
- - 'xenial':
- disabled: true
- dib-os-release: 'xenial'
- dib-os-element: 'ubuntu-minimal'
- dib-os-packages: 'vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- - 'centos7':
- disabled: true
- dib-os-release: '7'
- dib-os-element: 'centos-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- - 'opensuse423':
- disabled: true
- dib-os-release: '42.3'
- dib-os-element: 'opensuse-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- # -------------------------------
- # type
- # -------------------------------
- type:
- - virtual
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'openstack-bifrost-verify-{distro}-{type}-{stream}'
-
-# -------------------------------
-# VM defaults
-# -------------------------------
-- defaults:
- name: verify_vm_defaults
- test-vm-num-nodes: '3'
- test-vm-node-names: 'opnfv controller00 compute00'
- vm-domain-type: 'kvm'
- vm-cpu: '2'
- vm-disk: '30'
- vm-memory-size: '4096'
- vm-disk-cache: 'unsafe'
-
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'openstack-bifrost-verify-{distro}-{type}-{stream}'
-
- disabled: '{obj:disabled}'
-
- defaults: verify_vm_defaults
-
- concurrent: false
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'xci-verify-{distro}-.*'
- - 'xci-.*-{distro}-merge-.*'
- - '.*-bifrost-verify.*-{type}'
- - throttle:
- max-per-node: 2
- max-total: 10
- categories:
- - xci-verify-virtual
- option: category
-
- block-level: 'NODE'
-
- parameters:
- - string:
- name: PROJECT_REPO
- default: 'https://git.openstack.org/openstack/bifrost'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: DIB_OS_RELEASE
- default: '{dib-os-release}'
- - string:
- name: DIB_OS_ELEMENT
- default: '{dib-os-element}'
- - string:
- name: EXTRA_DIB_ELEMENTS
- default: '{extra-dib-elements}'
- - string:
- name: DIB_OS_PACKAGES
- default: '{dib-os-packages}'
- - string:
- name: TEST_VM_NUM_NODES
- default: '{test-vm-num-nodes}'
- - string:
- name: TEST_VM_NODE_NAMES
- default: '{test-vm-node-names}'
- - string:
- name: VM_DOMAIN_TYPE
- default: '{vm-domain-type}'
- - string:
- name: VM_CPU
- default: '{vm-cpu}'
- - string:
- name: VM_DISK
- default: '{vm-disk}'
- - string:
- name: VM_MEMORY_SIZE
- default: '{vm-memory-size}'
- - string:
- name: VM_DISK_CACHE
- default: '{vm-disk-cache}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- - string:
- name: CI_LOOP
- default: 'verify'
-
- scm:
- - git:
- url: '$PROJECT_REPO'
- refspec: '$GERRIT_REFSPEC'
- branches:
- - 'origin/$BRANCH'
- skip-tag: true
- choosing-strategy: 'gerrit'
- timeout: 10
- wipe-workspace: true
-
- triggers:
- - 'openstack-gerrit-trigger':
- branch: '{branch}'
-
- builders:
- - bifrost-set-name
- - bifrost-build
-
- wrappers:
- - fix-workspace-permissions
- - build-timeout:
- timeout: 180
-
- publishers:
- # yamllint disable rule:line-length
- - email:
- recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
- # yamllint enable rule:line-length
- - email-jenkins-admins-on-failure
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - FAILURE
- - NOT_BUILT
- - SUCCESS
- - UNSTABLE
- build-steps:
- - shell: !include-raw: ./xci-cleanup.sh
- mark-unstable-if-failed: true
-
-# -------------------------------
-# trigger macros
-# -------------------------------
-- trigger:
- name: 'openstack-gerrit-trigger'
- triggers:
- - gerrit:
- server-name: 'review.openstack.org'
- escape-quotes: true
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- silent-start: true
- custom-url: '* $JOB_NAME $BUILD_URL'
- projects:
- - project-compare-type: 'PLAIN'
- project-pattern: 'openstack/bifrost'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'doc/**'
- - compare-type: ANT
- pattern: 'releasenotes/**'
- readable-message: true
-
-# --------------------------
-# builder macros
-# --------------------------
-- builder:
- name: bifrost-set-name
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
-
-- builder:
- name: bifrost-build
- builders:
- - shell:
- !include-raw: ./bifrost-verify.sh
diff --git a/jjb/xci/bifrost-verify.sh b/jjb/xci/bifrost-verify.sh
deleted file mode 100755
index c810212e3..000000000
--- a/jjb/xci/bifrost-verify.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE/releng-xci
-
-cd $WORKSPACE
-git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD
-
-# combine opnfv and upstream scripts/playbooks
-/bin/cp -rf $WORKSPACE/releng-xci/xci/infra/bifrost/* $WORKSPACE/
-
-cd $WORKSPACE/releng-xci
-cat > bifrost_test.sh<<EOF
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-cd ~/bifrost
-# set path for XCI repository
-export XCI_PATH=~/bifrost/releng-xci
-
-# provision 3 VMs; xcimaster, controller, and compute
-./scripts/bifrost-provision.sh | ts
-
-sudo -H -E virsh list
-EOF
-chmod a+x bifrost_test.sh
-
-# Fix up distros
-case ${DISTRO} in
- xenial) VM_DISTRO=ubuntu ;;
- centos7) VM_DISTRO=centos ;;
- *suse*) VM_DISTRO=opensuse ;;
-esac
-
-export XCI_BUILD_CLEAN_VM_OS=false
-export XCI_UPDATE_CLEAN_VM_OS=true
-
-./xci/scripts/vm/start-new-vm.sh $VM_DISTRO
-
-rsync -a -e "ssh -F $HOME/.ssh/${VM_DISTRO}-xci-vm-config" $WORKSPACE/ ${VM_DISTRO}_xci_vm:~/bifrost
-
-ssh -F $HOME/.ssh/${VM_DISTRO}-xci-vm-config ${VM_DISTRO}_xci_vm "cd ~/bifrost/releng-xci && ./bifrost_test.sh"
diff --git a/jjb/xci/osa-periodic-jobs.yaml b/jjb/xci/osa-periodic-jobs.yaml
deleted file mode 100644
index 6c2d165a4..000000000
--- a/jjb/xci/osa-periodic-jobs.yaml
+++ /dev/null
@@ -1,261 +0,0 @@
----
-- project:
- name: 'opnfv-osa-periodic'
-
- project: openstack-ansible
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- # -------------------------------
- # distros
- # -------------------------------
- distro:
- - ubuntu:
- disabled: false
- - centos:
- disabled: false
- - opensuse:
- disabled: false
- # -------------------------------
- # type
- # -------------------------------
- type:
- - virtual
- # -------------------------------
- # periodic deploy & test phases
- # -------------------------------
- phase:
- - 'deploy'
- - 'healthcheck'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'xci-osa-periodic-{distro}-{type}-{stream}'
- - 'xci-osa-periodic-{distro}-{phase}-{type}-{stream}'
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'xci-osa-periodic-{distro}-{type}-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'xci-verify-{distro}-.*'
- - 'bifrost-verify-{distro}-.*'
- - 'bifrost-periodic-{distro}-.*'
- - 'xci-osa-verify-{distro}-.*'
- - 'xci-osa-periodic-{distro}-.*'
- block-level: 'NODE'
- - throttle:
- max-per-node: 2
- max-total: 10
- categories:
- - xci-verify-virtual
- option: category
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-openstack
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- - string:
- name: OPENSTACK_OSA_VERSION
- default: 'master'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: GIT_BASE
- default: 'https://git.openstack.org/openstack/$PROJECT'
- description: 'Git URL to use on this Jenkins Slave'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'xci-osa-periodic-{distro}-deploy-{type}-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- DEPLOY_SCENARIO=os-nosdn-nofeature-noha
- OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- BRANCH=$BRANCH
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: healthcheck
- condition: SUCCESSFUL
- projects:
- - name: 'xci-osa-periodic-{distro}-healthcheck-{type}-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- DEPLOY_SCENARIO=os-nosdn-nofeature-noha
- OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- FUNCTEST_MODE=tier
- FUNCTEST_TIER=healthcheck
- BRANCH=$BRANCH
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
-- job-template:
- name: 'xci-osa-periodic-{distro}-{phase}-{type}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '.*-bifrost-verify-.*'
- - '.*-bifrost-periodic-.*'
- block-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DISTRO
- default: 'ubuntu'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-noha'
- - string:
- name: OPENSTACK_OSA_VERSION
- default: 'master'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: XCI_FLAVOR
- default: 'mini'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: INSTALLER_TYPE
- default: 'osa'
- - string:
- name: GIT_BASE
- default: 'https://git.openstack.org/openstack/$PROJECT'
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-openstack
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'xci-osa-periodic-{phase}-macro'
-
-# -------------------------------
-# builder macros
-# -------------------------------
-- builder:
- name: 'xci-osa-periodic-deploy-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- cd $WORKSPACE
-
- # The start-new-vm.sh script will copy the entire releng-xci directory
- # so lets prepare the test script now so it can be copied by the script.
- # Please do not move it elsewhere or you would have to move it to the VM
- # yourself.
- cat > xci_test.sh<<EOF
- #!/bin/bash
- export DISTRO=$DISTRO
- export DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- export OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
- export FUNCTEST_MODE=$FUNCTEST_MODE
- export FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- export XCI_FLAVOR=$XCI_FLAVOR
- export CORE_OPENSTACK_INSTALL=true
- export CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- export OPNFV_RELENG_DEV_PATH=/home/devuser/releng-xci/
- export INSTALLER_TYPE=$INSTALLER_TYPE
- export GIT_BASE=$GIT_BASE
- export JENKINS_HOME=$JENKINS_HOME
-
- cd xci
- ./xci-deploy.sh
- EOF
- chmod a+x xci_test.sh
-
- export XCI_BUILD_CLEAN_VM_OS=false
- export XCI_UPDATE_CLEAN_VM_OS=true
-
- ./xci/scripts/vm/start-new-vm.sh $DISTRO
- - shell: |
- #!/bin/bash
-
- ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "cd releng-xci && ./xci_test.sh"
-
-
-- builder:
- name: 'xci-osa-periodic-healthcheck-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Hello World!"
- - shell: |
- #!/bin/bash
-
- sudo virsh destroy ${DISTRO}_xci_vm || true
- sudo virsh undefine ${DISTRO}_xci_vm || true
-
-# this will be enabled once the xci is prepared
-# - builder:
-# name: 'xci-verify-healthcheck-macro'
-# builders:
-# - shell:
-# !include-raw: ../../utils/fetch_os_creds.sh
-# - shell:
-# !include-raw: ../functest/functest-alpine.sh
diff --git a/jjb/xci/xci-cleanup.sh b/jjb/xci/xci-cleanup.sh
deleted file mode 100755
index 51a863da2..000000000
--- a/jjb/xci/xci-cleanup.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# Need to cover macros with and without parameters
-VM_NAME=$DISTRO
-VM_NAME+=_xci_vm
-
-# skip the deployment if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
- echo "Skipping the deployment!"
- exit 0
-fi
-
-sudo virsh destroy $VM_NAME || true
-sudo virsh undefine $VM_NAME || true
diff --git a/jjb/xci/xci-daily-jobs.yaml b/jjb/xci/xci-daily-jobs.yaml
deleted file mode 100644
index c1132ab85..000000000
--- a/jjb/xci/xci-daily-jobs.yaml
+++ /dev/null
@@ -1,271 +0,0 @@
----
-# -------------------------------
-# These jobs run on a daily basis and deploy OpenStack
-# using the pinned versions of opnfv/releng, openstack/bifrost
-# and openstack/openstack-ansible. Due to this, there is no
-# version/branch is set/passed to jobs and instead the versions
-# are checked out based on what is configured.
-# -------------------------------
-- project:
- project: 'releng-xci'
-
- name: 'xci-daily'
- # -------------------------------
- # Branch Anchors
- # -------------------------------
- master: &master
- stream: master
- opnfv-releng-version: master
- gs-pathname: ''
- # -------------------------------
- # Scenarios
- # -------------------------------
- scenario:
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'daily-trigger-disabled'
- xci-flavor: 'ha'
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- xci-flavor: 'noha'
- - 'os-odl-sfc-ha':
- auto-trigger-name: 'daily-trigger-disabled'
- xci-flavor: 'ha'
- - 'os-odl-sfc-noha':
- auto-trigger-name: 'daily-trigger-disabled'
- xci-flavor: 'noha'
- # -------------------------------
- # XCI PODs
- # -------------------------------
- pod:
- - virtual:
- <<: *master
- # -------------------------------
- # Supported Distros
- # -------------------------------
- distro:
- - 'xenial':
- disabled: false
- slave-label: xci-xenial-virtual
- dib-os-release: 'xenial'
- dib-os-element: 'ubuntu-minimal'
- # yamllint disable rule:line-length
- dib-os-packages: 'vlan,vim,less,bridge-utils,sudo,language-pack-en,iputils-ping,rsyslog,curl,python,debootstrap,ifenslave,ifenslave-2.6,lsof,lvm2,tcpdump,nfs-kernel-server,chrony,iptabls'
- # yamllint enable rule:line-length
- extra-dib-elements: 'openssh-server'
- - 'centos7':
- disabled: true
- slave-label: xci-centos7-virtual
- dib-os-release: '7'
- dib-os-element: 'centos7'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- - 'suse':
- disabled: true
- slave-label: xci-suse-virtual
- dib-os-release: '42.3'
- dib-os-element: 'opensuse-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
-
- # -------------------------------
- # Phases
- # -------------------------------
- phase:
- - 'deploy'
- - 'functest'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'xci-{scenario}-{pod}-{distro}-daily-{stream}'
- - 'xci-{phase}-{pod}-{distro}-daily-{stream}'
-
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'xci-{scenario}-{pod}-{distro}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '^xci-os.*'
- - '^xci-deploy.*'
- - '^xci-functest.*'
- - '^bifrost-.*periodic.*'
- - '^osa-.*periodic.*'
- block-level: 'NODE'
- - logrotate-default
-
- parameters:
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- - string:
- name: XCI_FLAVOR
- default: '{xci-flavor}'
- - label:
- name: SLAVE_LABEL
- default: '{slave-label}'
- - string:
- name: CI_LOOP
- default: 'daily'
-
- triggers:
- - '{auto-trigger-name}'
-
- wrappers:
- - fix-workspace-permissions
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - trigger-builds:
- - project: 'xci-deploy-{pod}-{distro}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- XCI_FLAVOR=$XCI_FLAVOR
- CI_LOOP=$CI_LOOP
- same-node: true
- block: true
- - trigger-builds:
- - project: 'xci-functest-{pod}-{distro}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- XCI_FLAVOR=$XCI_FLAVOR
- CI_LOOP=$CI_LOOP
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
- publishers:
- # yamllint disable rule:line-length
- - email:
- recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
- # yamllint enable rule:line-length
- - email-jenkins-admins-on-failure
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - FAILURE
- - NOT_BUILT
- - SUCCESS
- - UNSTABLE
- build-steps:
- - shell: !include-raw: ./xci-cleanup.sh
- mark-unstable-if-failed: true
-
-- job-template:
- name: 'xci-{phase}-{pod}-{distro}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '^xci-deploy.*'
- - '^xci-functest.*'
- - '^bifrost-.*periodic.*'
- - '^osa-.*periodic.*'
- block-level: 'NODE'
- - logrotate-default
-
- wrappers:
- - fix-workspace-permissions
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{opnfv-releng-version}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- - string:
- name: XCI_FLAVOR
- default: 'ha'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: DIB_OS_RELEASE
- default: '{dib-os-release}'
- - string:
- name: DIB_OS_ELEMENT
- default: '{dib-os-element}'
- - string:
- name: DIB_OS_PACKAGES
- default: '{dib-os-packages}'
- - string:
- name: EXTRA_DIB_ELEMENTS
- default: '{extra-dib-elements}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - label:
- name: SLAVE_LABEL
- default: '{slave-label}'
- - string:
- name: CI_LOOP
- default: 'daily'
- - string:
- name: INSTALLER_TYPE
- default: 'osa'
- - string:
- name: FUNCTEST_MODE
- default: 'daily'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'daily'
- description: "Daily suite name to run"
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - 'xci-{phase}-builder'
-
-# --------------------------
-# builder macros
-# --------------------------
-- builder:
- name: xci-deploy-builder
- builders:
- - shell:
- !include-raw: ./xci-deploy.sh
-
-- builder:
- name: xci-functest-builder
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Hello World!"
-
-# this will be enabled once the xci is prepared
-# - builder:
-# name: xci-functest-builder
-# builders:
-# - shell:
-# !include-raw:
-# - ../../utils/fetch_os_creds.sh
-# - ../functest/functest-alpine.sh
diff --git a/jjb/xci/xci-deploy.sh b/jjb/xci/xci-deploy.sh
deleted file mode 100755
index 71cf96d8d..000000000
--- a/jjb/xci/xci-deploy.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-cd $WORKSPACE/xci
-
-# for daily jobs, we want to use working versions
-# for periodic jobs, we will use whatever is set in the job, probably master
-if [[ "$CI_LOOP" == "daily" ]]; then
- # source pinned-vars to get releng version
- source ./config/pinned-versions
-
- # checkout the version
- git checkout -q $OPNFV_RELENG_VERSION
- echo "Info: Using $OPNFV_RELENG_VERSION"
-elif [[ "$CI_LOOP" == "periodic" ]]; then
- echo "Info: Using $OPNFV_RELENG_VERSION"
-fi
-
-# this is just an example to give the idea about what we need to do
-# so ignore this part for the timebeing as we need to adjust xci-deploy.sh
-# to take this into account while deploying anyways
-# clone openstack-ansible
-# stable/ocata already use pinned versions so this is only valid for master
-if [[ "$CI_LOOP" == "periodic" && "$OPENSTACK_OSA_VERSION" == "master" ]]; then
- cd $WORKSPACE
- # get the url to openstack-ansible git
- source ./config/env-vars
- echo "Info: Capture the ansible role requirement versions before doing anything"
- git clone -q $OPENSTACK_OSA_GIT_URL
- cd openstack-ansible
- cat ansible-role-requirements.yml | while IFS= read -r line
- do
- if [[ $line =~ "src:" ]]; then
- repo_url=$(echo $line | awk {'print $2'})
- repo_sha1=$(git ls-remote $repo_url $OPENSTACK_OSA_VERSION | awk {'print $1'})
- fi
- echo "$line" | sed -e "s|master|$repo_sha1|" >> opnfv-ansible-role-requirements.yml
- done
- echo "Info: SHA1s of ansible role requirements"
- echo "-------------------------------------------------------------------------"
- cat opnfv-ansible-role-requirements.yml
- echo "-------------------------------------------------------------------------"
-fi
-
-# proceed with the deployment
-cd $WORKSPACE/xci
-./xci-deploy.sh
-
-if [[ "$JOB_NAME" =~ "periodic" && "$OPENSTACK_OSA_VERSION" == "master" ]]; then
- # if we arrived here without failing, it means we have something we can pin
- # this is again here to show the intention
- cd $WORKSPACE/openstack-ansible
- OSA_GIT_SHA1=$(git rev-parse HEAD)
-
- # log some info
- echo -e "\n"
- echo "***********************************************************************"
- echo "* OSA SHA1 TO PIN *"
- echo "* *"
- echo " $OSA_GIT_SHA1"
- echo "* *"
- echo "***********************************************************************"
-fi
-
-echo -e "\n"
diff --git a/jjb/xci/xci-merge-jobs.yaml b/jjb/xci/xci-merge-jobs.yaml
deleted file mode 100644
index cb438ad5d..000000000
--- a/jjb/xci/xci-merge-jobs.yaml
+++ /dev/null
@@ -1,476 +0,0 @@
----
-- project:
- name: 'opnfv-xci-merge'
-
- project: releng-xci
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- # -------------------------------
- # distros
- # -------------------------------
- distro:
- - ubuntu:
- disabled: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - centos:
- disabled: true
- kill-phase-on: NEVER
- abort-all-job: false
- - opensuse:
- disabled: false
- kill-phase-on: FAILURE
- abort-all-job: true
- # -------------------------------
- # postmerge promotion phases
- # -------------------------------
- phase:
- - 'deploy'
- - 'healthcheck'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'xci-merge-virtual-{stream}'
- - 'xci-merge-promote-virtual-{stream}'
- - 'xci-merge-{distro}-virtual-{stream}'
- - 'xci-merge-{distro}-{phase}-virtual-{stream}'
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'xci-merge-virtual-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'releng-xci'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'false'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'xci/scripts/vm/**'
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: 'prototypes/**'
- - compare-type: ANT
- pattern: 'upstream/**'
- - compare-type: ANT
- pattern: 'INFO.yaml'
- - project-compare-type: 'REG_EXP'
- project-pattern: 'sfc|sdnvpn|releng-xci-scenarios'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- file-paths:
- - compare-type: ANT
- pattern: 'scenarios/**'
- readable-message: true
- custom-url: '* $JOB_NAME $BUILD_URL'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- - string:
- name: CI_LOOP
- default: 'merge'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: DISTRO
- default: 'all'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- builders:
- - 'xci-merge-set-scenario-macro'
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - multijob:
- name: deploy and test
- condition: SUCCESSFUL
- projects:
- - name: 'xci-merge-opensuse-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DISTRO=opensuse
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - name: 'xci-merge-ubuntu-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DISTRO=ubuntu
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - name: 'xci-merge-centos-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DISTRO=centos
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: false
- kill-phase-on: NEVER
- abort-all-job: false
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - multijob:
- name: promote
- condition: SUCCESSFUL
- projects:
- - name: 'xci-merge-promote-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
-- job-template:
- name: 'xci-merge-{distro}-virtual-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'xci-verify-{distro}-.*'
- - 'xci-merge-{distro}-.*'
- - 'openstack-bifrost-verify-{distro}-.*'
- - 'xci-osa-verify-{distro}-.*'
- - 'xci-osa-periodic-{distro}-.*'
- block-level: 'NODE'
- - throttle:
- max-per-node: 1
- max-total: 3
- categories:
- - xci-verify-virtual
- option: category
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- - string:
- name: CI_LOOP
- default: 'merge'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- builders:
- - 'xci-merge-set-scenario-macro'
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'xci-merge-{distro}-deploy-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: '{kill-phase-on}'
- abort-all-job: '{abort-all-job}'
- - multijob:
- name: healthcheck
- condition: SUCCESSFUL
- projects:
- - name: 'xci-merge-{distro}-healthcheck-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: true
- kill-phase-on: '{kill-phase-on}'
- abort-all-job: '{abort-all-job}'
-
- publishers:
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - FAILURE
- - NOT_BUILT
- - SUCCESS
- - UNSTABLE
- build-steps:
- - shell: !include-raw: ./xci-cleanup.sh
- mark-unstable-if-failed: true
-
-- job-template:
- name: 'xci-merge-{distro}-{phase}-virtual-{stream}'
-
- disabled: false
-
- concurrent: false
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '.*-bifrost-verify-.*'
- - '.*-bifrost-periodic-.*'
- - 'osa-verify-.*'
- - 'osa-periodic-.*'
- block-level: 'NODE'
-
- parameters:
- - string:
- name: PROJECT
- default: $GERRIT_PROJECT
- - string:
- name: DISTRO
- default: 'ubuntu'
- - string:
- name: CI_LOOP
- default: 'merge'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: XCI_FLAVOR
- default: 'mini'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: OPNFV_RELENG_DEV_PATH
- default: $WORKSPACE/
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- builders:
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - 'xci-merge-{phase}-macro'
-
-- job-template:
- name: 'xci-merge-promote-virtual-{stream}'
-
- disabled: false
-
- concurrent: false
-
- properties:
- - logrotate-default
-
- parameters:
- - string:
- name: PROJECT
- default: $GERRIT_PROJECT
- - string:
- name: CI_LOOP
- default: 'merge'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: DISTRO
- default: 'all'
- - string:
- name: OPNFV_RELENG_DEV_PATH
- default: $WORKSPACE/
- - string:
- name: LOCAL_PROMOTION_METADATA_FILE
- default: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - string:
- name: REMOTE_PROMOTION_METADATA_FILE
- default: "gs://artifacts.opnfv.org/xci/pipeline/merge/$DEPLOY_SCENARIO.properties"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- builders:
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - 'xci-merge-promote-macro'
-
-# -------------------------------
-# builder macros
-# -------------------------------
-- builder:
- name: 'xci-merge-set-scenario-macro'
- builders:
- - shell:
- !include-raw: ./xci-set-scenario.sh
-
-- builder:
- name: 'xci-merge-deploy-macro'
- builders:
- - shell:
- !include-raw: ./xci-start-new-vm.sh
-
- - shell:
- !include-raw: ./xci-start-deployment.sh
-
-- builder:
- name: 'xci-merge-healthcheck-macro'
- builders:
- - shell:
- !include-raw: ./xci-run-functest.sh
-
-- builder:
- name: 'xci-merge-promote-macro'
- builders:
- - shell:
- !include-raw: ./xci-promote.sh
diff --git a/jjb/xci/xci-promote.sh b/jjb/xci/xci-promote.sh
deleted file mode 100755
index d8d61bee2..000000000
--- a/jjb/xci/xci-promote.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# skip the healthcheck if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
- echo "Skipping the healthcheck!"
- exit 0
-fi
-
-# fail if promotion metadata file doesn't exist
-if [ ! -f $LOCAL_PROMOTION_METADATA_FILE ]; then
- echo "Unable to find promotion metadata file $LOCAL_PROMOTION_METADATA_FILE"
- echo "Skipping promotion!"
- exit 1
-fi
-
-# put additional info into the metadata file so we can use that for displaying the information
-echo "PROMOTED_BY=$BUILD_URL" >> $LOCAL_PROMOTION_METADATA_FILE
-echo "PROMOTED_ON=$(date -u '+%F_%H:%M'UTC)" >> $LOCAL_PROMOTION_METADATA_FILE
-
-# upload promotion metadata file to OPNFV artifact repo
-echo "Storing promotion metadata as $REMOTE_PROMOTION_METADATA_FILE"
-gsutil cp $LOCAL_PROMOTION_METADATA_FILE $REMOTE_PROMOTION_METADATA_FILE > /dev/null 2>&1
-
-# update the file metadata on gs to prevent the use of cached version of the file
-gsutil -m setmeta -r -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- $REMOTE_PROMOTION_METADATA_FILE > /dev/null 2>&1
-
-# log the metadata to console
-echo "Stored the metadata for $DEPLOY_SCENARIO"
-echo "---------------------------------------------------------------------------------"
-gsutil cat $REMOTE_PROMOTION_METADATA_FILE
-echo "---------------------------------------------------------------------------------"
-echo "Scenario $DEPLOY_SCENARIO has successfully been promoted!"
diff --git a/jjb/xci/xci-run-functest.sh b/jjb/xci/xci-run-functest.sh
deleted file mode 100755
index f46abe043..000000000
--- a/jjb/xci/xci-run-functest.sh
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# skip the healthcheck if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
- echo "Skipping the healthcheck!"
- exit 0
-fi
-
-# if the scenario is external, we need to wipe WORKSPACE to place releng-xci there since
-# the project where the scenario is coming from is cloned and the patch checked out to the
-# xci/scenarios/$DEPLOY_SCENARIO to be synched on clean VM
-# apart from that, we need releng-xci stuff in WORKSPACE for things to function correctly on Jenkins.
-# if the change is coming to releng-xci, we don't need to do anything since the patch is checked
-# out to the WORKSPACE anyways
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- cd $HOME && /bin/rm -rf $WORKSPACE
- git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE && cd $WORKSPACE
- chmod -R go-rwx $WORKSPACE/xci/scripts/vm
-fi
-
-# skip the deployment if the scenario is not supported on this distro
-OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
-if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
- echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
- exit 0
-fi
-
-# set XCI_VENV for ansible
-export XCI_PATH=/home/devuser/releng-xci
-export XCI_VENV=${XCI_PATH}/venv
-
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "source $XCI_VENV/bin/activate; while read var; do declare -x \"\${var}\" 2>/dev/null; done < ${XCI_PATH}/.cache/xci.env && cd releng-xci/xci && ansible-playbook -i playbooks/dynamic_inventory.py playbooks/prepare-functest.yml"
-echo "Running functest"
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "/root/run-functest.sh"
-# Record exit code
-functest_exit=$?
-
-case ${DEPLOY_SCENARIO[0]} in
- os-*)
- FUNCTEST_LOG=/root/results/functest.log
- ;;
- k8-*)
- FUNCTEST_LOG=/root/results/functest-kubernetes.log
- ;;
- *)
- echo "Unable to determine the installer. Exiting!"
- exit 1
- ;;
-esac
-
-echo "Functest log"
-echo "---------------------------------------------------------------------------------"
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "cat $FUNCTEST_LOG"
-echo "---------------------------------------------------------------------------------"
-exit ${functest_exit}
diff --git a/jjb/xci/xci-set-scenario.sh b/jjb/xci/xci-set-scenario.sh
deleted file mode 100755
index 7bc45f1c2..000000000
--- a/jjb/xci/xci-set-scenario.sh
+++ /dev/null
@@ -1,214 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# This function allows developers to specify the impacted scenario by adding
-# the info about installer and scenario into the commit message or using
-# the topic branch names. This results in either skipping the real verification
-# totally or skipping the determining the installer and scenario programmatically.
-# It is important to note that this feature is only available to generic scenarios
-# and only single installer/scenario pair is allowed.
-# The input in commit message should be placed at the end of the commit message body,
-# before the signed-off and change-id lines.
-#
-# Pattern to be searched in Commit Message
-# deploy-scenario:<scenario-name>
-# installer-type:<installer-type>
-# Examples:
-# deploy-scenario:os-odl-nofeature
-# installer-type:osa
-#
-# deploy-scenario:k8-nosdn-nofeature
-# installer-type:kubespray
-#
-# Patterns to be searched in topic branch name
-# skip-verify
-# skip-deployment
-# force-verify
-function override_scenario() {
- echo "Processing $GERRIT_PROJECT patchset $GERRIT_REFSPEC"
-
- # ensure the metadata we record is consistent for all types of patches including skipped ones
- # extract releng-xci sha
- XCI_SHA=$(cd $WORKSPACE && git rev-parse HEAD)
-
- # extract scenario sha which is same as releng-xci sha for generic scenarios
- SCENARIO_SHA=$XCI_SHA
-
- # process topic branch names
- if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment|force-verify ]]; then
- [[ "$GERRIT_TOPIC" =~ force-verify ]] && echo "Forcing CI verification using default scenario and installer!"
- [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]] && echo "Skipping verification!"
- echo "INSTALLER_TYPE=osa" > $WORK_DIRECTORY/scenario.properties
- echo "DEPLOY_SCENARIO=os-nosdn-nofeature" >> $WORK_DIRECTORY/scenario.properties
- echo "XCI_SHA=$XCI_SHA" >> $WORK_DIRECTORY/scenario.properties
- echo "SCENARIO_SHA=$SCENARIO_SHA" >> $WORK_DIRECTORY/scenario.properties
- echo "PROJECT_NAME=$GERRIT_PROJECT" >> $WORK_DIRECTORY/scenario.properties
- exit 0
- fi
-
- # process commit message
- if [[ "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "installer-type:" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "deploy-scenario:" ]]; then
- INSTALLER_TYPE=$(echo $GERRIT_CHANGE_COMMIT_MESSAGE | awk '/installer-type:/' RS=" " | cut -d":" -f2)
- DEPLOY_SCENARIO=$(echo $GERRIT_CHANGE_COMMIT_MESSAGE | awk '/deploy-scenario:/' RS=" " | cut -d":" -f2)
-
- if [[ -z "$INSTALLER_TYPE" || -z "$DEPLOY_SCENARIO" ]]; then
- echo "Installer type or deploy scenario is not specified. Falling back to programmatically determining them."
- else
- echo "Recording the installer '$INSTALLER_TYPE' and scenario '$DEPLOY_SCENARIO' for downstream jobs"
- echo "INSTALLER_TYPE=$INSTALLER_TYPE" > $WORK_DIRECTORY/scenario.properties
- echo "DEPLOY_SCENARIO=$DEPLOY_SCENARIO" >> $WORK_DIRECTORY/scenario.properties
- echo "XCI_SHA=$XCI_SHA" >> $WORK_DIRECTORY/scenario.properties
- echo "SCENARIO_SHA=$SCENARIO_SHA" >> $WORK_DIRECTORY/scenario.properties
- echo "PROJECT_NAME=$GERRIT_PROJECT" >> $WORK_DIRECTORY/scenario.properties
- exit 0
- fi
- else
- echo "Installer type or deploy scenario is not specified. Falling back to programmatically determining them."
- fi
-}
-
-# This function determines the default scenario for changes coming to releng-xci
-# by processing the Gerrit change and using diff to see what changed.
-#
-# The stuff in releng-xci is for the installer and other common things so the
-# determination is based on those.
-#
-# Pattern
-# releng-xci/installer/<installer_type>/<impacted files>: <scenario>
-function determine_default_scenario() {
- echo "Processing $GERRIT_PROJECT patchset $GERRIT_REFSPEC"
-
- # get the changeset
- cd $WORKSPACE
- # We need to set default scenario for changes that mess with installers
- INSTALLERS=$(git diff HEAD^..HEAD --name-only -- 'xci/installer' | cut -d "/" -f 3 | uniq)
- for CHANGED_INSTALLER in $INSTALLERS; do
- case $CHANGED_INSTALLER in
- kubespray)
- DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='k8-nosdn-nofeature'
- ;;
- # Default case (including OSA changes)
- *)
- DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='os-nosdn-nofeature'
- ;;
- esac
- done
- # For all other changes, we only need to set a default scenario if it's not set already
- if git diff HEAD^..HEAD --name-only | grep -q -v 'xci/installer'; then
- [[ ${#DEPLOY_SCENARIO[@]} -eq 0 ]] && DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='os-nosdn-nofeature'
- fi
-
- # extract releng-xci sha
- XCI_SHA=$(cd $WORKSPACE && git rev-parse HEAD)
-
- # TODO: we need to fix this so we actually extract the scenario sha by cloning releng-xci-scenarios
- # for the determined scenario. it is crucial for promotion...
- SCENARIO_SHA=$XCI_SHA
-}
-
-# This function determines the impacted scenario by processing the Gerrit
-# change and using diff to see what changed. If changed files belong to a scenario
-# its name gets recorded for deploying and testing the right scenario.
-#
-# Pattern
-# <project-repo>/scenarios/<scenario>/<impacted files>: <scenario>
-function determine_scenario() {
- echo "Processing $GERRIT_PROJECT patchset $GERRIT_REFSPEC"
-
- # remove the clone that is done via jenkins and place releng-xci there so the
- # things continue functioning properly
- cd $HOME && /bin/rm -rf $WORKSPACE
- git clone -q https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE && cd $WORKSPACE
-
- # fix the permissions so ssh doesn't complain due to having world-readable keyfiles
- chmod -R go-rwx $WORKSPACE/xci/scripts/vm
-
- # clone the project repo and fetch the patchset to process for further processing
- git clone -q https://gerrit.opnfv.org/gerrit/$GERRIT_PROJECT $WORK_DIRECTORY/$GERRIT_PROJECT
- cd $WORK_DIRECTORY/$GERRIT_PROJECT
- git fetch -q https://gerrit.opnfv.org/gerrit/$GERRIT_PROJECT $GERRIT_REFSPEC && git checkout -q FETCH_HEAD
-
- # process the diff to find out what scenario(s) are impacted - there should only be 1
- DEPLOY_SCENARIO+=$(git diff HEAD^..HEAD --name-only | grep scenarios | awk -F '[/|/]' '{print $2}' | uniq)
-
- # extract releng-xci sha
- XCI_SHA=$(cd $WORKSPACE && git rev-parse HEAD)
-
- # extract scenario sha
- SCENARIO_SHA=$(cd $WORK_DIRECTORY/$GERRIT_PROJECT && git rev-parse HEAD)
-}
-
-echo "Determining the impacted scenario"
-
-declare -a DEPLOY_SCENARIO
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# this directory is where the temporary clones and files are created
-# while extracting the impacted scenario
-WORK_DIRECTORY=/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO
-/bin/rm -rf $WORK_DIRECTORY && mkdir -p $WORK_DIRECTORY
-
-if [[ $GERRIT_PROJECT == "releng-xci" ]]; then
- determine_default_scenario
-fi
-override_scenario
-determine_scenario
-
-# ensure single scenario is impacted
- if [[ $(IFS=$'\n' echo ${DEPLOY_SCENARIO[@]} | wc -w) != 1 ]]; then
- echo "Change impacts multiple scenarios!"
- echo "XCI doesn't support testing of changes that impact multiple scenarios currently."
- echo "Please split your change into multiple different/dependent changes, each modifying single scenario."
- exit 1
-fi
-
-# set the installer
-case ${DEPLOY_SCENARIO[0]} in
- os-*)
- INSTALLER_TYPE=osa
- ;;
- k8-*)
- INSTALLER_TYPE=kubespray
- ;;
- *)
- echo "Unable to determine the installer. Exiting!"
- exit 1
- ;;
-esac
-
-# save the installer and scenario names into java properties file
-# so they can be injected to downstream jobs via envInject
-echo "Recording the installer '$INSTALLER_TYPE' and scenario '${DEPLOY_SCENARIO[0]}' and SHAs for downstream jobs"
-echo "INSTALLER_TYPE=$INSTALLER_TYPE" > $WORK_DIRECTORY/scenario.properties
-echo "DEPLOY_SCENARIO=$DEPLOY_SCENARIO" >> $WORK_DIRECTORY/scenario.properties
-echo "XCI_SHA=$XCI_SHA" >> $WORK_DIRECTORY/scenario.properties
-echo "SCENARIO_SHA=$SCENARIO_SHA" >> $WORK_DIRECTORY/scenario.properties
-echo "PROJECT_NAME=$GERRIT_PROJECT" >> $WORK_DIRECTORY/scenario.properties
-
-# skip scenario support check if the job is promotion job
-if [[ "$JOB_NAME" =~ (os|k8) ]]; then
- exit 0
-fi
-
-# skip the deployment if the scenario is not supported on this distro
-OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
-if ! sed -n "/^- scenario: ${DEPLOY_SCENARIO[0]}$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
- echo "# SKIPPED: Scenario ${DEPLOY_SCENARIO[0]} is NOT supported on $DISTRO"
- exit 0
-fi
diff --git a/jjb/xci/xci-start-deployment.sh b/jjb/xci/xci-start-deployment.sh
deleted file mode 100755
index 102ca41c3..000000000
--- a/jjb/xci/xci-start-deployment.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# skip the deployment if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
- echo "Skipping the deployment!"
- exit 0
-fi
-
-# if the scenario is external, we need to wipe WORKSPACE to place releng-xci there since
-# the project where the scenario is coming from is cloned and the patch checked out to the
-# xci/scenarios/$DEPLOY_SCENARIO to be synched on clean VM
-# apart from that, we need releng-xci stuff in WORKSPACE for things to function correctly on Jenkins.
-# if the change is coming to releng-xci, we don't need to do anything since the patch is checked
-# out to the WORKSPACE anyways
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- cd $HOME && /bin/rm -rf $WORKSPACE
- git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE && cd $WORKSPACE
- chmod -R go-rwx $WORKSPACE/xci/scripts/vm
-fi
-
-# skip the deployment if the scenario is not supported on this distro
-OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
-if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
- echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
- exit 0
-fi
-
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "cd releng-xci && ./xci_test.sh"
diff --git a/jjb/xci/xci-start-new-vm.sh b/jjb/xci/xci-start-new-vm.sh
deleted file mode 100755
index 79b6a785a..000000000
--- a/jjb/xci/xci-start-new-vm.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# skip the deployment if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
- echo "Skipping the deployment!"
- exit 0
-fi
-
-# if the scenario is external, we need to wipe WORKSPACE to place releng-xci there since
-# the project where the scenario is coming from is cloned and the patch checked out to the
-# xci/scenarios/$DEPLOY_SCENARIO to be synched on clean VM
-# apart from that, we need releng-xci stuff in WORKSPACE for things to function correctly on Jenkins.
-# if the change is coming to releng-xci, we don't need to do anything since the patch is checked
-# out to the WORKSPACE anyways
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- cd $HOME && /bin/rm -rf $WORKSPACE
- git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE && cd $WORKSPACE
- chmod -R go-rwx $WORKSPACE/xci/scripts/vm
-fi
-
-# skip the deployment if the scenario is not supported on this distro
-OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
-if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
- echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
- exit 0
-fi
-
-cd $WORKSPACE
-
-# The start-new-vm.sh script will copy the entire releng-xci directory
-# so lets prepare the test script now so it can be copied by the script.
-# Please do not move it elsewhere or you would have to move it to the VM
-# yourself.
-cat > xci_test.sh<<EOF
-#!/bin/bash
-set -o pipefail
-export DISTRO=$DISTRO
-export DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-export FUNCTEST_MODE=$FUNCTEST_MODE
-export FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
-export XCI_FLAVOR=$XCI_FLAVOR
-export CORE_OPENSTACK_INSTALL=true
-export BIFROST_USE_PREBUILT_IMAGES=true
-export CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
-export OPNFV_RELENG_DEV_PATH=/home/devuser/releng-xci/
-export INSTALLER_TYPE=$INSTALLER_TYPE
-export GIT_BASE=$GIT_BASE
-export JENKINS_HOME=$JENKINS_HOME
-export CI_LOOP=$CI_LOOP
-export BUILD_TAG=$BUILD_TAG
-export NODE_NAME=$NODE_NAME
-
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- export XCI_ANSIBLE_PARAMS="-e @/home/devuser/releng-xci/scenario_overrides.yml"
-fi
-
-cd xci
-./xci-deploy.sh | ts
-EOF
-
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- cat > scenario_overrides.yml <<-EOF
----
-xci_scenarios_overrides:
- - scenario: $DEPLOY_SCENARIO
- version: $GERRIT_PATCHSET_REVISION
- refspec: $GERRIT_REFSPEC
-EOF
-fi
-
-chmod a+x xci_test.sh
-
-export XCI_BUILD_CLEAN_VM_OS=false
-export XCI_UPDATE_CLEAN_VM_OS=true
-
-./xci/scripts/vm/start-new-vm.sh $DISTRO
diff --git a/jjb/xci/xci-verify-jobs.yaml b/jjb/xci/xci-verify-jobs.yaml
deleted file mode 100644
index baff0c99e..000000000
--- a/jjb/xci/xci-verify-jobs.yaml
+++ /dev/null
@@ -1,307 +0,0 @@
----
-- project:
- name: 'opnfv-xci-verify'
-
- project: releng-xci
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- # -------------------------------
- # distros
- # -------------------------------
- distro:
- - ubuntu:
- disabled: false
- successful: false
- failed: false
- unstable: false
- notbuilt: false
- - centos:
- disabled: true
- successful: true
- failed: true
- unstable: true
- notbuilt: true
- - opensuse:
- disabled: false
- successful: false
- failed: false
- unstable: false
- notbuilt: false
- # -------------------------------
- # type
- # -------------------------------
- type:
- - virtual
- # -------------------------------
- # patch verification phases
- # -------------------------------
- phase:
- - 'deploy'
- - 'healthcheck'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'xci-verify-{distro}-{type}-{stream}'
- - 'xci-verify-{distro}-{phase}-{type}-{stream}'
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'xci-verify-{distro}-{type}-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'xci-verify-{distro}-.*'
- - 'xci-merge-{distro}-.*'
- - 'openstack-bifrost-verify-{distro}-.*'
- - 'xci-osa-verify-{distro}-.*'
- - 'xci-osa-periodic-{distro}-.*'
- block-level: 'NODE'
- - throttle:
- max-per-node: 2
- max-total: 10
- categories:
- - xci-verify-virtual
- option: category
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'true'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'releng-xci'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'false'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'xci/scripts/vm/**'
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: 'upstream/**'
- - project-compare-type: 'REG_EXP'
- project-pattern: 'sfc|sdnvpn|releng-xci-scenarios'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- file-paths:
- - compare-type: ANT
- pattern: 'scenarios/**'
- readable-message: true
- custom-url: '* $JOB_NAME $BUILD_URL'
- skip-vote:
- successful: '{obj:successful}'
- failed: '{obj:failed}'
- unstable: '{obj:unstable}'
- notbuilt: '{obj:notbuilt}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: CI_LOOP
- default: 'verify'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- builders:
- - 'xci-verify-set-scenario-macro'
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'xci-verify-{distro}-deploy-{type}-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: healthcheck
- condition: SUCCESSFUL
- projects:
- - name: 'xci-verify-{distro}-healthcheck-{type}-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
- publishers:
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - FAILURE
- - NOT_BUILT
- - SUCCESS
- - UNSTABLE
- build-steps:
- - shell: !include-raw: ./xci-cleanup.sh
- mark-unstable-if-failed: true
-
-
-- job-template:
- name: 'xci-verify-{distro}-{phase}-{type}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '.*-bifrost-verify-.*'
- - '.*-bifrost-periodic-.*'
- - 'osa-verify-.*'
- - 'osa-periodic-.*'
- block-level: 'NODE'
-
- parameters:
- - string:
- name: PROJECT
- default: $GERRIT_PROJECT
- - string:
- name: DISTRO
- default: 'ubuntu'
- - string:
- name: CI_LOOP
- default: 'verify'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: XCI_FLAVOR
- default: 'mini'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: OPNFV_RELENG_DEV_PATH
- default: $WORKSPACE/
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- builders:
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - 'xci-verify-{phase}-macro'
-
-# -------------------------------
-# builder macros
-# -------------------------------
-- builder:
- name: 'xci-verify-set-scenario-macro'
- builders:
- - shell:
- !include-raw: ./xci-set-scenario.sh
-
-- builder:
- name: 'xci-verify-deploy-macro'
- builders:
- - shell:
- !include-raw: ./xci-start-new-vm.sh
-
- - shell:
- !include-raw: ./xci-start-deployment.sh
-
-- builder:
- name: 'xci-verify-healthcheck-macro'
- builders:
- - shell:
- !include-raw: ./xci-run-functest.sh
diff --git a/jjb/yardstick/yardstick-cleanup.sh b/jjb/yardstick/yardstick-cleanup.sh
deleted file mode 100755
index 47bf9bd10..000000000
--- a/jjb/yardstick/yardstick-cleanup.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-[[ ${CI_DEBUG} == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-# Remove containers along with image opnfv/yardstick*:<none>
-dangling_images=($(docker images -f "dangling=true" | awk '/opnfv[/]yardstick/ {print $3}'))
-if [[ ${#dangling_images[@]} -eq 0 ]] ; then
- echo "Removing opnfv/yardstick:<none> images and their containers..."
- for image_id in "${dangling_images[@]}"; do
- echo " Removing image_id: $image_id and its containers"
- containers=$(docker ps -a | awk "/${image_id}/ {print \$1}")
- if [[ -n "$containers" ]];then
- docker rm -f "${containers}" >${redirect}
- fi
- docker rmi "${image_id}" >${redirect}
- done
-fi
-
-echo "Cleaning up docker containers/images..."
-# Remove previous running containers if exist
-if docker ps -a | grep -q opnfv/yardstick; then
- echo "Removing existing opnfv/yardstick containers..."
- docker ps -a | awk "/${image_id}/ {print \$1}" | xargs docker rm -f >${redirect}
-
-fi
-
-# Remove existing images if exist
-if docker images | grep -q opnfv/yardstick; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep opnfv/yardstick
- image_ids=($(docker images | awk '/opnfv[/]yardstick/ {print $3}'))
- for id in "${image_ids[@]}"; do
- echo "Removing docker image id $id..."
- docker rmi "${id}" >${redirect}
- done
-fi
-
diff --git a/jjb/yardstick/yardstick-daily-jobs.yaml b/jjb/yardstick/yardstick-daily-jobs.yaml
deleted file mode 100644
index c5a35086f..000000000
--- a/jjb/yardstick/yardstick-daily-jobs.yaml
+++ /dev/null
@@ -1,548 +0,0 @@
----
-###################################
-# job configuration for yardstick
-###################################
-- project:
- name: yardstick
-
- project: '{name}'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '{stream}'
- docker-tag: 'stable'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # apex CI PODs
- - virtual:
- slave-label: apex-virtual-master
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: apex-virtual-fraser
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - baremetal:
- slave-label: apex-baremetal-fraser
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # fuel CI PODs
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # armband CI PODs
- - armband-baremetal:
- slave-label: armband-baremetal
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - armband-virtual:
- slave-label: armband-virtual
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - armband-baremetal:
- slave-label: armband-baremetal
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - armband-virtual:
- slave-label: armband-virtual
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # joid CI PODs
- - baremetal:
- slave-label: joid-baremetal
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: joid-virtual
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: joid-baremetal
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - virtual:
- slave-label: joid-virtual
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # compass CI PODs
- - baremetal:
- slave-label: compass-baremetal
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: compass-virtual
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: compass-baremetal
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - virtual:
- slave-label: compass-virtual
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- # daisy CI PODs
- - baremetal:
- slave-label: daisy-baremetal
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: daisy-baremetal
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - virtual:
- slave-label: daisy-virtual
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - orange-pod1:
- slave-label: '{pod}'
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod1:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod1:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - zte-pod2:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod3:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod3:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - zte-pod9:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod9:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *fraser
- - orange-pod2:
- slave-label: '{pod}'
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - huawei-pod3:
- slave-label: '{pod}'
- installer: compass
- auto-trigger-name: 'yardstick-daily-huawei-pod3-trigger'
- <<: *master
- - huawei-pod4:
- slave-label: '{pod}'
- installer: compass
- auto-trigger-name: 'yardstick-daily-huawei-pod4-trigger'
- <<: *master
- - baremetal-centos:
- slave-label: 'intel-pod8'
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - flex-pod1:
- slave-label: '{pod}'
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- # -------------------------------
- testsuite:
- - 'daily'
-
- jobs:
- - 'yardstick-{installer}-{pod}-{testsuite}-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
- name: 'yardstick-{installer}-{pod}-{testsuite}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 180
- abort: true
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - 'yardstick-params-{slave-label}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: YARDSTICK_SCENARIO_SUITE_NAME
- default: opnfv_${{DEPLOY_SCENARIO}}_{testsuite}.yaml
- description: 'Path to test scenario suite'
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debut output information"
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'yardstick-cleanup'
- - 'yardstick-fetch-creds'
- - 'yardstick-{testsuite}'
- - 'yardstick-store-results'
-
- publishers:
- - email:
- recipients: jean.gaoliang@huawei.com limingjiang@huawei.com
- - email-jenkins-admins-on-failure
-
-########################
-# builder macros
-########################
-- builder:
- name: yardstick-daily
- builders:
- - shell:
- !include-raw: ./yardstick-daily.sh
-
-- builder:
- name: yardstick-fetch-creds
- builders:
- # yamllint disable rule:indentation
- - conditional-step:
- condition-kind: regex-match
- regex: "os-.*"
- label: '$DEPLOY_SCENARIO'
- steps:
- - shell:
- !include-raw: ../../utils/fetch_os_creds.sh
- - conditional-step:
- condition-kind: regex-match
- regex: "k8-.*"
- label: '$DEPLOY_SCENARIO'
- steps:
- - shell:
- !include-raw: ./yardstick-get-k8s-conf.sh
-
-- builder:
- name: yardstick-store-results
- builders:
- - shell:
- !include-raw: ../../utils/push-test-logs.sh
-
-- builder:
- name: yardstick-cleanup
- builders:
- - shell:
- !include-raw: ./yardstick-cleanup.sh
-########################
-# parameter macros
-########################
-- parameter:
- name: 'yardstick-params-apex-virtual-master'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-apex-baremetal-master'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-apex-virtual-fraser'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-apex-baremetal-fraser'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-fuel-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-fuel-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-armband-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-armband-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-joid-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-joid-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-intel-pod8'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-compass-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-daisy-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-daisy-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-zte-pod1'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-zte-pod2'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-zte-pod3'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-zte-pod9'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-orange-pod1'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-orange-pod2'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-compass-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-huawei-pod3'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-huawei-pod4'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-flex-pod1'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-#######################
-## trigger macros
-#######################
-# trigger for PODs to only run yardstick test suites
-- trigger:
- name: 'yardstick-daily-huawei-pod3-trigger'
- triggers:
- - timed: '0 1 * * *'
-
-- trigger:
- name: 'yardstick-daily-huawei-pod4-trigger'
- triggers:
- - timed: ''
diff --git a/jjb/yardstick/yardstick-daily.sh b/jjb/yardstick/yardstick-daily.sh
deleted file mode 100755
index 783c64ee1..000000000
--- a/jjb/yardstick/yardstick-daily.sh
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/bin/bash
-set -e
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-rc_file_vol=""
-cacert_file_vol=""
-sshkey=""
-
-rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
-
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- instack_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
- sshkey="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- if [[ -n $(sudo iptables -L FORWARD |grep "REJECT"|grep "reject-with icmp-port-unreachable") ]]; then
- #note: this happens only in opnfv-lf-pod1
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
-fi
-
-if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
- if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
- rc_file_vol="-v ${HOME}/admin.conf:/etc/yardstick/admin.conf"
- else
- # If production lab then creds may be retrieved dynamically
- # creds are on the jumphost, always in the same folder
- rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
- # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
- # replace the default one by the customized one provided by jenkins config
- fi
-elif [[ ${INSTALLER_TYPE} == 'compass' ]]; then
- if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
- rc_file_vol="-v ${HOME}/admin.conf:/etc/yardstick/admin.conf"
- else
- cacert_file_vol="-v ${HOME}/os_cacert:/etc/yardstick/os_cacert"
- echo "export OS_CACERT=/etc/yardstick/os_cacert" >> ${HOME}/opnfv-openrc.sh
- fi
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
- cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
- sshkey="-v ${SSH_KEY}:/root/.ssh/mcp.rsa"
-fi
-# Set iptables rule to allow forwarding return traffic for container
-if ! sudo iptables -C FORWARD -j RETURN 2> ${redirect} || ! sudo iptables -L FORWARD | awk 'NR==3' | grep RETURN 2> ${redirect}; then
- sudo iptables -I FORWARD -j RETURN
-fi
-
-opts="--privileged=true --rm"
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK} \
- -e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
- -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e CI_DEBUG=true"
-
-if [[ "${INSTALLER_TYPE}" == 'fuel' ]]; then
- envs+=" -e SSH_KEY=/root/.ssh/mcp.rsa"
-fi
-
-# Pull the image with correct tag
-DOCKER_REPO='opnfv/yardstick'
-if [ "$(uname -m)" = 'aarch64' ]; then
- DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
-fi
-echo "Yardstick: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
-docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
-docker images
-
-# map log directory
-branch=${BRANCH##*/}
-dir_result="${HOME}/opnfv/yardstick/results/${branch}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-map_log_dir="-v ${dir_result}:/tmp/yardstick"
-
-# Run docker
-cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} \
-exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
-
-echo "Yardstick: Running docker cmd: ${cmd}"
-${cmd}
-
-echo "Yardstick: done!"
diff --git a/jjb/yardstick/yardstick-get-k8s-conf.sh b/jjb/yardstick/yardstick-get-k8s-conf.sh
deleted file mode 100755
index 116d2f0d0..000000000
--- a/jjb/yardstick/yardstick-get-k8s-conf.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-set -e
-
-dest_path="$HOME/admin.conf"
-
-if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
- if [[ ${INSTALLER_TYPE} == 'joid' ]];then
- juju scp kubernetes-master/0:config "${dest_path}"
- elif [[ ${INSTALLER_TYPE} == 'compass' ]];then
- echo "Copy admin.conf to ${dest_path}"
- docker cp compass-tasks:/opt/admin.conf "${dest_path}"
- fi
-fi
diff --git a/jjb/yardstick/yardstick-project-jobs.yaml b/jjb/yardstick/yardstick-project-jobs.yaml
deleted file mode 100644
index 56e825e19..000000000
--- a/jjb/yardstick/yardstick-project-jobs.yaml
+++ /dev/null
@@ -1,238 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: yardstick-project-jobs
-
- project: 'yardstick'
-
- jobs:
- - 'yardstick-verify-{stream}'
- - 'yardstick-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'yardstick-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 30
- fail: true
-
- builders:
- - yardstick-unit-tests-python-27
- - yardstick-unit-tests-python-3
- - yardstick-functional-tests-python-27
- - yardstick-functional-tests-python-3
- - yardstick-coverage-tests
- - yardstick-pep8-tests
-
-- job-template:
- name: 'yardstick-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon\
- \ the completion of the build."
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 30
- fail: true
-
- builders:
- - yardstick-unit-tests-python-27
- - yardstick-unit-tests-python-3
- - yardstick-functional-tests-python-27
- - yardstick-functional-tests-python-3
- - yardstick-coverage-tests
- - yardstick-pep8-tests
-
-################################
-# job builders
-################################
-
-- builder:
- name: yardstick-unit-tests-python-27
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Running unit tests in Python 2.7 ..."
- cd $WORKSPACE
- tox -epy27
-
-- builder:
- name: yardstick-unit-tests-python-3
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Running unit tests in Python 3 ..."
- cd $WORKSPACE
- tox -epy3
-
-- builder:
- name: yardstick-functional-tests-python-27
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev rabbitmq-server
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Configure RabbitMQ service"
- sudo service rabbitmq-server restart
- sudo rabbitmqctl start_app
- if [ -z "$(sudo rabbitmqctl list_users | grep yardstick)" ]; then
- sudo rabbitmqctl add_user yardstick yardstick
- sudo rabbitmqctl set_permissions yardstick ".*" ".*" ".*"
- fi
-
- echo "Running functional tests in Python 2.7 ..."
- cd $WORKSPACE
- tox -efunctional
-
-- builder:
- name: yardstick-functional-tests-python-3
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev rabbitmq-server
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Configure RabbitMQ service"
- sudo service rabbitmq-server restart
- sudo rabbitmqctl start_app
- if [ -z "$(sudo rabbitmqctl list_users | grep yardstick)" ]; then
- sudo rabbitmqctl add_user yardstick yardstick
- sudo rabbitmqctl set_permissions yardstick ".*" ".*" ".*"
- fi
-
- echo "Running functional tests in Python 3 ..."
- cd $WORKSPACE
- tox -efunctional-py3
-
-- builder:
- name: yardstick-coverage-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Running coverage tests ..."
- cd $WORKSPACE
- tox -ecoverage
-
-- builder:
- name: yardstick-pep8-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Running style guidelines (PEP8) tests ..."
- cd $WORKSPACE
- tox -epep8
diff --git a/modules/opnfv/deployment/example.py b/modules/opnfv/deployment/example.py
index 52d9b5630..1e54321c7 100644
--- a/modules/opnfv/deployment/example.py
+++ b/modules/opnfv/deployment/example.py
@@ -23,15 +23,17 @@ print(handler.get_deployment_info())
print("########## FUEL ##########")
+# NOTE: If you get traces containing <paramiko.ecdsakey.ECDSAKey object [...]>
+# make sure 10.20.0.2 is not already in ~/.ssh/known_hosts with another sig
handler = factory.Factory.get_handler('fuel',
'10.20.0.2',
- 'root',
- installer_pwd='r00tme')
+ 'ubuntu',
+ pkey_file='/var/lib/opnfv/mcp.rsa')
print(handler.get_deployment_info())
-print("List of nodes in cluster 4:")
-nodes = handler.get_nodes({'cluster': '4'})
+print("List of nodes in cluster")
+nodes = handler.get_nodes()
for node in nodes:
print(node)
diff --git a/modules/opnfv/deployment/factory.py b/modules/opnfv/deployment/factory.py
index 1fd8d447b..cd2fc3645 100644
--- a/modules/opnfv/deployment/factory.py
+++ b/modules/opnfv/deployment/factory.py
@@ -42,7 +42,7 @@ class Factory(object):
elif installer.lower() == "fuel":
return fuel_adapter.FuelAdapter(installer_ip=installer_ip,
installer_user=installer_user,
- installer_pwd=installer_pwd)
+ pkey_file=pkey_file)
elif installer.lower() == "compass":
return compass_adapter.ContainerizedCompassAdapter(
installer_ip=installer_ip,
diff --git a/modules/opnfv/deployment/fuel/adapter.py b/modules/opnfv/deployment/fuel/adapter.py
index a217767ba..a57168d0e 100644
--- a/modules/opnfv/deployment/fuel/adapter.py
+++ b/modules/opnfv/deployment/fuel/adapter.py
@@ -1,5 +1,5 @@
##############################################################################
-# Copyright (c) 2017 Ericsson AB and others.
+# Copyright (c) 2018 Ericsson AB and others.
# Author: Jose Lausuch (jose.lausuch@ericsson.com)
# George Paraskevopoulos (geopar@intracom-telecom.com)
# All rights reserved. This program and the accompanying materials
@@ -7,193 +7,129 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+'''
+ This modules implements the Fuel@OPNFV adapter
+ - host executing this module needs network connectivity to a cluster via:
+ * mcpcontrol network (usually 10.20.0.0/24, created by installer);
+ * PXE/admin network;
+ The above are always true for an OPNFV Pharos jumpserver.
+ - key-based SSH auth is used throughout the cluster, without proxy-ing
+ cluster node access via Salt master (old Fuel@OPNFV used to);
+'''
+
+from yaml import safe_load, YAMLError
from opnfv.deployment import manager
from opnfv.utils import opnfv_logger as logger
from opnfv.utils import ssh_utils
-logger = logger.Logger(__name__).getLogger()
+LOGGER = logger.Logger(__name__).getLogger()
class FuelAdapter(manager.DeploymentHandler):
+ '''
+ This class extends the generic handler with Fuel@OPNFV specifics
+ '''
- def __init__(self, installer_ip, installer_user, installer_pwd):
+ def __init__(self, installer_ip, installer_user, pkey_file):
super(FuelAdapter, self).__init__(installer='fuel',
installer_ip=installer_ip,
installer_user=installer_user,
- installer_pwd=installer_pwd,
- pkey_file=None)
-
- def _get_clusters(self):
- environments = []
- output = self.runcmd_fuel_env()
- lines = output.rsplit('\n')
- if len(lines) < 2:
- logger.info("No environments found in the deployment.")
- return None
- else:
- fields = lines[0].rsplit(' | ')
-
- index_id = -1
- index_status = -1
- index_name = -1
- index_release_id = -1
-
- for i in range(len(fields)):
- if "id" in fields[i]:
- index_id = i
- elif "status" in fields[i]:
- index_status = i
- elif "name" in fields[i]:
- index_name = i
- elif "release_id" in fields[i]:
- index_release_id = i
-
- # order env info
- for i in range(2, len(lines)):
- fields = lines[i].rsplit(' | ')
- dict = {"id": fields[index_id].strip(),
- "status": fields[index_status].strip(),
- "name": fields[index_name].strip(),
- "release_id": fields[index_release_id].strip()}
- environments.append(dict)
-
- return environments
+ installer_pwd=None,
+ pkey_file=pkey_file)
def get_nodes(self, options=None):
-
- if options and options['cluster'] and len(self.nodes) > 0:
- n = []
- for node in self.nodes:
- if str(node.info['cluster']) == str(options['cluster']):
- n.append(node)
- return n
-
+ '''
+ Generates a list of all the nodes in the deployment
+ '''
+ # Unlike old Fuel@Openstack, we don't keep track of different clusters
+ # explicitly, but through domain names.
+ # For simplicity, we will assume a single cluster per Salt master node.
try:
# if we have retrieved previously all the nodes, don't do it again
# This fails the first time when the constructor calls this method
# therefore the try/except
if len(self.nodes) > 0:
return self.nodes
+ # pylint: disable=bare-except
except:
pass
+ # Manager roles to reclass properties mapping
+ _map = {
+ 'salt:master:enabled': manager.Role.INSTALLER,
+ 'maas:region:enabled': manager.Role.INSTALLER,
+ 'nova:controller:enabled': manager.Role.CONTROLLER,
+ 'nova:compute:enabled': manager.Role.COMPUTE,
+ 'opendaylight:server:enabled': manager.Role.ODL,
+ }
nodes = []
- cmd = 'fuel node'
+ cmd = ("sudo salt '*' pillar.item {} --out yaml --static 2>/dev/null"
+ .format(' '.join(_map.keys() + ['_param:pxe_admin_address'])))
+ # Sample output (for one node):
+ # cmp001.mcp-ovs-noha.local:
+ # _param:pxe_admin_address: 192.168.11.34
+ # maas:region:enabled: ''
+ # nova:compute:enabled: true
+ # nova:controller:enabled: ''
+ # opendaylight:server:enabled: ''
+ # retcode: 0
+ # salt:master:enabled: ''
output = self.installer_node.run_cmd(cmd)
- lines = output.rsplit('\n')
- if len(lines) < 2:
- logger.info("No nodes found in the deployment.")
+ if output.startswith('No minions matched the target'):
+ LOGGER.info('No nodes found in the deployment.')
return nodes
- # get fields indexes
- fields = lines[0].rsplit(' | ')
-
- index_id = -1
- index_status = -1
- index_name = -1
- index_cluster = -1
- index_ip = -1
- index_mac = -1
- index_roles = -1
- index_online = -1
-
- for i in range(len(fields)):
- if "group_id" in fields[i]:
- break
- elif "id" in fields[i]:
- index_id = i
- elif "status" in fields[i]:
- index_status = i
- elif "name" in fields[i]:
- index_name = i
- elif "cluster" in fields[i]:
- index_cluster = i
- elif "ip" in fields[i]:
- index_ip = i
- elif "mac" in fields[i]:
- index_mac = i
- elif "roles " in fields[i] and "pending_roles" not in fields[i]:
- index_roles = i
- elif "online" in fields[i]:
- index_online = i
-
- # order nodes info
- for i in range(2, len(lines)):
- fields = lines[i].rsplit(' | ')
- id = fields[index_id].strip().encode()
- ip = fields[index_ip].strip().encode()
- status_node = fields[index_status].strip().encode().lower()
- name = fields[index_name].strip().encode()
- roles_all = fields[index_roles].strip().encode().lower()
-
- roles = [x for x in [manager.Role.CONTROLLER,
- manager.Role.COMPUTE,
- manager.Role.ODL] if x in roles_all]
-
- dict = {"cluster": fields[index_cluster].strip().encode(),
- "mac": fields[index_mac].strip().encode(),
- "status_node": status_node,
- "online": fields[index_online].strip().encode()}
-
- ssh_client = None
- if status_node == 'ready':
- status = manager.NodeStatus.STATUS_OK
- proxy = {'ip': self.installer_ip,
- 'username': self.installer_user,
- 'password': self.installer_pwd}
- ssh_client = ssh_utils.get_ssh_client(hostname=ip,
- username='root',
- proxy=proxy)
- elif 'error' in status_node:
- status = manager.NodeStatus.STATUS_ERROR
- elif 'off' in status_node:
- status = manager.NodeStatus.STATUS_OFFLINE
- elif 'discover' in status_node:
- status = manager.NodeStatus.STATUS_UNUSED
- else:
- status = manager.NodeStatus.STATUS_INACTIVE
-
+ try:
+ yaml_output = safe_load(output)
+ except YAMLError as exc:
+ LOGGER.error(exc)
+ for node_name in yaml_output.keys():
+ ip_addr = yaml_output[node_name]['_param:pxe_admin_address']
+ ssh_client = ssh_utils.get_ssh_client(hostname=ip_addr,
+ username='ubuntu',
+ pkey_file=self.pkey_file)
node = manager.Node(
- id, ip, name, status, roles, ssh_client, dict)
- if options and options['cluster']:
- if fields[index_cluster].strip() == options['cluster']:
- nodes.append(node)
- else:
- nodes.append(node)
+ id=node_name,
+ ip=ip_addr,
+ name=node_name,
+ status=manager.NodeStatus.STATUS_OK,
+ roles=[_map[x] for x in _map if yaml_output[node_name][x]],
+ ssh_client=ssh_client)
+ nodes.append(node)
- self.get_nodes_called = True
return nodes
def get_openstack_version(self):
- cmd = 'source openrc;nova-manage version 2>/dev/null'
- version = None
- for node in self.nodes:
- if node.is_controller() and node.is_active():
- version = node.run_cmd(cmd)
- break
- return version
+ '''
+ Returns a string of the openstack version (nova-compute)
+ '''
+ cmd = ("sudo salt -C 'I@nova:controller and *01*' "
+ "cmd.run 'nova-manage version 2>/dev/null' --out yaml --static")
+ nova_version = self.installer_node.run_cmd(cmd)
+ if nova_version:
+ return nova_version.split(' ')[-1]
+ return None
def get_sdn_version(self):
- cmd = "apt-cache policy opendaylight|grep Installed"
+ '''
+ Returns a string of the sdn controller and its version, if exists
+ '''
+ cmd = ("sudo salt -C 'I@opendaylight:server and *01*'"
+ "pkg.version opendaylight --out yaml --static")
version = None
for node in self.nodes:
if manager.Role.ODL in node.roles and node.is_active():
- odl_version = node.run_cmd(cmd)
+ odl_version = self.installer_node.run_cmd(cmd)
if odl_version:
version = 'OpenDaylight ' + odl_version.split(' ')[-1]
break
return version
def get_deployment_status(self):
- cmd = "fuel env|tail -1|awk '{print $3}'"
- result = self.installer_node.run_cmd(cmd)
- if result is None or len(result) == 0:
- return 'unknown'
- elif 'operational' in result:
- return 'active'
- elif 'deploy' in result:
- return 'deploying'
- else:
- return 'active'
+ '''
+ Returns a string of the status of the deployment
+ '''
+ # NOTE: Requires Fuel-side signaling of deployment status, stub it
+ return 'active'
diff --git a/modules/opnfv/deployment/manager.py b/modules/opnfv/deployment/manager.py
index 694df7755..2b5aedbc7 100644
--- a/modules/opnfv/deployment/manager.py
+++ b/modules/opnfv/deployment/manager.py
@@ -241,13 +241,13 @@ class Node(object):
Returns the ovs version installed
'''
if self.is_active():
- cmd = "ovs-vsctl --version|head -1| sed 's/^.*) //'"
- return self.run_cmd(cmd)
+ cmd = "ovs-vsctl --version 2>/dev/null|head -1| sed 's/^.*) //'"
+ return self.run_cmd(cmd) or None
return None
def get_system_info(self):
'''
- Returns the ovs version installed
+ Returns system information
'''
cmd = 'grep MemTotal /proc/meminfo'
memory = self.run_cmd(cmd).partition('MemTotal:')[-1].strip().encode()
diff --git a/modules/opnfv/utils/Credentials.py b/modules/opnfv/utils/Credentials.py
index 141ecbd93..193a10aac 100644
--- a/modules/opnfv/utils/Credentials.py
+++ b/modules/opnfv/utils/Credentials.py
@@ -7,9 +7,9 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
#
-# Usage example:
+# Usage example (note: Fuel actually uses key-based auth, not user/pass):
# from opnfv.utils.Credentials import Credentials as credentials
-# credentials("fuel", "10.20.0.2", "root", "r00tme").fetch('./openrc')
+# credentials("fuel", "10.20.0.2", "user", "password").fetch('./openrc')
#
import os
@@ -74,12 +74,7 @@ class Credentials(object):
pass
def __fetch_creds_fuel(self, target_path):
- creds_file = '/root/openrc'
- try:
- self.handler.get_file_from_controller(creds_file, target_path)
- except Exception as e:
- self.logger.error(
- "Cannot get %s from controller. %e" % (creds_file, e))
+ # TODO
pass
def __fetch_creds_joid(self, target_path):
diff --git a/modules/opnfv/utils/ssh_utils.py b/modules/opnfv/utils/ssh_utils.py
index 175a38078..22727170b 100644
--- a/modules/opnfv/utils/ssh_utils.py
+++ b/modules/opnfv/utils/ssh_utils.py
@@ -49,10 +49,11 @@ def get_ssh_client(hostname,
client = paramiko.SSHClient()
else:
client = ProxyHopClient()
+ proxy_password = proxy.get('password', None)
proxy_pkey_file = proxy.get('pkey_file', '/root/.ssh/id_rsa')
client.configure_jump_host(proxy['ip'],
proxy['username'],
- proxy['password'],
+ proxy_password,
proxy_pkey_file)
if client is None:
raise Exception('Could not connect to client')
diff --git a/modules/requirements.txt b/modules/requirements.txt
index 0718fa3b0..2c51daaea 100644
--- a/modules/requirements.txt
+++ b/modules/requirements.txt
@@ -1,3 +1,4 @@
paramiko>=2.0 # LGPLv2.1+
mock>=2.0 # BSD
requests>=2.14.2 # Apache-2.0
+pyyaml>=3.11 # MIT
diff --git a/releases/2023.1/functest.yaml b/releases/2023.1/functest.yaml
new file mode 100644
index 000000000..b48123f96
--- /dev/null
+++ b/releases/2023.1/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/2023.1
+ location:
+ functest-xtesting: 781fc313c1c59018edb892ff0423963191781592
diff --git a/releases/2023.2/functest.yaml b/releases/2023.2/functest.yaml
new file mode 100644
index 000000000..372326435
--- /dev/null
+++ b/releases/2023.2/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/2023.2
+ location:
+ functest-xtesting: 0d6bb30968d1092caeffee4159feccf6319ff565
diff --git a/releases/gambia/apex.yaml b/releases/gambia/apex.yaml
new file mode 100644
index 000000000..3fc8f3124
--- /dev/null
+++ b/releases/gambia/apex.yaml
@@ -0,0 +1,29 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: apex
+project-type: installer
+release-model: stable
+upstream: https://wiki.openstack.org/wiki/TripleO
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ apex: de387870dfc45fa9a9b066bbfe1c96c731cb086b
+ - version: opnfv-7.1.0
+ location:
+ apex: 06da44b76d254987d41b083960b2f3637175a791
+ - version: opnfv-7.2.0
+ location:
+ apex: 16e7adc723a960f9a1f91d70c90fd9be364f9fe9
+
+branches:
+ - name: stable/gambia
+ location:
+ apex: 82bf9da27ea0c973068720d440e3391084e8a9d9
diff --git a/releases/gambia/armband.yaml b/releases/gambia/armband.yaml
new file mode 100644
index 000000000..b0f11db02
--- /dev/null
+++ b/releases/gambia/armband.yaml
@@ -0,0 +1,30 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: armband
+project-type: installer
+release-model: stable
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ armband: 30027365166c204bc03043234bbc6b0dfc2506fb
+ - version: opnfv-7.1.0
+ location:
+ armband: 5d8dd94fd0a50d9899010eb764e94abcb05b38c7
+ - version: opnfv-7.2.0
+ location:
+ armband: 5d8dd94fd0a50d9899010eb764e94abcb05b38c7
+
+branches:
+ - name: stable/gambia
+ location:
+ armband: a880b5c0fe55397b73f0fcf8f8c87d523327099d
+
+release-notes: https://opnfv-armband.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/gambia/auto.yaml b/releases/gambia/auto.yaml
new file mode 100644
index 000000000..5254827b0
--- /dev/null
+++ b/releases/gambia/auto.yaml
@@ -0,0 +1,25 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation, Tieto and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: auto
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ auto: 1a2260efe5d15f95b8fa778a9ee8023121facd7e
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ auto: 1a2260efe5d15f95b8fa778a9ee8023121facd7e
+ - version: opnfv-7.1.0
+ location:
+ auto: 1a2260efe5d15f95b8fa778a9ee8023121facd7e
diff --git a/releases/gambia/availability.yaml b/releases/gambia/availability.yaml
new file mode 100644
index 000000000..ee28fec30
--- /dev/null
+++ b/releases/gambia/availability.yaml
@@ -0,0 +1,9 @@
+---
+project: availability
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ availability: 053a75d02fe1842ba3e2bc0d6b019e1cdc6a6123
diff --git a/releases/gambia/barometer.yaml b/releases/gambia/barometer.yaml
new file mode 100644
index 000000000..be492b138
--- /dev/null
+++ b/releases/gambia/barometer.yaml
@@ -0,0 +1,14 @@
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-7.1.0
+ location:
+ barometer: 7cb6bd3a71c17a158f3057bfd4f1dabb0b28db35
+
+branches:
+ - name: stable/gambia
+ location:
+ barometer: 9ac248a08de1f197bafbabbf43175b30ab19ec32
diff --git a/releases/gambia/bottlenecks.yaml b/releases/gambia/bottlenecks.yaml
new file mode 100644
index 000000000..564bf7d35
--- /dev/null
+++ b/releases/gambia/bottlenecks.yaml
@@ -0,0 +1,14 @@
+---
+project: bottlenecks
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ bottlenecks: 399ec2f328f56c1f81c454ecedfb6b99eaf93c42
+
+releases:
+ - version: opnfv-7.1.0
+ location:
+ bottlenecks: b38c653c910ed4cf83d6e8d0df1b88b1bb109f4b
diff --git a/releases/gambia/clover.yaml b/releases/gambia/clover.yaml
new file mode 100644
index 000000000..0614d9f32
--- /dev/null
+++ b/releases/gambia/clover.yaml
@@ -0,0 +1,22 @@
+---
+project: clover
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ clover: b0231105e304d21f454bec58c4c2905f8d8b8e5f
+ - version: opnfv-7.1.0
+ location:
+ clover: fbc39f64e0f7c09558a5ce421719b63217c77d7b
+ - version: opnfv-7.2.0
+ location:
+ clover: fbc39f64e0f7c09558a5ce421719b63217c77d7b
+
+branches:
+ - name: stable/gambia
+ location:
+ clover: ee2169ee4b8fb3539ad173fbc1557b54b2f2216f
+
+release-notes: https://opnfv-clover.readthedocs.io/en/stable-gambia/release/release-notes/release-notes.html
diff --git a/releases/gambia/compass4nfv.yaml b/releases/gambia/compass4nfv.yaml
new file mode 100644
index 000000000..baf1fdea4
--- /dev/null
+++ b/releases/gambia/compass4nfv.yaml
@@ -0,0 +1,26 @@
+---
+project: compass4nfv
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ compass4nfv: 33f94b43639dbe37b7e8e2b5eeb4c65064207c6b
+ - name: stable/gambia
+ location:
+ compass-containers: 20e229822b31b03e1120c3e5efd4ba131261617e
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ compass4nfv: 33f94b43639dbe37b7e8e2b5eeb4c65064207c6b
+ - version: opnfv-7.0.0
+ location:
+ compass-containers: 20e229822b31b03e1120c3e5efd4ba131261617e
+ - version: opnfv-7.1.0
+ location:
+ compass4nfv: 5838841f09950160f907e15fc14282449f6652af
+ - version: opnfv-7.1.0
+ location:
+ compass-containers: c8fd057f2752713a21d093f5255348e2b183871e
diff --git a/releases/gambia/container4nfv.yaml b/releases/gambia/container4nfv.yaml
new file mode 100644
index 000000000..6522cfa7a
--- /dev/null
+++ b/releases/gambia/container4nfv.yaml
@@ -0,0 +1,9 @@
+---
+project: container4nfv
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ container4nfv: 1fa07ef952a6b0e12487901919cec52b9d9b9739
diff --git a/releases/gambia/doctor.yaml b/releases/gambia/doctor.yaml
new file mode 100644
index 000000000..73707a63a
--- /dev/null
+++ b/releases/gambia/doctor.yaml
@@ -0,0 +1,27 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 Nokia Corporation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: doctor
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ doctor: 3ddc2392b0ed364eede49ff006d64df3ea456350
+ - version: opnfv-7.0.1
+ location:
+ doctor: 9250be9b9f3e3fff0c5aa827daf89f0212ce964f
+ - version: opnfv-7.1.0
+ location:
+ doctor: e4ad3e7cf33e7696a08a52c4e167e240f56c7428
+branches:
+ - name: stable/gambia
+ location:
+ doctor: 3ddc2392b0ed364eede49ff006d64df3ea456350
diff --git a/releases/gambia/edgecloud.yaml b/releases/gambia/edgecloud.yaml
new file mode 100644
index 000000000..db8038340
--- /dev/null
+++ b/releases/gambia/edgecloud.yaml
@@ -0,0 +1,9 @@
+---
+project: edgecloud
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ edgecloud: 3708debe3f7ff9edd0f0a52998ebf7a799bb712f
diff --git a/releases/gambia/fuel.yaml b/releases/gambia/fuel.yaml
new file mode 100644
index 000000000..f74f25df8
--- /dev/null
+++ b/releases/gambia/fuel.yaml
@@ -0,0 +1,30 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: fuel
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ fuel: 90a442136b0aff8380388ac0a94831d0904e3cb8
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ fuel: 326c26b9b89c23e9775ba287d9da3bea35cfc437
+ - version: opnfv-7.1.0
+ location:
+ fuel: 79c56150533ec77e73bede445e74960affbbef67
+ - version: opnfv-7.2.0
+ location:
+ fuel: 0c05f52aaf024db6443336f69ebc7c6e037d06b9
+
+release-notes: https://opnfv-fuel.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/gambia/functest.yaml b/releases/gambia/functest.yaml
new file mode 100644
index 000000000..a04412ca2
--- /dev/null
+++ b/releases/gambia/functest.yaml
@@ -0,0 +1,35 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ functest: b7f9b53a34cd8ffcc9df4d2f286e55e13adb4a58
+ - version: opnfv-7.1.0
+ location:
+ functest: b21dede23cb7577497fb7adf2775a77c45a37e36
+ - version: opnfv-7.2.0
+ location:
+ functest: 312f1481503e44fdb29857d528d8d641e408f929
+ - version: opnfv-7.0.0
+ location:
+ functest-kubernetes: fd5c939094244458aea31ead8c6519da95266c2a
+ - version: opnfv-7.1.0
+ location:
+ functest-kubernetes: fd5c939094244458aea31ead8c6519da95266c2a
+ - version: opnfv-7.2.0
+ location:
+ functest-kubernetes: 262e3a4e51183388091481a7593b11bc37f295fb
+
+branches:
+ - name: stable/gambia
+ location:
+ functest: d5fce8c12bc73c1b1547df2750563c271a3ab3f6
+ - name: stable/gambia
+ location:
+ functest-kubernetes: ca3c9cf6f57c87aee8c6f0b93f70d84e3df736de
+ - name: stable/gambia
+ location:
+ functest-xtesting: a09ece9bbe9cda52487cf5b443619fec1b3e09ca
diff --git a/releases/gambia/ipv6.yaml b/releases/gambia/ipv6.yaml
new file mode 100644
index 000000000..ed4600f84
--- /dev/null
+++ b/releases/gambia/ipv6.yaml
@@ -0,0 +1,30 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: ipv6
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ ipv6: c0e6ae3d1d443f96f0ab50e417f9ad740669b890
+ - version: opnfv-7.1.0
+ location:
+ ipv6: 7978c6172432174ea6027922524b476783516f56
+ - version: opnfv-7.2.0
+ location:
+ ipv6: a0e58ed0081af8ca95b35a5967769c1a1dce6689
+
+branches:
+ - name: stable/gambia
+ location:
+ ipv6: e36f753cf6e9ab0c02a400b6bac5c79b48268d44
+
+release-notes: https://opnfv-ipv6.readthedocs.io/en/stable-gambia/release/release-notes/
diff --git a/releases/gambia/nfvbench.yaml b/releases/gambia/nfvbench.yaml
new file mode 100644
index 000000000..796ec4de4
--- /dev/null
+++ b/releases/gambia/nfvbench.yaml
@@ -0,0 +1,9 @@
+---
+project: nfvbench
+project-type: tools
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ nfvbench: 3dfb5268ebfada08c1a2316708564e627f86da42
diff --git a/releases/gambia/opnfvdocs.yaml b/releases/gambia/opnfvdocs.yaml
new file mode 100644
index 000000000..b9cd04f4b
--- /dev/null
+++ b/releases/gambia/opnfvdocs.yaml
@@ -0,0 +1,9 @@
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ opnfvdocs: c89a5b35e81cf085bba892b230ca4516a92ffd57
diff --git a/releases/gambia/ovn4nfv.yaml b/releases/gambia/ovn4nfv.yaml
new file mode 100644
index 000000000..b5c8ad3c2
--- /dev/null
+++ b/releases/gambia/ovn4nfv.yaml
@@ -0,0 +1,9 @@
+---
+project: ovn4nfv
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ ovn4nfv: 9301afc8b3d9314ba19b464b8a7eb3cb7fa3bd7e
diff --git a/releases/gambia/samplevnf.yaml b/releases/gambia/samplevnf.yaml
new file mode 100644
index 000000000..3e1cc5d44
--- /dev/null
+++ b/releases/gambia/samplevnf.yaml
@@ -0,0 +1,25 @@
+---
+project: samplevnf
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ samplevnf: 66ee98f3427439a4cdd56c9f67a25aec140a5c9c
+ - version: opnfv-7.0.1
+ location:
+ samplevnf: 9c29c90931b7fbc1690508769f1f7a7898dbf015
+ - version: opnfv-7.1.0
+ location:
+ samplevnf: 9c29c90931b7fbc1690508769f1f7a7898dbf015
+ - version: opnfv-7.2.0
+ location:
+ samplevnf: ef3eab1c61b59bf4f808f7900ad5db29a41f0d9a
+
+branches:
+ - name: stable/gambia
+ location:
+ samplevnf: 4d59d3530d1c41734f15423142e64eb9c929c717
+
+release-notes: https://opnfv-samplevnf.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/gambia/sandbox.yaml b/releases/gambia/sandbox.yaml
new file mode 100644
index 000000000..d08bc3994
--- /dev/null
+++ b/releases/gambia/sandbox.yaml
@@ -0,0 +1,17 @@
+---
+project: sandbox
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ sandbox: c2012f5b642f17e6024db631b833414114a329d5
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ sandbox: c2012f5b642f17e6024db631b833414114a329d5
+ - version: opnfv-7.0.1
+ location:
+ sandbox: c2012f5b642f17e6024db631b833414114a329d5
diff --git a/releases/gambia/sdnvpn.yaml b/releases/gambia/sdnvpn.yaml
new file mode 100644
index 000000000..309523058
--- /dev/null
+++ b/releases/gambia/sdnvpn.yaml
@@ -0,0 +1,27 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: sdnvpn
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ sdnvpn: 6c2ec2eeedfc342e815b3dff7c190d8909e22e48
+ - version: opnfv-7.1.0
+ location:
+ sdnvpn: 3c74ef730c31343837b01c0e92e643d387ba8202
+
+branches:
+ - name: stable/gambia
+ location:
+ sdnvpn: c05105a4f9f51f7bb31cad791e65d664e5a3bc4b
+
+release-notes: https://opnfv-sdnvpn.readthedocs.io/en/stable-gambia/release/release-notes
diff --git a/releases/gambia/sfc.yaml b/releases/gambia/sfc.yaml
new file mode 100644
index 000000000..c32b9c8c5
--- /dev/null
+++ b/releases/gambia/sfc.yaml
@@ -0,0 +1,11 @@
+---
+project: sfc
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ sfc: 77bdb6a79a506f91959070dc8ff28949f2dff825
+
+release-notes: https://opnfv-sfc.readthedocs.io/en/stable-gambia/release/release-notes/
diff --git a/releases/gambia/stor4nfv.yaml b/releases/gambia/stor4nfv.yaml
new file mode 100644
index 000000000..65c327fe1
--- /dev/null
+++ b/releases/gambia/stor4nfv.yaml
@@ -0,0 +1,9 @@
+---
+project: stor4nfv
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ stor4nfv: cefd1d0854ee948acc7147834146914983a11556
diff --git a/releases/gambia/storperf.yaml b/releases/gambia/storperf.yaml
new file mode 100644
index 000000000..088f73821
--- /dev/null
+++ b/releases/gambia/storperf.yaml
@@ -0,0 +1,9 @@
+---
+project: storperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ storperf: 4aba838a07f5cd7dbd6d606c34f688e647a5d890
diff --git a/releases/gambia/vswitchperf.yaml b/releases/gambia/vswitchperf.yaml
new file mode 100644
index 000000000..dd13b790f
--- /dev/null
+++ b/releases/gambia/vswitchperf.yaml
@@ -0,0 +1,14 @@
+---
+project: vswitchperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ vswitchperf: 96de8654952a606d28d56c057ba871b5553e4176
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ vswitchperf: ad9507ef023f36494024d805cd2d4640536bd7a4
diff --git a/releases/gambia/yardstick.yaml b/releases/gambia/yardstick.yaml
new file mode 100644
index 000000000..527bb24d2
--- /dev/null
+++ b/releases/gambia/yardstick.yaml
@@ -0,0 +1,24 @@
+---
+project: yardstick
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/gambia
+ location:
+ yardstick: 497292013ef0d7e1e014d76803d4f284618b4986
+
+releases:
+ - version: opnfv-7.0.0
+ location:
+ yardstick: b9fa3eac6caf65a371f339fcbc3abc7cbe5b41cc
+
+ - version: opnfv-7.1.0
+ location:
+ yardstick: a1e5634d35ba5f0095fdd2777efa4042b1872127
+
+ - version: opnfv-7.2.0
+ location:
+ yardstick: eb6d97898b6f69f7ced0fdb0acba15a0a1df0d09
+
+release-notes: https://opnfv-yardstick.readthedocs.io/en/stable-gambia/release/release-notes/
diff --git a/releases/hunter/apex.yaml b/releases/hunter/apex.yaml
new file mode 100644
index 000000000..397f3da84
--- /dev/null
+++ b/releases/hunter/apex.yaml
@@ -0,0 +1,23 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: apex
+project-type: installer
+release-model: stable
+upstream: https://wiki.openstack.org/wiki/TripleO
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ apex: 5a60fc71848a60ee51837b7dc7a49dda741876cd
+
+branches:
+ - name: stable/hunter
+ location:
+ apex: 8f294ab694a95e60abb29af6f1ef560ae365d436
diff --git a/releases/hunter/availability.yaml b/releases/hunter/availability.yaml
new file mode 100644
index 000000000..64654002e
--- /dev/null
+++ b/releases/hunter/availability.yaml
@@ -0,0 +1,9 @@
+---
+project: availability
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ availability: 3cbea96d97e2449ed152c38114ec58aa1c876a96
diff --git a/jjb/releng/releng-release-create-venv.sh b/releases/hunter/barometer.yaml
index 0d5635b59..71d787b59 100644
--- a/jjb/releng/releng-release-create-venv.sh
+++ b/releases/hunter/barometer.yaml
@@ -1,4 +1,3 @@
-#!/bin/bash
# SPDX-License-Identifier: Apache-2.0
##############################################################################
# Copyright (c) 2018 The Linux Foundation and others.
@@ -7,12 +6,17 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-set -e -o pipefail
-echo "---> Create virtualenv"
+---
+project: barometer
+project-type: feature
+release-model: stable
-sudo pip install virtualenv
-virtualenv $WORKSPACE/venv
-# shellcheck source=$WORKSPACE/venv/bin/activate disable=SC1091
-source $WORKSPACE/venv/bin/activate
-pip install --upgrade pip
-pip install -r releases/scripts/requirements.txt
+releases:
+ - version: opnfv-8.1.0
+ location:
+ barometer: 6fe9f36a63e71bd4614260cca0664b02b95c8b9c
+
+branches:
+ - name: stable/hunter
+ location:
+ barometer: 6fe9f36a63e71bd4614260cca0664b02b95c8b9c
diff --git a/releases/hunter/bottlenecks.yaml b/releases/hunter/bottlenecks.yaml
new file mode 100644
index 000000000..43c055352
--- /dev/null
+++ b/releases/hunter/bottlenecks.yaml
@@ -0,0 +1,14 @@
+---
+project: bottlenecks
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ bottlenecks: 80edaf3c201bec85fe8a403e03b68899c08df3bb
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ bottlenecks: 80edaf3c201bec85fe8a403e03b68899c08df3bb
diff --git a/releases/hunter/clover.yaml b/releases/hunter/clover.yaml
new file mode 100644
index 000000000..f35dde881
--- /dev/null
+++ b/releases/hunter/clover.yaml
@@ -0,0 +1,16 @@
+---
+project: clover
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ clover: 1301c1842b5b678a1c91ce6d81b1df85135186b6
+
+branches:
+ - name: stable/hunter
+ location:
+ clover: 3f86a3d611f4d2a4f21d9be2a2284ccf120db36e
+
+release-notes: https://opnfv-clover.readthedocs.io/en/stable-hunter/release/release-notes/release-notes.html
diff --git a/releases/hunter/container4nfv.yaml b/releases/hunter/container4nfv.yaml
new file mode 100644
index 000000000..dd119a374
--- /dev/null
+++ b/releases/hunter/container4nfv.yaml
@@ -0,0 +1,9 @@
+---
+project: container4nfv
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ container4nfv: 7a5ffe11c53e77e16383e712aad823e333b61aff
diff --git a/releases/hunter/cran.yaml b/releases/hunter/cran.yaml
new file mode 100644
index 000000000..3a11a4ece
--- /dev/null
+++ b/releases/hunter/cran.yaml
@@ -0,0 +1,9 @@
+---
+project: cran
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ cran: 60a2982c7b9de09880005b23da88bff0a8d59467
diff --git a/releases/hunter/doctor.yaml b/releases/hunter/doctor.yaml
new file mode 100644
index 000000000..1678d8c17
--- /dev/null
+++ b/releases/hunter/doctor.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2019 Nokia Corporation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: doctor
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ doctor: 3eb8e9800215f25712d62160042e472f63cec1e1
+
+branches:
+ - name: stable/hunter
+ location:
+ doctor: 73605c5c34b97ab56306bfa9af0f5888f3c7e46d
+
+release-notes: https://opnfv-doctor.readthedocs.io/en/stable-hunter/release/release-notes/index.html#doctor-releasenotes
diff --git a/releases/hunter/dovetail.yaml b/releases/hunter/dovetail.yaml
new file mode 100644
index 000000000..55dafd1af
--- /dev/null
+++ b/releases/hunter/dovetail.yaml
@@ -0,0 +1,9 @@
+---
+project: dovetail
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ dovetail: d2291d12050c22cea7a92f68eaa75e2c9c50cd2f
diff --git a/releases/hunter/edgecloud.yaml b/releases/hunter/edgecloud.yaml
new file mode 100644
index 000000000..54f98f0c5
--- /dev/null
+++ b/releases/hunter/edgecloud.yaml
@@ -0,0 +1,9 @@
+---
+project: edgecloud
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ edgecloud: 0aa38f991cc8defd2cf46bea93f16938a3d66927
diff --git a/releases/hunter/fds.yaml b/releases/hunter/fds.yaml
new file mode 100644
index 000000000..8c33218ac
--- /dev/null
+++ b/releases/hunter/fds.yaml
@@ -0,0 +1,9 @@
+---
+project: fds
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ fds: 20cef81fd594f10949e151fd5a0a439af0a844e4
diff --git a/releases/hunter/fuel.yaml b/releases/hunter/fuel.yaml
new file mode 100644
index 000000000..2ec25976c
--- /dev/null
+++ b/releases/hunter/fuel.yaml
@@ -0,0 +1,27 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2019 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: fuel
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ fuel: f021e19b4797eb4f6830d86056fce919f1e1fe01
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ fuel: 4e64c8495d8cddab653e7b6f8db1d4335755d4c9
+ - version: opnfv-8.1.0
+ location:
+ fuel: 09ec1e9cc02268c45d5cfd5a289b53a52d9c6e94
+
+release-notes: https://opnfv-fuel.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/hunter/functest.yaml b/releases/hunter/functest.yaml
new file mode 100644
index 000000000..19f4431c3
--- /dev/null
+++ b/releases/hunter/functest.yaml
@@ -0,0 +1,29 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ functest: a3355e22585c66823c430d6adcdef8dddacb33a1
+ - version: opnfv-8.1.0
+ location:
+ functest: 924f4a31ef22675630fa6009610074600b4f0e68
+ - version: opnfv-8.0.0
+ location:
+ functest-kubernetes: ec2bf0e8bd1b4d3b2c4e2894820d2cbb454e36e7
+ - version: opnfv-8.1.0
+ location:
+ functest-kubernetes: d02aa9919eb04974d096fcd54891723482f8d056
+
+branches:
+ - name: stable/hunter
+ location:
+ functest: d5fce8c12bc73c1b1547df2750563c271a3ab3f6
+ - name: stable/hunter
+ location:
+ functest-kubernetes: ca3c9cf6f57c87aee8c6f0b93f70d84e3df736de
+ - name: stable/hunter
+ location:
+ functest-xtesting: a09ece9bbe9cda52487cf5b443619fec1b3e09ca
diff --git a/releases/hunter/ipv6.yaml b/releases/hunter/ipv6.yaml
new file mode 100644
index 000000000..aa5e932a0
--- /dev/null
+++ b/releases/hunter/ipv6.yaml
@@ -0,0 +1,27 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: ipv6
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ ipv6: b113174fa52a1ac4bd20bdbdaea86b07897e7091
+ - version: opnfv-8.1.0
+ location:
+ ipv6: 9d3f926814387835029d80e49b8dfc9102c4ece7
+
+branches:
+ - name: stable/hunter
+ location:
+ ipv6: 3084e2b829dc88a129ee9cdb65afa81986a6bfed
+
+release-notes: https://opnfv-ipv6.readthedocs.io/en/stable-hunter/release/release-notes/
diff --git a/releases/hunter/opnfvdocs.yaml b/releases/hunter/opnfvdocs.yaml
new file mode 100644
index 000000000..7853e028c
--- /dev/null
+++ b/releases/hunter/opnfvdocs.yaml
@@ -0,0 +1,9 @@
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ opnfvdocs: e834a1712a717d6ab5f66d3c2b56e39f07f55f68
diff --git a/releases/hunter/samplevnf.yaml b/releases/hunter/samplevnf.yaml
new file mode 100644
index 000000000..66d08404c
--- /dev/null
+++ b/releases/hunter/samplevnf.yaml
@@ -0,0 +1,11 @@
+---
+project: samplevnf
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ samplevnf: 8310d0fedb5fc13610b95e100c8639c97892a2b5
+
+release-notes: https://opnfv-samplevnf.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/hunter/sfc.yaml b/releases/hunter/sfc.yaml
new file mode 100644
index 000000000..6324b500b
--- /dev/null
+++ b/releases/hunter/sfc.yaml
@@ -0,0 +1,9 @@
+---
+project: sfc
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ sfc: 50ad0d757b2015067c2b13adbbe59b746477b207
diff --git a/releases/hunter/snaps.yaml b/releases/hunter/snaps.yaml
new file mode 100644
index 000000000..3558ec707
--- /dev/null
+++ b/releases/hunter/snaps.yaml
@@ -0,0 +1,9 @@
+---
+project: snaps
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ snaps: 567cdf25e1319fbc8a1a874dd18d21f88948e67e
diff --git a/releases/hunter/stor4nfv.yaml b/releases/hunter/stor4nfv.yaml
new file mode 100644
index 000000000..22bff3b21
--- /dev/null
+++ b/releases/hunter/stor4nfv.yaml
@@ -0,0 +1,11 @@
+---
+project: stor4nfv
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ stor4nfv: fe427fc3f69ecc5e104a6fdb2c9f5d82f0254fdb
+
+release-notes: https://opnfv-stor4nfv.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/hunter/storperf.yaml b/releases/hunter/storperf.yaml
new file mode 100644
index 000000000..6abe0f241
--- /dev/null
+++ b/releases/hunter/storperf.yaml
@@ -0,0 +1,9 @@
+---
+project: storperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ storperf: cc15a771fa60ddd63ac0a7eee3b43eb9f8cf2334
diff --git a/releases/hunter/vswitchperf.yaml b/releases/hunter/vswitchperf.yaml
new file mode 100644
index 000000000..b5053a5c2
--- /dev/null
+++ b/releases/hunter/vswitchperf.yaml
@@ -0,0 +1,14 @@
+---
+project: vswitchperf
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ vswitchperf: 589639d313c24104b2a27ef16baf716d1d274108
+
+branches:
+ - name: stable/hunter
+ location:
+ vswitchperf: 589639d313c24104b2a27ef16baf716d1d274108
diff --git a/releases/hunter/yardstick.yaml b/releases/hunter/yardstick.yaml
new file mode 100644
index 000000000..f19ec3a55
--- /dev/null
+++ b/releases/hunter/yardstick.yaml
@@ -0,0 +1,16 @@
+---
+project: yardstick
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/hunter
+ location:
+ yardstick: 85899baa90714ae15add21a330316793071168ae
+
+releases:
+ - version: opnfv-8.0.0
+ location:
+ yardstick: 9bd54d7914f4c025d4a50611f726d6154f914c7b
+
+release-notes: https://opnfv-yardstick.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/iruya/airship.yaml b/releases/iruya/airship.yaml
new file mode 100644
index 000000000..8c5555883
--- /dev/null
+++ b/releases/iruya/airship.yaml
@@ -0,0 +1,27 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: airship
+project-type: installer
+release-model: stable
+
+releases:
+ - version: opnfv-9.0.0
+ location:
+ airship: 36fc72a853884ddbb56b296db3b233e9aa96d671
+ - version: opnfv-9.0.1
+ location:
+ airship: e08cedd88c945449c6b7b57c4d353b7f00546e98
+
+branches:
+ - name: stable/iruya
+ location:
+ airship: 425c6b76dbb4bbf172f866b914009a547511615b
+
+release-notes: https://opnfv-airship.readthedocs.io/en/stable-iruya/release/release-notes/
diff --git a/releases/iruya/barometer.yaml b/releases/iruya/barometer.yaml
new file mode 100644
index 000000000..d0d83ed13
--- /dev/null
+++ b/releases/iruya/barometer.yaml
@@ -0,0 +1,25 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2018 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-9.0.0
+ location:
+ barometer: 0353bf9ad8ef5674b864b67ea6c22ff0609dfcfb
+
+branches:
+ - name: stable/iruya
+ location:
+ barometer: ed42941aa613ba9612ce53c0e812309d14e35955
+
+# yamllint disable-line rule:line-length
+release-notes: https://opnfv-barometer.readthedocs.io/en/stable-iruya/release/release-notes/release-notes.html#barometer-release-notes
diff --git a/releases/iruya/bottlenecks.yaml b/releases/iruya/bottlenecks.yaml
new file mode 100644
index 000000000..f2fbb7133
--- /dev/null
+++ b/releases/iruya/bottlenecks.yaml
@@ -0,0 +1,9 @@
+---
+project: bottlenecks
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/iruya
+ location:
+ bottlenecks: b6c7bba0c071b42172283e4d97a3641f6464857a
diff --git a/releases/iruya/doctor.yaml b/releases/iruya/doctor.yaml
new file mode 100644
index 000000000..7a7a919fd
--- /dev/null
+++ b/releases/iruya/doctor.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 Nokia Corporation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: doctor
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-9.0.0
+ location:
+ doctor: b530bac56e3d35bf69e23595aa5bb9c3ceea61b6
+
+branches:
+ - name: stable/iruya
+ location:
+ doctor: b530bac56e3d35bf69e23595aa5bb9c3ceea61b6
+
+release-notes: https://opnfv-doctor.readthedocs.io/en/stable-iruya/release/release-notes/index.html#doctor-releasenotes
diff --git a/releases/iruya/edgecloud.yaml b/releases/iruya/edgecloud.yaml
new file mode 100644
index 000000000..123af8ac2
--- /dev/null
+++ b/releases/iruya/edgecloud.yaml
@@ -0,0 +1,9 @@
+---
+project: edgecloud
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/iruya
+ location:
+ edgecloud: 58c068f9455441e25f41de3224f20d83f84bcb46
diff --git a/releases/iruya/fds.yaml b/releases/iruya/fds.yaml
new file mode 100644
index 000000000..d09a93fe7
--- /dev/null
+++ b/releases/iruya/fds.yaml
@@ -0,0 +1,9 @@
+---
+project: fds
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/iruya
+ location:
+ fds: 20cef81fd594f10949e151fd5a0a439af0a844e4
diff --git a/releases/iruya/fuel.yaml b/releases/iruya/fuel.yaml
new file mode 100644
index 000000000..02f74b735
--- /dev/null
+++ b/releases/iruya/fuel.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2019 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: fuel
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/iruya
+ location:
+ fuel: d8f0a6b4fd9c3cb10780abcbd9b446c48b08cced
+
+releases:
+ - version: opnfv-9.0.0
+ location:
+ fuel: e6f1c557adf37baabf0afee6384ee99849cd25e8
+
+release-notes: https://opnfv-fuel.readthedocs.io/en/stable-iruya/release/release-notes/release-notes.html
diff --git a/releases/iruya/functest.yaml b/releases/iruya/functest.yaml
new file mode 100644
index 000000000..e44d81992
--- /dev/null
+++ b/releases/iruya/functest.yaml
@@ -0,0 +1,23 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-9.0.0
+ location:
+ functest: 95aad4cafe073e62b8624dbf7a5d973feee8d988
+ - version: opnfv-9.0.0
+ location:
+ functest-kubernetes: 049c730a5cf97eff34c82dc0968d709d4cbec1db
+
+branches:
+ - name: stable/iruya
+ location:
+ functest: daf01598d6030a29ccddc3023ee94fe9b0e3b320
+ - name: stable/iruya
+ location:
+ functest-kubernetes: fd130c880d4d185153efe3e3e0aa24c95052c320
+ - name: stable/iruya
+ location:
+ functest-xtesting: 838fc2ffe26f06cb11455145b7e73c452e85c15d
diff --git a/releases/iruya/opnfvdocs.yaml b/releases/iruya/opnfvdocs.yaml
new file mode 100644
index 000000000..0829b8a21
--- /dev/null
+++ b/releases/iruya/opnfvdocs.yaml
@@ -0,0 +1,9 @@
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/iruya
+ location:
+ opnfvdocs: dc8743fed0cbb123a22053a1f057648755b86612
diff --git a/releases/iruya/rocket.yaml b/releases/iruya/rocket.yaml
new file mode 100644
index 000000000..2e078a080
--- /dev/null
+++ b/releases/iruya/rocket.yaml
@@ -0,0 +1,9 @@
+---
+project: rocket
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/iruya
+ location:
+ rocket: 14d9cc0e2e5f4e9d136ff3f9e8a29f0bb71eee5d
diff --git a/releases/iruya/stor4nfv.yaml b/releases/iruya/stor4nfv.yaml
new file mode 100644
index 000000000..0c2f1287a
--- /dev/null
+++ b/releases/iruya/stor4nfv.yaml
@@ -0,0 +1,11 @@
+---
+project: stor4nfv
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/iruya
+ location:
+ stor4nfv: 223c598c19bcd5190454369f16ed9ca25575be5e
+
+release-notes: https://opnfv-stor4nfv.readthedocs.io/en/latest/release/release-notes/release-notes.html
diff --git a/releases/iruya/storperf.yaml b/releases/iruya/storperf.yaml
new file mode 100644
index 000000000..9e8b3066f
--- /dev/null
+++ b/releases/iruya/storperf.yaml
@@ -0,0 +1,14 @@
+---
+project: storperf
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-9.0.0
+ location:
+ storperf: 91ff597feecf4a4bac927d2961a25717cf04575b
+
+branches:
+ - name: stable/iruya
+ location:
+ storperf: 8d97778672f843cd79ac9862b5fe8c13ef31091f
diff --git a/releases/iruya/vswitchperf.yaml b/releases/iruya/vswitchperf.yaml
new file mode 100644
index 000000000..23e0d66c2
--- /dev/null
+++ b/releases/iruya/vswitchperf.yaml
@@ -0,0 +1,14 @@
+---
+project: vswitchperf
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-9.0.0
+ location:
+ vswitchperf: a94395daf8d3312659b56a306ea64960a2cdd64a
+
+branches:
+ - name: stable/iruya
+ location:
+ vswitchperf: a94395daf8d3312659b56a306ea64960a2cdd64a
diff --git a/releases/iruya/yardstick.yaml b/releases/iruya/yardstick.yaml
new file mode 100644
index 000000000..428d231bd
--- /dev/null
+++ b/releases/iruya/yardstick.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2019 Huawei Technologies Co., Ltd. and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: yardstick
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/iruya
+ location:
+ yardstick: 13967af6479058515fea6c94c4d5b8dc9da536cc
diff --git a/releases/jerma/airship.yaml b/releases/jerma/airship.yaml
new file mode 100644
index 000000000..22bb2fdca
--- /dev/null
+++ b/releases/jerma/airship.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: airship
+project-type: installer
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ airship: a9ef2c8eb110daa3a395f7cb118bc5d26c5fca69
+
+branches:
+ - name: stable/jerma
+ location:
+ airship: 4b2c638c2495be08953c1687ddbe689927d6cd8f
+
+release-notes: http://docs.opnfv.org/en/stable-jerma/submodules/airship/docs/release/release-notes/release-notes.html
diff --git a/releases/jerma/barometer.yaml b/releases/jerma/barometer.yaml
new file mode 100644
index 000000000..742d0a2fb
--- /dev/null
+++ b/releases/jerma/barometer.yaml
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ barometer: 968f66fc2837744dfe8db69d2314cef3f0cb0800
+branches:
+ - name: stable/jerma
+ location:
+ barometer: f521375d6e898bbdef7abcd9cae25677af96bd82
diff --git a/releases/jerma/cirv.yaml b/releases/jerma/cirv.yaml
new file mode 100644
index 000000000..57cb08d87
--- /dev/null
+++ b/releases/jerma/cirv.yaml
@@ -0,0 +1,34 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: cirv
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ cirv: f38765a83c1ba03b5619c1c66ed7bf1a2b6b00be
+ - version: opnfv-10.0.0
+ location:
+ cirv-hdv: 2d145d4f1fd231def2c9d52a71267031b938c0ac
+ - version: opnfv-10.0.0
+ location:
+ cirv-sdv: d1cc451f841f995028d57677e897a39229a065a2
+
+branches:
+ - name: stable/jerma
+ location:
+ cirv: f38765a83c1ba03b5619c1c66ed7bf1a2b6b00be
+ - name: stable/jerma
+ location:
+ cirv-hdv: 2d145d4f1fd231def2c9d52a71267031b938c0ac
+ - name: stable/jerma
+ location:
+ cirv-sdv: d1cc451f841f995028d57677e897a39229a065a2
diff --git a/releases/jerma/doctor.yaml b/releases/jerma/doctor.yaml
new file mode 100644
index 000000000..d28b4e780
--- /dev/null
+++ b/releases/jerma/doctor.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 Nokia Corporation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: doctor
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ doctor: 72a1f8c92f1692f1ea8dcb5bc706ec9939c30e0a
+
+branches:
+ - name: stable/jerma
+ location:
+ doctor: 72a1f8c92f1692f1ea8dcb5bc706ec9939c30e0a
+
+release-notes: https://opnfv-doctor.readthedocs.io/en/stable-jerma/release/release-notes/index.html
diff --git a/releases/jerma/dovetail.yaml b/releases/jerma/dovetail.yaml
new file mode 100644
index 000000000..7fd61ecd8
--- /dev/null
+++ b/releases/jerma/dovetail.yaml
@@ -0,0 +1,29 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright 2020 Huawei Technologies Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##############################################################################
+---
+project: dovetail
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ dovetail-webportal: 7a13dabbedff1184c12e3e4f5cf56545f1d01397
+
+branches:
+ - name: stable/jerma
+ location:
+ dovetail-webportal: 7a13dabbedff1184c12e3e4f5cf56545f1d01397
diff --git a/releases/jerma/functest.yaml b/releases/jerma/functest.yaml
new file mode 100644
index 000000000..c635b1d82
--- /dev/null
+++ b/releases/jerma/functest.yaml
@@ -0,0 +1,15 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/jerma
+ location:
+ functest: 574243656df6acae28094c62f33410ef1fc3e7b5
+ - name: stable/jerma
+ location:
+ functest-kubernetes: bd28b49aa1ba7d8127144c2a3eef0536d9bdc606
+ - name: stable/jerma
+ location:
+ functest-xtesting: 4bef764e2b3976f73e91fed7bca97b40d4123522
diff --git a/releases/jerma/kuberef.yaml b/releases/jerma/kuberef.yaml
new file mode 100644
index 000000000..1eaa395f3
--- /dev/null
+++ b/releases/jerma/kuberef.yaml
@@ -0,0 +1,22 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ kuberef: d3916f20f0b10da360cc6c38b61e1ee04d6278c5
+
+branches:
+ - name: stable/jerma
+ location:
+ kuberef: d3916f20f0b10da360cc6c38b61e1ee04d6278c5
diff --git a/releases/jerma/moon.yaml b/releases/jerma/moon.yaml
new file mode 100644
index 000000000..742bc2165
--- /dev/null
+++ b/releases/jerma/moon.yaml
@@ -0,0 +1,9 @@
+---
+project: moon
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/jerma
+ location:
+ moon: 7bb53c64da2dcf88894bfd31503accdd81498f3d
diff --git a/releases/jerma/opnfvdocs.yaml b/releases/jerma/opnfvdocs.yaml
new file mode 100644
index 000000000..a5d0c1447
--- /dev/null
+++ b/releases/jerma/opnfvdocs.yaml
@@ -0,0 +1,14 @@
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ opnfvdocs: 8ae4bb381bef8ad9f71bd0f46323799a90bb7deb
+
+branches:
+ - name: stable/jerma
+ location:
+ opnfvdocs: 1d0a0c19a3ca2d8d161a37232e7bf2366223bd49
diff --git a/releases/jerma/samplevnf.yaml b/releases/jerma/samplevnf.yaml
new file mode 100644
index 000000000..27aebee0e
--- /dev/null
+++ b/releases/jerma/samplevnf.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+# ##############################################################################
+# # Copyright (c) 2020 The Linux Foundation and others.
+# # All rights reserved. This program and the accompanying materials
+# # are made available under the terms of the Apache License, Version 2.0
+# # which accompanies this distribution, and is available at
+# # http://www.apache.org/licenses/LICENSE-2.0
+# ##############################################################################
+---
+project: samplevnf
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ samplevnf: d5801e449ca4c7115c2c11bf164bcce7b149917c
+
+branches:
+ - name: stable/jerma
+ location:
+ samplevnf: d5801e449ca4c7115c2c11bf164bcce7b149917c
+
+release-notes: https://docs.opnfv.org/projects/samplevnf/en/stable-jerma/release/release-notes/index.html
diff --git a/releases/jerma/vswitchperf.yaml b/releases/jerma/vswitchperf.yaml
new file mode 100644
index 000000000..ddb6c4050
--- /dev/null
+++ b/releases/jerma/vswitchperf.yaml
@@ -0,0 +1,14 @@
+---
+project: vswitchperf
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ vswitchperf: 90ef48d4342a983c7733b8c21bb902d1dab2685a
+
+branches:
+ - name: stable/jerma
+ location:
+ vswitchperf: 90ef48d4342a983c7733b8c21bb902d1dab2685a
diff --git a/releases/kali/airship.yaml b/releases/kali/airship.yaml
new file mode 100644
index 000000000..6eb5f6ffd
--- /dev/null
+++ b/releases/kali/airship.yaml
@@ -0,0 +1,19 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: airship
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ airship: 88723e1bb827b0efa57f47cb12d0ff1c1fe7802c
+
+release-notes: http://docs.opnfv.org/en/stable-kali/submodules/airship/docs/release/release-notes/release-notes.html
diff --git a/releases/kali/barometer.yaml b/releases/kali/barometer.yaml
new file mode 100644
index 000000000..a916ce16a
--- /dev/null
+++ b/releases/kali/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ barometer: 08fd79b551dde788da6974705df9c9d3e8de01d5
diff --git a/utils/build-server-ansible/vars/docker-compose-CentOS.yml b/releases/kali/cirv.yaml
index fc4bcba7e..3a5fe6240 100644
--- a/utils/build-server-ansible/vars/docker-compose-CentOS.yml
+++ b/releases/kali/cirv.yaml
@@ -1,12 +1,17 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
---
-- name: Ensure docker compose is installed.
- yum:
- name: 'docker-compose'
- state: present
+project: cirv
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ cirv-sdv: 3daf232ec65cb42c9c4a05bed1f185fe1aa4bd0b
diff --git a/releases/kali/functest.yaml b/releases/kali/functest.yaml
new file mode 100644
index 000000000..efe050046
--- /dev/null
+++ b/releases/kali/functest.yaml
@@ -0,0 +1,15 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ functest: f74088bb1ae93feaf56c7ec7d1f1e78c97d8de9a
+ - name: stable/kali
+ location:
+ functest-kubernetes: 83d3161c52c8cc8d9e4f52d7a693bbcc32508379
+ - name: stable/kali
+ location:
+ functest-xtesting: 0997d4c739baf30d13529d4408b3761e5c5e8919
diff --git a/releases/kali/kuberef.yaml b/releases/kali/kuberef.yaml
new file mode 100644
index 000000000..e4c826e85
--- /dev/null
+++ b/releases/kali/kuberef.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ kuberef: e917625c424d33e892fa253cdd02da926b843325
diff --git a/releases/kali/vineperf.yaml b/releases/kali/vineperf.yaml
new file mode 100644
index 000000000..2dcfe5cc9
--- /dev/null
+++ b/releases/kali/vineperf.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: vineperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ vineperf: 53bfad9e49e8d0ea585e92d6efda6c7a2fd779c1
diff --git a/releases/lakelse/barometer.yaml b/releases/lakelse/barometer.yaml
new file mode 100644
index 000000000..f85809a21
--- /dev/null
+++ b/releases/lakelse/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ barometer: 52e3a8bc19d86205e81442acfa9512006207c023
diff --git a/releases/lakelse/cirv.yaml b/releases/lakelse/cirv.yaml
new file mode 100644
index 000000000..f91d6591f
--- /dev/null
+++ b/releases/lakelse/cirv.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: cirv
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ cirv-sdv: 31fb310d3fd1c9c1f12cfe0c654870e24f5efab6
diff --git a/releases/lakelse/kuberef.yaml b/releases/lakelse/kuberef.yaml
new file mode 100644
index 000000000..f5075d1ac
--- /dev/null
+++ b/releases/lakelse/kuberef.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ kuberef: f773a3c4c6f0f3f749c417eb909b0d153a613b17
diff --git a/releases/lakelse/opnfvdocs.yaml b/releases/lakelse/opnfvdocs.yaml
new file mode 100644
index 000000000..519c31dc2
--- /dev/null
+++ b/releases/lakelse/opnfvdocs.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ opnfvdocs: 5af53a3b05353539a072a41a3493c579c63f7b3d
diff --git a/releases/lakelse/vineperf.yaml b/releases/lakelse/vineperf.yaml
new file mode 100644
index 000000000..199fe02ae
--- /dev/null
+++ b/releases/lakelse/vineperf.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: vineperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ vineperf: 2fcc501a8cf9f5a7dbcedf48946e225dfc485ff8
diff --git a/releases/leguer/functest.yaml b/releases/leguer/functest.yaml
new file mode 100644
index 000000000..8ffd79be0
--- /dev/null
+++ b/releases/leguer/functest.yaml
@@ -0,0 +1,15 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/leguer
+ location:
+ functest: a2af4693b352eaf6a84885eacb120cf4e260e5c4
+ - name: stable/leguer
+ location:
+ functest-kubernetes: 687723872c04c4b889cf4b529f6534776b16908d
+ - name: stable/leguer
+ location:
+ functest-xtesting: cd0f0900cb9290f2e84ad184842d8d8ce543a2e7
diff --git a/releases/moselle/barometer.yaml b/releases/moselle/barometer.yaml
new file mode 100644
index 000000000..c636486fc
--- /dev/null
+++ b/releases/moselle/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ barometer: 31d3f2708010dc36377958b6fefb0459301e50af
diff --git a/releases/moselle/kuberef.yaml b/releases/moselle/kuberef.yaml
new file mode 100644
index 000000000..f483bc90f
--- /dev/null
+++ b/releases/moselle/kuberef.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ kuberef: f845220cd17f89c8e1c68aecb3f0c7e82a56aa03
diff --git a/releases/moselle/opnfvdocs.yaml b/releases/moselle/opnfvdocs.yaml
new file mode 100644
index 000000000..f9be3d357
--- /dev/null
+++ b/releases/moselle/opnfvdocs.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ opnfvdocs: 79b31e661c5676f5f21acb7b2870bb0ed8d0e301
diff --git a/utils/build-server-ansible/inventory.ini b/releases/moselle/thoth.yaml
index 115b1306e..23a3de11a 100644
--- a/utils/build-server-ansible/inventory.ini
+++ b/releases/moselle/thoth.yaml
@@ -1,8 +1,17 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-localhost ansible_connection=local
+---
+project: thoth
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ thoth: 9e9df400ba7f9259a38484d232fe11e08edb4da4
diff --git a/releases/moselle/vineperf.yaml b/releases/moselle/vineperf.yaml
new file mode 100644
index 000000000..709352926
--- /dev/null
+++ b/releases/moselle/vineperf.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: vineperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ vineperf: aa75d7b0b5041aafa9a34bc95600db5b01a5791e
diff --git a/releases/nile/barometer.yaml b/releases/nile/barometer.yaml
new file mode 100644
index 000000000..b607a49ec
--- /dev/null
+++ b/releases/nile/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/nile
+ location:
+ barometer: 41195d4c2c3418e8c2daa44c792406be2fc54964
diff --git a/releases/nile/opnfvdocs.yaml b/releases/nile/opnfvdocs.yaml
new file mode 100644
index 000000000..0b427ec32
--- /dev/null
+++ b/releases/nile/opnfvdocs.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2022 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/nile
+ location:
+ opnfvdocs: 397bbce7edc03354802dacd1524f5b306979ebd3
diff --git a/jjb/kvmfornfv/kvmfornfv-build.sh b/releases/nile/thoth.yaml
index ff918492c..59d8c9e77 100755..100644
--- a/jjb/kvmfornfv/kvmfornfv-build.sh
+++ b/releases/nile/thoth.yaml
@@ -1,16 +1,17 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
+# SPDX-License-Identifier: Apache-2.0
##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
+# Copyright (c) 2021 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-# build output directory
-OUTPUT_DIR=$WORKSPACE/build_output
-mkdir -p $OUTPUT_DIR
+---
+project: thoth
+project-type: feature
+release-model: stable
-# start the build
-cd $WORKSPACE
-./ci/build.sh $OUTPUT_DIR
+branches:
+ - name: stable/nile
+ location:
+ thoth: 3b4fa67222bd3e33cb921869395b46193d353bb1
diff --git a/releases/nile/vineperf.yaml b/releases/nile/vineperf.yaml
new file mode 100644
index 000000000..88252799c
--- /dev/null
+++ b/releases/nile/vineperf.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: vineperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/nile
+ location:
+ vineperf: e0b5af5ab0a36bb63c880b9137dd02e26e4c94cc
diff --git a/releases/orinoco/barometer.yaml b/releases/orinoco/barometer.yaml
new file mode 100644
index 000000000..897e8dba0
--- /dev/null
+++ b/releases/orinoco/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/orinoco
+ location:
+ barometer: 41195d4c2c3418e8c2daa44c792406be2fc54964
diff --git a/releases/orinoco/kuberef.yaml b/releases/orinoco/kuberef.yaml
new file mode 100644
index 000000000..dfbffa0ce
--- /dev/null
+++ b/releases/orinoco/kuberef.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/orinoco
+ location:
+ kuberef: 486015325b01283f31595abd8fbdbf93530f6a5a
diff --git a/releases/orinoco/opnfvdocs.yaml b/releases/orinoco/opnfvdocs.yaml
new file mode 100644
index 000000000..e05c3492b
--- /dev/null
+++ b/releases/orinoco/opnfvdocs.yaml
@@ -0,0 +1,9 @@
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/orinoco
+ location:
+ opnfvdocs: fc7cdf4af32a4ea6ffb0e1719650d8bb4e460185
diff --git a/utils/build-server-ansible/vars/docker-compose-Ubuntu.yml b/releases/orinoco/thoth.yaml
index f985b6a4d..f19c443c1 100644
--- a/utils/build-server-ansible/vars/docker-compose-Ubuntu.yml
+++ b/releases/orinoco/thoth.yaml
@@ -1,12 +1,17 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
-#############################################################################
+##############################################################################
---
-- name: Ensure docker compose is installed
- apt:
- name: 'docker-compose'
- state: present
+project: thoth
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/orinoco
+ location:
+ thoth: 77a6ce27efa02f72568950e37aed3853f05ecd4b
diff --git a/releases/schema.yaml b/releases/schema.yaml
index c3838760a..f84c87093 100644
--- a/releases/schema.yaml
+++ b/releases/schema.yaml
@@ -46,7 +46,7 @@ properties:
properties:
name:
type: 'string'
- pattern: '^stable/[a-z]+$'
+ pattern: '^stable/[a-z0-9.]+$'
location:
type: 'object'
required: ['name', 'location']
diff --git a/releases/scripts/create_branch.py b/releases/scripts/create_branch.py
deleted file mode 100644
index fa3c92def..000000000
--- a/releases/scripts/create_branch.py
+++ /dev/null
@@ -1,143 +0,0 @@
-#!/usr/bin/env python2
-# SPDX-License-Identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-"""
-Create Gerrit Branchs
-"""
-
-import argparse
-
-try:
- import ConfigParser
-except ImportError:
- import configparser as ConfigParser
-
-import logging
-import os
-import yaml
-
-from requests.compat import quote
-from requests.exceptions import RequestException
-
-from pygerrit2.rest import GerritRestAPI
-from pygerrit2.rest.auth import HTTPDigestAuthFromNetrc, HTTPBasicAuthFromNetrc
-
-
-logging.basicConfig(level=logging.INFO)
-
-
-def quote_branch(arguments):
- """
- Quote is used here to escape the '/' in branch name. By
- default '/' is listed in 'safe' characters which aren't escaped.
- quote is not used in the data of the PUT request, as quoting for
- arguments is handled by the request library
- """
- new_args = arguments.copy()
- new_args['branch'] = quote(new_args['branch'], '')
- return new_args
-
-
-def create_branch(api, arguments):
- """
- Create a branch using the Gerrit REST API
- """
- logger = logging.getLogger(__file__)
-
- branch_data = """
- {
- "ref": "%(branch)s"
- "revision": "%(commit)s"
- }""" % arguments
-
- # First verify the commit exists, otherwise the branch will be
- # created at HEAD
- try:
- request = api.get("/projects/%(project)s/commits/%(commit)s" %
- arguments)
- logger.debug(request)
- logger.debug("Commit exists: %(commit)s", arguments)
- except RequestException as err:
- if hasattr(err, 'response') and err.response.status_code in [404]:
- logger.warn("Commit %(commit)s for %(project)s does"
- " not exist. Not creating branch.", arguments)
- logger.warn(err)
- else:
- logger.error("Error: %s", str(err))
- # Skip trying to create the branch
- return
-
- # Try to create the branch and let us know if it already exist.
- try:
- request = api.put("/projects/%(project)s/branches/%(branch)s" %
- quote_branch(arguments), branch_data)
- logger.info("Branch %(branch)s for %(project)s successfully created",
- arguments)
- except RequestException as err:
- if hasattr(err, 'response') and err.response.status_code in [412, 409]:
- logger.info("Branch %(branch)s already created for %(project)s",
- arguments)
- logger.info(err)
- else:
- logger.error("Error: %s", str(err))
-
-
-def main():
- """Given a yamlfile that follows the release syntax, create branches
- in Gerrit listed under branches"""
-
- config = ConfigParser.ConfigParser()
- config.read(os.path.join(os.path.abspath(os.path.dirname(__file__)),
- 'defaults.cfg'))
- config.read([os.path.expanduser('~/releases.cfg'), 'releases.cfg'])
-
- gerrit_url = config.get('gerrit', 'url')
-
- parser = argparse.ArgumentParser()
- parser.add_argument('--file', '-f',
- type=argparse.FileType('r'),
- required=True)
- parser.add_argument('--basicauth', '-b', action='store_true')
- args = parser.parse_args()
-
- GerritAuth = HTTPDigestAuthFromNetrc
- if args.basicauth:
- GerritAuth = HTTPBasicAuthFromNetrc
-
- try:
- auth = GerritAuth(url=gerrit_url)
- except ValueError as err:
- logging.error("%s for %s", err, gerrit_url)
- quit(1)
- restapi = GerritRestAPI(url=gerrit_url, auth=auth)
-
- project = yaml.safe_load(args.file)
-
- create_branches(restapi, project)
-
-
-def create_branches(restapi, project):
- """Create branches for a specific project defined in the release
- file"""
-
- branches = []
- for branch in project['branches']:
- repo, ref = next(iter(branch['location'].items()))
- branches.append({
- 'project': repo,
- 'branch': branch['name'],
- 'commit': ref
- })
-
- for branch in branches:
- create_branch(restapi, branch)
-
-
-if __name__ == "__main__":
- main()
diff --git a/releases/scripts/release-status.sh b/releases/scripts/release-status.sh
index 6790100a7..1fb4f70e2 100755
--- a/releases/scripts/release-status.sh
+++ b/releases/scripts/release-status.sh
@@ -15,7 +15,7 @@ RELEASE="${RELEASE:-fraser}"
[ -a repos.txt ] && rm repos.txt
for project in releases/$RELEASE/*; do
- python releases/scripts/repos.py -n -f $project >> repos.txt
+ python3 releases/scripts/repos.py -n -f $project >> repos.txt
done
while read -r repo
diff --git a/releases/scripts/repos.py b/releases/scripts/repos.py
index 47ce42d88..91c4e9300 100644
--- a/releases/scripts/repos.py
+++ b/releases/scripts/repos.py
@@ -63,20 +63,28 @@ def main():
type=str,
help="Only print"
"SHAs for the specified release")
+ parser.add_argument('--branches', '-b',
+ action='store_true',
+ default=False,
+ help="Print Branch info")
+
args = parser.parse_args()
project = yaml.safe_load(args.file)
- list_repos(project, args)
+ if args.branches:
+ list_branches(project, args)
+ else:
+ list_repos(project, args)
def list_repos(project, args):
"""List repositories in the project file"""
lookup = project.get('releases', [])
+
if 'releases' not in project:
exit(0)
-
repos = set()
for item in lookup:
repo, ref = next(iter(item['location'].items()))
@@ -90,5 +98,24 @@ def list_repos(project, args):
print(repo)
+def list_branches(project, args):
+ """List branches in the project file"""
+
+ lookup = project.get('branches', [])
+
+ if 'branches' not in project:
+ exit(0)
+ repos = set()
+ for item in lookup:
+ repo, ref = next(iter(item['location'].items()))
+ if args.names:
+ repos.add(Repo(repo))
+ elif args.release and item['name'] == args.release:
+ repos.add(Repo(repo, ref))
+ elif not args.release:
+ repos.add(Repo(repo, item['name'], ref))
+ for repo in repos:
+ print(repo)
+
if __name__ == "__main__":
main()
diff --git a/releases/scripts/requirements.txt b/releases/scripts/requirements.txt
deleted file mode 100644
index 5a7d216e9..000000000
--- a/releases/scripts/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-pygerrit2 < 2.1.0
-PyYAML < 4.0
-jsonschema < 2.7.0
-rfc3987
-ruamel.yaml
diff --git a/releases/v1.21/functest.yaml b/releases/v1.21/functest.yaml
new file mode 100644
index 000000000..44b630cdc
--- /dev/null
+++ b/releases/v1.21/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.21
+ location:
+ functest-kubernetes: 3a58b1019c0f0a3e4ec3ce8be16ffd9a15edf518
diff --git a/releases/v1.22/functest.yaml b/releases/v1.22/functest.yaml
new file mode 100644
index 000000000..c2683e0e5
--- /dev/null
+++ b/releases/v1.22/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.22
+ location:
+ functest-kubernetes: 56d89152af91b73ee32d74062e7c366ca7d72e47
diff --git a/releases/v1.23/functest.yaml b/releases/v1.23/functest.yaml
new file mode 100644
index 000000000..9acccb215
--- /dev/null
+++ b/releases/v1.23/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.23
+ location:
+ functest-kubernetes: e3c09959d8639b6e798759bb74e98a752b98b7f4
diff --git a/releases/v1.24/functest.yaml b/releases/v1.24/functest.yaml
new file mode 100644
index 000000000..5e35ec315
--- /dev/null
+++ b/releases/v1.24/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.24
+ location:
+ functest-kubernetes: 57d0741942c23ab66d38b7269e68694b92de9646
diff --git a/releases/v1.25/functest.yaml b/releases/v1.25/functest.yaml
new file mode 100644
index 000000000..f746d968e
--- /dev/null
+++ b/releases/v1.25/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.25
+ location:
+ functest-kubernetes: 7d1b01dafdfcd65bea716b61a6036a972dcf395b
diff --git a/releases/v1.26/functest.yaml b/releases/v1.26/functest.yaml
new file mode 100644
index 000000000..37ae6037c
--- /dev/null
+++ b/releases/v1.26/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.26
+ location:
+ functest-kubernetes: f6eba4b5b93b994a6773867fdb123c2c34a56415
diff --git a/releases/v1.27/functest.yaml b/releases/v1.27/functest.yaml
new file mode 100644
index 000000000..ae709534a
--- /dev/null
+++ b/releases/v1.27/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.27
+ location:
+ functest-kubernetes: e95b4b94abc1ad55144b07b137e2c53d045bd649
diff --git a/releases/v1.28/functest.yaml b/releases/v1.28/functest.yaml
new file mode 100644
index 000000000..a4cc15ccf
--- /dev/null
+++ b/releases/v1.28/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.28
+ location:
+ functest-kubernetes: e95b4b94abc1ad55144b07b137e2c53d045bd649
diff --git a/releases/v1.29/functest.yaml b/releases/v1.29/functest.yaml
new file mode 100644
index 000000000..dcc78ed85
--- /dev/null
+++ b/releases/v1.29/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.29
+ location:
+ functest-kubernetes: 9cb990f8f874f395fe6aa7f045fa9dcc3fdb35c4
diff --git a/releases/wallaby/functest.yaml b/releases/wallaby/functest.yaml
new file mode 100644
index 000000000..19f760798
--- /dev/null
+++ b/releases/wallaby/functest.yaml
@@ -0,0 +1,12 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/wallaby
+ location:
+ functest: be8185f6345d499c80c71bf0dbbd1402069e1c81
+ - name: stable/wallaby
+ location:
+ functest-xtesting: ecedaa857ff821cad5dc9fa423d1a596ea255efe
diff --git a/releases/xena/functest.yaml b/releases/xena/functest.yaml
new file mode 100644
index 000000000..cdd9904d3
--- /dev/null
+++ b/releases/xena/functest.yaml
@@ -0,0 +1,12 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/xena
+ location:
+ functest: 1d976335e832e4b48fb7bd2cda3665c28dfabe86
+ - name: stable/xena
+ location:
+ functest-xtesting: f32ab02c42a775e57ba34153d2d401bee34e88b8
diff --git a/releases/yoga/functest.yaml b/releases/yoga/functest.yaml
new file mode 100644
index 000000000..406ecaaf7
--- /dev/null
+++ b/releases/yoga/functest.yaml
@@ -0,0 +1,12 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/yoga
+ location:
+ functest: 95206bab447d0d707edc1623eca2e14ef3ce1b8a
+ - name: stable/yoga
+ location:
+ functest-xtesting: 33a884eae2153a2ea91b4bce79b06028e783e8bf
diff --git a/releases/zed/functest.yaml b/releases/zed/functest.yaml
new file mode 100644
index 000000000..0efa75ffd
--- /dev/null
+++ b/releases/zed/functest.yaml
@@ -0,0 +1,12 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/zed
+ location:
+ functest: 55879c22390bd24e696761d4c04fea34ed5a1e87
+ - name: stable/zed
+ location:
+ functest-xtesting: 25c7404b8d2a8f36170f6067c97915ddb6128bfd
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 000000000..ea58b2475
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,3 @@
+sphinxcontrib-httpdomain
+jenkins-job-builder
+piccolo-theme
diff --git a/tox.ini b/tox.ini
index d2f416884..23f7c1a84 100644
--- a/tox.ini
+++ b/tox.ini
@@ -4,28 +4,25 @@
# and then run "tox" from this directory.
[tox]
-envlist = jjb,docs,docs-linkcheck
+envlist = jjb,docs
skipsdist = True
[testenv]
+basepython = python3.10
usedevelop = False
-setenv=
- HOME = {envtmpdir}
- PYTHONPATH = {toxinidir}
+deps =
+ -chttps://opendev.org/openstack/requirements/raw/branch/stable/zed/upper-constraints.txt
+ -cupper-constraints.txt
+ -r{toxinidir}/test-requirements.txt
[testenv:jjb]
-deps =
- jenkins-job-builder==2.0.3
commands=
- jenkins-jobs test -o job_output -r {posargs:"jjb/"}
+ jenkins-jobs test -o {envtmpdir}/job_output -r {toxinidir}/jjb
[testenv:docs]
-deps = -r{toxinidir}/docs/requirements.txt
commands =
- sphinx-build -b html -n -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/html
- echo "Generated docs available in {toxinidir}/docs/_build/html"
-whitelist_externals = echo
+ sphinx-build -b html -n -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/html
[testenv:docs-linkcheck]
-deps = -r{toxinidir}/docs/requirements.txt
-commands = sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/linkcheck
+commands =
+ sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/linkcheck
diff --git a/upper-constraints.txt b/upper-constraints.txt
new file mode 100644
index 000000000..b6d67807d
--- /dev/null
+++ b/upper-constraints.txt
@@ -0,0 +1,7 @@
+sphinxcontrib-needs===0.7.9
+sphinxcontrib.plantuml===0.24
+sphinx_toolbox==3.2.0
+sphinxcontrib.nwdiag===2.0.0
+sphinxcontrib-swaggerdoc===0.1.7
+sphinx-rtd-theme===1.0.0
+jenkins-job-builder===4.3.0
diff --git a/utils/artifacts.opnfv.org.sh b/utils/artifacts.opnfv.org.sh
index 1984b4992..2b35006d4 100755
--- a/utils/artifacts.opnfv.org.sh
+++ b/utils/artifacts.opnfv.org.sh
@@ -31,7 +31,7 @@ do
for index in $(gsutil ls -l gs://artifacts.opnfv.org/logs/"$project"/ |awk 'NF==1'| sed s,gs://artifacts.opnfv.org/,, )
do
index="$(echo ${index%/*} | sed s,/,_,g)"
- echo "<LI><a href=\"http://artifacts.opnfv.org/${index%/*}.html\">"$index"</a></LI>" >> $OUTPUT
+ echo "<LI><a href=\"https://artifacts.opnfv.org/${index%/*}.html\">"$index"</a></LI>" >> $OUTPUT
done
done
@@ -55,7 +55,7 @@ rm -f $OUTPUT
echo "<LI>$path</LI>" >> $OUTPUT
echo "</UL>" >> $OUTPUT
else
- echo "<LI><a href=\"http://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
+ echo "<LI><a href=\"https://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
fi
done
@@ -85,7 +85,7 @@ rm -f $OUTPUT
echo "<LI>$path</LI>" >> $OUTPUT
echo "</UL>" >> $OUTPUT
else
- echo "<LI><a href=\"http://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
+ echo "<LI><a href=\"https://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
fi
done
@@ -119,7 +119,7 @@ do
echo "<LI>$path</LI>" >> $OUTPUT
echo "</UL>" >> $OUTPUT
else
- echo "<LI><a href=\"http://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
+ echo "<LI><a href=\"https://artifacts.opnfv.org/$filepath\">"$filepath"</a></LI>" >> $OUTPUT
fi
diff --git a/utils/build-server-ansible/main.yml b/utils/build-server-ansible/main.yml
deleted file mode 100644
index c9f244bee..000000000
--- a/utils/build-server-ansible/main.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-#############################################################################
----
-- hosts: "localhost"
- become: "True"
- tasks:
- - debug:
- msg: "{{ inventory_hostname }} is {{ ansible_distribution }}"
- - include_vars: vars/defaults.yml
- - include: vars/CentOS.yml
- when: ansible_distribution == "CentOS"
- - include: vars/Ubuntu.yml
- when: ansible_distribution == "Ubuntu"
- - name: Install Docker.
- package: name={{ docker_package }} state={{ docker_package_state }}
- - name: Ensure Docker is started and enabled at boot.
- service:
- name: docker
- state: started
- enabled: "yes"
- - name: install gsutil
- pip:
- name: gsutil
- state: present
- - name: install tox
- pip:
- name: tox
- state: present
- - name: install yamllint
- pip:
- name: yamllint
- state: present
- - include: vars/docker-compose-CentOS.yml
- when: ansible_distribution == "CentOS"
- - include: vars/docker-compose-Ubuntu.yml
- when: ansible_distribution == "Ubuntu"
diff --git a/utils/build-server-ansible/vars/CentOS.yml b/utils/build-server-ansible/vars/CentOS.yml
deleted file mode 100644
index 7a2c1d1d5..000000000
--- a/utils/build-server-ansible/vars/CentOS.yml
+++ /dev/null
@@ -1,76 +0,0 @@
-############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-############################################################################
----
-- name: Ensure old versions of Docker are not installed.
- package:
- name: '{{ item }}'
- state: absent
- with_items:
- - docker
- - docker-common
- - docker-engine
-
-- name: Add Docker GPG key.
- rpm_key:
- key: https://download.docker.com/linux/centos/gpg
- state: present
-
-- name: Ensure epel is installed.
- yum:
- name: epel-release
- state: present
-- name: Ensure depdencies are installed.
- yum:
- name: "{{ item }}"
- state: present
- with_items:
- - python-pip
- - rpm-build
- - kernel-headers
- - libpcap-devel
- - zlib-devel
- - numactl-devel
- - doxygen
- - python-sphinx
- - libvirt-devel
- - python-devel
- - openssl-devel
- - python-six
- - net-tools
- - bc
- - sysstat
- - xmlstarlet
- - facter
- - jq
-
-- name: install the 'Development tools' package group
- yum:
- name: "@Development tools"
- state: present
-
-- name: Add Docker repository.
- get_url:
- url: "{{ docker_yum_repo_url }}"
- dest: '/etc/yum.repos.d/docker-ce.repo'
- owner: root
- group: root
- mode: 0644
-
-- name: Configure Docker Edge repo.
- ini_file:
- dest: '/etc/yum.repos.d/docker-ce.repo'
- section: 'docker-ce-edge'
- option: enabled
- value: '{{ docker_yum_repo_enable_edge }}'
-
-- name: Configure Docker Test repo.
- ini_file:
- dest: '/etc/yum.repos.d/docker-ce.repo'
- section: 'docker-ce-test'
- option: enabled
- value: '{{ docker_yum_repo_enable_test }}'
diff --git a/utils/build-server-ansible/vars/Ubuntu.yml b/utils/build-server-ansible/vars/Ubuntu.yml
deleted file mode 100644
index f50255868..000000000
--- a/utils/build-server-ansible/vars/Ubuntu.yml
+++ /dev/null
@@ -1,88 +0,0 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-#############################################################################
----
-- name: Ensure old versions of Docker are not installed.
- package:
- name: '{{ item }}'
- state: absent
- with_items:
- - docker
- - docker-engine
-
-- name: Ensure depdencies are installed.
- apt:
- name: "{{ item }}"
- state: present
- with_items:
- - apt-transport-https
- - ca-certificates
- - git
- - build-essential
- - curl
- - wget
- - rpm
- - fuseiso
- - createrepo
- - genisoimage
- - libfuse-dev
- - dh-autoreconf
- - pkg-config
- - zlib1g-dev
- - libglib2.0-dev
- - libpixman-1-dev
- - python-virtualenv
- - python-dev
- - libffi-dev
- - libssl-dev
- - libxml2-dev
- - libxslt1-dev
- - bc
- - qemu-kvm
- - libvirt-bin
- - ubuntu-vm-builder
- - bridge-utils
- - monit
- - openjdk-8-jre-headless
- - python-nose
- - dirmngr
- - collectd
- - flex
- - bison
- - libnuma-dev
- - shellcheck
- - python-pip
- - sysstat
- - xmlstarlet
- - facter
- - jq
-
-- name: Add Docker apt key.
- apt_key:
- url: https://download.docker.com/linux/ubuntu/gpg
- id: 9DC858229FC7DD38854AE2D88D81803C0EBFCD88
- state: present
- register: add_repository_key
- ignore_errors: true
-
-- name: Ensure curl is present (on older systems without SNI).
- package: name=curl state=present
- when: add_repository_key|failed
-
-- name: Add Docker apt key (alternative for older systems without SNI).
- # yamllint disable rule:line-length
- shell: "curl -sSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -"
- # yamllint enable rule:line-length
- args:
- warn: "no"
- when: add_repository_key|failed
-
-- name: Add Docker repository.
- apt_repository:
- repo: "{{ docker_apt_repository }}"
- state: present
- update_cache: "yes"
diff --git a/utils/build-server-ansible/vars/defaults.yml b/utils/build-server-ansible/vars/defaults.yml
deleted file mode 100644
index 8d83380dd..000000000
--- a/utils/build-server-ansible/vars/defaults.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-#############################################################################
----
-docker_package: "docker-ce"
-docker_package_state: present
-
-# Used only for Debian/Ubuntu. Switch 'stable' to 'edge' if needed.
-docker_apt_release_channel: stable
-# yamllint disable rule:line-length
-docker_apt_repository: "deb https://download.docker.com/linux/{{ ansible_distribution|lower }} {{ ansible_distribution_release }} {{ docker_apt_release_channel }}"
-# yamllint enable rule:line-length
-
-# Used only for RedHat/CentOS.
-# yamllint disable rule:line-length
-docker_yum_repo_url: https://download.docker.com/linux/centos/docker-ce.repo
-# yamllint enable rule:line-length
-docker_yum_repo_enable_edge: 0
-docker_yum_repo_enable_test: 0
diff --git a/utils/fetch_k8_conf.sh b/utils/fetch_k8_conf.sh
index 32c07f8b6..e5e237d03 100755
--- a/utils/fetch_k8_conf.sh
+++ b/utils/fetch_k8_conf.sh
@@ -59,6 +59,9 @@ if [ "$installer_type" == "compass" ]; then
info "Fetch admin.conf successfully"
elif [ "$installer_type" == "joid" ]; then
info "Do nothing, config file has been provided in $HOME/joid_config/config for joid"
+elif [ "$installer_type" == "fuel" ]; then
+ info "Getting kubernetes config ..."
+ docker cp -L fuel:/opt/kubernetes.config $dest_path
else
error "Installer $installer_type is not supported by this script"
fi
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh
index 501095036..4a66187e5 100755
--- a/utils/fetch_os_creds.sh
+++ b/utils/fetch_os_creds.sh
@@ -113,7 +113,7 @@ if [ "$installer_type" == "fuel" ]; then
ssh ${ssh_options} "${ssh_user}@${controller_ip}" \
"sudo cat /root/keystonercv3" > "${dest_path}"
- if [[ ! "${BUILD_TAG}" =~ 'virtual' ]]; then
+ if [[ ! "${DEPLOY_SCENARIO}" =~ -noha$ ]]; then
ssh ${ssh_options} "${ssh_user}@${installer_ip}" \
"cat /etc/ssl/certs/os_cacert" > "${os_cacert}"
fi
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index 0647d3210..4716af9a2 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -8,7 +8,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-#Monit setup script for opnfv jnlp slave connections
+#Monit setup script for opnfv jnlp agent connections
test_firewall() {
jenkins_hostname="${jenkins_hostname:-build.opnfv.org}"
@@ -25,7 +25,7 @@ LF firewall not open, please send a report to helpdesk with your gpg key attache
opnfv-helpdesk@rt.linuxfoundation.org
Jenkins Home: $jenkinshome
Jenkins User: $jenkinsuser
-Slave Name: $slave_name
+Slave Name: $agent_name
IP Address: $(curl -s http://icanhazip.com)
EOF
exit 1
@@ -44,8 +44,8 @@ main () {
exit 1
fi
- if [[ -z $slave_name || -z $slave_secret ]]; then
- echo "slave name or secret not defined, please edit this file to define it"
+ if [[ -z $agent_name || -z $agent_secret ]]; then
+ echo "agent name or secret not defined, please edit this file to define it"
exit 1
fi
@@ -141,8 +141,8 @@ depends on jenkins_piddir\
fi
if [[ $started_monit == "true" ]]; then
- wget --timestamping https://"$jenkins_hostname"/jnlpJars/slave.jar && true
- chown $jenkinsuser:$jenkinsuser slave.jar
+ wget --timestamping https://"$jenkins_hostname"/jnlpJars/agent.jar && true
+ chown $jenkinsuser:$jenkinsuser agent.jar
if [[ -f /var/run/$jenkinsuser/jenkins_jnlp_pid ]]; then
echo "pid file found"
@@ -173,8 +173,8 @@ usage() {
cat << EOF
**this file must be copied to the jenkins home directory to work**
-jenkins-jnlp-connect.sh configures monit to keep slave connection up
-Checks for new versions of slave.jar
+jenkins-jnlp-connect.sh configures monit to keep agent connection up
+Checks for new versions of agent.jar
run as root to create pid directory and create monit config.
can be run as root additional times if you change variables and need to update monit config.
after running as root you should see "you are ready to start monit"
@@ -183,7 +183,7 @@ usage: $0 [OPTIONS]
-h show this message
-j set jenkins home
-u set jenkins user
- -n set slave name
+ -n set agent name
-s set secret key
-l set host, default is build.opnfv.org/ci
-t test the connection string by connecting without monit
@@ -205,8 +205,8 @@ do
case $OPTION in
j ) jenkinshome="$OPTARG" ;;
u ) jenkinsuser="$OPTARG" ;;
- n ) slave_name="$OPTARG" ;;
- s ) slave_secret="$OPTARG";;
+ n ) agent_name="$OPTARG" ;;
+ s ) agent_secret="$OPTARG";;
l ) jenkins_hostname="$OPTARG" ;;
h ) usage ;;
t ) started_monit=true
@@ -218,5 +218,5 @@ do
done
jenkins_hostname="${jenkins_hostname:-build.opnfv.org/ci}"
-connectionstring="java -jar slave.jar -jnlpUrl https://"$jenkins_hostname"/computer/"$slave_name"/slave-agent.jnlp -secret "$slave_secret" -noCertificateCheck "
+connectionstring="java -jar agent.jar -jnlpUrl https://"$jenkins_hostname"/computer/"$agent_name"/slave-agent.jnlp -secret "$agent_secret" -noCertificateCheck "
main "$@"
diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh
index 9debb45b7..62c964da7 100644
--- a/utils/push-test-logs.sh
+++ b/utils/push-test-logs.sh
@@ -24,12 +24,13 @@ node_list=(\
'lf-pod1' 'lf-pod2' \
'lf-virtual2' 'lf-virtual3' \
'intel-pod12' 'intel-pod18' \
+'itri-pod1' \
'ericsson-pod1' 'ericsson-pod2' \
'ericsson-virtual1' 'ericsson-virtual2' 'ericsson-virtual3' \
'ericsson-virtual4' 'ericsson-virtual5' \
-'arm-pod5' 'arm-pod6' \
+'arm-pod9' 'arm-pod10' \
'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' \
-'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
+'huawei-pod6' 'huawei-pod7' 'huawei-pod12' 'huawei-pod8' \
'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4' \
'huawei-virtual5' 'huawei-virtual8' 'huawei-virtual9' \
'zte-pod2' 'zte-pod3' 'zte-pod9' \
@@ -64,11 +65,11 @@ if [ -d "$dir_result" ]; then
else
echo "Uploading logs to artifact $project_artifact"
gsutil -m cp -r "$dir_result"/* gs://artifacts.opnfv.org/"$project_artifact"/ >/dev/null 2>&1
- echo "Logs can be found in http://artifacts.opnfv.org/logs_${project}_${testbed}.html"
+ echo "Logs can be found in https://artifacts.opnfv.org/logs_${project}_${testbed}.html"
cd $dir_result
files=($(find . -name \* -print|sed 's/^\.//'|sed '/^\s*$/d'))
for f in ${files[@]}; do
- echo "http://artifacts.opnfv.org/${project_artifact}${f}"
+ echo "https://artifacts.opnfv.org/${project_artifact}${f}"
done
fi
fi
diff --git a/utils/retention_script.sh b/utils/retention_script.sh
index b2177930b..3d0673ad0 100755
--- a/utils/retention_script.sh
+++ b/utils/retention_script.sh
@@ -15,10 +15,7 @@
# limitations under the License.
##############################################################################
-PATH=$PATH:/usr/local/bin/
-
-#These are the only projects that generate artifacts
-for x in armband ovsnfv fuel apex compass4nfv
+for x in airship apex armband compass4nfv fuel ovsnfv
do
echo "Looking at artifacts for project $x"