summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitmodules3
-rw-r--r--docs/ci/index.rst9
-rw-r--r--docs/ci/tables/ci-build-servers.rst17
-rw-r--r--docs/ci/user-guide.rst100
-rw-r--r--docs/conf.py11
-rw-r--r--docs/conf.yaml3
-rw-r--r--docs/requirements.txt3
-rw-r--r--gitlab-templates/Docker.gitlab-ci.yml70
-rw-r--r--gitlab-templates/GoogleStorage.gitlab-ci.yml35
-rw-r--r--gitlab-templates/RTD.gitlab-ci.yml104
m---------global-jjb0
-rwxr-xr-xjjb/3rd_party_ci/create-apex-vms.sh12
-rwxr-xr-xjjb/3rd_party_ci/detect-snapshot.sh30
-rwxr-xr-xjjb/3rd_party_ci/download-netvirt-artifact.sh38
-rwxr-xr-xjjb/3rd_party_ci/install-netvirt.sh32
-rw-r--r--jjb/3rd_party_ci/odl-netvirt.yaml287
-rwxr-xr-xjjb/3rd_party_ci/postprocess-netvirt.sh11
-rw-r--r--jjb/airship/airship-rtd-jobs.yaml21
-rw-r--r--jjb/airship/airship.yaml655
-rw-r--r--jjb/airship/cntt.yaml714
-rwxr-xr-xjjb/apex/apex-build.sh98
-rwxr-xr-xjjb/apex/apex-deploy.sh213
-rwxr-xr-xjjb/apex/apex-download-artifact.sh89
-rwxr-xr-xjjb/apex/apex-fetch-logs.sh25
-rwxr-xr-xjjb/apex/apex-fetch-snap-info.sh46
-rw-r--r--jjb/apex/apex-functest-scenario.sh18
-rwxr-xr-xjjb/apex/apex-iso-verify.sh75
-rw-r--r--jjb/apex/apex-jjb-renderer.py51
-rw-r--r--jjb/apex/apex-project-jobs.yaml146
-rw-r--r--jjb/apex/apex-rtd-jobs.yaml20
-rw-r--r--jjb/apex/apex-snapshot-create.sh105
-rw-r--r--jjb/apex/apex-snapshot-deploy.sh184
-rwxr-xr-xjjb/apex/apex-unit-test.sh33
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh163
-rw-r--r--jjb/apex/apex-verify-jobs.yaml409
-rw-r--r--jjb/apex/apex.yaml2008
-rw-r--r--jjb/apex/apex.yaml.j21361
-rw-r--r--jjb/apex/scenarios.yaml.hidden68
-rw-r--r--jjb/apex/update-build-result.groovy5
-rw-r--r--jjb/armband/armband-ci-jobs.yaml128
-rw-r--r--jjb/armband/armband-rtd-jobs.yaml20
-rw-r--r--jjb/armband/armband-verify-jobs.yaml100
-rw-r--r--jjb/availability/availability-rtd-jobs.yaml25
-rw-r--r--jjb/availability/availability.yaml8
-rw-r--r--jjb/barometer/barometer-build.sh22
-rw-r--r--jjb/barometer/barometer-rtd-jobs.yaml28
-rw-r--r--jjb/barometer/barometer-upload-artifact.sh74
-rw-r--r--jjb/barometer/barometer.yaml161
-rw-r--r--jjb/bottlenecks/bottlenecks-cleanup.sh17
-rw-r--r--jjb/bottlenecks/bottlenecks-project-jobs.yaml222
-rw-r--r--jjb/bottlenecks/bottlenecks-rtd-jobs.yaml21
-rw-r--r--jjb/bottlenecks/bottlenecks-run-suite.sh125
-rw-r--r--jjb/calipso/calipso-rtd-jobs.yaml13
-rw-r--r--jjb/calipso/calipso.yaml65
-rw-r--r--jjb/ci_gate_security/anteater-report-to-gerrit.sh28
-rw-r--r--jjb/ci_gate_security/anteater-security-audit-weekly.sh50
-rw-r--r--jjb/ci_gate_security/anteater-security-audit.sh32
-rw-r--r--jjb/ci_gate_security/opnfv-ci-gate-security.yaml190
-rw-r--r--jjb/clover/clover-project.yaml172
-rw-r--r--jjb/clover/clover-rtd-jobs.yaml21
-rw-r--r--jjb/clover/clover-views.yaml6
-rw-r--r--jjb/cntt/cntt.yaml97
-rwxr-xr-xjjb/container4nfv/arm64/deploy-cni.sh17
-rwxr-xr-xjjb/container4nfv/arm64/yardstick-arm64.sh93
-rw-r--r--jjb/container4nfv/container4nfv-arm64.yaml35
-rw-r--r--jjb/container4nfv/container4nfv-project.yaml169
-rw-r--r--jjb/container4nfv/container4nfv-rtd-jobs.yaml21
-rw-r--r--jjb/container4nfv/container4nfv-views.yaml7
-rw-r--r--jjb/cperf/cirros-upload.yaml.ansible39
-rw-r--r--jjb/cperf/cperf-ci-jobs.yaml210
-rwxr-xr-xjjb/cperf/cperf-prepare-robot.sh32
-rwxr-xr-xjjb/cperf/cperf-robot-netvirt-csit.sh186
-rw-r--r--jjb/cperf/cperf-upload-logs-csit.sh12
-rw-r--r--jjb/cperf/cperf-views.yaml6
-rw-r--r--jjb/cperf/csit-clean.yaml.ansible11
-rw-r--r--jjb/cperf/parse-node-yaml.py71
-rw-r--r--jjb/daisy4nfv/daisy-daily-jobs.yaml423
-rwxr-xr-xjjb/daisy4nfv/daisy-deploy.sh75
-rw-r--r--jjb/daisy4nfv/daisy-project-jobs.yaml312
-rw-r--r--jjb/daisy4nfv/daisy-rtd-jobs.yaml12
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-basic.sh6
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-build-kolla-image.sh68
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-build.sh41
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-download-artifact.sh86
-rw-r--r--jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml226
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-smoke-test.sh6
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-upload-artifact.sh96
-rw-r--r--jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml225
-rwxr-xr-xjjb/doctor/doctor-env-presetup.sh69
-rw-r--r--jjb/doctor/doctor-rtd-jobs.yaml21
-rw-r--r--jjb/doctor/doctor.yaml260
-rw-r--r--jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml102
-rwxr-xr-xjjb/dovetail/dovetail-artifacts-upload.sh94
-rw-r--r--jjb/dovetail/dovetail-artifacts-upload.yaml115
-rw-r--r--jjb/dovetail/dovetail-ci-jobs.yaml300
-rwxr-xr-xjjb/dovetail/dovetail-cleanup.sh49
-rw-r--r--jjb/dovetail/dovetail-project-jobs.yaml116
-rw-r--r--jjb/dovetail/dovetail-rtd-jobs.yaml20
-rwxr-xr-xjjb/dovetail/dovetail-run.sh451
-rw-r--r--jjb/edgecloud/edgecloud-rtd-jobs.yaml24
-rw-r--r--jjb/edgecloud/edgecloud-views.yaml6
-rw-r--r--jjb/fds/fds-rtd-jobs.yaml12
-rw-r--r--jjb/fds/fds-views.yaml6
-rw-r--r--jjb/fuel/fuel-daily-jobs.yaml705
-rwxr-xr-xjjb/fuel/fuel-deploy.sh89
-rw-r--r--jjb/fuel/fuel-docker-jobs.yaml224
-rwxr-xr-xjjb/fuel/fuel-logs.sh36
-rw-r--r--jjb/fuel/fuel-rtd-jobs.yaml20
-rwxr-xr-xjjb/fuel/fuel-set-scenario.sh54
-rw-r--r--jjb/fuel/fuel-verify-jobs.yaml271
-rwxr-xr-xjjb/functest/functest-alpine.sh270
-rwxr-xr-xjjb/functest/functest-cleanup.sh40
-rw-r--r--jjb/functest/functest-daily-jobs.yaml304
-rw-r--r--jjb/functest/functest-docker.yaml340
-rwxr-xr-xjjb/functest/functest-env-presetup.sh58
-rw-r--r--jjb/functest/functest-exit.sh11
-rwxr-xr-xjjb/functest/functest-k8.sh46
-rw-r--r--jjb/functest/functest-kubernetes-docker.yaml276
-rw-r--r--jjb/functest/functest-kubernetes-pi.yaml891
-rw-r--r--jjb/functest/functest-kubernetes-project-jobs.yaml80
-rw-r--r--jjb/functest/functest-kubernetes.yaml2099
-rw-r--r--jjb/functest/functest-pi.yaml1239
-rw-r--r--jjb/functest/functest-project-jobs.yaml80
-rwxr-xr-xjjb/functest/functest-suite.sh21
-rw-r--r--jjb/functest/functest.yaml2552
-rw-r--r--jjb/functest/xtesting-ci-docker.yaml189
-rw-r--r--jjb/functest/xtesting-ci-vm.yaml233
-rw-r--r--jjb/functest/xtesting-ci.yaml182
-rw-r--r--jjb/functest/xtesting-docker.yaml234
-rw-r--r--jjb/functest/xtesting-pi.yaml425
-rw-r--r--jjb/functest/xtesting-project-jobs.yaml80
-rw-r--r--jjb/functest/xtesting.yaml932
l---------jjb/global-jjb/jjb1
l---------jjb/global-jjb/shell1
-rw-r--r--jjb/global/basic-jobs.yaml46
-rw-r--r--jjb/global/installer-params.yaml140
-rwxr-xr-xjjb/global/installer-report.sh46
-rw-r--r--jjb/global/releng-macros.yaml547
-rw-r--r--jjb/global/slave-params.yaml1273
-rw-r--r--jjb/ipv6/ipv6-rtd-jobs.yaml24
-rw-r--r--jjb/ipv6/ipv6-views.yaml6
-rw-r--r--jjb/ipv6/ipv6.yaml8
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-download-artifact.sh39
-rw-r--r--jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml13
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-test.sh33
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-upload-artifact.sh85
-rw-r--r--jjb/kvmfornfv/kvmfornfv-views.yaml6
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yaml386
-rw-r--r--jjb/laas/laas.yml34
-rw-r--r--jjb/nfvbench/nfvbench-rtd-jobs.yaml20
-rw-r--r--jjb/nfvbench/nfvbench-views.yaml16
-rw-r--r--jjb/nfvbench/nfvbench.yaml41
-rw-r--r--jjb/onosfw/onosfw-rtd-jobs.yaml13
-rw-r--r--jjb/onosfw/onosfw-views.yaml6
-rw-r--r--jjb/onosfw/onosfw.yaml192
-rwxr-xr-xjjb/openci/create-ape.sh47
-rwxr-xr-xjjb/openci/create-cde.sh47
-rwxr-xr-xjjb/openci/create-clme.sh48
-rw-r--r--jjb/openci/openci-odl-daily-jobs.yaml81
-rw-r--r--jjb/openci/openci-onap-daily-jobs.yaml81
-rw-r--r--jjb/openci/openci-opnfv-daily-jobs.yaml135
-rw-r--r--jjb/openci/openci-views.yaml6
-rw-r--r--jjb/opnfvdocs/docs-rtd.yaml86
-rw-r--r--jjb/opnfvdocs/opnfvdocs-views.yaml16
-rw-r--r--jjb/opnfvdocs/opnfvdocs.yaml137
-rw-r--r--jjb/opnfvtsc/opnfvtsc-rtd-jobs.yaml13
-rw-r--r--jjb/opnfvtsc/opnfvtsc-views.yaml6
-rw-r--r--jjb/ovn4nfv/golang-make-test.sh25
-rw-r--r--jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml88
-rw-r--r--jjb/ovn4nfv/ovn4nfv-k8s-plugins-project-jobs.yaml109
-rw-r--r--jjb/ovn4nfv/ovn4nfv-project-jobs.yaml60
-rw-r--r--jjb/ovn4nfv/ovn4nfv-rtd-jobs.yaml20
-rw-r--r--jjb/ovno/ovno-rtd-jobs.yaml13
-rw-r--r--jjb/ovno/ovno-views.yaml6
-rw-r--r--jjb/pharos/check-jinja2.yaml98
-rw-r--r--jjb/pharos/pharos-rtd-jobs.yaml12
-rw-r--r--jjb/pharos/pharos-views.yaml6
-rw-r--r--jjb/pharos/pharos.yaml11
-rw-r--r--jjb/releng/artifact-cleanup.yaml41
-rwxr-xr-xjjb/releng/branch-or-tag.sh6
-rw-r--r--jjb/releng/opnfv-docker-arm.yaml230
-rw-r--r--jjb/releng/opnfv-docker-custom.yaml111
-rw-r--r--jjb/releng/opnfv-docker.sh164
-rw-r--r--jjb/releng/opnfv-docker.yaml506
-rw-r--r--jjb/releng/opnfv-lint.yaml186
-rw-r--r--jjb/releng/opnfv-utils.yaml178
-rw-r--r--jjb/releng/releng-info-vote.yaml6
-rw-r--r--jjb/releng/releng-jobs.yaml496
-rw-r--r--jjb/releng/releng-release-create-branch.sh8
-rw-r--r--jjb/releng/releng-release-jobs.yaml122
-rw-r--r--jjb/releng/releng-release-tagging.sh5
-rw-r--r--jjb/releng/releng-rtd-jobs.yaml16
-rw-r--r--jjb/releng/releng-views.yaml17
-rw-r--r--jjb/samplevnf/samplevnf-rtd-jobs.yaml13
-rw-r--r--jjb/samplevnf/samplevnf-views.yaml6
-rw-r--r--jjb/sdnvpn/sdnvpn-rtd-jobs.yaml20
-rw-r--r--jjb/sdnvpn/sdnvpn-views.yaml6
-rw-r--r--jjb/sfc/sfc-project-jobs.yaml110
-rw-r--r--jjb/sfc/sfc-rtd-jobs.yaml20
-rw-r--r--jjb/sfc/sfc-views.yaml7
-rw-r--r--jjb/snaps/snaps-rtd-jobs.yaml12
-rw-r--r--jjb/snaps/snaps-verify-jobs.yaml81
-rw-r--r--jjb/snaps/snaps-views.yaml6
-rw-r--r--jjb/stor4nfv/stor4nfv-project.yaml8
-rw-r--r--jjb/stor4nfv/stor4nfv-rtd-jobs.yaml24
-rw-r--r--jjb/stor4nfv/stor4nfv-views.yaml6
-rw-r--r--jjb/storperf/storperf-daily-jobs.yaml178
-rw-r--r--jjb/storperf/storperf-rtd-jobs.yaml12
-rw-r--r--jjb/storperf/storperf-verify-jobs.yaml200
-rw-r--r--jjb/storperf/storperf-views.yaml6
-rw-r--r--jjb/storperf/storperf.yaml78
-rw-r--r--jjb/ves/ves-rtd-jobs.yaml13
-rw-r--r--jjb/ves/ves-views.yaml6
-rw-r--r--jjb/ves/ves.yaml8
-rw-r--r--jjb/vswitchperf/vswitchperf-rtd-jobs.yaml25
-rw-r--r--jjb/vswitchperf/vswitchperf-views.yaml6
-rw-r--r--jjb/vswitchperf/vswitchperf.yaml207
-rw-r--r--jjb/xci/bifrost-cleanup-job.yaml146
-rw-r--r--jjb/xci/bifrost-periodic-jobs.yaml154
-rwxr-xr-xjjb/xci/bifrost-provision.sh109
-rw-r--r--jjb/xci/bifrost-verify-jobs.yaml225
-rwxr-xr-xjjb/xci/bifrost-verify.sh54
-rw-r--r--jjb/xci/osa-periodic-jobs.yaml263
-rwxr-xr-xjjb/xci/xci-cleanup.sh28
-rw-r--r--jjb/xci/xci-daily-jobs.yaml356
-rwxr-xr-xjjb/xci/xci-deploy.sh75
-rw-r--r--jjb/xci/xci-merge-jobs.yaml492
-rwxr-xr-xjjb/xci/xci-promote.sh51
-rw-r--r--jjb/xci/xci-rtd-jobs.yaml13
-rwxr-xr-xjjb/xci/xci-run-functest.sh77
-rwxr-xr-xjjb/xci/xci-set-scenario.sh265
-rwxr-xr-xjjb/xci/xci-start-deployment.sh45
-rwxr-xr-xjjb/xci/xci-start-new-vm.sh93
-rw-r--r--jjb/xci/xci-verify-jobs.yaml327
-rw-r--r--jjb/xci/xci-views.yaml6
-rwxr-xr-xjjb/yardstick/yardstick-cleanup.sh36
-rw-r--r--jjb/yardstick/yardstick-daily-jobs.yaml509
-rwxr-xr-xjjb/yardstick/yardstick-daily.sh75
-rwxr-xr-xjjb/yardstick/yardstick-get-k8s-conf.sh13
-rw-r--r--jjb/yardstick/yardstick-project-jobs.yaml194
-rw-r--r--jjb/yardstick/yardstick-rtd-jobs.yaml20
-rw-r--r--releases/2023.1/functest.yaml9
-rw-r--r--releases/2023.2/functest.yaml9
-rw-r--r--releases/hunter/vswitchperf.yaml5
-rw-r--r--releases/iruya/vswitchperf.yaml5
-rw-r--r--releases/jerma/airship.yaml24
-rw-r--r--releases/jerma/barometer.yaml21
-rw-r--r--releases/jerma/cirv.yaml34
-rw-r--r--releases/jerma/doctor.yaml24
-rw-r--r--releases/jerma/dovetail.yaml29
-rw-r--r--releases/jerma/kuberef.yaml (renamed from jjb/releng/releng-release-create-venv.sh)23
-rw-r--r--releases/jerma/moon.yaml9
-rw-r--r--releases/jerma/opnfvdocs.yaml14
-rw-r--r--releases/jerma/samplevnf.yaml24
-rw-r--r--releases/jerma/vswitchperf.yaml14
-rw-r--r--releases/kali/airship.yaml19
-rw-r--r--releases/kali/barometer.yaml17
-rw-r--r--releases/kali/cirv.yaml (renamed from utils/build-server-ansible/vars/docker-compose-CentOS.yml)17
-rw-r--r--releases/kali/functest.yaml15
-rw-r--r--releases/kali/kuberef.yaml17
-rw-r--r--releases/kali/vineperf.yaml17
-rw-r--r--releases/lakelse/barometer.yaml17
-rw-r--r--releases/lakelse/cirv.yaml17
-rw-r--r--releases/lakelse/kuberef.yaml17
-rw-r--r--releases/lakelse/opnfvdocs.yaml17
-rw-r--r--releases/lakelse/vineperf.yaml17
-rw-r--r--releases/leguer/functest.yaml15
-rw-r--r--releases/moselle/barometer.yaml17
-rw-r--r--releases/moselle/kuberef.yaml17
-rw-r--r--releases/moselle/opnfvdocs.yaml17
-rw-r--r--releases/moselle/thoth.yaml (renamed from utils/build-server-ansible/inventory.ini)15
-rw-r--r--releases/moselle/vineperf.yaml17
-rw-r--r--releases/nile/barometer.yaml17
-rw-r--r--releases/nile/opnfvdocs.yaml17
-rw-r--r--[-rwxr-xr-x]releases/nile/thoth.yaml (renamed from jjb/kvmfornfv/kvmfornfv-build.sh)19
-rw-r--r--releases/nile/vineperf.yaml17
-rw-r--r--releases/orinoco/barometer.yaml17
-rw-r--r--releases/orinoco/kuberef.yaml17
-rw-r--r--releases/orinoco/opnfvdocs.yaml9
-rw-r--r--releases/orinoco/thoth.yaml (renamed from utils/build-server-ansible/vars/docker-compose-Ubuntu.yml)19
-rw-r--r--releases/schema.yaml2
-rwxr-xr-xreleases/scripts/release-status.sh2
-rw-r--r--releases/scripts/requirements.txt5
-rw-r--r--releases/v1.21/functest.yaml9
-rw-r--r--releases/v1.22/functest.yaml9
-rw-r--r--releases/v1.23/functest.yaml9
-rw-r--r--releases/v1.24/functest.yaml9
-rw-r--r--releases/v1.25/functest.yaml9
-rw-r--r--releases/v1.26/functest.yaml9
-rw-r--r--releases/v1.27/functest.yaml9
-rw-r--r--releases/v1.28/functest.yaml9
-rw-r--r--releases/v1.29/functest.yaml9
-rw-r--r--releases/wallaby/functest.yaml12
-rw-r--r--releases/xena/functest.yaml12
-rw-r--r--releases/yoga/functest.yaml12
-rw-r--r--releases/zed/functest.yaml12
-rw-r--r--test-requirements.txt3
-rw-r--r--tox.ini26
-rw-r--r--upper-constraints.txt7
-rw-r--r--utils/build-server-ansible/main.yml46
-rw-r--r--utils/build-server-ansible/vars/CentOS.yml77
-rw-r--r--utils/build-server-ansible/vars/Ubuntu.yml158
-rw-r--r--utils/build-server-ansible/vars/defaults.yml27
-rwxr-xr-xutils/retention_script.sh5
304 files changed, 9597 insertions, 27462 deletions
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index 9f7b77825..000000000
--- a/.gitmodules
+++ /dev/null
@@ -1,3 +0,0 @@
-[submodule "jjb/global-jjb"]
- path = global-jjb
- url = https://github.com/lfit/releng-global-jjb
diff --git a/docs/ci/index.rst b/docs/ci/index.rst
index 08f23ac2f..445552a82 100644
--- a/docs/ci/index.rst
+++ b/docs/ci/index.rst
@@ -8,10 +8,17 @@
OPNFV CI
========
-TBD
+OPNFV continuous integration (CI) is ran on a variety of :doc:`hardware <resources>`
+connected to Jenkins and mangaged through YAML files in the `Releng`_
+repository. These YAML files are read by `Jenkins Job Builder`_ to
+generate and upload Jenkins jobs to the server. See the :doc:`User Guide
+<user-guide>` for resources on getting started with CI for your project.
.. toctree::
:maxdepth: 2
user-guide
resources
+
+.. _Releng: https://gerrit.opnfv.org/gerrit/admin/repos/releng
+.. _Jenkins Job Builder: https://docs.openstack.org/infra/jenkins-job-builder/
diff --git a/docs/ci/tables/ci-build-servers.rst b/docs/ci/tables/ci-build-servers.rst
index ad168653e..b2f4893e3 100644
--- a/docs/ci/tables/ci-build-servers.rst
+++ b/docs/ci/tables/ci-build-servers.rst
@@ -18,23 +18,14 @@
- aarch64
- Ubuntu 16.04
- `Armband ENEA Team`_
- * - `ericsson-build3 <https://build.opnfv.org/ci/computer/ericsson-build3>`_
+ * - `lf-build5 <https://build.opnfv.org/ci/computer/lf-build5>`_
- x86_64
- - Ubuntu 16.04
- - `Dianfeng Du`_
- * - `ericsson-build4 <https://build.opnfv.org/ci/computer/ericsson-build4>`_
- - x86_64
- - Ubuntu 16.04
- - `Dianfeng Du`_
- * - `lf-build1 <https://build.opnfv.org/ci/computer/lf-build1>`_
- - x86_64
- - CentOS 7.4
+ - Ubuntu 18.04
- `Linux Foundation`_
- * - `lf-build2 <https://build.opnfv.org/ci/computer/lf-build2>`_
+ * - `lf-build6 <https://build.opnfv.org/ci/computer/lf-build6>`_
- x86_64
- - Ubuntu 16.10
+ - CentOS 8
- `Linux Foundation`_
.. _Linux Foundation: helpdesk@opnfv.org
-.. _Dianfeng Du: dianfeng.du@ericsson.com
.. _Armband ENEA Team: armband@enea.com
diff --git a/docs/ci/user-guide.rst b/docs/ci/user-guide.rst
index 9c9b74a41..a56a3a1b0 100644
--- a/docs/ci/user-guide.rst
+++ b/docs/ci/user-guide.rst
@@ -8,4 +8,102 @@
CI User Guide
=============
-TBD
+Structure of the Releng Repository
+----------------------------------
+
+jjb/<projects>
+ Individual project CI configurations.
+
+jjb/global
+ Collection of JJB defaults and templates shared by all projects.
+
+global-jjb/
+ Git submodule pointing to `Global-JJB`_, which provides a variety of
+ common `CI jobs`_ such as ReadTheDocs
+ (RTD) builds.
+
+docs/
+ This documentation.
+
+releases/
+ Release configuration files for creating stable branches and tagging
+ repositories and related automation scripts.
+
+utils/
+ Collection of common utilities used by projects
+
+utils/build-server-ansible
+ Ansible configuration for managing build servers. This is where
+ projects can add packages they need for their CI to the servers.
+
+
+CI Setup
+--------
+
+Basic Setup
+~~~~~~~~~~~
+
+All projects are required to have a **+1 Verified** vote in Gerrit in
+order to merge their code. As a new project that comes in may not yet
+know how they want to setup CI, they can pass this validation by
+configuring a 'no-op' job to run against their changesets.
+
+1. Clone the `Releng`_ repository, using the *Clone with commit-msg
+ hook* command under the *SSH* tab (displayed after logging in and
+ uploading an SSH key):
+
+ .. note::
+ <gerrit username> in the command below will be your username in
+ Gerrit when viewing the command on the website.
+
+ For example::
+
+ git clone "ssh://<gerrit username>@gerrit.opnfv.org:29418/releng" && \
+ scp -p -P 29418 <gerrit username>@gerrit.opnfv.org:hooks/commit-msg "releng/.git/hooks/"
+
+
+2. Create a project directory under the *jjb/* directory, and an intial
+ project YAML file::
+
+ mkdir jjb/myproject
+ touch jjb/myproject/myproject-ci-jobs.yaml
+
+3. Modify the project YAML file to add the basic validation job::
+
+ $EDITOR jjb/myproject/myproject-ci-jobs.yaml
+
+ ::
+
+ ---
+ - project:
+ name: myproject
+ project:
+ - '{name}'
+ jobs:
+ - '{project}-verify-basic'
+
+Docker Builds
+~~~~~~~~~~~~~
+
+Docker build are managed through the **jjb/releng/opnfv-docker.yaml**
+file. Modify this file with your project details to enable docker builds
+on merges and tags to your project repository::
+
+ ---
+ - project:
+ name: opnfv-docker'
+
+ [...]
+
+ dockerrepo:
+ [...]
+ - 'myproject':
+ project: 'myproject'
+ <<: *master
+
+.. _Jenkins Job Builder: https://docs.openstack.org/infra/jenkins-job-builder/
+.. _Releng: https://gerrit.opnfv.org/gerrit/admin/repos/releng
+.. _Global-JJB: https://docs.releng.linuxfoundation.org/projects/global-jjb/en/latest/index.html
+.. _CI jobs: https://docs.releng.linuxfoundation.org/projects/global-jjb/en/latest/index.html#global-jjb-templates
+.. _opnfvdocs: https://docs.opnfv.org/en/latest/how-to-use-docs/index.html
+.. _support.linuxfoundation.org: https://jira.linuxfoundation.org/plugins/servlet/theme/portal/2/create/145
diff --git a/docs/conf.py b/docs/conf.py
index 86ab8c577..6cfaf6985 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1 +1,10 @@
-from docs_conf.conf import * # flake8: noqa
+project = 'Releng'
+extensions = [
+ 'sphinx.ext.autosectionlabel'
+]
+html_theme = "piccolo_theme"
+autosectionlabel_prefix_document = True
+autosectionlabel_maxdepth = 4
+numfig = True
+numfig_format = {'figure': 'Figure %s', 'table': 'Table %s',
+ 'code-block': 'Listing %s', 'section': 'Section %s'}
diff --git a/docs/conf.yaml b/docs/conf.yaml
deleted file mode 100644
index 749a4b1cf..000000000
--- a/docs/conf.yaml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-project_cfg: opnfv
-project: releng
diff --git a/docs/requirements.txt b/docs/requirements.txt
deleted file mode 100644
index f26b04141..000000000
--- a/docs/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-lfdocs-conf
-sphinxcontrib-httpdomain
-sphinx-opnfv-theme
diff --git a/gitlab-templates/Docker.gitlab-ci.yml b/gitlab-templates/Docker.gitlab-ci.yml
new file mode 100644
index 000000000..8acb5a00e
--- /dev/null
+++ b/gitlab-templates/Docker.gitlab-ci.yml
@@ -0,0 +1,70 @@
+# Build and push a Docker image with CI/CD.
+# Docker-in-Docker documentation: https://docs.gitlab.com/ee/ci/docker/using_docker_build.html
+#
+# By default builds are tagged with their branch name and pushed to the
+# Gitlab Docker Registry. If DOCKER_TAG_LATEST is set to true, builds on
+# the $DOCKER_LATEST_BRANCH are also tagged and pushed as ":latest"
+#
+# Scheduled builds can be enabled on a Gitlab schedule by specifying
+# DOCKER_SCHEDULE = "true" in variables
+---
+variables:
+ # Docker registry where images will be pushed
+ DOCKER_REGISTRY: "$CI_REGISTRY"
+ DOCKER_USERNAME: "$CI_REGISTRY_USER"
+ DOCKER_TOKEN: "$CI_REGISTRY_PASSWORD"
+ # Whether or to push images after they're built
+ DOCKER_PUSH: "true"
+ # TODO: Conditionally include '--file' to docker build to reduce need
+ # to always define FILEPATH when BUILDCONTEXT is set
+ DOCKER_FILEPATH: "Dockerfile"
+ DOCKER_BUILDCONTEXT: "."
+ DOCKER_IMAGE: "$CI_REGISTRY_IMAGE"
+ # If LATEST_TAG is set to true, builds on the $DOCKER_LATEST_BRANCH
+ # will be tagged and pushed with ":latest"
+ DOCKER_LATEST_TAG: "true"
+ DOCKER_LATEST_BRANCH: "$CI_DEFAULT_BRANCH"
+
+.docker-build-and-push: &docker-build-and-push
+ image: docker:latest
+ stage: deploy
+ interruptible: true
+ services:
+ - docker:dind
+ before_script:
+ - docker login -u "$DOCKER_USERNAME" -p "$DOCKER_TOKEN" $DOCKER_REGISTRY
+ script:
+ # Warm the cache by fetching the latest image. There's no guarantee
+ # the image will already exist on the runner.
+ - docker pull "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}" || true
+ - >
+ docker build
+ --pull
+ --cache-from "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}"
+ --file "$DOCKER_FILEPATH"
+ --tag "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}"
+ $DOCKER_BUILDCONTEXT
+ - |
+ if [[ "$CI_COMMIT_BRANCH" == "$DOCKER_LATEST_BRANCH" && "$DOCKER_LATEST_TAG" == "true" ]]; then
+ docker tag "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}" "$DOCKER_IMAGE"
+ fi
+ - |
+ # Push docker images if DOCKER_PUSH is set
+ if [[ "$DOCKER_PUSH" == "true" ]]; then
+ docker push "$DOCKER_IMAGE:${CI_COMMIT_REF_SLUG}"
+ # Push ':latest' if LATEST_TAG is true
+ if [[ "$CI_COMMIT_BRANCH" == "$DOCKER_LATEST_BRANCH" && "$DOCKER_LATEST_TAG" == "true" ]]; then
+ docker push "$DOCKER_IMAGE"
+ fi
+ fi
+ rules:
+ - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+ when: never
+ # Gitlab does not have a way of specifying which jobs are scheduled,
+ # so an extra variable is needed in order to signify docker build
+ # should be picked up by the schedule run.
+ - if: $CI_PIPELINE_SOURCE == "schedule" && $DOCKER_SCHEDULE != "true"
+ when: never
+ - if: '$CI_COMMIT_BRANCH == $DOCKER_LATEST_BRANCH'
+ - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+ - if: $CI_COMMIT_TAG
diff --git a/gitlab-templates/GoogleStorage.gitlab-ci.yml b/gitlab-templates/GoogleStorage.gitlab-ci.yml
new file mode 100644
index 000000000..4ddf313d6
--- /dev/null
+++ b/gitlab-templates/GoogleStorage.gitlab-ci.yml
@@ -0,0 +1,35 @@
+# Template for uploading artifacts to Google Storage
+#
+# To upload artifacts to Google Storage, include this file in your
+# .gitlab-ci.yml file with the following stanza:
+#
+# include:
+# - project: anuket/releng
+# file: '/gitlab-templates/GoogleStorage.gitlab-ci.yml'
+#
+# And append the following "- !reference.." line to the script portion
+# of a job where artifacts should be uploaded:
+#
+# script:
+# ...
+# - !reference [.gsutil-install, script]
+# ...
+#
+# After the script has been included `gsutil` will have access to the
+# necessary Google Storage bucket.
+---
+variables:
+ GS_URL: "artifacts.opnfv.org/$CI_PROJECT_NAME"
+ WORKSPACE: $CI_PROJECT_DIR
+
+.gsutil-install: &gsutil-install
+ script:
+ - |
+ if command -v dnf &> /dev/null; then
+ dnf -y install python3-pip
+ else
+ yum -y install python3-pip
+ fi
+ - python3 -m pip install -U pip
+ - python3 -m pip install gsutil
+ - echo "$GSUTIL_CONFIG" > ~/.boto
diff --git a/gitlab-templates/RTD.gitlab-ci.yml b/gitlab-templates/RTD.gitlab-ci.yml
new file mode 100644
index 000000000..59b455d69
--- /dev/null
+++ b/gitlab-templates/RTD.gitlab-ci.yml
@@ -0,0 +1,104 @@
+# ReadTheDocs Workflow
+#
+# This workflow adds these builds to projects:
+#
+# docs-build:
+# Generate a html sphinx-build from the $DOCS_DIRECTORY
+#
+# docs-link-check:
+# Run a non-blocking sphinx-build linkcheck against
+# the $DOCS_DIRECTORY
+#
+# pages:
+# Serve the built documentation as the Gitlab Pages site for
+# the project
+#
+# Both docs-build and docs-link-check run on merge requests and merges
+# to the default branch that modify files under the $DOCS_DIRECTORY,
+# while pages only run on merges.
+#
+# Scheduled builds can be enabled when creating a schedule job and
+# specifying DOCS_SCHEDULE = "true" in build variables
+#
+# If extra dependencies are needed for builds they will be installed
+# from the $DOCS_REQUIREMENTS location.
+---
+variables:
+ PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
+ DOCS_DIRECTORY: "docs"
+ DOCS_REQUIREMENTS: "$DOCS_DIRECTORY/requirements.txt"
+ STABLE_BRANCH: "stable/*"
+
+.docs-cache: &docs-cache
+ paths:
+ - .cache/pip
+ - venv/
+
+.docs-before-script: &docs-before-script
+ - python -V
+ - pip install virtualenv
+ - virtualenv venv
+ - source venv/bin/activate
+ - pip install Sphinx
+ - |
+ if [ -f "$DOCS_REQUIREMENTS" ]; then
+ pip install -r "$DOCS_REQUIREMENTS"
+ fi
+
+docs-build:
+ stage: build
+ image: python:3
+ before_script:
+ - *docs-before-script
+ script: |
+ sphinx-build -T -b html -D language=en $DOCS_DIRECTORY _build/html
+ cache: *docs-cache
+ artifacts:
+ paths:
+ - _build/html
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "schedule" && $DOCS_SCHEDULE != "true"
+ when: never
+ - if: $CI_PIPELINE_SOURCE == "merge_request_event"
+ changes:
+ - $DOCS_DIRECTORY/**/*
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ - if: $CI_COMMIT_BRANCH == $STABLE_BRANCH
+
+docs-link-check:
+ stage: test
+ allow_failure: true
+ needs: []
+ image: python:3
+ before_script:
+ - *docs-before-script
+ script: |
+ sphinx-build -T -b linkcheck $DOCS_DIRECTORY _build/linkcheck
+ cache: *docs-cache
+ artifacts:
+ paths:
+ - _build/linkcheck
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "schedule" && $DOCS_SCHEDULE != "true"
+ when: never
+ - if: $CI_PIPELINE_SOURCE == "merge_request_event"
+ changes:
+ - $DOCS_DIRECTORY/**/*
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ - if: $CI_COMMIT_BRANCH == $STABLE_BRANCH
+
+pages:
+ stage: deploy
+ image: python:3
+ script: |
+ mkdir public
+ mv _build/html/* public/
+ artifacts:
+ paths:
+ - public
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "schedule"
+ when: never
+ - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
+ changes:
+ - $DOCS_DIRECTORY/**/*
diff --git a/global-jjb b/global-jjb
deleted file mode 160000
-Subproject 623134d6449371e8bc28095e53abc051bbb6b4a
diff --git a/jjb/3rd_party_ci/create-apex-vms.sh b/jjb/3rd_party_ci/create-apex-vms.sh
deleted file mode 100755
index 0744ac89a..000000000
--- a/jjb/3rd_party_ci/create-apex-vms.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-
-. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh
-pushd $LIB
-./test_environment.sh --env-number $APEX_ENV_NUMBER --cloner-info $CLONER_INFO --snapshot-disks $SNAPSHOT_DISKS --vjump-hosts $VIRTUAL_JUMPHOSTS
-popd
diff --git a/jjb/3rd_party_ci/detect-snapshot.sh b/jjb/3rd_party_ci/detect-snapshot.sh
deleted file mode 100755
index 77788aa2c..000000000
--- a/jjb/3rd_party_ci/detect-snapshot.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Detecting requested OpenStack branch and topology type in gerrit comment"
-parsed_comment=$(echo $GERRIT_EVENT_COMMENT_TEXT | sed -n 's/^.*check-opnfv\s*//p')
-parsed_comment=$(echo $parsed_comment | sed -n 's/\s*$//p')
-if [ ! -z "$parsed_comment" ]; then
- if echo $parsed_comment | grep -E '^[a-z]+-(no)?ha'; then
- os_version=${parsed_comment%%"-"*}
- topo=${parsed_comment#*"-"}
- echo "OS version detected in gerrit comment: ${os_version}"
- echo "Topology type detected in gerrit comment: ${topo}"
- else
- echo "Invalid format given for scenario in gerrit comment: ${parsed_comment}...aborting"
- exit 1
- fi
-else
- echo "No scenario given in gerrit comment, will use default (master OpenStack, noha)"
- os_version='master'
- topo='noha'
-fi
-
-echo "Writing variables to file"
-cat > detected_snapshot << EOI
-OS_VERSION=$os_version
-TOPOLOGY=$topo
-SNAP_CACHE=$HOME/snap_cache/$os_version/$topo
-EOI
diff --git a/jjb/3rd_party_ci/download-netvirt-artifact.sh b/jjb/3rd_party_ci/download-netvirt-artifact.sh
deleted file mode 100755
index ac7f76ccd..000000000
--- a/jjb/3rd_party_ci/download-netvirt-artifact.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Attempting to fetch the artifact location from ODL Jenkins"
-if [ "$ODL_BRANCH" != 'master' ]; then
- DIST=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\2#p')
- ODL_BRANCH=$(echo ${ODL_BRANCH} | sed -rn 's#([a-zA-Z]+)/([a-zA-Z]+)#\1%2F\2#p')
-else
- DIST='neon'
-fi
-
-echo "ODL Distribution is ${DIST}"
-ODL_ZIP="karaf-SNAPSHOT.zip"
-CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/${GERRIT_PROJECT}~${ODL_BRANCH}~${GERRIT_CHANGE_ID}/detail"
-# due to limitation with the Jenkins Gerrit Trigger, we need to use Gerrit REST API to get the change details
-ODL_BUILD_JOB_NUM=$(curl --fail ${CHANGE_DETAILS_URL} | grep -Eo "${GERRIT_PROJECT}-distribution-check-${DIST}/[0-9]+" | tail -1 | grep -Eo [0-9]+)
-DISTRO_CHECK_CONSOLE_LOG="https://logs.opendaylight.org/releng/vex-yul-odl-jenkins-1/${GERRIT_PROJECT}-distribution-check-${DIST}/${ODL_BUILD_JOB_NUM}/console.log.gz"
-NETVIRT_ARTIFACT_URL=$(curl --fail --compressed ${DISTRO_CHECK_CONSOLE_LOG} | grep 'BUNDLE_URL' | cut -d = -f 2)
-
-echo -e "URL to artifact is\n\t$NETVIRT_ARTIFACT_URL"
-
-echo "Downloading the artifact. This could take time..."
-if ! wget -q -O $ODL_ZIP $NETVIRT_ARTIFACT_URL; then
- echo "The artifact does not exist! Probably removed due to ODL Jenkins artifact retention policy."
- echo "Use 'recheck' on the gerrit to get artifact rebuilt."
- exit 1
-fi
-
-#TODO(trozet) remove this once odl-pipeline accepts zip files
-echo "Converting artifact zip to tar.gz"
-UNZIPPED_DIR=`dirname $(unzip -qql ${ODL_ZIP} | head -n1 | tr -s ' ' | cut -d' ' -f5-)`
-unzip ${ODL_ZIP}
-tar czf /tmp/${NETVIRT_ARTIFACT} ${UNZIPPED_DIR}
-
-echo "Download complete"
-ls -al /tmp/${NETVIRT_ARTIFACT}
diff --git a/jjb/3rd_party_ci/install-netvirt.sh b/jjb/3rd_party_ci/install-netvirt.sh
deleted file mode 100755
index 232d60e5c..000000000
--- a/jjb/3rd_party_ci/install-netvirt.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SNAP_CACHE=$HOME/snap_cache/$OS_VERSION/$TOPOLOGY
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-
-if [ ! -f "/tmp/${NETVIRT_ARTIFACT}" ]; then
- echo "ERROR: /tmp/${NETVIRT_ARTIFACT} specified as NetVirt Artifact, but file does not exist"
- exit 1
-fi
-
-if [ ! -f "${SNAP_CACHE}/node.yaml" ]; then
- echo "ERROR: node.yaml pod config missing in ${SNAP_CACHE}"
- exit 1
-fi
-
-if [ ! -f "${SNAP_CACHE}/id_rsa" ]; then
- echo "ERROR: id_rsa ssh creds missing in ${SNAP_CACHE}"
- exit 1
-fi
-
-# TODO (trozet) snapshot should have already been unpacked into cache folder
-# but we really should check the cache here, and not use a single cache folder
-# for when we support multiple jobs on a single slave
-pushd sdnvpn/odl-pipeline/lib > /dev/null
-git fetch https://gerrit.opnfv.org/gerrit/sdnvpn refs/changes/17/59017/5 && git checkout FETCH_HEAD
-./odl_reinstaller.sh --pod-config ${SNAP_CACHE}/node.yaml \
- --odl-artifact /tmp/${NETVIRT_ARTIFACT} --ssh-key-file ${SNAP_CACHE}/id_rsa
-popd > /dev/null
diff --git a/jjb/3rd_party_ci/odl-netvirt.yaml b/jjb/3rd_party_ci/odl-netvirt.yaml
deleted file mode 100644
index 15d28486f..000000000
--- a/jjb/3rd_party_ci/odl-netvirt.yaml
+++ /dev/null
@@ -1,287 +0,0 @@
----
-- project:
- name: 'netvirt'
-
- project: 'netvirt'
-
- installer: 'netvirt'
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - oxygen:
- branch: 'stable/oxygen'
- gs-pathname: ''
- disabled: false
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'create-apex-vms':
- slave-label: 'apex-virtual-master'
- - 'install-netvirt':
- slave-label: 'apex-virtual-master'
- - 'postprocess':
- slave-label: 'apex-virtual-master'
- #####################################
- # jobs
- #####################################
- jobs:
- - 'odl-netvirt-verify-virtual-{stream}'
- - 'odl-netvirt-verify-virtual-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'odl-netvirt-verify-virtual-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 5
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-.*-promote.*'
- - 'apex-virtual.*'
- - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
- - 'odl-netvirt-verify-virtual-install-netvirt-.*'
- - 'functest-netvirt-virtual-suite-.*'
- - 'odl-netvirt-verify-virtual-postprocess-.*'
- blocking-level: 'NODE'
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/apex
- branches:
- - 'origin/master'
- timeout: 15
- wipe-workspace: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: NETVIRT_ARTIFACT
- default: distribution-karaf.tar.gz
- - 'apex-virtual-master-defaults'
-
- triggers:
- - gerrit:
- server-name: 'git.opendaylight.org'
- trigger-on:
- # yamllint disable rule:line-length
- # - comment-added-contains-event:
- # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS'
- # - comment-added-contains-event:
- # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE'
- # yamllint enable rule:line-length
- - comment-added-contains-event:
- comment-contains-value: 'check-opnfv'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '*'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- readable-message: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - detect-opnfv-snapshot
- - inject:
- properties-file: detected_snapshot
- - multijob:
- name: create-apex-vms
- condition: SUCCESSFUL
- projects:
- - name: 'odl-netvirt-verify-virtual-create-apex-vms-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- APEX_ENV_NUMBER=$APEX_ENV_NUMBER
- GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
- TOPOLOGY=$TOPOLOGY
- OS_VERSION=$OS_VERSION
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: install-netvirt
- condition: SUCCESSFUL
- projects:
- - name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}'
- current-parameters: false
- predefined-parameters: |
- ODL_BRANCH=$BRANCH
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- GERRIT_PROJECT=$GERRIT_PROJECT
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- TOPOLOGY=$TOPOLOGY
- OS_VERSION=$OS_VERSION
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: csit
- condition: ALWAYS
- projects:
- - name: cperf-apex-csit-master
- predefined-parameters: |
- ODL_BRANCH=$BRANCH
- RC_FILE_PATH=$SNAP_CACHE/overcloudrc
- NODE_FILE_PATH=$SNAP_CACHE/node.yaml
- SSH_KEY_PATH=$SNAP_CACHE/id_rsa
- ODL_CONTAINERIZED=false
- OS_VERSION=$OS_VERSION
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
- - multijob:
- name: csit-collect-logs
- condition: ALWAYS
- projects:
- - name: cperf-upload-logs-csit
- predefined-parameters: |
- ODL_BRANCH=$BRANCH
- OS_VERSION=$OS_VERSION
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
- - multijob:
- name: apex-fetch-logs
- condition: ALWAYS
- projects:
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: false
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: postprocess
- condition: ALWAYS
- projects:
- - name: 'odl-netvirt-verify-virtual-postprocess-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
- GERRIT_PATCHSET_NUMBER=$GERRIT_PATCHSET_NUMBER
- GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
- NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
-
-- job-template:
- name: 'odl-netvirt-verify-virtual-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 5
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
- - 'odl-netvirt-verify-virtual-install-netvirt-.*'
- - 'functest-netvirt-virtual-suite-.*'
- - 'odl-netvirt-verify-virtual-postprocess-.*'
- blocking-level: 'NODE'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/apex
- branches:
- - 'origin/master'
- timeout: 15
- wipe-workspace: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - '{installer}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-noha'
- description: 'Scenario to deploy and test'
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/apex
- description: "URL to Google Storage with snapshot artifacts."
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-builder'
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'netvirt-verify-create-apex-vms-builder'
- builders:
- - shell:
- !include-raw: ../apex/apex-snapshot-deploy.sh
-- builder:
- name: 'netvirt-verify-install-netvirt-builder'
- builders:
- - shell:
- !include-raw: ./download-netvirt-artifact.sh
- - shell:
- !include-raw: ./install-netvirt.sh
-- builder:
- name: 'netvirt-verify-postprocess-builder'
- builders:
- - shell:
- !include-raw: ./postprocess-netvirt.sh
-
-- builder:
- name: 'detect-opnfv-snapshot'
- builders:
- - shell:
- !include-raw: ./detect-snapshot.sh
diff --git a/jjb/3rd_party_ci/postprocess-netvirt.sh b/jjb/3rd_party_ci/postprocess-netvirt.sh
deleted file mode 100755
index 796514259..000000000
--- a/jjb/3rd_party_ci/postprocess-netvirt.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# clone opnfv sdnvpn repo
-git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh
-pushd $LIB
-./post_process.sh
-popd
diff --git a/jjb/airship/airship-rtd-jobs.yaml b/jjb/airship/airship-rtd-jobs.yaml
deleted file mode 100644
index f3af2adfd..000000000
--- a/jjb/airship/airship-rtd-jobs.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-- project:
- name: airship-rtd
- project: airship
- project-name: airship
- project-pattern: 'airship'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - iruya:
- branch: 'stable/{stream}'
- disabled: false
-
-
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-airship/111032/'
- rtd-token: 'b4e9b47eee6e1311e82e462f06cc4fb44a7534db'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/airship/airship.yaml b/jjb/airship/airship.yaml
deleted file mode 100644
index 076715958..000000000
--- a/jjb/airship/airship.yaml
+++ /dev/null
@@ -1,655 +0,0 @@
----
-- airship-jobs: &airship-jobs
- name: 'airship-jobs'
- current-parameters: true
-
-- airship-params: &airship-params
- name: 'airship-params'
- repo: 'opnfv'
- port:
- tag:
- - latest:
- branch: master
- slave: intel-pod17
- functest_tag: hunter
-
-- parameter:
- name: airship-slave
- parameters:
- - label:
- name: slave
- default: '{slave}'
-
-- parameter:
- name: airship-build_tag
- parameters:
- - random-string:
- name: build_tag
-
-- parameter:
- name: airship-DEBUG
- parameters:
- - string:
- name: DEBUG
- default: 'true'
-
-- parameter:
- name: airship-EXTERNAL_NETWORK
- parameters:
- - string:
- name: EXTERNAL_NETWORK
- default: public
-
-- airship-containers: &airship-containers
- name: 'airship-containers'
- repo: '{repo}'
- port: '{port}'
- container: '{container}'
- functest_tag: '{functest_tag}'
-
-- airship-run-containers: &airship-run-containers
- name: 'airship-run-containers'
- <<: *airship-containers
- test: '{test}'
-
-- builder:
- name: airship-pull-containers
- builders:
- - shell: |
- set +x
- if [ "{repo}" = "_" ]; then
- image={container}:{functest_tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{functest_tag}
- else
- image={repo}:{port}/{container}:{functest_tag}
- fi
- sudo docker pull $image
-
-- builder:
- name: airship-run-containers
- builders:
- - shell: |
- set +x
- [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
- if [ "{repo}" = "_" ]; then
- image={container}:{functest_tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{functest_tag}
- else
- image={repo}:{port}/{container}:{functest_tag}
- fi
- sudo docker run --rm \
- -e S3_ENDPOINT_URL=https://storage.googleapis.com \
- -e S3_DST_URL=s3://artifacts.opnfv.org/airship/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
- -e HTTP_DST_URL=http://artifacts.opnfv.org/airship/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
- -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e NODE_NAME=$slave \
- -e INSTALLER_TYPE=airship \
- -e BUILD_TAG=$BUILD_TAG \
- -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -v /home/opnfv/functest/.boto:/root/.boto \
- -e DEBUG=$DEBUG \
- -e EXTERNAL_NETWORK=$EXTERNAL_NETWORK \
- -e DEPLOY_SCENARIO=os-nosdn-nofeature-ha \
- -v /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file \
- -v /home/opnfv/functest/images:/home/opnfv/functest/images \
- -v /home/opnfv/functest/tempest_conf.yaml:/usr/lib/python2.7/\
- site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml \
- -v /home/opnfv/functest/blacklist.yaml:/usr/lib/python2.7/\
- site-packages/functest/opnfv_tests/openstack/rally/blacklist.yaml \
- $image run_tests -t {test} -r -p
-
-- builder:
- name: airship-remove-images
- builders:
- - shell: |
- set +x
- if [ "{repo}" = "_" ]; then
- image={container}:{functest_tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{functest_tag}
- else
- image={repo}:{port}/{container}:{functest_tag}
- fi
- sudo docker rmi $image || true
-
-- builder:
- name: airship-deploy
- builders:
- - shell: |
- set +x
- export TERM_OPTS=" "
- sudo -H -E -u ubuntu bash -c 'tools/deploy.sh $slave deploy_site'
-
-- scm:
- name: airship-scm
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/airship
- refspec: '+refs/changes/*:refs/changes/*'
- branches:
- - '{ref}'
-
-- parameter:
- name: airship-branch
- parameters:
- - string:
- name: branch
- default: '{branch}'
-
-- job-template:
- name: 'airship-deploy-daily-{tag}'
- parameters:
- - airship-slave:
- slave: '{slave}'
- - airship-branch:
- branch: '{branch}'
- scm:
- - airship-scm:
- ref: $branch
- builders:
- - airship-deploy
-
-- job-template:
- name: 'airship-deploy-gate-{tag}'
- parameters:
- - airship-slave:
- slave: '{slave}'
- scm:
- - airship-scm:
- ref: $GERRIT_REFSPEC
- builders:
- - airship-deploy
-
-- project:
- name: 'airship-deploy'
- <<: *airship-params
- jobs:
- - 'airship-deploy-daily-{tag}'
- - 'airship-deploy-gate-{tag}'
-
-- job-template:
- name: 'airship-{repo}-{container}-{tag}-pull'
- parameters:
- - airship-slave:
- slave: '{slave}'
- builders:
- - airship-pull-containers:
- <<: *airship-containers
-
-- project:
- name: 'airship-opnfv-functest-healthcheck-pull'
- <<: *airship-params
- container: 'functest-healthcheck'
- jobs:
- - 'airship-{repo}-{container}-{tag}-pull'
-
-- project:
- name: 'airship-opnfv-functest-smoke-pull'
- <<: *airship-params
- container: 'functest-smoke'
- jobs:
- - 'airship-{repo}-{container}-{tag}-pull'
-
-- project:
- name: 'airship-opnfv-functest-benchmarking-pull'
- <<: *airship-params
- container: 'functest-benchmarking'
- jobs:
- - 'airship-{repo}-{container}-{tag}-pull'
-
-- project:
- name: 'airship-opnfv-functest-vnf-pull'
- <<: *airship-params
- container: 'functest-vnf'
- jobs:
- - 'airship-{repo}-{container}-{tag}-pull'
-
-- job-template:
- name: 'airship-{repo}-{container}-{tag}-rmi'
- parameters:
- - airship-slave:
- slave: '{slave}'
- builders:
- - airship-remove-images:
- <<: *airship-containers
-
-- project:
- name: 'airship-opnfv-functest-healthcheck-rmi'
- <<: *airship-params
- container: 'functest-healthcheck'
- jobs:
- - 'airship-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'airship-opnfv-functest-smoke-rmi'
- <<: *airship-params
- container: 'functest-smoke'
- jobs:
- - 'airship-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'airship-opnfv-functest-benchmarking-rmi'
- <<: *airship-params
- container: 'functest-benchmarking'
- jobs:
- - 'airship-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'airship-opnfv-functest-vnf-rmi'
- <<: *airship-params
- container: 'functest-vnf'
- jobs:
- - 'airship-{repo}-{container}-{tag}-rmi'
-
-- job-template:
- name: 'airship-{repo}-{container}-{tag}-{test}-run'
- parameters:
- - airship-slave:
- slave: '{slave}'
- - airship-build_tag:
- build_tag: ''
- - airship-DEBUG:
- DEBUG: 'true'
- - airship-EXTERNAL_NETWORK:
- EXTERNAL_NETWORK: public
- builders:
- - airship-run-containers:
- <<: *airship-run-containers
-
-- project:
- name: 'airship-opnfv-functest-healthcheck'
- <<: *airship-params
- container: 'functest-healthcheck'
- test:
- - connection_check
- - tenantnetwork1
- - tenantnetwork2
- - vmready1
- - vmready2
- - singlevm1
- - singlevm2
- - vping_ssh
- - vping_userdata
- - cinder_test
- - odl
- - tempest_smoke
- jobs:
- - 'airship-{repo}-{container}-{tag}-{test}-run'
-
-- project:
- name: 'airship-opnfv-functest-smoke'
- <<: *airship-params
- container: 'functest-smoke'
- test:
- - neutron-tempest-plugin-api
- - tempest_cinder
- - tempest_keystone
- - rally_sanity
- - refstack_defcore
- - tempest_full
- - tempest_scenario
- - tempest_slow
- - patrole
- - neutron_trunk
- - networking-bgpvpn
- - networking-sfc
- - barbican
- jobs:
- - 'airship-{repo}-{container}-{tag}-{test}-run'
-
-- project:
- name: 'airship-opnfv-functest-benchmarking'
- <<: *airship-params
- container: 'functest-benchmarking'
- test:
- - rally_full
- - rally_jobs
- - vmtp
- - shaker
- jobs:
- - 'airship-{repo}-{container}-{tag}-{test}-run'
-
-- project:
- name: 'airship-opnfv-functest-vnf'
- <<: *airship-params
- container: 'functest-vnf'
- test:
- - cloudify
- - cloudify_ims
- - heat_ims
- - vyos_vrouter
- - juju_epc
- jobs:
- - 'airship-{repo}-{container}-{tag}-{test}-run'
-
-- builder:
- name: airship-zip
- builders:
- - shell: |
- set +x
- [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
- if [ "{repo}" = "_" ]; then
- image={container}:{functest_tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{functest_tag}
- else
- image={repo}:{port}/{container}:{functest_tag}
- fi
- sudo docker run --rm \
- -e S3_ENDPOINT_URL=https://storage.googleapis.com \
- -e S3_DST_URL=s3://artifacts.opnfv.org/airship \
- -e HTTP_DST_URL=http://artifacts.opnfv.org/airship/ \
- -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e BUILD_TAG=$BUILD_TAG \
- -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -v /home/opnfv/functest/.boto:/root/.boto \
- $image zip_campaign
-
-- job-template:
- name: 'airship-{tag}-zip'
- parameters:
- - airship-build_tag:
- build_tag: ''
- builders:
- - airship-zip:
- <<: *airship-containers
-
-- project:
- name: 'airship-{tag}-zip'
- <<: *airship-params
- container: 'functest-healthcheck'
- jobs:
- - 'airship-{tag}-zip'
-
-- job-template:
- name: 'airship-{tag}-daily'
- project-type: multijob
- triggers:
- - timed: '@daily'
- parameters:
- - airship-slave:
- slave: '{slave}'
- - airship-build_tag:
- build_tag: ''
- - airship-branch:
- branch: 'master'
- - airship-DEBUG:
- DEBUG: 'true'
- - airship-EXTERNAL_NETWORK:
- EXTERNAL_NETWORK: public
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - '^airship-{tag}-(daily|gate)$'
- builders:
- - multijob:
- name: deploy
- projects:
- - name: 'airship-deploy-daily-{tag}'
- <<: *airship-jobs
- - multijob:
- name: remove former images
- projects:
- - name: 'airship-opnfv-functest-healthcheck-{tag}-rmi'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-rmi'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-benchmarking-{tag}-rmi'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-rmi'
- <<: *airship-jobs
- - multijob:
- name: pull containers
- projects:
- - name: 'airship-opnfv-functest-healthcheck-{tag}-pull'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-pull'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-benchmarking-{tag}-pull'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-pull'
- <<: *airship-jobs
- - multijob:
- name: opnfv/functest-healthcheck:{functest_tag}
- condition: ALWAYS
- projects:
- - name: 'airship-opnfv-functest-healthcheck-{tag}-connection_check-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-tenantnetwork1-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-tenantnetwork2-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-vmready1-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-vmready2-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-singlevm1-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-singlevm2-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-vping_ssh-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-vping_userdata-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-cinder_test-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-odl-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-tempest_smoke-run'
- <<: *airship-jobs
- - multijob:
- name: opnfv/functest-smoke:{functest_tag}
- condition: ALWAYS
- projects:
- - name: 'airship-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-neutron-tempest_cinder-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-neutron-tempest_keystone-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-rally_sanity-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-refstack_defcore-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-tempest_full-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-tempest_scenario-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-tempest_slow-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-patrole-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-neutron_trunk-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-networking-sfc-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-barbican-run'
- <<: *airship-jobs
- - multijob:
- name: opnfv/functest-benchmarking:{functest_tag}
- condition: ALWAYS
- projects:
- - name: 'airship-opnfv-functest-benchmarking-{tag}-rally_full-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-benchmarking-{tag}-rally_jobs-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-benchmarking-{tag}-vmtp-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-benchmarking-{tag}-shaker-run'
- <<: *airship-jobs
- - multijob:
- name: opnfv/functest-vnf:{functest_tag}
- condition: ALWAYS
- execution-type: SEQUENTIALLY
- projects:
- - name: 'airship-opnfv-functest-vnf-{tag}-cloudify-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-cloudify_ims-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-heat_ims-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-vyos_vrouter-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-juju_epc-run'
- <<: *airship-jobs
- - multijob:
- name: dump all campaign data
- projects:
- - name: 'airship-{tag}-zip'
- <<: *airship-jobs
-
-- trigger:
- name: airship-patchset-created
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'airship'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
-
-- job-template:
- name: 'airship-{tag}-gate'
- project-type: multijob
- triggers:
- - airship-patchset-created:
- branch: '{branch}'
- parameters:
- - airship-slave:
- slave: '{slave}'
- - airship-build_tag:
- build_tag: ''
- - airship-DEBUG:
- DEBUG: 'true'
- - airship-EXTERNAL_NETWORK:
- EXTERNAL_NETWORK: public
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - '^airship-{tag}-(daily|gate)$'
- builders:
- - multijob:
- name: deploy
- projects:
- - name: 'airship-deploy-gate-{tag}'
- <<: *airship-jobs
- - multijob:
- name: remove former images
- projects:
- - name: 'airship-opnfv-functest-healthcheck-{tag}-rmi'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-rmi'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-benchmarking-{tag}-rmi'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-rmi'
- <<: *airship-jobs
- - multijob:
- name: pull containers
- projects:
- - name: 'airship-opnfv-functest-healthcheck-{tag}-pull'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-pull'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-benchmarking-{tag}-pull'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-pull'
- <<: *airship-jobs
- - multijob:
- name: opnfv/functest-healthcheck:{functest_tag}
- projects:
- - name: 'airship-opnfv-functest-healthcheck-{tag}-connection_check-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-tenantnetwork1-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-tenantnetwork2-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-vmready1-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-vmready2-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-singlevm1-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-singlevm2-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-vping_ssh-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-vping_userdata-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-cinder_test-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-healthcheck-{tag}-odl-run'
- <<: *airship-jobs
- - multijob:
- name: opnfv/functest-smoke:{functest_tag}
- projects:
- - name: 'airship-opnfv-functest-smoke-{tag}-tempest_scenario-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-neutron_trunk-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-networking-sfc-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-smoke-{tag}-barbican-run'
- <<: *airship-jobs
- - multijob:
- name: opnfv/functest-benchmarking:{functest_tag}
- projects:
- - name: 'airship-opnfv-functest-benchmarking-{tag}-vmtp-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-benchmarking-{tag}-shaker-run'
- <<: *airship-jobs
- - multijob:
- name: opnfv/functest-vnf:{functest_tag}
- condition: ALWAYS
- execution-type: SEQUENTIALLY
- projects:
- - name: 'airship-opnfv-functest-vnf-{tag}-cloudify-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-vyos_vrouter-run'
- <<: *airship-jobs
- - name: 'airship-opnfv-functest-vnf-{tag}-juju_epc-run'
- <<: *airship-jobs
-
-- project:
- name: 'airship'
- <<: *airship-params
- jobs:
- - 'airship-{tag}-daily'
- - 'airship-{tag}-gate'
-
-- view:
- name: airship
- view-type: list
- columns:
- - status
- - weather
- - job
- - last-success
- - last-failure
- - last-duration
- regex: ^airship-[a-z]+-(daily|check|gate)$
diff --git a/jjb/airship/cntt.yaml b/jjb/airship/cntt.yaml
deleted file mode 100644
index ea8e51ded..000000000
--- a/jjb/airship/cntt.yaml
+++ /dev/null
@@ -1,714 +0,0 @@
----
-- cntt-jobs: &cntt-jobs
- name: 'cntt-jobs'
- current-parameters: true
-
-- cntt-params: &cntt-params
- name: 'cntt-params'
- repo: 'opnfv'
- port:
- tag:
- - latest:
- branch: master
- slave: intel-pod15
- functest_tag: hunter
-
-- parameter:
- name: cntt-slave
- parameters:
- - label:
- name: slave
- default: '{slave}'
-
-- parameter:
- name: cntt-build_tag
- parameters:
- - random-string:
- name: build_tag
-
-- parameter:
- name: cntt-DEBUG
- parameters:
- - string:
- name: DEBUG
- default: 'true'
-
-- parameter:
- name: cntt-EXTERNAL_NETWORK
- parameters:
- - string:
- name: EXTERNAL_NETWORK
- default: public
-
-- cntt-containers: &cntt-containers
- name: 'cntt-containers'
- repo: '{repo}'
- port: '{port}'
- container: '{container}'
- functest_tag: '{functest_tag}'
-
-- cntt-run-containers: &cntt-run-containers
- name: 'cntt-run-containers'
- <<: *cntt-containers
- test: '{test}'
-
-- builder:
- name: cntt-pull-containers
- builders:
- - shell: |
- set +x
- if [ "{repo}" = "_" ]; then
- image={container}:{functest_tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{functest_tag}
- else
- image={repo}:{port}/{container}:{functest_tag}
- fi
- sudo docker pull $image
-
-- builder:
- name: cntt-run-containers
- builders:
- - shell: |
- set +x
- [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
- if [ "{repo}" = "_" ]; then
- image={container}:{functest_tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{functest_tag}
- else
- image={repo}:{port}/{container}:{functest_tag}
- fi
- sudo docker run --rm \
- -e S3_ENDPOINT_URL=https://storage.googleapis.com \
- -e S3_DST_URL=s3://artifacts.opnfv.org/cntt/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
- -e HTTP_DST_URL=http://artifacts.opnfv.org/cntt/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
- -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e NODE_NAME=$slave \
- -e INSTALLER_TYPE=cntt \
- -e BUILD_TAG=$BUILD_TAG \
- -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -v /home/opnfv/functest/.boto:/root/.boto \
- -e DEBUG=$DEBUG \
- -e EXTERNAL_NETWORK=$EXTERNAL_NETWORK \
- -e DEPLOY_SCENARIO=os-nosdn-nofeature-ha \
- -v /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file \
- -v /home/opnfv/functest/images:/home/opnfv/functest/images \
- -v /home/opnfv/functest/tempest_conf.yaml:/usr/lib/python2.7/\
- site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml \
- $image run_tests -t {test} -r -p
-
-- builder:
- name: cntt-remove-images
- builders:
- - shell: |
- set +x
- if [ "{repo}" = "_" ]; then
- image={container}:{functest_tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{functest_tag}
- else
- image={repo}:{port}/{container}:{functest_tag}
- fi
- sudo docker rmi $image || true
-
-- builder:
- name: cntt-deploy
- builders:
- - shell: |
- set +x
- export TERM_OPTS=" "
- sudo -H -E -u opnfv bash -c 'tools/deploy.sh $slave deploy_site'
-
-- scm:
- name: cntt-scm
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/airship
- refspec: '+refs/changes/*:refs/changes/*'
- branches:
- - '{ref}'
-
-- parameter:
- name: cntt-branch
- parameters:
- - string:
- name: branch
- default: '{branch}'
-
-- job-template:
- name: 'cntt-deploy-daily-{tag}'
- parameters:
- - cntt-slave:
- slave: '{slave}'
- - cntt-branch:
- branch: '{branch}'
- scm:
- - cntt-scm:
- ref: $branch
- builders:
- - cntt-deploy
-
-- job-template:
- name: 'cntt-deploy-gate-{tag}'
- parameters:
- - cntt-slave:
- slave: '{slave}'
- scm:
- - cntt-scm:
- ref: $GERRIT_REFSPEC
- builders:
- - cntt-deploy
-
-- project:
- name: 'cntt-deploy'
- <<: *cntt-params
- jobs:
- - 'cntt-deploy-daily-{tag}'
- - 'cntt-deploy-gate-{tag}'
-
-- job-template:
- name: 'cntt-{repo}-{container}-{tag}-pull'
- parameters:
- - cntt-slave:
- slave: '{slave}'
- builders:
- - cntt-pull-containers:
- <<: *cntt-containers
-
-- project:
- name: 'cntt-opnfv-functest-healthcheck-pull'
- <<: *cntt-params
- container: 'functest-healthcheck'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-pull'
-
-- project:
- name: 'cntt-opnfv-functest-smoke-pull'
- <<: *cntt-params
- container: 'functest-smoke'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-pull'
-
-- project:
- name: 'cntt-opnfv-functest-smoke-cntt-pull'
- <<: *cntt-params
- container: 'functest-smoke-cntt'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-pull'
-
-- project:
- name: 'cntt-opnfv-functest-benchmarking-pull'
- <<: *cntt-params
- container: 'functest-benchmarking'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-pull'
-
-- project:
- name: 'cntt-opnfv-functest-vnf-pull'
- <<: *cntt-params
- container: 'functest-vnf'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-pull'
-
-- job-template:
- name: 'cntt-{repo}-{container}-{tag}-rmi'
- parameters:
- - cntt-slave:
- slave: '{slave}'
- builders:
- - cntt-remove-images:
- <<: *cntt-containers
-
-- project:
- name: 'cntt-opnfv-functest-healthcheck-rmi'
- <<: *cntt-params
- container: 'functest-healthcheck'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'cntt-opnfv-functest-smoke-rmi'
- <<: *cntt-params
- container: 'functest-smoke'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'cntt-opnfv-functest-smoke-cntt-rmi'
- <<: *cntt-params
- container: 'functest-smoke-cntt'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'cntt-opnfv-functest-benchmarking-rmi'
- <<: *cntt-params
- container: 'functest-benchmarking'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'cntt-opnfv-functest-vnf-rmi'
- <<: *cntt-params
- container: 'functest-vnf'
- jobs:
- - 'cntt-{repo}-{container}-{tag}-rmi'
-
-- job-template:
- name: 'cntt-{repo}-{container}-{tag}-{test}-run'
- parameters:
- - cntt-slave:
- slave: '{slave}'
- - cntt-build_tag:
- build_tag: ''
- - cntt-DEBUG:
- DEBUG: 'true'
- - cntt-EXTERNAL_NETWORK:
- EXTERNAL_NETWORK: public
- builders:
- - cntt-run-containers:
- <<: *cntt-run-containers
-
-- project:
- name: 'cntt-opnfv-functest-healthcheck'
- <<: *cntt-params
- container: 'functest-healthcheck'
- test:
- - connection_check
- - tenantnetwork1
- - tenantnetwork2
- - vmready1
- - vmready2
- - singlevm1
- - singlevm2
- - vping_ssh
- - vping_userdata
- - cinder_test
- - odl
- - tempest_smoke
- jobs:
- - 'cntt-{repo}-{container}-{tag}-{test}-run'
-
-- project:
- name: 'cntt-opnfv-functest-smoke'
- <<: *cntt-params
- container: 'functest-smoke'
- test:
- - neutron-tempest-plugin-api
- - tempest_cinder
- - tempest_keystone
- - rally_sanity
- - refstack_defcore
- - tempest_full
- - tempest_scenario
- - tempest_slow
- - patrole
- - neutron_trunk
- - networking-bgpvpn
- - networking-sfc
- - barbican
- jobs:
- - 'cntt-{repo}-{container}-{tag}-{test}-run'
-
-- project:
- name: 'cntt-opnfv-functest-smoke-cntt'
- <<: *cntt-params
- container: 'functest-smoke-cntt'
- test:
- - neutron-tempest-plugin-api
- - tempest_cinder
- - tempest_keystone
- - rally_sanity
- - tempest_full
- - tempest_scenario
- - tempest_slow
- jobs:
- - 'cntt-{repo}-{container}-{tag}-{test}-run'
-
-- project:
- name: 'cntt-opnfv-functest-benchmarking'
- <<: *cntt-params
- container: 'functest-benchmarking'
- test:
- - rally_full
- - rally_jobs
- - vmtp
- - shaker
- jobs:
- - 'cntt-{repo}-{container}-{tag}-{test}-run'
-
-- project:
- name: 'cntt-opnfv-functest-vnf'
- <<: *cntt-params
- container: 'functest-vnf'
- test:
- - cloudify
- - cloudify_ims
- - heat_ims
- - vyos_vrouter
- - juju_epc
- jobs:
- - 'cntt-{repo}-{container}-{tag}-{test}-run'
-
-- builder:
- name: cntt-zip
- builders:
- - shell: |
- set +x
- [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
- if [ "{repo}" = "_" ]; then
- image={container}:{functest_tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{functest_tag}
- else
- image={repo}:{port}/{container}:{functest_tag}
- fi
- sudo docker run --rm \
- -e S3_ENDPOINT_URL=https://storage.googleapis.com \
- -e S3_DST_URL=s3://artifacts.opnfv.org/airship \
- -e HTTP_DST_URL=http://artifacts.opnfv.org/airship/ \
- -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e BUILD_TAG=$BUILD_TAG \
- -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -v /home/opnfv/functest/.boto:/root/.boto \
- $image zip_campaign
-
-- job-template:
- name: 'cntt-{tag}-zip'
- parameters:
- - cntt-build_tag:
- build_tag: ''
- builders:
- - cntt-zip:
- <<: *cntt-containers
-
-- project:
- name: 'cntt-{tag}-zip'
- <<: *cntt-params
- container: 'functest-healthcheck'
- jobs:
- - 'cntt-{tag}-zip'
-
-- job-template:
- name: 'cntt-{tag}-daily'
- disabled: true
- project-type: multijob
- triggers:
- - timed: '@daily'
- parameters:
- - cntt-slave:
- slave: '{slave}'
- - cntt-build_tag:
- build_tag: ''
- - cntt-branch:
- branch: 'master'
- - cntt-DEBUG:
- DEBUG: 'true'
- - cntt-EXTERNAL_NETWORK:
- EXTERNAL_NETWORK: public
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - '^cntt-{tag}-(daily|gate)$'
- builders:
- - multijob:
- name: deploy
- projects:
- - name: 'cntt-deploy-daily-{tag}'
- <<: *cntt-jobs
- - multijob:
- name: remove former images
- projects:
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-rmi'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-rmi'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-rmi'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-rmi'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-rmi'
- <<: *cntt-jobs
- - multijob:
- name: pull containers
- projects:
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-pull'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-pull'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-pull'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-pull'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-pull'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-healthcheck:{functest_tag}
- condition: ALWAYS
- projects:
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-connection_check-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-tenantnetwork1-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-tenantnetwork2-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-vmready1-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-vmready2-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-singlevm1-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-singlevm2-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-vping_ssh-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-vping_userdata-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-cinder_test-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-odl-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-tempest_smoke-run'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-smoke:{functest_tag}
- condition: ALWAYS
- projects:
- - name: 'cntt-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_cinder-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_keystone-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-rally_sanity-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-refstack_defcore-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_full-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_scenario-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_slow-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-patrole-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-neutron_trunk-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-networking-sfc-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-barbican-run'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-smoke-cntt:{tag}
- condition: ALWAYS
- projects:
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-benchmarking:{functest_tag}
- condition: ALWAYS
- projects:
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-rally_full-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-rally_jobs-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-vmtp-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-shaker-run'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-vnf:{functest_tag}
- condition: ALWAYS
- execution-type: SEQUENTIALLY
- projects:
- - name: 'cntt-opnfv-functest-vnf-{tag}-cloudify-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-cloudify_ims-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-heat_ims-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-vyos_vrouter-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-juju_epc-run'
- <<: *cntt-jobs
- - multijob:
- name: dump all campaign data
- projects:
- - name: 'cntt-{tag}-zip'
- <<: *cntt-jobs
-
-- trigger:
- name: cntt-patchset-created
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'airship'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
-
-- job-template:
- name: 'cntt-{tag}-gate'
- project-type: multijob
- disabled: true
- triggers:
- - cntt-patchset-created:
- branch: '{branch}'
- parameters:
- - cntt-slave:
- slave: '{slave}'
- - cntt-build_tag:
- build_tag: ''
- - cntt-DEBUG:
- DEBUG: 'true'
- - cntt-EXTERNAL_NETWORK:
- EXTERNAL_NETWORK: public
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - '^cntt-{tag}-(daily|gate)$'
- builders:
- - multijob:
- name: deploy
- projects:
- - name: 'cntt-deploy-gate-{tag}'
- <<: *cntt-jobs
- - multijob:
- name: remove former images
- projects:
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-rmi'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-rmi'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-rmi'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-rmi'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-rmi'
- <<: *cntt-jobs
- - multijob:
- name: pull containers
- projects:
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-pull'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-pull'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-cntt-{tag}-pull'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-pull'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-pull'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-healthcheck:{functest_tag}
- projects:
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-connection_check-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-tenantnetwork1-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-tenantnetwork2-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-vmready1-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-vmready2-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-singlevm1-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-singlevm2-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-vping_ssh-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-vping_userdata-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-cinder_test-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-healthcheck-{tag}-odl-run'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-smoke:{functest_tag}
- projects:
- - name: 'cntt-opnfv-functest-smoke-{tag}-tempest_scenario-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-neutron_trunk-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-networking-sfc-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-smoke-{tag}-barbican-run'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-benchmarking:{functest_tag}
- projects:
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-vmtp-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-benchmarking-{tag}-shaker-run'
- <<: *cntt-jobs
- - multijob:
- name: opnfv/functest-vnf:{functest_tag}
- condition: ALWAYS
- execution-type: SEQUENTIALLY
- projects:
- - name: 'cntt-opnfv-functest-vnf-{tag}-cloudify-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-cloudify_ims-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-heat_ims-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-vyos_vrouter-run'
- <<: *cntt-jobs
- - name: 'cntt-opnfv-functest-vnf-{tag}-juju_epc-run'
- <<: *cntt-jobs
-
-- project:
- name: 'cntt'
- <<: *cntt-params
- jobs:
- - 'cntt-{tag}-daily'
- - 'cntt-{tag}-gate'
-
-- view:
- name: cntt
- view-type: list
- columns:
- - status
- - weather
- - job
- - last-success
- - last-failure
- - last-duration
- regex: ^cntt-[a-z]+-(daily|check|gate)$
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
deleted file mode 100755
index 618d18110..000000000
--- a/jjb/apex/apex-build.sh
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-# log info to console
-echo "Starting the build of Apex using OpenStack Master packages. This will take some time..."
-echo "---------------------------------------------------------------------------------------"
-echo
-# create the cache directory if it doesn't exist
-[[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
-# set OPNFV_ARTIFACT_VERSION
-if echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
- GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
- export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
- if [[ "$BRANCH" != 'stable/fraser' ]]; then
- # build rpm
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
- else
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
- fi
-elif echo $BUILD_TAG | grep "csit" 1> /dev/null; then
- export OPNFV_ARTIFACT_VERSION=csit${BUILD_NUMBER}
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
-elif [ "$ARTIFACT_VERSION" == "daily" ]; then
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
- if [[ "$BRANCH" != 'stable/fraser' ]]; then
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
- else
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
- fi
-else
- export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION}
- if [[ "$BRANCH" != 'stable/fraser' ]]; then
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --rpms"
- else
- export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
- fi
-fi
-
-# Temporary hack until we fix apex build script
-BUILD_DIRECTORY=${WORKSPACE}/build
-
-# start the build
-pushd ${BUILD_DIRECTORY}
-make clean
-popd
-export PYTHONPATH=${WORKSPACE}
-python3 apex/build.py $BUILD_ARGS
-RPM_VERSION=$(grep Version: $WORKSPACE/build/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
-# list the contents of BUILD_OUTPUT directory
-echo "Build Directory is ${BUILD_DIRECTORY}/../.build"
-echo "Build Directory Contents:"
-echo "-------------------------"
-ls -al ${BUILD_DIRECTORY}/../.build
-
-# list the contents of CACHE directory
-echo "Cache Directory is ${CACHE_DIRECTORY}"
-echo "Cache Directory Contents:"
-echo "-------------------------"
-ls -al $CACHE_DIRECTORY
-
-if [[ "$BUILD_ARGS" =~ '--iso' && "$BRANCH" == 'stable/fraser' ]]; then
- mkdir -p /tmp/apex-iso/
- rm -f /tmp/apex-iso/*.iso
- cp -f $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso /tmp/apex-iso/
-fi
-
-if ! echo $ARTIFACT_VERSION | grep "dev" 1> /dev/null; then
- echo "Writing opnfv.properties file"
- if [ "$BRANCH" == 'stable/fraser' ]; then
- # save information regarding artifact into file
- (
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
- echo "OPNFV_SRPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.src.rpm"
- echo "OPNFV_RPM_URL=$GS_URL/opnfv-apex-$RPM_VERSION.noarch.rpm"
- echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
- ) > $WORKSPACE/opnfv.properties
- else
- # save information regarding artifact into file
- # we only generate the python package for master
- (
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_SRPM_URL=$GS_URL/python34-opnfv-apex-$RPM_VERSION.src.rpm"
- echo "OPNFV_RPM_URL=$GS_URL/python34-opnfv-apex-$RPM_VERSION.noarch.rpm"
- echo "OPNFV_RPM_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/../.build/noarch/python34-opnfv-apex-$RPM_VERSION.noarch.rpm | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
- ) > $WORKSPACE/opnfv.properties
- fi
-fi
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
deleted file mode 100755
index 09d6ca630..000000000
--- a/jjb/apex/apex-deploy.sh
+++ /dev/null
@@ -1,213 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-IPV6_FLAG=False
-ALLINONE_FLAG=False
-CSIT_ENV_FLAG=False
-FUNCTEST_ENV_FLAG=False
-
-# log info to console
-echo "Starting the Apex deployment."
-echo "--------------------------------------------------------"
-echo
-
-if [ -z ${DEPLOY_SCENARIO+x} ]; then
- echo "Deploy scenario not set!"
- exit 1
-else
- echo "Deploy scenario: ${DEPLOY_SCENARIO}"
-fi
-
-# Dev or RPM/ISO build
-if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
- # Settings for deploying from git workspace
- DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
- NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
- CLEAN_CMD="opnfv-clean"
- # if we are using master, then we are downloading/caching upstream images
- # we want to use that built in mechanism to avoid re-downloading every job
- # so we use a dedicated folder to hold the upstream cache
- UPSTREAM_CACHE=$HOME/upstream_cache
- if [[ "$BRANCH" != 'stable/fraser' ]]; then
- mkdir -p ${UPSTREAM_CACHE}
- RESOURCES=$UPSTREAM_CACHE
- else
- RESOURCES="${WORKSPACE}/.build/"
- fi
- CONFIG="${WORKSPACE}/build"
- BASE=$CONFIG
- IMAGES=$RESOURCES
- LIB="${WORKSPACE}/lib"
- DEPLOY_CMD="opnfv-deploy --image-dir ${RESOURCES}"
- # Ensure artifacts were downloaded and extracted correctly
- # TODO(trozet) add verification here
-
- # Install dev build
- sudo rm -rf /tmp/.build
- mv -f .build /tmp/
- sudo pip3 install --upgrade --force-reinstall .
- mv -f /tmp/.build ${WORKSPACE}/
-else
- DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
- NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
- CLEAN_CMD="opnfv-clean"
- # set to use different directory here because upon RPM removal this
- # directory will be wiped in daily
- UPSTREAM_CACHE=$HOME/upstream_cache
- if [[ "$BRANCH" != 'stable/fraser' ]]; then
- mkdir -p ${UPSTREAM_CACHE}
- RESOURCES=$UPSTREAM_CACHE
- else
- RESOURCES="/var/opt/opnfv/images"
- fi
- DEPLOY_CMD="opnfv-deploy --image-dir ${RESOURCES}"
- CONFIG="/var/opt/opnfv"
- BASE=$CONFIG
- IMAGES=$RESOURCES
- LIB="/var/opt/opnfv/lib"
- sudo mkdir -p /var/log/apex
- sudo chmod 777 /var/log/apex
- cd /var/log/apex
-fi
-
-# Install Dependencies
-# Make sure python34 dependencies are installed
-dependencies="epel-release python34 python34-devel libvirt-devel python34-pip \
-ansible python34-PyYAML python34-jinja2 python34-setuptools python-tox ansible"
-
-for dep_pkg in $dependencies; do
- if ! rpm -q ${dep_pkg} > /dev/null; then
- if ! sudo yum install -y ${dep_pkg}; then
- echo "Failed to install ${dep_pkg}"
- exit 1
- fi
- fi
-done
-
-if [[ "$JOB_NAME" =~ "virtual" ]]; then
- # Make sure ipxe-roms-qemu package is updated to latest.
- # This package is needed for multi virtio nic PXE boot in virtual environment.
- sudo yum update -y ipxe-roms-qemu
-fi
-
-if [ "$OPNFV_CLEAN" == 'yes' ]; then
- if sudo test -e '/root/inventory/pod_settings.yaml'; then
- clean_opts='-i /root/inventory/pod_settings.yaml'
- else
- clean_opts=''
- fi
-
- sudo ${CLEAN_CMD} ${clean_opts}
-fi
-
-# These are add-ons to regular scenarios where you can do like
-# os-nosdn-nofeature-noha-ipv6, or os-nosdn-nofeature-noha-allinone
-if echo ${DEPLOY_SCENARIO} | grep ipv6; then
- IPV6_FLAG=True
- DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} | sed 's/-ipv6//')
- echo "INFO: IPV6 Enabled"
-fi
-
-if echo ${DEPLOY_SCENARIO} | grep allinone; then
- ALLINONE_FLAG=True
- DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} | sed 's/-allinone//')
- echo "INFO: All in one deployment detected"
-fi
-
-if echo ${DEPLOY_SCENARIO} | grep csit; then
- CSIT_ENV_FLAG=True
- DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} | sed 's/-csit//')
- echo "INFO: CSIT env requested in deploy scenario"
-elif echo ${DEPLOY_SCENARIO} | grep functest; then
- FUNCTEST_ENV_FLAG=True
- DEPLOY_SCENARIO=$(echo ${DEPLOY_SCENARIO} | sed 's/-functest//')
- echo "INFO: Functest env requested in deploy scenario"
-fi
-
-echo "Deploy Scenario set to ${DEPLOY_SCENARIO}"
-DEPLOY_FILE="${DEPLOY_SETTINGS_DIR}/${DEPLOY_SCENARIO}.yaml"
-
-if [ ! -e "$DEPLOY_FILE" ]; then
- echo "ERROR: Required settings file missing: Deploy settings file ${DEPLOY_FILE}"
-fi
-
-if [[ "$JOB_NAME" =~ "virtual" ]]; then
- # settings for virtual deployment
- DEPLOY_CMD="${DEPLOY_CMD} -v"
- if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
- DEPLOY_CMD="${DEPLOY_CMD} --virtual-default-ram 12 --virtual-compute-ram 7"
- fi
- if [[ "$ALLINONE_FLAG" == "True" ]]; then
- DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 0"
- elif [[ "$PROMOTE" == "True" ]]; then
- DEPLOY_CMD="${DEPLOY_CMD} --virtual-computes 2"
- fi
-
- if [[ "$FUNCTEST_ENV_FLAG" == "True" || "$CSIT_ENV_FLAG" == "True" ]]; then
- if [[ "$CSIT_ENV_FLAG" == "True" ]]; then
- ENV_TYPE="csit"
- else
- ENV_TYPE="functest"
- fi
- if [ -z ${OS_VERSION+x} ]; then
- echo "INFO: OS_VERSION not passed to deploy, detecting based on branch and scenario"
- case $BRANCH in
- master)
- if [[ "$DEPLOY_SCENARIO" =~ "rocky" ]]; then
- OS_VERSION=rocky
- else
- OS_VERSION=master
- fi
- ;;
- *gambia)
- OS_VERSION=queens
- ;;
- *)
- echo "Unable to detection OS_VERSION, aborting"
- exit 1
- ;;
- esac
- fi
- if [[ "$OS_VERSION" != "master" ]]; then
- SNAP_ENV="${ENV_TYPE}-${OS_VERSION}-environment.yaml"
- else
- SNAP_ENV="${ENV_TYPE}-environment.yaml"
- fi
- DEPLOY_CMD="${DEPLOY_CMD} -e ${SNAP_ENV}"
- fi
-else
- # settings for bare metal deployment
- NETWORK_SETTINGS_DIR="/root/network"
- INVENTORY_FILE="/root/inventory/pod_settings.yaml"
-
- if ! sudo test -e "$INVENTORY_FILE"; then
- echo "ERROR: Required settings file missing: Inventory settings file ${INVENTORY_FILE}"
- exit 1
- fi
- # include inventory file for bare metal deployment
- DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
-fi
-
-if [ "$IPV6_FLAG" == "True" ]; then
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
-elif [[ "$CSIT_ENV_FLAG" == "True" || "$FUNCTEST_ENV_FLAG" == "True" ]]; then
- # We use csit network settings which is single network for snapshots
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_csit.yaml"
-else
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
-fi
-
-# Check that network settings file exists
-if ! sudo test -e "$NETWORK_FILE"; then
- echo "ERROR: Required settings file missing: Network Settings file ${NETWORK_FILE}"
- exit 1
-fi
-
-# start deployment
-sudo ${DEPLOY_CMD} -d ${DEPLOY_FILE} -n ${NETWORK_FILE} --debug
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-download-artifact.sh b/jjb/apex/apex-download-artifact.sh
deleted file mode 100755
index bc3311d68..000000000
--- a/jjb/apex/apex-download-artifact.sh
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Downloading the Apex artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-[[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
-
-if [ -z "$DEPLOY_SCENARIO" ]; then
- echo "Deploy scenario not set!"
- exit 1
-else
- echo "Deploy scenario: ${DEPLOY_SCENARIO}"
-fi
-
-if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
- if [[ "$BRANCH" != 'stable/fraser' ]]; then
- echo "Skipping download of artifacts for master/gambia branch"
- else
- # dev build
- GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
- export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
- # get build artifact
- pushd ${BUILD_DIRECTORY} > /dev/null
- echo "Downloading packaged dev build: apex-${OPNFV_ARTIFACT_VERSION}.tar.gz"
- curl --fail -s -o $BUILD_DIRECTORY/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
- tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
- popd > /dev/null
- fi
-else
- echo "Will use RPMs..."
-
- # Must be RPMs/ISO
- echo "Downloading latest properties file"
-
- # get the properties file in order to get info regarding artifacts
- curl --fail -s -o $BUILD_DIRECTORY/opnfv.properties http://$GS_URL/latest.properties
-
- # source the file so we get OPNFV vars
- source $BUILD_DIRECTORY/opnfv.properties
-
- RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
- RPM_LIST=$(basename $OPNFV_RPM_URL)
- # find version of RPM
- VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
- if [ "$BRANCH" == 'stable/fraser' ]; then
- # build RPM List which already includes base Apex RPM
- RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
- RPM_LIST+=" python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
- fi
-
- # remove old / install new RPMs
- if rpm -q python34-opnfv-apex > /dev/null; then
- INSTALLED_RPMS=$(rpm -qa | grep apex)
- if [ -n "$INSTALLED_RPMS" ]; then
- sudo yum remove -y ${INSTALLED_RPMS}
- fi
- fi
- # Create an rpms dir on slave
- mkdir -p ~/apex_rpms
- pushd ~/apex_rpms
- # Remove older rpms which do not match this version
- find . ! -name "*${VERSION_EXTENSION}.noarch.rpm" -type f -exec rm -f {} +
- # Download RPM only if changed on server
- for rpm in $RPM_LIST; do
- wget -N ${RPM_INSTALL_PATH}/${rpm}
- done
- if ! sudo yum install -y $RPM_LIST; then
- echo "Unable to install new RPMs: $RPM_LIST"
- exit 1
- fi
- popd
-fi
-
-# TODO: Uncomment these lines to verify SHA512SUMs once the sums are
-# fixed.
-# echo "$OPNFV_ARTIFACT_SHA512SUM $BUILD_DIRECTORY/apex.iso" | sha512sum -c
-# echo "$OPNFV_RPM_SHA512SUM $BUILD_DIRECTORY/$(basename $OPNFV_RPM_URL)" | sha512sum -c
-
-# list the files
-ls -al $BUILD_DIRECTORY
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-fetch-logs.sh b/jjb/apex/apex-fetch-logs.sh
deleted file mode 100755
index bdb2252b3..000000000
--- a/jjb/apex/apex-fetch-logs.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Fetching logs from overcloud. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-if sudo opnfv-pyutil --fetch-logs; then
- LOG_LOCATION=$(cat apex_util.log | grep 'Log retrieval complete' | grep -Eo '/tmp/.+$')
- if [ -z "$LOG_LOCATION" ]; then
- echo "WARNING: Unable to determine log location. Logs will not be uploaded"
- exit 0
- else
- sudo chmod 777 ${LOG_LOCATION}
- UPLOAD_LOCATION="${GS_URL}/logs/${JOB_NAME}/${BUILD_NUMBER}/"
- gsutil -m cp -r ${LOG_LOCATION} gs://${UPLOAD_LOCATION} > gsutil.latest_logs.log
- echo -e "Logs available at: \n$(find ${LOG_LOCATION} -type f | sed -n 's#^/tmp/#http://'$UPLOAD_LOCATION'#p')"
- fi
-else
- echo "WARNING: Log retrieval failed. No logs will be uploaded"
- exit 0
-fi
diff --git a/jjb/apex/apex-fetch-snap-info.sh b/jjb/apex/apex-fetch-snap-info.sh
deleted file mode 100755
index 3324aca4f..000000000
--- a/jjb/apex/apex-fetch-snap-info.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env bash
-
-##############################################################################
-# Copyright (c) 2018 Tim Rozet (Red Hat) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "Fetching overcloudrc, ssh key, and node.yaml from deployment..."
-
-SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-
-tmp_dir=/tmp/snap
-rm -rf ${tmp_dir}
-mkdir -p ${tmp_dir}
-
-# TODO(trozet) remove this after fix goes in for tripleo_inspector to copy these
-pushd ${tmp_dir} > /dev/null
-echo "Copying overcloudrc and ssh key from Undercloud..."
-# Store overcloudrc
-UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
-sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./
-# Copy out ssh key of stack from undercloud
-sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./
-sudo chmod 0600 id_rsa
-popd > /dev/null
-
-echo "Gathering introspection information"
-git clone https://gerrit.opnfv.org/gerrit/sdnvpn.git
-pushd sdnvpn/odl-pipeline/lib > /dev/null
-sudo ./tripleo_introspector.sh --out-file ${tmp_dir}/node.yaml
-popd > /dev/null
-sudo rm -rf sdnvpn
-
-sudo chown jenkins-ci:jenkins-ci ${tmp_dir}/*
-
-ls -lrt ${tmp_dir}
-
-echo "Fetch complete"
diff --git a/jjb/apex/apex-functest-scenario.sh b/jjb/apex/apex-functest-scenario.sh
deleted file mode 100644
index f1cb74e24..000000000
--- a/jjb/apex/apex-functest-scenario.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-features=$(echo $DEPLOY_SCENARIO | sed -r -n 's/os-.+-(.+)-(noha|ha)/\1/p')
-if [ "$features" == 'rocky' ]; then
- functest_scenario=$(echo $DEPLOY_SCENARIO | sed -r -n 's/(os-.+?)-rocky-(noha|ha)/\1-nofeature-\2/p')
- echo "DOCKER_TAG=hunter" > functest_scenario
-elif [[ "$features" =~ 'rocky' ]]; then
- functest_scenario=$(echo $DEPLOY_SCENARIO | sed -r -n 's/(os-.+?)-(.+)_rocky-(noha|ha)/\1-\2-\3/p')
- echo "DOCKER_TAG=hunter" > functest_scenario
-else
- functest_scenario=$(echo $DEPLOY_SCENARIO | sed -r -n 's/-(noha|ha).*/-\1/p')
- echo "DOCKER_TAG=$([[ ${BRANCH##*/} == "master" ]] && \
- echo "latest" || echo ${BRANCH##*/})" > functest_scenario
-fi
-echo "DEPLOY_SCENARIO=$functest_scenario" >> functest_scenario
diff --git a/jjb/apex/apex-iso-verify.sh b/jjb/apex/apex-iso-verify.sh
deleted file mode 100755
index c29d7cb32..000000000
--- a/jjb/apex/apex-iso-verify.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# log info to console
-echo "Starting the Apex iso verify."
-echo "--------------------------------------------------------"
-echo
-
-if [ "$BRANCH" != 'stable/fraser' ]; then
- echo "Skipping Apex iso verify for ${BRANCH} branch"
- exit 0
-fi
-
-# Must be RPMs/ISO
-echo "Downloading latest properties file"
-
-# get the properties file in order to get info regarding artifacts
-curl --fail -s -o opnfv.properties http://$GS_URL/latest.properties
-
-# source the file so we get OPNFV vars
-source opnfv.properties
-
-if ! rpm -q virt-install > /dev/null; then
- sudo yum -y install virt-install
-fi
-
-# define a clean function
-rm_apex_iso_verify () {
-if sudo virsh list --all | grep apex-iso-verify | grep running; then
- sudo virsh destroy apex-iso-verify
-fi
-if sudo virsh list --all | grep apex-iso-verify; then
- sudo virsh undefine apex-iso-verify
-fi
-}
-
-# Make sure a pre-existing iso-verify isn't there
-rm_apex_iso_verify
-
-#make sure there is not an existing console log file for the VM
-sudo rm -f /var/log/libvirt/qemu/apex-iso-verify-console.log
-
-# run an install from the iso
-# This streams a serial console to tcp port 3737 on localhost
-sudo virt-install -n apex-iso-verify -r 4096 --vcpus 4 --os-variant=rhel7 \
- --accelerate -v --noautoconsole \
- --disk path=/var/lib/libvirt/images/apex-iso-verify.qcow2,size=30,format=qcow2 \
- -l /tmp/apex-iso/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso \
- --extra-args 'console=ttyS0 console=ttyS0,115200n8 serial inst.ks=file:/iso-verify.ks inst.stage2=hd:LABEL=OPNFV\x20CentOS\x207\x20x86_64:/' \
- --initrd-inject ci/iso-verify.ks \
- --serial file,path=/var/log/libvirt/qemu/apex-iso-verify-console.log
-
-echo "Waiting for install to finish..."
-sleep 10
-end_time=$(($SECONDS+1500))
-while ! [[ `sudo tail -n1 /var/log/libvirt/qemu/apex-iso-verify-console.log` =~ 'Power down' ]]; do
- if [ $SECONDS -gt $end_time ] || ! sudo virsh list --all | grep apex-iso-verify | grep running > /dev/null; then
- sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
- sudo virsh list --all
- echo "Error: Failed to find power down message after install"
- exit 1
- fi
- sleep 10
-done
-
-sudo cat /var/log/libvirt/qemu/apex-iso-verify-console.log
-
-# clean up
-rm_apex_iso_verify
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/apex/apex-jjb-renderer.py b/jjb/apex/apex-jjb-renderer.py
deleted file mode 100644
index 58dc4fff9..000000000
--- a/jjb/apex/apex-jjb-renderer.py
+++ /dev/null
@@ -1,51 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (trozet@redhat.com) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import pprint
-import yaml
-from jinja2 import Environment
-from jinja2 import FileSystemLoader
-
-
-def render_jjb():
- """Render JJB output from scenarios.yaml.hidden file and jinja
- template"""
-
- gspathname = dict()
- branch = dict()
- build_slave = dict()
- env = Environment(loader=FileSystemLoader('./'), autoescape=True,
- keep_trailing_newline=True)
-
- with open('scenarios.yaml.hidden') as _:
- scenarios = yaml.safe_load(_)
-
- template = env.get_template('apex.yaml.j2')
-
- print("Scenarios are: ")
- pprint.pprint(scenarios)
-
- for stream in scenarios:
- if stream == 'master':
- gspathname['master'] = ''
- branch[stream] = stream
- else:
- gspathname[stream] = '/' + stream
- branch[stream] = 'stable/' + stream
- build_slave[stream] = 'apex-baremetal-{}'.format(stream)
-
- output = template.render(scenarios=scenarios, gspathname=gspathname,
- branch=branch, build_slave=build_slave)
-
- with open('./apex.yaml', 'w') as fh:
- fh.write(output)
-
-
-if __name__ == "__main__":
- render_jjb()
diff --git a/jjb/apex/apex-project-jobs.yaml b/jjb/apex/apex-project-jobs.yaml
deleted file mode 100644
index c581c59ab..000000000
--- a/jjb/apex/apex-project-jobs.yaml
+++ /dev/null
@@ -1,146 +0,0 @@
----
-- project:
- name: 'apex-project-jobs'
- project: 'apex'
-
- stream:
- - master: &master
- branch: 'master'
- gs-pathname: ''
- concurrent-builds: 3
- disabled: false
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- concurrent-builds: 3
- disabled: false
- - gambia: &gambia
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- concurrent-builds: 3
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- concurrent-builds: 3
- disabled: false
- - euphrates: &euphrates
- branch: 'stable/euphrates'
- gs-pathname: '/euphrates'
- concurrent-builds: 3
- disabled: true
- - danube: &danube
- branch: 'stable/danube'
- gs-pathname: '/danube'
- concurrent-builds: 1
- disabled: true
-
- jobs:
- - 'apex-build-{stream}'
- - 'apex-verify-iso-{stream}'
-
-# Build phase
-- job-template:
- name: 'apex-build-{stream}'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: 'apex-build-master'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - timeout:
- timeout: 150
- fail: true
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: '{concurrent-builds}'
- max-total: 10
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify-iso-{stream}'
-
- builders:
- - 'apex-build'
- - inject:
- properties-content: ARTIFACT_TYPE=rpm
- - 'apex-upload-artifact'
-
-# ISO verify job
-- job-template:
- name: 'apex-verify-iso-{stream}'
-
- # Job template for builds
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: 'apex-virtual-master'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-iso-verify'
- - inject:
- properties-content: ARTIFACT_TYPE=iso
- - 'apex-upload-artifact'
-
-########################
-# builder macros
-########################
-- builder:
- name: 'apex-build'
- builders:
- - shell:
- !include-raw: ./apex-build.sh
-
-- builder:
- name: 'apex-iso-verify'
- builders:
- - shell:
- !include-raw: ./apex-iso-verify.sh
diff --git a/jjb/apex/apex-rtd-jobs.yaml b/jjb/apex/apex-rtd-jobs.yaml
deleted file mode 100644
index 48e4949f7..000000000
--- a/jjb/apex/apex-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: apex-rtd
- project: apex
- project-name: apex
-
- project-pattern: 'apex'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-apex/47262/'
- rtd-token: '134db049c774ab06c41db432e3a042a982f50edf'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/apex/apex-snapshot-create.sh b/jjb/apex/apex-snapshot-create.sh
deleted file mode 100644
index e8bf60bd2..000000000
--- a/jjb/apex/apex-snapshot-create.sh
+++ /dev/null
@@ -1,105 +0,0 @@
-#!/usr/bin/env bash
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (Red Hat) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-
-if [ -z "$SNAP_TYPE" ]; then
- echo "ERROR: SNAP_TYPE not provided...exiting"
- exit 1
-fi
-
-echo "Creating Apex snapshot..."
-echo "-------------------------"
-echo
-
-# create tmp directory
-tmp_dir=$(pwd)/.tmp
-mkdir -p ${tmp_dir}
-
-# info should have already been collected in apex-fetch-snap-info so copy it
-cp -r /tmp/snap/* ${tmp_dir}/
-
-echo "Shutting down nodes"
-# Shut down nodes
-nodes=$(sudo virsh list | grep -Eo "baremetal[0-9]")
-for node in $nodes; do
- sudo virsh shutdown ${node} --mode acpi
-done
-
-for node in $nodes; do
- count=0
- while [ "$count" -lt 10 ]; do
- sleep 10
- if sudo virsh list | grep ${node}; then
- echo "Waiting for $node to shutdown, try $count"
- else
- break
- fi
- count=$((count+1))
- done
-
- if [ "$count" -ge 10 ]; then
- echo "Node $node failed to shutdown"
- exit 1
- fi
-done
-
-pushd ${tmp_dir} > /dev/null
-echo "Gathering virsh definitions"
-# copy qcow2s, virsh definitions
-for node in $nodes; do
- sudo cp -f /var/lib/libvirt/images/${node}.qcow2 ./
- sudo virsh dumpxml ${node} > ${node}.xml
-done
-
-# copy virsh net definitions
-sudo virsh net-dumpxml admin > admin.xml
-
-sudo chown jenkins-ci:jenkins-ci *
-
-# tar up artifacts
-DATE=`date +%Y-%m-%d`
-tar czf ../apex-${SNAP_TYPE}-snap-${DATE}.tar.gz .
-popd > /dev/null
-sudo rm -rf ${tmp_dir}
-echo "Snapshot saved as apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
-
-# update opnfv properties file
-snap_sha=$(sha512sum apex-${SNAP_TYPE}-snap-${DATE}.tar.gz | cut -d' ' -f1)
-
-if curl --fail -O -L http://$GS_URL/snapshot.properties; then
- # TODO(trozet): deprecate OPNFV_SNAP_URL for CSIT_SNAP_URL
- if [ "$SNAP_TYPE" == 'csit' ]; then
- sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
- sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
- fi
- sed -i '/^'${SNAP_TYPE}'_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-'${SNAP_TYPE}'-snap-'${DATE}'.tar.gz#};${x;/^$/{s##'${SNAP_TYPE}'_SNAP_URL='${GS_URL}'/apex-'${SNAP_TYPE}'-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
- sed -i '/^'${SNAP_TYPE}'_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//'${SNAP_TYPE}'_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
-else
- cat << EOF > snapshot.properties
-${SNAP_TYPE}_SNAP_URL=${GS_URL}/apex-${SNAP_TYPE}-snap-${DATE}.tar.gz
-${SNAP_TYPE}_SNAP_SHA512SUM=${snap_sha}
-EOF
- # TODO(trozet): deprecate OPNFV_SNAP_URL for CSIT_SNAP_URL
- if [ "$SNAP_TYPE" == 'csit' ]; then
- cat << EOF >> snapshot.properties
-OPNFV_SNAP_URL=${GS_URL}/apex-csit-snap-${DATE}.tar.gz
-OPNFV_SNAP_SHA512SUM=${snap_sha}
-EOF
- fi
-fi
-echo "${SNAP_TYPE}_SNAP_URL=$GS_URL/apex-${SNAP_TYPE}-snap-${DATE}.tar.gz"
-echo "${SNAP_TYPE}_SNAP_SHA512SUM=$(sha512sum apex-${SNAP_TYPE}-snap-${DATE}.tar.gz | cut -d' ' -f1)"
-echo "Updated properties file: "
-cat snapshot.properties
diff --git a/jjb/apex/apex-snapshot-deploy.sh b/jjb/apex/apex-snapshot-deploy.sh
deleted file mode 100644
index dd69df3fc..000000000
--- a/jjb/apex/apex-snapshot-deploy.sh
+++ /dev/null
@@ -1,184 +0,0 @@
-#!/usr/bin/env bash
-##############################################################################
-# Copyright (c) 2016 Tim Rozet (Red Hat) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
-SNAP_CACHE=$HOME/snap_cache
-
-
-echo "Deploying Apex snapshot..."
-echo "--------------------------"
-echo
-
-if [ -z "$SNAP_TYPE" ]; then
- echo "ERROR: SNAP_TYPE not provided...exiting"
- exit 1
-fi
-
-echo "Cleaning server"
-pushd ci > /dev/null
-sudo opnfv-clean
-popd > /dev/null
-
-full_snap_url="gs://${GS_URL}/${OS_VERSION}/${TOPOLOGY}"
-
-echo "Downloading latest snapshot properties file"
-if ! gsutil cp ${full_snap_url}/snapshot.properties $WORKSPACE/opnfv.properties; then
- echo "ERROR: Unable to find snapshot.properties at ${full_snap_url}...exiting"
- exit 1
-fi
-
-echo "Properties contents:"
-cat ${WORKSPACE}/opnfv.properties
-
-# find latest check sum
-latest_snap_checksum=$(cat ${WORKSPACE}/opnfv.properties | grep ${SNAP_TYPE}_SNAP_SHA512SUM | awk -F "=" '{print $2}')
-if [ -z "$latest_snap_checksum" ]; then
- echo "ERROR: checksum of latest snapshot from snapshot.properties is null!"
- exit 1
-fi
-
-local_snap_checksum=""
-SNAP_CACHE=${SNAP_CACHE}/${OS_VERSION}/${TOPOLOGY}
-
-# check snap cache directory exists
-# if snapshot cache exists, find the checksum
-if [ -d "$SNAP_CACHE" ]; then
- latest_snap=$(ls ${SNAP_CACHE} | grep tar.gz | grep $SNAP_TYPE | tail -n 1)
- if [ -n "$latest_snap" ]; then
- local_snap_checksum=$(sha512sum ${SNAP_CACHE}/${latest_snap} | cut -d' ' -f1)
- echo "Local snap checksum is: ${local_snap_checksum}"
- fi
-else
- mkdir -p ${SNAP_CACHE}
-fi
-
-# compare check sum and download latest snap if not up to date
-if [ "$local_snap_checksum" != "$latest_snap_checksum" ]; then
- snap_url=$(cat opnfv.properties | grep ${SNAP_TYPE}_SNAP_URL | awk -F "=" '{print $2}')
- # TODO(trozet): Remove this once OPNFV url is deprecated
- if [[ -z "$snap_url" && "$SNAP_TYPE" == 'csit' ]]; then
- echo "WARN: Unable to find snap url for ${SNAP_TYPE}, attempting to use OPNFV"
- snap_url=$(cat opnfv.properties | grep OPNFV_SNAP_URL | awk -F "=" '{print $2}')
- fi
- if [ -z "$snap_url" ]; then
- echo "ERROR: Snap URL from snapshot.properties is null!"
- exit 1
- fi
- echo "INFO: SHA mismatch, will download latest snapshot"
- # wipe cache
- rm -rf ${SNAP_CACHE}/*
- gsutil cp "gs://${snap_url}" ${SNAP_CACHE}/
- snap_tar=$(basename ${snap_url})
-else
- snap_tar=${latest_snap}
-fi
-
-echo "INFO: Snapshot to be used is ${snap_tar}"
-
-# move to snap cache dir and unpack
-pushd ${SNAP_CACHE} > /dev/null
-tar xvf ${snap_tar}
-
-# create each network
-virsh_networks=$(ls *.xml | grep -v baremetal)
-
-if [ -z "$virsh_networks" ]; then
- echo "ERROR: no virsh networks found in snapshot unpack"
- exit 1
-fi
-
-echo "Checking overcloudrc"
-if ! stat overcloudrc; then
- echo "ERROR: overcloudrc does not exist in snap unpack"
- exit 1
-fi
-
-for network_def in ${virsh_networks}; do
- sudo virsh net-create ${network_def}
- network=$(echo ${network_def} | awk -F '.' '{print $1}')
- if ! sudo virsh net-list | grep ${network}; then
- sudo virsh net-start ${network}
- fi
- echo "Checking if OVS bridge is missing for network: ${network}"
- if ! sudo ovs-vsctl show | grep "br-${network}"; then
- sudo ovs-vsctl add-br br-${network}
- echo "OVS Bridge created: br-${network}"
- if [ "br-${network}" == 'br-admin' ]; then
- echo "Configuring IP 192.0.2.99 on br-admin"
- sudo ip addr add 192.0.2.99/24 dev br-admin
- sudo ip link set up dev br-admin
- elif [ "br-${network}" == 'br-external' ]; then
- echo "Configuring IP 192.168.37.1 on br-external"
- sudo ip addr add 192.168.37.1/24 dev br-external
- sudo ip link set up dev br-external
- # Routes for admin network
- # The overcloud controller is multi-homed and will fail to respond
- # to traffic from the functest container due to reverse-path-filtering
- # This route allows reverse traffic, by forcing admin network destined
- # traffic through the external network for controller IPs only.
- # Compute nodes have no ip on external interfaces.
- controller_ips=$(cat overcloudrc | grep -Eo "192.0.2.[0-9]+")
- for ip in $controller_ips; do
- sudo ip route add ${ip}/32 dev br-external
- done
- fi
- fi
-done
-
-echo "Virsh networks up: $(sudo virsh net-list)"
-echo "Bringing up Overcloud VMs..."
-virsh_vm_defs=$(ls baremetal*.xml)
-
-if [ -z "$virsh_vm_defs" ]; then
- echo "ERROR: no virsh VMs found in snapshot unpack"
- exit 1
-fi
-
-for node_def in ${virsh_vm_defs}; do
- sed -ri "s/machine='[^\s]+'/machine='pc'/" ${node_def}
- sudo virsh define ${node_def}
- node=$(echo ${node_def} | awk -F '.' '{print $1}')
- sudo cp -f ${node}.qcow2 /var/lib/libvirt/images/
- sudo virsh start ${node}
- echo "Node: ${node} started"
-done
-
-# copy overcloudrc for functest
-mkdir -p $HOME/cloner-info
-cp -f overcloudrc $HOME/cloner-info/
-
-admin_controller_ip=$(cat overcloudrc | grep -Eo -m 1 "192.0.2.[0-9]+" | head -1)
-netvirt_url="http://${admin_controller_ip}:8081/restconf/operational/network-topology:network-topology/topology/netvirt:1"
-
-source overcloudrc
-counter=1
-while [ "$counter" -le 10 ]; do
- echo "Checking if OpenStack is up"
- if nc -z ${admin_controller_ip} 9696 > /dev/null; then
- echo "Overcloud Neutron is up...Checking if OpenDaylight NetVirt is up..."
- if curl --fail --silent -u admin:${SDN_CONTROLLER_PASSWORD} ${netvirt_url} > /dev/null; then
- echo "OpenDaylight is up. Overcloud deployment complete"
- exit 0
- else
- echo "OpenDaylight not yet up, try ${counter}"
- fi
- else
- echo "Neutron not yet up, try ${counter}"
- fi
- counter=$((counter+1))
- sleep 60
-done
-
-echo "ERROR: Deployment not up after 10 minutes...exiting."
-exit 1
diff --git a/jjb/apex/apex-unit-test.sh b/jjb/apex/apex-unit-test.sh
deleted file mode 100755
index 3f15847f2..000000000
--- a/jjb/apex/apex-unit-test.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-# log info to console
-echo "Starting unit tests for Apex..."
-echo "---------------------------------------------------------------------------------------"
-echo
-
-PATH=$PATH:/usr/sbin
-
-
-pushd build/ > /dev/null
-for pkg in yamllint rpmlint iproute epel-release python34-devel python34-nose python34-PyYAML python-pep8 python34-mock python34-pip; do
- if ! rpm -q ${pkg} > /dev/null; then
- if ! sudo yum install -y ${pkg}; then
- echo "Failed to install ${pkg} package..."
- exit 1
- fi
- fi
-done
-
-# Make sure coverage is installed
-if ! python3 -c "import coverage" &> /dev/null; then sudo pip3 install coverage; fi
-
-make rpmlint
-make python-pep8-check
-make yamllint
-make python-tests
-popd > /dev/null
-
-echo "--------------------------------------------------------"
-echo "Unit Tests Done!"
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
deleted file mode 100755
index 00a0a1c78..000000000
--- a/jjb/apex/apex-upload-artifact.sh
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [ -z "$ARTIFACT_TYPE" ]; then
- echo "ERROR: ARTIFACT_TYPE not provided...exiting"
- exit 1
-fi
-
-# log info to console
-echo "Uploading the Apex ${ARTIFACT_TYPE} artifact. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-if [[ ! "$ARTIFACT_VERSION" =~ dev ]]; then
- source $BUILD_DIRECTORY/../opnfv.properties
-fi
-
-importkey () {
- # clone releng repository
- echo "Cloning releng repository..."
- [ -d releng ] && rm -rf releng
- git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
- #this is where we import the siging key
- if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- if ! $WORKSPACE/releng/utils/gpg_import_key.sh; then
- echo "WARNING: Failed to run gpg key import"
- fi
- fi
-}
-
-signrpm () {
- for artifact in $RPM_LIST $SRPM_LIST; do
- echo "Signing artifact: ${artifact}"
- gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $artifact
- gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig)
- echo "Upload complete for ${artifact} signature"
- done
-}
-
-signiso () {
- gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso
-
- gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig
- echo "ISO signature Upload Complete!"
-}
-
-uploadiso () {
- gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log
- echo "ISO Upload Complete!"
-}
-
-uploadrpm () {
- for artifact in $RPM_LIST $SRPM_LIST; do
- echo "Uploading artifact: ${artifact}"
- gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log
- echo "Upload complete for ${artifact}"
- done
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log
-
- # Make the property files viewable on the artifact site
- gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-}
-
-uploadsnap () {
- # Uploads snapshot artifact and updated properties file
- echo "Uploading snapshot artifacts"
- # snapshot dir is the same node in the create job workspace
- # only 1 promotion job can run at a time on a slave
- snapshot_dir="${WORKSPACE}/../apex-create-snapshot"
- if [ -z "$SNAP_TYPE" ]; then
- echo "ERROR: SNAP_TYPE not provided...exiting"
- exit 1
- fi
- gsutil cp ${snapshot_dir}/apex-${SNAP_TYPE}-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
- gsutil cp ${snapshot_dir}/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
- echo "Upload complete for Snapshot"
-}
-
-uploadimages () {
- # Uploads dev tarball
- GERRIT_PATCHSET_NUMBER=$(echo $GERRIT_REFSPEC | grep -Eo '[0-9]+$')
- export OPNFV_ARTIFACT_VERSION="dev${GERRIT_CHANGE_NUMBER}_${GERRIT_PATCHSET_NUMBER}"
- echo "Uploading development build tarball"
- pushd $BUILD_DIRECTORY > /dev/null
- tar czf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz *.qcow2 *.vmlinuz *.initrd
- gsutil cp apex-${OPNFV_ARTIFACT_VERSION}.tar.gz gs://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz > gsutil.latest.log
- popd > /dev/null
-}
-
-# Always import the signing key, if it's available the artifacts will be
-# signed before being uploaded
-importkey
-
-if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
- echo "Signing Key avaliable"
- SIGN_ARTIFACT="true"
-fi
-
-if [ "$ARTIFACT_TYPE" == 'snapshot' ]; then
- uploadsnap
-elif [ "$ARTIFACT_TYPE" == 'iso' ]; then
- if [[ "$ARTIFACT_VERSION" =~ dev || "$BRANCH" != 'stable/fraser' ]]; then
- echo "Skipping ISO artifact upload for ${ARTIFACT_TYPE} due to dev/${BRANCH} build"
- exit 0
- fi
- if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
- signiso
- fi
- uploadiso
-elif [ "$ARTIFACT_TYPE" == 'rpm' ]; then
- if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
- if [[ "$BRANCH" != 'stable/fraser' ]]; then
- echo "will not upload artifacts, ${BRANCH} uses upstream"
- ARTIFACT_TYPE=none
- else
- echo "dev build detected, will upload image tarball"
- ARTIFACT_TYPE=tarball
- uploadimages
- fi
- else
- RPM_INSTALL_PATH=$BUILD_DIRECTORY/noarch
- # RPM URL should be python package for master, and is only package we need
- RPM_LIST=$RPM_INSTALL_PATH/$(basename $OPNFV_RPM_URL)
- SRPM_INSTALL_PATH=$BUILD_DIRECTORY
- SRPM_LIST=$SRPM_INSTALL_PATH/$(basename $OPNFV_SRPM_URL)
- if [[ "$BRANCH" == 'stable/fraser' ]]; then
- VERSION_EXTENSION=$(echo $(basename $OPNFV_RPM_URL) | sed 's/opnfv-apex-//')
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
- RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
- VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//')
- SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}"
- SRPM_LIST+=" ${SRPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}"
- fi
-
- if [[ -n "$SIGN_ARTIFACT" && "$SIGN_ARTIFACT" == "true" ]]; then
- signrpm
- fi
- uploadrpm
- fi
-else
- echo "ERROR: Unknown artifact type ${ARTIFACT_TYPE} to upload...exiting"
- exit 1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-if [ "$ARTIFACT_TYPE" == 'iso' ]; then echo "ISO Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"; fi
-if [ "$ARTIFACT_TYPE" == 'rpm' ]; then echo "RPM Artifact is available as http://$GS_URL/$(basename $OPNFV_RPM_URL)"; fi
-if [ "$ARTIFACT_TYPE" == 'tarball' ]; then echo "Dev tarball Artifact is available as http://$GS_URL/apex-${OPNFV_ARTIFACT_VERSION}.tar.gz)"; fi
diff --git a/jjb/apex/apex-verify-jobs.yaml b/jjb/apex/apex-verify-jobs.yaml
deleted file mode 100644
index e8115ebf6..000000000
--- a/jjb/apex/apex-verify-jobs.yaml
+++ /dev/null
@@ -1,409 +0,0 @@
----
-- project:
- name: 'apex-verify-jobs'
- project: 'apex'
- jobs:
- - 'apex-verify-{stream}'
- - 'apex-verify-gate-{stream}'
- - 'apex-verify-unit-tests-{stream}'
- stream:
- - master: &master
- branch: '{stream}'
- gs-pathname: ''
- verify-scenario: 'os-nosdn-nofeature-noha'
- disabled: false
- - hunter: &hunter
- branch: '{stream}'
- gs-pathname: ''
- verify-scenario: 'os-nosdn-nofeature-noha'
- disabled: false
- - gambia: &gambia
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- verify-scenario: 'os-nosdn-nofeature-ha'
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- verify-scenario: 'os-nosdn-nofeature-ha'
- disabled: false
- - danube: &danube
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- verify-scenario: 'os-odl_l3-nofeature-ha'
- disabled: true
- - euphrates: &euphrates
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- verify-scenario: 'os-odl-nofeature-ha'
- disabled: false
-
-# Unit Test
-- job-template:
- name: 'apex-verify-unit-tests-{stream}'
-
- node: 'apex-build-master'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- parameters:
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'true'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'apex'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'false'
- file-paths:
- - compare-type: ANT
- pattern: 'apex/tests/**'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: '*'
- - compare-type: ANT
- pattern: 'apex/*'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'lib/**'
- - compare-type: ANT
- pattern: 'config/**'
- - compare-type: ANT
- pattern: 'apex/build/**'
- - compare-type: ANT
- pattern: 'apex/common/**'
- - compare-type: ANT
- pattern: 'apex/inventory/**'
- - compare-type: ANT
- pattern: 'apex/network/**'
- - compare-type: ANT
- pattern: 'apex/overcloud/**'
- - compare-type: ANT
- pattern: 'apex/settings/**'
- - compare-type: ANT
- pattern: 'apex/undercloud/**'
- - compare-type: ANT
- pattern: 'apex/virtual/**'
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - apex-unit-test
-
-# Verify
-- job-template:
- name: 'apex-verify-{stream}'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - '{project}-virtual-{stream}-defaults'
- - apex-parameter:
- gs-pathname: '{gs-pathname}/dev'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'true'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'apex'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- file-paths:
- - compare-type: ANT
- pattern: '*'
- - compare-type: ANT
- pattern: 'apex/*'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'lib/**'
- - compare-type: ANT
- pattern: 'config/**'
- - compare-type: ANT
- pattern: 'apex/build/**'
- - compare-type: ANT
- pattern: 'apex/builders/**'
- - compare-type: ANT
- pattern: 'apex/common/**'
- - compare-type: ANT
- pattern: 'apex/inventory/**'
- - compare-type: ANT
- pattern: 'apex/network/**'
- - compare-type: ANT
- pattern: 'apex/overcloud/**'
- - compare-type: ANT
- pattern: 'apex/settings/**'
- - compare-type: ANT
- pattern: 'apex/undercloud/**'
- - compare-type: ANT
- pattern: 'apex/virtual/**'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'apex/tests/**'
- - compare-type: ANT
- pattern: 'docs/**'
-
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 3
- max-total: 10
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-.*-promote.*'
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'apex-verify-unit-tests-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- DEPLOY_SCENARIO={verify-scenario}
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- FUNCTEST_SUITE_NAME=tempest_smoke
- FUNCTEST_MODE=testcase
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
-
-# Verify Scenario Gate
-- job-template:
- name: 'apex-verify-gate-{stream}'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - '{project}-virtual-{stream}-defaults'
- - apex-parameter:
- gs-pathname: '{gs-pathname}/dev'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - comment-added-contains-event:
- comment-contains-value: '^Patch Set [0-9]+: Code-Review\+2.*start-gate-scenario:.*'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'apex'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'build/**'
- - compare-type: ANT
- pattern: 'lib/**'
- - compare-type: ANT
- pattern: 'config/**'
- - compare-type: ANT
- pattern: 'apex/**'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-virtual.*'
- - 'apex-.*-promote.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- # yamllint disable rule:line-length
- - shell: |
- echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo '(os|k8s)-.*$') > detected_scenario
- # yamllint enable rule:line-length
- - inject:
- properties-file: detected_scenario
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- GERRIT_EVENT_COMMENT_TEXT=$GERRIT_EVENT_COMMENT_TEXT
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - shell:
- !include-raw-escape: ./apex-functest-scenario.sh
- - inject:
- properties-file: functest_scenario
- override-build-parameters: true
- - multijob:
- name: functest-smoke
- condition: ALWAYS
- projects:
- - name: 'functest-apex-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- DOCKER_TAG=$DOCKER_TAG
- FUNCTEST_SUITE_NAME=tempest_smoke
- FUNCTEST_MODE=testcase
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' != 'k8s-nosdn-nofeature-noha'"
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-########################
-# builder macros
-########################
-- builder:
- name: apex-unit-test
- builders:
- - shell:
- !include-raw: ./apex-unit-test.sh
diff --git a/jjb/apex/apex.yaml b/jjb/apex/apex.yaml
deleted file mode 100644
index fb2eaa2db..000000000
--- a/jjb/apex/apex.yaml
+++ /dev/null
@@ -1,2008 +0,0 @@
----
-- project:
- name: 'apex'
- project: 'apex'
- jobs:
- - 'apex-fetch-logs-{stream}'
- - 'apex-runner-cperf-{stream}'
- - 'apex-virtual-{stream}'
- - 'apex-deploy-{platform}-{stream}'
- - 'apex-daily-{stream}'
- - 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
- - 'apex-fdio-promote-daily-{stream}'
- - 'apex-{scenario}-baremetal-{scenario_stream}'
- - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- - 'apex-upload-snapshot'
- - 'apex-create-snapshot'
- - 'apex-fetch-snap-info'
- - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
- # stream: branch with - in place of / (eg. stable-arno)
- # branch: branch (eg. stable/arno)
- stream:
- - master: &master
- branch: 'master'
- gs-pathname: ''
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-noha'
- scenario_stream: 'master'
- disable_daily: true
- disable_promote: true
- - hunter: &hunter
- branch: 'stable/hunter'
- gs-pathname: '/hunter'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'hunter'
- disable_daily: true
- disable_promote: true
- - gambia: &gambia
- branch: 'stable/gambia'
- gs-pathname: '/gambia'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'gambia'
- disable_daily: true
- disable_promote: true
- - fraser: &fraser
- branch: 'stable/fraser'
- gs-pathname: '/fraser'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'fraser'
- disable_daily: true
- disable_promote: true
- - euphrates: &euphrates
- branch: 'stable/euphrates'
- gs-pathname: '/euphrates'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-odl-nofeature-ha'
- scenario_stream: 'euphrates'
- disable_daily: true
- disable_promote: true
- - danube: &danube
- branch: 'stable/danube'
- gs-pathname: '/danube'
- build-slave: 'apex-build-danube'
- virtual-slave: 'apex-virtual-danube'
- baremetal-slave: 'apex-baremetal-danube'
- verify-scenario: 'os-odl_l3-nofeature-ha'
- scenario_stream: 'danube'
- disabled: true
- disable_daily: true
- disable_promote: true
-
- scenario:
- - 'os-nosdn-nofeature-noha':
- <<: *danube
- - 'os-nosdn-nofeature-ha':
- <<: *danube
- - 'os-nosdn-nofeature-ha-ipv6':
- <<: *danube
- - 'os-nosdn-ovs-noha':
- <<: *danube
- - 'os-nosdn-ovs-ha':
- <<: *danube
- - 'os-nosdn-fdio-noha':
- <<: *danube
- - 'os-nosdn-fdio-ha':
- <<: *danube
- - 'os-nosdn-kvm-ha':
- <<: *danube
- - 'os-nosdn-kvm-noha':
- <<: *danube
- - 'os-odl_l2-fdio-noha':
- <<: *danube
- - 'os-odl_l2-fdio-ha':
- <<: *danube
- - 'os-odl_netvirt-fdio-noha':
- <<: *danube
- - 'os-odl_l2-sfc-noha':
- <<: *danube
- - 'os-odl_l3-nofeature-noha':
- <<: *danube
- - 'os-odl_l3-nofeature-ha':
- <<: *danube
- - 'os-odl_l3-ovs-noha':
- <<: *danube
- - 'os-odl_l3-ovs-ha':
- <<: *danube
- - 'os-odl-bgpvpn-ha':
- <<: *danube
- - 'os-odl-gluon-noha':
- <<: *danube
- - 'os-odl_l3-fdio-noha':
- <<: *danube
- - 'os-odl_l3-fdio-ha':
- <<: *danube
- - 'os-odl_l3-fdio_dvr-noha':
- <<: *danube
- - 'os-odl_l3-fdio_dvr-ha':
- <<: *danube
- - 'os-odl_l3-csit-noha':
- <<: *danube
- - 'os-onos-nofeature-ha':
- <<: *danube
- - 'os-ovn-nofeature-noha':
- <<: *danube
- - 'os-nosdn-nofeature-noha':
- <<: *master
- - 'os-nosdn-nofeature-ha':
- <<: *master
- - 'os-nosdn-nofeature-noha':
- <<: *gambia
- - 'os-nosdn-nofeature-ha':
- <<: *gambia
- - 'os-nosdn-nofeature-ha-ipv6':
- <<: *gambia
- - 'os-odl-nofeature-noha':
- <<: *gambia
- - 'os-odl-nofeature-ha':
- <<: *gambia
- - 'k8s-nosdn-nofeature-noha':
- <<: *gambia
- - 'os-odl-bgpvpn-ha':
- <<: *gambia
- - 'os-odl-bgpvpn-noha':
- <<: *gambia
- - 'os-odl-sfc-ha':
- <<: *gambia
- - 'os-odl-sfc-noha':
- <<: *gambia
- - 'os-nosdn-calipso-noha':
- <<: *gambia
- - 'os-ovn-nofeature-ha':
- <<: *gambia
- - 'os-nosdn-nofeature-ha':
- <<: *fraser
- - 'os-odl-bgpvpn-ha':
- <<: *fraser
- - 'os-nosdn-nofeature-noha':
- <<: *hunter
- - 'os-nosdn-nofeature-ha':
- <<: *hunter
- - 'os-ovn-nofeature-ha':
- <<: *hunter
- - 'os-nosdn-nofeature-noha':
- <<: *euphrates
- - 'os-nosdn-nofeature-ha':
- <<: *euphrates
- - 'os-odl-nofeature-ha':
- <<: *euphrates
- - 'os-odl-nofeature-noha':
- <<: *euphrates
- - 'os-odl-bgpvpn-ha':
- <<: *euphrates
- - 'os-ovn-nofeature-noha':
- <<: *euphrates
- - 'os-nosdn-fdio-noha':
- <<: *euphrates
- - 'os-nosdn-fdio-ha':
- <<: *euphrates
- - 'os-nosdn-bar-ha':
- <<: *euphrates
- - 'os-nosdn-bar-noha':
- <<: *euphrates
- - 'os-nosdn-nofeature-ha-ipv6':
- <<: *euphrates
- - 'os-nosdn-ovs_dpdk-noha':
- <<: *euphrates
- - 'os-nosdn-ovs_dpdk-ha':
- <<: *euphrates
- - 'os-nosdn-kvm_ovs_dpdk-noha':
- <<: *euphrates
- - 'os-nosdn-kvm_ovs_dpdk-ha':
- <<: *euphrates
- - 'os-odl-sfc-noha':
- <<: *euphrates
- - 'os-odl-sfc-ha':
- <<: *euphrates
-
- platform:
- - 'baremetal'
- - 'virtual'
-
- os_version:
- - 'queens':
- os_scenario: 'nofeature'
- odl_branch: 'stable/oxygen'
- - 'rocky':
- os_scenario: 'rocky'
- odl_branch: 'stable/oxygen'
- - 'master':
- os_scenario: 'nofeature'
- odl_branch: 'stable/fluorine'
-
- topology:
- - 'noha'
- - 'ha'
- - 'noha-allinone'
-
- snap_type:
- - csit:
- sdn: 'odl'
- - functest:
- sdn: 'nosdn'
-# Fetch Logs Job
-- job-template:
- name: 'apex-fetch-logs-{stream}'
-
- concurrent: true
-
- disabled: false
- scm:
- - git-scm-gerrit
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-fetch-logs'
-
-- job-template:
- name: 'apex-runner-cperf-{stream}'
-
- # runner cperf job
- project-type: 'multijob'
- node: 'intel-pod2'
-
- disabled: false
-
- parameters:
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: false
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME"
- - multijob:
- name: 'Baremetal Deploy'
- condition: ALWAYS
- projects:
- - name: 'apex-deploy-baremetal-{stream}'
- node-parameters: false
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- OPNFV_CLEAN=yes
- DEPLOY_SCENARIO={verify-scenario}
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: CPERF
- condition: SUCCESSFUL
- projects:
- - name: 'cperf-apex-intel-pod2-daily-master'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={verify-scenario}
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-# Deploy job
-- job-template:
- name: 'apex-deploy-{platform}-{stream}'
-
- concurrent: true
-
- disabled: false
- quiet-period: 30
- scm:
- - git-scm-gerrit
-
- wrappers:
- - timeout:
- timeout: 140
- fail: true
-
- parameters:
- - '{project}-{platform}-{stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
- description: "Scenario to deploy with."
- # yamllint disable rule:line-length
- - string:
- name: OPNFV_CLEAN
- default: 'no'
- description: "Use yes in lower case to invoke clean. Indicates if the deploy environment should be cleaned before deployment"
-
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - 'functest.*'
- - 'yardstick.*'
- - 'dovetail.*'
- - 'storperf.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - 'apex-download-artifact'
- - 'apex-deploy'
- - 'clean-workspace'
-
-
-# Virtual Deploy and Test
-- job-template:
- name: 'apex-virtual-{stream}'
-
- project-type: 'multijob'
-
- concurrent: true
-
- disabled: false
-
- scm:
- - git-scm-gerrit
-
- parameters:
- - '{project}-defaults'
- - '{project}-virtual-{stream}-defaults'
- - 'functest-suite-parameter'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
- description: "Scenario to deploy with."
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-runner.*'
- - 'apex-run.*'
- - 'apex-virtual-.*'
- - 'apex-verify-gate-.*'
- - 'odl-netvirt.*'
- - 'apex-.*-promote.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- PROMOTE=$PROMOTE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: functest-smoke
- condition: ALWAYS
- projects:
- - name: 'functest-apex-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# Baremetal Deploy and Test
-- job-template:
- name: 'apex-{scenario}-baremetal-{scenario_stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{scenario_stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
- - 'apex-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- execution-type: SEQUENTIALLY
- projects:
- - name: 'apex-deploy-baremetal-{scenario_stream}'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-fetch-logs-{scenario_stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - shell:
- !include-raw-escape: ./apex-functest-scenario.sh
- - inject:
- properties-file: functest_scenario
- override-build-parameters: true
- - multijob:
- name: 'OPNFV Test Suite'
- condition: ALWAYS
- projects:
- - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- DOCKER_TAG=$DOCKER_TAG
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{scenario_stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- publishers:
- - groovy-postbuild:
- script:
- !include-raw-escape: ./update-build-result.groovy
-
-# Baremetal test job
-- job-template:
- name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{scenario_stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: "Scenario to deploy with."
- - string:
- name: DOCKER_TAG
- default: ''
- description: Default docker tag to pass to functest
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-run.*'
- - 'apex-testsuite-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: Functest
- condition: ALWAYS
- projects:
- - name: 'functest-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- DOCKER_TAG=$DOCKER_TAG
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Yardstick
- condition: ALWAYS
- projects:
- - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-default-mandatory
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-default-mandatory-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-default-optional
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-default-optional-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-proposed_tests
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-proposed_tests-optional-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: StorPerf
- condition: ALWAYS
- projects:
- - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-# Build status is always success due conditional plugin prefetching
-# build status before multijob phases execute
-# - conditional-step:
-# condition-kind: current-status
-# condition-worst: SUCCESS
-# condtion-best: SUCCESS
-# on-evaluation-failure: mark-unstable
-# steps:
-# - shell: 'echo "Tests Passed"'
-
-- job-template:
- name: 'apex-daily-{stream}'
-
- # Job template for daily build
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- project-type: 'multijob'
-
- disabled: '{obj:disable_daily}'
-
- scm:
- - git-scm
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-daily.*'
-
- triggers:
- - 'apex-{stream}'
-
- builders:
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: 'Verify and upload ISO'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-verify-iso-{stream}'
- current-parameters: false
- predefined-parameters: |
- BUILD_DIRECTORY=$WORKSPACE/../apex-build-{stream}/.build
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - apex-builder-{stream}
-
-# snapshot info fetch
-- job-template:
- name: 'apex-fetch-snap-info'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./apex-fetch-snap-info.sh
-
-# snapshot create
-- job-template:
- name: 'apex-create-snapshot'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./apex-snapshot-create.sh
-
-# snapshot upload
-- job-template:
- name: 'apex-upload-snapshot'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - inject:
- properties-content: ARTIFACT_TYPE=snapshot
- - 'apex-upload-artifact'
-
-# CSIT promote
-- job-template:
- name: 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
-
- # Job template for promoting CSIT Snapshots
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
- project-type: 'multijob'
- disabled: '{disable_promote}'
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
- - string:
- name: PROMOTE
- default: 'True'
- description: "Used for overriding the PROMOTE"
- - string:
- name: GS_URL
- default: 'artifacts.opnfv.org/apex/{os_version}/{topology}'
- description: "User for overriding GS_URL from apex params"
- - string:
- name: OS_VERSION
- default: '{os_version}'
- description: OpenStack version short name
- - string:
- name: ODL_BRANCH
- default: '{odl_branch}'
- description: ODL branch being used
- - string:
- name: FORCE_PROMOTE
- default: 'False'
- description: "Used to force promotion and skip CSIT"
- - string:
- name: SNAP_TYPE
- default: '{snap_type}'
- description: Type of snapshot to promote
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
- - 'apex-.*-promote.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- triggers:
- - '{stream}-{snap_type}-{os_version}'
-
- builders:
- - multijob:
- name: apex-virtual-deploy
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- DEPLOY_SCENARIO=os-{sdn}-{os_scenario}-{topology}-{snap_type}
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: fetch snapshot info
- condition: SUCCESSFUL
- projects:
- - name: 'apex-fetch-snap-info'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: test phase
- condition: SUCCESSFUL
- execution-type: SEQUENTIALLY
- projects:
- - name: cperf-apex-csit-master
- predefined-parameters: |
- ODL_BRANCH=$ODL_BRANCH
- RC_FILE_PATH=/tmp/snap/overcloudrc
- NODE_FILE_PATH=/tmp/snap/node.yaml
- SSH_KEY_PATH=/tmp/snap/id_rsa
- ODL_CONTAINERIZED=true
- OS_VERSION=$OS_VERSION
- SKIP_CSIT=$FORCE_PROMOTE
- SNAP_TYPE=$SNAP_TYPE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: false
- enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
- - name: cperf-upload-logs-csit
- predefined-parameters: |
- ODL_BRANCH=$ODL_BRANCH
- OS_VERSION=$OS_VERSION
- SNAP_TYPE=$SNAP_TYPE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
- enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
- - name: 'functest-apex-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- DOCKER_TAG=$DOCKER_TAG
- FUNCTEST_SUITE_NAME=tempest_smoke
- FUNCTEST_MODE=testcase
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- enable-condition: "def m = '$SNAP_TYPE' ==~ /functest/"
- abort-all-job: true
- git-revision: false
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: create snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-create-snapshot'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: upload snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-upload-snapshot'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
-
-# FDIO promote
-- job-template:
- name: 'apex-fdio-promote-daily-{stream}'
-
- # Job template for promoting CSIT Snapshots
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
- project-type: 'multijob'
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-deploy.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
-
- builders:
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- PROMOTE=True
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: create snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-create-snapshot'
- current-parameters: false
- predefined-parameters: |
- SNAP_TYPE=fdio
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: upload snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-upload-snapshot'
- current-parameters: false
- predefined-parameters: |
- SNAP_TYPE=fdio
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
-
-# Flex job
-- job-template:
- name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
-
- project-type: 'multijob'
-
- disabled: true
-
- node: 'flex-pod2'
-
- scm:
- - git-scm
-
- triggers:
- - 'apex-{stream}'
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "Scenario to deploy with."
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
- - 'apex-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-baremetal-{stream}'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: Yardstick
- condition: ALWAYS
- projects:
- - name: 'yardstick-apex-baremetal-daily-{stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-
-########################
-# parameter macros
-########################
-- parameter:
- name: apex-parameter
- parameters:
- - string:
- name: ARTIFACT_NAME
- default: 'latest'
- description: "RPM Artifact name that will be appended to GS_URL to deploy a specific artifact"
- - string:
- name: ARTIFACT_VERSION
- default: 'daily'
- description: "Artifact version type"
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/.build
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/apex-cache{gs-pathname}
- description: "Directory where the cache to be used during the build is located."
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where opnfv artifacts are stored in gs repository"
- - string:
- name: GS_URL
- default: $GS_BASE{gs-pathname}
- description: "URL to Google Storage."
- - string:
- name: PROMOTE
- default: 'False'
- description: "Flag to know if we should promote/upload snapshot artifacts."
-
-########################
-# builder macros
-########################
-
-# danube Builder
-- builder:
- name: apex-builder-danube
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-kvm-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-kvm-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l2-fdio-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l2-fdio-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_netvirt-fdio-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l2-sfc-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-nofeature-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-nofeature-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-ovs-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-ovs-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-bgpvpn-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-gluon-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-fdio-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-fdio-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-fdio_dvr-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-fdio_dvr-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl_l3-csit-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-onos-nofeature-ha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-ovn-nofeature-noha-baremetal-danube'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# master Builder
-- builder:
- name: apex-builder-master
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-master'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# gambia Builder
-- builder:
- name: apex-builder-gambia
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-noha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-ha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-k8s-nosdn-nofeature-noha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-bgpvpn-ha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-bgpvpn-noha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-sfc-ha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-sfc-noha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-calipso-noha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-ovn-nofeature-ha-baremetal-gambia'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# fraser Builder
-- builder:
- name: apex-builder-fraser
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-bgpvpn-ha-baremetal-fraser'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# hunter Builder
-- builder:
- name: apex-builder-hunter
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-hunter'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-hunter'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-ovn-nofeature-ha-baremetal-hunter'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# euphrates Builder
-- builder:
- name: apex-builder-euphrates
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
- - name: 'apex-os-nosdn-nofeature-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-nofeature-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-bgpvpn-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-ovn-nofeature-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-fdio-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-bar-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-bar-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-nofeature-ha-ipv6-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs_dpdk-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-ovs_dpdk-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-kvm_ovs_dpdk-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-nosdn-kvm_ovs_dpdk-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-sfc-noha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-os-odl-sfc-ha-baremetal-euphrates'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-- builder:
- name: 'apex-upload-artifact'
- builders:
- - shell:
- !include-raw: ./apex-upload-artifact.sh
-
-- builder:
- name: 'apex-download-artifact'
- builders:
- - shell:
- !include-raw: ./apex-download-artifact.sh
-
-- builder:
- name: 'apex-deploy'
- builders:
- - shell:
- !include-raw: ./apex-deploy.sh
-
-- builder:
- name: 'apex-fetch-logs'
- builders:
- - shell:
- !include-raw: ./apex-fetch-logs.sh
-
-#######################
-# trigger macros
-# timed is in format: 'min hour daymonth month dayweek'
-########################
-- trigger:
- name: 'apex-master'
- triggers:
- - timed: '0 0 1-31/2 * *'
-
-- trigger:
- name: 'apex-hunter'
- triggers:
- - timed: '0 4 2-30/2 * *'
-
-- trigger:
- name: 'apex-gambia'
- triggers:
- - timed: '0 4 2-30/2 * *'
-
-- trigger:
- name: 'apex-fraser'
- triggers:
- - timed: '0 0 2-30/2 * *'
-
-- trigger:
- name: 'apex-euphrates'
- triggers:
- - timed: '0 0 2-30/2 * *'
-
-- trigger:
- name: 'apex-danube'
- triggers:
- - timed: '0 3 1 1 7'
-
-- trigger:
- name: 'master-csit-master'
- triggers:
- - timed: '0 5 * * *'
-
-- trigger:
- name: 'master-csit-rocky'
- triggers:
- - timed: '0 5 * * *'
-
-- trigger:
- name: 'master-csit-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'hunter-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'hunter-csit-rocky'
- triggers:
- - timed: '0 5 * * *'
-
-- trigger:
- name: 'hunter-csit-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-csit-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-csit-queens'
- triggers:
- - timed: '0 5 * * *'
-
-- trigger:
- name: 'fraser-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'fraser-csit-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'fraser-csit-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-csit-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-csit-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-csit-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-csit-queens'
- triggers:
- - timed: ''
-- trigger:
- name: 'master-functest-master'
- triggers:
- - timed: '0 3 * * *'
-
-- trigger:
- name: 'master-functest-rocky'
- triggers:
- - timed: '0 3 * * *'
-
-- trigger:
- name: 'master-functest-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'hunter-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'hunter-functest-rocky'
- triggers:
- - timed: '0 3 * * *'
-
-- trigger:
- name: 'hunter-functest-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-functest-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-functest-queens'
- triggers:
- - timed: '0 3 * * *'
-
-- trigger:
- name: 'fraser-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'fraser-functest-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'fraser-functest-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-functest-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-functest-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-functest-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-functest-queens'
- triggers:
- - timed: ''
diff --git a/jjb/apex/apex.yaml.j2 b/jjb/apex/apex.yaml.j2
deleted file mode 100644
index 293aacecb..000000000
--- a/jjb/apex/apex.yaml.j2
+++ /dev/null
@@ -1,1361 +0,0 @@
----
-- project:
- name: 'apex'
- project: 'apex'
- jobs:
- - 'apex-fetch-logs-{stream}'
- - 'apex-runner-cperf-{stream}'
- - 'apex-virtual-{stream}'
- - 'apex-deploy-{platform}-{stream}'
- - 'apex-daily-{stream}'
- - 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
- - 'apex-fdio-promote-daily-{stream}'
- - 'apex-{scenario}-baremetal-{scenario_stream}'
- - 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- - 'apex-upload-snapshot'
- - 'apex-create-snapshot'
- - 'apex-fetch-snap-info'
- - 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
- # stream: branch with - in place of / (eg. stable-arno)
- # branch: branch (eg. stable/arno)
- stream:
- - master: &master
- branch: 'master'
- gs-pathname: ''
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-noha'
- scenario_stream: 'master'
- disable_daily: true
- disable_promote: true
- - hunter: &hunter
- branch: 'stable/hunter'
- gs-pathname: '/hunter'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'hunter'
- disable_daily: true
- disable_promote: true
- - gambia: &gambia
- branch: 'stable/gambia'
- gs-pathname: '/gambia'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'gambia'
- disable_daily: true
- disable_promote: true
- - fraser: &fraser
- branch: 'stable/fraser'
- gs-pathname: '/fraser'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-nosdn-nofeature-ha'
- scenario_stream: 'fraser'
- disable_daily: true
- disable_promote: true
- - euphrates: &euphrates
- branch: 'stable/euphrates'
- gs-pathname: '/euphrates'
- build-slave: 'apex-build-master'
- virtual-slave: 'apex-virtual-master'
- baremetal-slave: 'apex-baremetal-master'
- verify-scenario: 'os-odl-nofeature-ha'
- scenario_stream: 'euphrates'
- disable_daily: true
- disable_promote: true
- - danube: &danube
- branch: 'stable/danube'
- gs-pathname: '/danube'
- build-slave: 'apex-build-danube'
- virtual-slave: 'apex-virtual-danube'
- baremetal-slave: 'apex-baremetal-danube'
- verify-scenario: 'os-odl_l3-nofeature-ha'
- scenario_stream: 'danube'
- disabled: true
- disable_daily: true
- disable_promote: true
-
- scenario:
- {%- for stream in scenarios %}
- {%- for scenario in scenarios[stream] %}
- - '{{scenario}}':
- <<: *{{stream}}
- {%- endfor %}
- {%- endfor %}
-
- platform:
- - 'baremetal'
- - 'virtual'
-
- os_version:
- - 'queens':
- os_scenario: 'nofeature'
- odl_branch: 'stable/oxygen'
- - 'rocky':
- os_scenario: 'rocky'
- odl_branch: 'stable/oxygen'
- - 'master':
- os_scenario: 'nofeature'
- odl_branch: 'stable/fluorine'
-
- topology:
- - 'noha'
- - 'ha'
- - 'noha-allinone'
-
- snap_type:
- - csit:
- sdn: 'odl'
- - functest:
- sdn: 'nosdn'
-# Fetch Logs Job
-- job-template:
- name: 'apex-fetch-logs-{stream}'
-
- concurrent: true
-
- disabled: false
- scm:
- - git-scm-gerrit
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'apex-fetch-logs'
-
-- job-template:
- name: 'apex-runner-cperf-{stream}'
-
- # runner cperf job
- project-type: 'multijob'
- node: 'intel-pod2'
-
- disabled: false
-
- parameters:
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from parameters macro."
-
- scm:
- - git-scm
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: false
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME"
- - multijob:
- name: 'Baremetal Deploy'
- condition: ALWAYS
- projects:
- - name: 'apex-deploy-baremetal-{stream}'
- node-parameters: false
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- OPNFV_CLEAN=yes
- DEPLOY_SCENARIO={verify-scenario}
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: CPERF
- condition: SUCCESSFUL
- projects:
- - name: 'cperf-apex-intel-pod2-daily-master'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO={verify-scenario}
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-# Deploy job
-- job-template:
- name: 'apex-deploy-{platform}-{stream}'
-
- concurrent: true
-
- disabled: false
- quiet-period: 30
- scm:
- - git-scm-gerrit
-
- wrappers:
- - timeout:
- timeout: 140
- fail: true
-
- parameters:
- - '{project}-{platform}-{stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
- description: "Scenario to deploy with."
- # yamllint disable rule:line-length
- - string:
- name: OPNFV_CLEAN
- default: 'no'
- description: "Use yes in lower case to invoke clean. Indicates if the deploy environment should be cleaned before deployment"
-
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-deploy.*'
- - 'functest.*'
- - 'yardstick.*'
- - 'dovetail.*'
- - 'storperf.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - 'apex-download-artifact'
- - 'apex-deploy'
- - 'clean-workspace'
-
-
-# Virtual Deploy and Test
-- job-template:
- name: 'apex-virtual-{stream}'
-
- project-type: 'multijob'
-
- concurrent: true
-
- disabled: false
-
- scm:
- - git-scm-gerrit
-
- parameters:
- - '{project}-defaults'
- - '{project}-virtual-{stream}-defaults'
- - 'functest-suite-parameter'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{verify-scenario}'
- description: "Scenario to deploy with."
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-runner.*'
- - 'apex-run.*'
- - 'apex-virtual-.*'
- - 'apex-verify-gate-.*'
- - 'odl-netvirt.*'
- - 'apex-.*-promote.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- ARTIFACT_VERSION=$ARTIFACT_VERSION
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- PROMOTE=$PROMOTE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: functest-smoke
- condition: ALWAYS
- projects:
- - name: 'functest-apex-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-
-# Baremetal Deploy and Test
-- job-template:
- name: 'apex-{scenario}-baremetal-{scenario_stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{scenario_stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: "Scenario to deploy with."
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
- - 'apex-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- execution-type: SEQUENTIALLY
- projects:
- - name: 'apex-deploy-baremetal-{scenario_stream}'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - name: 'apex-fetch-logs-{scenario_stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - shell:
- !include-raw-escape: ./apex-functest-scenario.sh
- - inject:
- properties-file: functest_scenario
- override-build-parameters: true
- - multijob:
- name: 'OPNFV Test Suite'
- condition: ALWAYS
- projects:
- - name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- DOCKER_TAG=$DOCKER_TAG
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: apex-fetch-logs
- projects:
- - name: 'apex-fetch-logs-{scenario_stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- publishers:
- - groovy-postbuild:
- script:
- !include-raw-escape: ./update-build-result.groovy
-
-# Baremetal test job
-- job-template:
- name: 'apex-testsuite-{scenario}-baremetal-{scenario_stream}'
-
- project-type: 'multijob'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{scenario_stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: "Scenario to deploy with."
- - string:
- name: DOCKER_TAG
- default: ''
- description: Default docker tag to pass to functest
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-run.*'
- - 'apex-testsuite-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Testing on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: Functest
- condition: ALWAYS
- projects:
- - name: 'functest-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- DOCKER_TAG=$DOCKER_TAG
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Yardstick
- condition: ALWAYS
- projects:
- - name: 'yardstick-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-default-mandatory
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-default-mandatory-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-default-optional
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-default-optional-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: Dovetail-proposed_tests
- condition: ALWAYS
- projects:
- - name: 'dovetail-apex-baremetal-proposed_tests-optional-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|odl-bgpvpn)-ha/"
- abort-all-job: false
- git-revision: false
- - multijob:
- name: StorPerf
- condition: ALWAYS
- projects:
- - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-# Build status is always success due conditional plugin prefetching
-# build status before multijob phases execute
-# - conditional-step:
-# condition-kind: current-status
-# condition-worst: SUCCESS
-# condtion-best: SUCCESS
-# on-evaluation-failure: mark-unstable
-# steps:
-# - shell: 'echo "Tests Passed"'
-
-- job-template:
- name: 'apex-daily-{stream}'
-
- # Job template for daily build
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- project-type: 'multijob'
-
- disabled: '{obj:disable_daily}'
-
- scm:
- - git-scm
-
- parameters:
- - '{project}-defaults'
- - '{project}-baremetal-{stream}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-daily.*'
-
- triggers:
- - 'apex-{stream}'
-
- builders:
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: 'Verify and upload ISO'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-verify-iso-{stream}'
- current-parameters: false
- predefined-parameters: |
- BUILD_DIRECTORY=$WORKSPACE/../apex-build-{stream}/.build
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - apex-builder-{stream}
-
-# snapshot info fetch
-- job-template:
- name: 'apex-fetch-snap-info'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./apex-fetch-snap-info.sh
-
-# snapshot create
-- job-template:
- name: 'apex-create-snapshot'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./apex-snapshot-create.sh
-
-# snapshot upload
-- job-template:
- name: 'apex-upload-snapshot'
-
- disabled: false
-
- parameters:
- - '{project}-defaults'
-
- builders:
- - inject:
- properties-content: ARTIFACT_TYPE=snapshot
- - 'apex-upload-artifact'
-
-# CSIT promote
-- job-template:
- name: 'apex-{snap_type}-promote-daily-{stream}-os-{os_version}-{topology}'
-
- # Job template for promoting CSIT Snapshots
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
- project-type: 'multijob'
- disabled: '{disable_promote}'
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: ARTIFACT_VERSION
- default: dev
- description: "Used for overriding the ARTIFACT_VERSION"
- - string:
- name: PROMOTE
- default: 'True'
- description: "Used for overriding the PROMOTE"
- - string:
- name: GS_URL
- default: 'artifacts.opnfv.org/apex/{os_version}/{topology}'
- description: "User for overriding GS_URL from apex params"
- - string:
- name: OS_VERSION
- default: '{os_version}'
- description: OpenStack version short name
- - string:
- name: ODL_BRANCH
- default: '{odl_branch}'
- description: ODL branch being used
- - string:
- name: FORCE_PROMOTE
- default: 'False'
- description: "Used to force promotion and skip CSIT"
- - string:
- name: SNAP_TYPE
- default: '{snap_type}'
- description: Type of snapshot to promote
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
- - 'apex-.*-promote.*'
- - 'odl-netvirt.*'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- triggers:
- - '{stream}-{snap_type}-{os_version}'
-
- builders:
- - multijob:
- name: apex-virtual-deploy
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- DEPLOY_SCENARIO=os-{sdn}-{os_scenario}-{topology}-{snap_type}
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: fetch snapshot info
- condition: SUCCESSFUL
- projects:
- - name: 'apex-fetch-snap-info'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: test phase
- condition: SUCCESSFUL
- execution-type: SEQUENTIALLY
- projects:
- - name: cperf-apex-csit-master
- predefined-parameters: |
- ODL_BRANCH=$ODL_BRANCH
- RC_FILE_PATH=/tmp/snap/overcloudrc
- NODE_FILE_PATH=/tmp/snap/node.yaml
- SSH_KEY_PATH=/tmp/snap/id_rsa
- ODL_CONTAINERIZED=true
- OS_VERSION=$OS_VERSION
- SKIP_CSIT=$FORCE_PROMOTE
- SNAP_TYPE=$SNAP_TYPE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: false
- enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
- - name: cperf-upload-logs-csit
- predefined-parameters: |
- ODL_BRANCH=$ODL_BRANCH
- OS_VERSION=$OS_VERSION
- SNAP_TYPE=$SNAP_TYPE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: false
- enable-condition: "def m = '$SNAP_TYPE' ==~ /csit/"
- - name: 'functest-apex-virtual-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- DOCKER_TAG=$DOCKER_TAG
- FUNCTEST_SUITE_NAME=tempest_smoke
- FUNCTEST_MODE=testcase
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- enable-condition: "def m = '$SNAP_TYPE' ==~ /functest/"
- abort-all-job: true
- git-revision: false
- - name: 'apex-fetch-logs-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
- - multijob:
- name: create snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-create-snapshot'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: upload snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-upload-snapshot'
- current-parameters: true
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
-
-# FDIO promote
-- job-template:
- name: 'apex-fdio-promote-daily-{stream}'
-
- # Job template for promoting CSIT Snapshots
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: '{virtual-slave}'
- project-type: 'multijob'
- disabled: false
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-deploy.*'
- - 'apex-runner.*'
- - 'apex-daily.*'
-
- builders:
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'apex-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_netvirt-fdio-noha
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- PROMOTE=True
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: create snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-create-snapshot'
- current-parameters: false
- predefined-parameters: |
- SNAP_TYPE=fdio
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: upload snapshot
- condition: SUCCESSFUL
- projects:
- - name: 'apex-upload-snapshot'
- current-parameters: false
- predefined-parameters: |
- SNAP_TYPE=fdio
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
-
-# Flex job
-- job-template:
- name: 'apex-flex-daily-os-nosdn-nofeature-ha-{stream}'
-
- project-type: 'multijob'
-
- disabled: true
-
- node: 'flex-pod2'
-
- scm:
- - git-scm
-
- triggers:
- - 'apex-{stream}'
-
- parameters:
- - '{project}-defaults'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - apex-parameter:
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "Scenario to deploy with."
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
- blocking-jobs:
- - 'apex-verify.*'
- - 'apex-runner.*'
- - 'apex-.*-promote.*'
- - 'apex-run.*'
- - 'apex-.+-baremetal-.+'
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - description-setter:
- description: "Deployed on $NODE_NAME - Scenario: $DEPLOY_SCENARIO"
- - multijob:
- name: 'Baremetal Deploy'
- condition: SUCCESSFUL
- projects:
- - name: 'apex-deploy-baremetal-{stream}'
- node-parameters: true
- current-parameters: true
- predefined-parameters: |
- OPNFV_CLEAN=yes
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: false
- - multijob:
- name: Yardstick
- condition: ALWAYS
- projects:
- - name: 'yardstick-apex-baremetal-daily-{stream}'
- node-parameters: true
- current-parameters: false
- predefined-parameters:
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- kill-phase-on: NEVER
- abort-all-job: false
- git-revision: false
-
-
-########################
-# parameter macros
-########################
-- parameter:
- name: apex-parameter
- parameters:
- - string:
- name: ARTIFACT_NAME
- default: 'latest'
- description: "RPM Artifact name that will be appended to GS_URL to deploy a specific artifact"
- - string:
- name: ARTIFACT_VERSION
- default: 'daily'
- description: "Artifact version type"
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/.build
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/apex-cache{gs-pathname}
- description: "Directory where the cache to be used during the build is located."
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where opnfv artifacts are stored in gs repository"
- - string:
- name: GS_URL
- default: $GS_BASE{gs-pathname}
- description: "URL to Google Storage."
- - string:
- name: PROMOTE
- default: 'False'
- description: "Flag to know if we should promote/upload snapshot artifacts."
-
-########################
-# builder macros
-########################
-{% for stream in scenarios %}
-# {{ stream }} Builder
-- builder:
- name: apex-builder-{{ stream }}
- builders:
- - multijob:
- name: Baremetal Deploy and Test Phase
- condition: SUCCESSFUL
- projects:
-{%- for scenario in scenarios[stream] %}
- - name: 'apex-{{ scenario }}-baremetal-{{ stream }}'
- node-parameters: false
- current-parameters: false
- predefined-parameters: |
- OPNFV_CLEAN=yes
- kill-phase-on: NEVER
- abort-all-job: true
- git-revision: false
-{%- endfor %}
-{% endfor -%}
-
-- builder:
- name: 'apex-upload-artifact'
- builders:
- - shell:
- !include-raw: ./apex-upload-artifact.sh
-
-- builder:
- name: 'apex-download-artifact'
- builders:
- - shell:
- !include-raw: ./apex-download-artifact.sh
-
-- builder:
- name: 'apex-deploy'
- builders:
- - shell:
- !include-raw: ./apex-deploy.sh
-
-- builder:
- name: 'apex-fetch-logs'
- builders:
- - shell:
- !include-raw: ./apex-fetch-logs.sh
-
-#######################
-# trigger macros
-# timed is in format: 'min hour daymonth month dayweek'
-########################
-- trigger:
- name: 'apex-master'
- triggers:
- - timed: '0 0 1-31/2 * *'
-
-- trigger:
- name: 'apex-hunter'
- triggers:
- - timed: '0 4 2-30/2 * *'
-
-- trigger:
- name: 'apex-gambia'
- triggers:
- - timed: '0 4 2-30/2 * *'
-
-- trigger:
- name: 'apex-fraser'
- triggers:
- - timed: '0 0 2-30/2 * *'
-
-- trigger:
- name: 'apex-euphrates'
- triggers:
- - timed: '0 0 2-30/2 * *'
-
-- trigger:
- name: 'apex-danube'
- triggers:
- - timed: '0 3 1 1 7'
-
-- trigger:
- name: 'master-csit-master'
- triggers:
- - timed: '0 5 * * *'
-
-- trigger:
- name: 'master-csit-rocky'
- triggers:
- - timed: '0 5 * * *'
-
-- trigger:
- name: 'master-csit-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'hunter-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'hunter-csit-rocky'
- triggers:
- - timed: '0 5 * * *'
-
-- trigger:
- name: 'hunter-csit-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-csit-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-csit-queens'
- triggers:
- - timed: '0 5 * * *'
-
-- trigger:
- name: 'fraser-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'fraser-csit-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'fraser-csit-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-csit-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-csit-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-csit-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-csit-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-csit-queens'
- triggers:
- - timed: ''
-- trigger:
- name: 'master-functest-master'
- triggers:
- - timed: '0 3 * * *'
-
-- trigger:
- name: 'master-functest-rocky'
- triggers:
- - timed: '0 3 * * *'
-
-- trigger:
- name: 'master-functest-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'hunter-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'hunter-functest-rocky'
- triggers:
- - timed: '0 3 * * *'
-
-- trigger:
- name: 'hunter-functest-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-functest-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'gambia-functest-queens'
- triggers:
- - timed: '0 3 * * *'
-
-- trigger:
- name: 'fraser-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'fraser-functest-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'fraser-functest-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-functest-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'euphrates-functest-queens'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-functest-master'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-functest-rocky'
- triggers:
- - timed: ''
-
-- trigger:
- name: 'danube-functest-queens'
- triggers:
- - timed: ''
diff --git a/jjb/apex/scenarios.yaml.hidden b/jjb/apex/scenarios.yaml.hidden
deleted file mode 100644
index d9d32b473..000000000
--- a/jjb/apex/scenarios.yaml.hidden
+++ /dev/null
@@ -1,68 +0,0 @@
-master:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
-hunter:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-ovn-nofeature-ha'
-gambia:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-nosdn-nofeature-ha-ipv6'
- - 'os-odl-nofeature-noha'
- - 'os-odl-nofeature-ha'
- - 'k8s-nosdn-nofeature-noha'
- - 'os-odl-bgpvpn-ha'
- - 'os-odl-bgpvpn-noha'
- - 'os-odl-sfc-ha'
- - 'os-odl-sfc-noha'
- - 'os-nosdn-calipso-noha'
- - 'os-ovn-nofeature-ha'
-fraser:
- - 'os-nosdn-nofeature-ha'
- - 'os-odl-bgpvpn-ha'
-euphrates:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-odl-nofeature-ha'
- - 'os-odl-nofeature-noha'
- - 'os-odl-bgpvpn-ha'
- - 'os-ovn-nofeature-noha'
- - 'os-nosdn-fdio-noha'
- - 'os-nosdn-fdio-ha'
- - 'os-nosdn-bar-ha'
- - 'os-nosdn-bar-noha'
- - 'os-nosdn-nofeature-ha-ipv6'
- - 'os-nosdn-ovs_dpdk-noha'
- - 'os-nosdn-ovs_dpdk-ha'
- - 'os-nosdn-kvm_ovs_dpdk-noha'
- - 'os-nosdn-kvm_ovs_dpdk-ha'
- - 'os-odl-sfc-noha'
- - 'os-odl-sfc-ha'
-danube:
- - 'os-nosdn-nofeature-noha'
- - 'os-nosdn-nofeature-ha'
- - 'os-nosdn-nofeature-ha-ipv6'
- - 'os-nosdn-ovs-noha'
- - 'os-nosdn-ovs-ha'
- - 'os-nosdn-fdio-noha'
- - 'os-nosdn-fdio-ha'
- - 'os-nosdn-kvm-ha'
- - 'os-nosdn-kvm-noha'
- - 'os-odl_l2-fdio-noha'
- - 'os-odl_l2-fdio-ha'
- - 'os-odl_netvirt-fdio-noha'
- - 'os-odl_l2-sfc-noha'
- - 'os-odl_l3-nofeature-noha'
- - 'os-odl_l3-nofeature-ha'
- - 'os-odl_l3-ovs-noha'
- - 'os-odl_l3-ovs-ha'
- - 'os-odl-bgpvpn-ha'
- - 'os-odl-gluon-noha'
- - 'os-odl_l3-fdio-noha'
- - 'os-odl_l3-fdio-ha'
- - 'os-odl_l3-fdio_dvr-noha'
- - 'os-odl_l3-fdio_dvr-ha'
- - 'os-odl_l3-csit-noha'
- - 'os-onos-nofeature-ha'
- - 'os-ovn-nofeature-noha'
diff --git a/jjb/apex/update-build-result.groovy b/jjb/apex/update-build-result.groovy
deleted file mode 100644
index 9edca6b6b..000000000
--- a/jjb/apex/update-build-result.groovy
+++ /dev/null
@@ -1,5 +0,0 @@
-import hudson.model.*
-if (manager.logContains('^.*apex-deploy-baremetal.*SUCCESS$')
- && manager.build.@result == hudson.model.Result.FAILURE) {
- manager.build.@result = hudson.model.Result.UNSTABLE
-}
diff --git a/jjb/armband/armband-ci-jobs.yaml b/jjb/armband/armband-ci-jobs.yaml
deleted file mode 100644
index 4f2dc5eb3..000000000
--- a/jjb/armband/armband-ci-jobs.yaml
+++ /dev/null
@@ -1,128 +0,0 @@
----
-# jenkins job templates for Armband
-- project:
- name: 'armband-ci'
- project: 'armband'
- installer: 'fuel'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- functest_docker_tag: hunter
- hunter: &hunter
- stream: hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
- functest_docker_tag: '{stream}'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # CI POD's
- # -------------------------------
- pod:
- # -------------------------------
- # hunter
- # -------------------------------
- - armband-baremetal:
- deploy-type: 'baremetal'
- slave-label: 'armband-{deploy-type}'
- <<: *hunter
- # -------------------------------
- # master
- # -------------------------------
- - armband-baremetal:
- deploy-type: 'baremetal'
- slave-label: 'armband-{deploy-type}'
- <<: *master
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- # HA scenarios
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
- - 'os-odl-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
- - 'os-ovn-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-ovs-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
-
- # NOHA scenarios
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
- - 'os-nosdn-fdio-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-{stream}-trigger'
-
- jobs:
- - 'fuel-{scenario}-{pod}-daily-{stream}'
- - 'fuel-deploy-{pod}-daily-{stream}'
-
-########################
-# job templates: Armband uses Fuel's job templates, no need to duplicate
-########################
-
-########################
-# trigger macros
-########################
-# CI PODs
-# ----------------------------------------------------------------
-# Enea Armband CI Baremetal Triggers running against master branch
-# ----------------------------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: '0 1 * * 7'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: '0 1 * * 4,6'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: '0 1 * * 2'
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-armband-baremetal-master-trigger'
- triggers:
- - timed: '0 1 * * 1'
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-noha-armband-baremetal-master-trigger'
- triggers:
- - timed: '0 1 * * 3,5'
-# ---------------------------------------------------------------------
-# Enea Armband CI Baremetal Triggers running against hunter branch
-# ---------------------------------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-hunter-trigger'
- triggers:
- - timed: '10 1 * * 1'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-armband-baremetal-hunter-trigger'
- triggers:
- - timed: '10 1 * * 7'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-armband-baremetal-hunter-trigger'
- triggers:
- - timed: '10 1 * * 2'
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-armband-baremetal-hunter-trigger'
- triggers:
- - timed: '10 1 * * 4,6'
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-armband-baremetal-hunter-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-noha-armband-baremetal-hunter-trigger'
- triggers:
- - timed: '10 1 * * 3,5'
diff --git a/jjb/armband/armband-rtd-jobs.yaml b/jjb/armband/armband-rtd-jobs.yaml
deleted file mode 100644
index 70d2f1171..000000000
--- a/jjb/armband/armband-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: armband-rtd
- project: armband
- project-name: armband
-
- project-pattern: 'armband'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-armband/47349/'
- rtd-token: 'b8f7de9a1f2baf063ccc6afb52dbc8e6308b6ab5'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: true
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/armband/armband-verify-jobs.yaml b/jjb/armband/armband-verify-jobs.yaml
deleted file mode 100644
index d7a646ad0..000000000
--- a/jjb/armband/armband-verify-jobs.yaml
+++ /dev/null
@@ -1,100 +0,0 @@
----
-- project:
- name: 'armband-verify-jobs'
-
- project: 'armband'
-
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
- #####################################
- # jobs
- #####################################
- jobs:
- - 'armband-verify-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'armband-verify-{stream}'
-
- disabled: true
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 2
- option: 'project'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'patches/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-arm-defaults'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'armband-verify-builder-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'armband-verify-builder-macro'
- builders:
- - shell: |
- #!/bin/bash
- make fuel-patches-import
diff --git a/jjb/availability/availability-rtd-jobs.yaml b/jjb/availability/availability-rtd-jobs.yaml
deleted file mode 100644
index 8488ea781..000000000
--- a/jjb/availability/availability-rtd-jobs.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
----
-- project:
- name: availability-rtd
- project: availability
- project-name: availability
-
- gerrit-skip-vote: true
- project-pattern: 'availability'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-availability/47351/'
- rtd-token: '3ae514b14073e1eacb697d3eddee62a26c8c891c'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/availability/availability.yaml b/jjb/availability/availability.yaml
deleted file mode 100644
index 2d3473499..000000000
--- a/jjb/availability/availability.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: availability
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/barometer/barometer-build.sh b/jjb/barometer/barometer-build.sh
deleted file mode 100644
index 5f78aae7a..000000000
--- a/jjb/barometer/barometer-build.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-set -x
-
-OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-OPNFV_ARTIFACT_URL="$GS_URL/$OPNFV_ARTIFACT_VERSION/"
-
-# log info to console
-echo "Starting the build of Barometer RPMs"
-echo "------------------------------------"
-echo
-
-cd ci
-./install_dependencies.sh
-./build_rpm.sh
-cp utility/rpms_list $WORKSPACE
-cd $WORKSPACE
-
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_ARTIFACT_URL=$OPNFV_ARTIFACT_URL"
-) > $WORKSPACE/opnfv.properties
-
diff --git a/jjb/barometer/barometer-rtd-jobs.yaml b/jjb/barometer/barometer-rtd-jobs.yaml
deleted file mode 100644
index a8ec686a4..000000000
--- a/jjb/barometer/barometer-rtd-jobs.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
----
-- project:
- name: barometer-rtd
- project: barometer
- project-name: barometer
-
- project-pattern: 'barometer'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-barometer/47353/'
- rtd-token: 'aef70b8a0148b295e25dd92474110bcd622bacb0'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - iruya: &iruya
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/barometer/barometer-upload-artifact.sh b/jjb/barometer/barometer-upload-artifact.sh
deleted file mode 100644
index f05dc2af8..000000000
--- a/jjb/barometer/barometer-upload-artifact.sh
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/bin/bash
-set -o nounset
-set -o pipefail
-
-RPM_LIST=$WORKSPACE/rpms_list
-RPM_WORKDIR=$WORKSPACE/rpmbuild
-RPM_DIR=$RPM_WORKDIR/RPMS/x86_64/
-cd $WORKSPACE/
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-# Check if all the appropriate RPMs were generated
-echo "Checking if all the Barometer RPMs were created"
-echo "-----------------------------------------------"
-echo
-
-if [ -d $RPM_DIR ]
-then
- ls $RPM_DIR > list_of_gen_pack
-else
- echo "Can't access folder $RPM_DIR with rpm packages"
- echo "Barometer nightly build FAILED"
- exit 1
-fi
-
-for PACKAGENAME in `cat $RPM_LIST`
-do
- if ! grep -q $PACKAGENAME list_of_gen_pack
- then
- echo "$PACKAGENAME is missing"
- echo "Barometer nightly build FAILED"
- exit 2
- fi
-done
-
-#remove the file you no longer need.
-rm list_of_gen_pack
-
-echo "Uploading the barometer RPMs to artifacts.opnfv.org"
-echo "---------------------------------------------------"
-echo
-
-gsutil -m cp -r $RPM_DIR/* gs://$OPNFV_ARTIFACT_URL > $WORKSPACE/gsutil.log 2>&1
-
-# Check if the RPMs were pushed
-gsutil ls gs://$OPNFV_ARTIFACT_URL > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading barometer RPMs to gs://$OPNFV_ARTIFACT_URL!"
- echo "Check log $WORKSPACE/gsutil.log on the appropriate build server"
- exit 1
-else
- # upload property files only if build is successful
- gsutil cp $WORKSPACE/opnfv.properties gs://$OPNFV_ARTIFACT_URL/opnfv.properties > gsutil.properties.log 2>&1
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-fi
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$OPNFV_ARTIFACT_URL/*.rpm > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$OPNFV_ARTIFACT_URL/opnfv.properties > /dev/null 2>&1
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-echo "Artifact is available at $OPNFV_ARTIFACT_URL"
-
-#cleanup the RPM repo from the build machine.
-rm -rf $RPM_WORKDIR
diff --git a/jjb/barometer/barometer.yaml b/jjb/barometer/barometer.yaml
deleted file mode 100644
index 5ebab5a0c..000000000
--- a/jjb/barometer/barometer.yaml
+++ /dev/null
@@ -1,161 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: barometer
-
- project: '{name}'
-
- jobs:
- - 'barometer-verify-{stream}'
- - 'barometer-merge-{stream}'
- - 'barometer-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - iruya: &iruya
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'barometer-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'lf-build2-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make
-
-- job-template:
- name: 'barometer-merge-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 3
- max-per-node: 2
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'lf-build2-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make
-
-- job-template:
- name: 'barometer-daily-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - barometer-project-parameter:
- gs-pathname: '{gs-pathname}'
- - 'opnfv-build-centos-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - shell: !include-raw-escape: ./barometer-build.sh
- - shell: !include-raw-escape: ./barometer-upload-artifact.sh
-
-########################
-# parameter macros
-########################
-- parameter:
- name: barometer-project-parameter
- parameters:
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
diff --git a/jjb/bottlenecks/bottlenecks-cleanup.sh b/jjb/bottlenecks/bottlenecks-cleanup.sh
deleted file mode 100644
index d0e2088c7..000000000
--- a/jjb/bottlenecks/bottlenecks-cleanup.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#clean up correlated dockers and their images
-bash $WORKSPACE/docker/docker_cleanup.sh -d bottlenecks --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d Bottlenecks --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d yardstick --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d kibana --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d elasticsearch --debug
-bash $WORKSPACE/docker/docker_cleanup.sh -d influxdb --debug
diff --git a/jjb/bottlenecks/bottlenecks-project-jobs.yaml b/jjb/bottlenecks/bottlenecks-project-jobs.yaml
deleted file mode 100644
index 456181083..000000000
--- a/jjb/bottlenecks/bottlenecks-project-jobs.yaml
+++ /dev/null
@@ -1,222 +0,0 @@
----
-###################################################
-# Non-ci jobs for Bottlenecks project
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: bottlenecks-project-jobs
-
- project: 'bottlenecks'
-
- jobs:
- - 'bottlenecks-verify-{stream}'
- - 'bottlenecks-merge-{stream}'
- - 'bottlenecks-{suite}-upload-artifacts-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- # This is used for common project file storage
- gs-pathname: ''
- # This is used for different test suite dependent packages storage
- gs-packagepath: '/{suite}'
- disabled: false
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- gs-packagepath: '/{stream}/{suite}'
- disabled: false
-
- suite:
- - 'posca_stress_traffic'
- - 'posca_stress_ping'
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'bottlenecks-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - bottlenecks-unit-tests
-
-- job-template:
- name: 'bottlenecks-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - bottlenecks-hello
-
-- job-template:
- name: 'bottlenecks-{suite}-upload-artifacts-{stream}'
-
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - bottlenecks-parameter:
- gs-packagepath: '{gs-packagepath}'
-
- scm:
- - git-scm
-
- builders:
- - 'bottlenecks-builder-upload-artifact'
- - 'bottlenecks-artifact-workspace-cleanup'
-
-####################
-# parameter macros
-####################
-- parameter:
- name: bottlenecks-parameter
- parameters:
- - string:
- name: CACHE_DIR
- default: $WORKSPACE/cache{gs-packagepath}
- description: "the cache to store packages downloaded from public IP"
- - string:
- name: SUITE_URL
- default: gs://artifacts.opnfv.org/bottlenecks{gs-packagepath}
- description: "LF artifacts url for storage of bottlenecks packages"
- - string:
- name: PACKAGE_URL
- default: http://205.177.226.237:9999/bottlenecks{gs-packagepath}/
- description: "the url where we store the packages used for bottlenecks\
- \ rubbos"
-
-####################################
-# builders for bottlenecks project
-####################################
-- builder:
- name: bottlenecks-builder-upload-artifact
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "Bottlenecks: upload to artifacts from the public IP"
-
- [[ -d $CACHE_DIR ]] || mkdir -p $CACHE_DIR
-
- for file in $(curl -s $PACKAGE_URL |
- grep href |
- sed 's/.*href="//' |
- sed 's/".*//' |
- grep '^[a-zA-Z].*'); do
- curl --connect-timeout 10 -o $CACHE_DIR/$file $PACKAGE_URL$file -v
- echo "bottlenecks: copy file $CACHE_DIR/$file to $SUITE_URL"
- gsutil cp $CACHE_DIR/$file $SUITE_URL
- done
-
-- builder:
- name: bottlenecks-artifact-workspace-cleanup
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "Bottlenecks: cleanup cache used for storage downloaded packages"
-
- /bin/rm -rf $CACHE_DIR
-
-- builder:
- name: bottlenecks-unit-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- echo "Running unit tests..."
- cd $WORKSPACE
- virtualenv $WORKSPACE/bottlenecks_venv
- source $WORKSPACE/bottlenecks_venv/bin/activate
-
- # install python packages
- easy_install -U setuptools
- easy_install -U pip
- pip install -r $WORKSPACE/requirements/verify.txt
-
- # unit tests
- /bin/bash $WORKSPACE/verify.sh
-
- deactivate
-
-- builder:
- name: bottlenecks-hello
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo -e "Wellcome to Bottlenecks! \nMerge event is planning to support more functions! "
diff --git a/jjb/bottlenecks/bottlenecks-rtd-jobs.yaml b/jjb/bottlenecks/bottlenecks-rtd-jobs.yaml
deleted file mode 100644
index 57ecd802c..000000000
--- a/jjb/bottlenecks/bottlenecks-rtd-jobs.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-- project:
- name: bottlenecks-rtd
- project: bottlenecks
- project-name: bottlenecks
-
- gerrit-skip-vote: true
- project-pattern: 'bottlenecks'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-bottlenecks/47355/'
- rtd-token: '95dd0dbdde4a219b5196ffb86e15401b7b927885'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
deleted file mode 100644
index 2c044eaa7..000000000
--- a/jjb/bottlenecks/bottlenecks-run-suite.sh
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#set -e
-[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-BOTTLENECKS_IMAGE=opnfv/bottlenecks
-REPORT="True"
-
-RELENG_REPO=${WORKSPACE}/releng
-[ -d ${RELENG_REPO} ] && rm -rf ${RELENG_REPO}
-git clone https://gerrit.opnfv.org/gerrit/releng ${RELENG_REPO} >${redirect}
-
-YARDSTICK_REPO=${WORKSPACE}/yardstick
-[ -d ${YARDSTICK_REPO} ] && rm -rf ${YARDSTICK_REPO}
-git clone https://gerrit.opnfv.org/gerrit/yardstick ${YARDSTICK_REPO} >${redirect}
-
-OPENRC=/tmp/admin_rc.sh
-OS_CACERT=/tmp/os_cacert
-
-BOTTLENECKS_CONFIG=/tmp
-KUBESTONE_TEST_DIR=/home/opnfv/bottlenecks/testsuites/kubestone/testcases
-
-# Pulling Bottlenecks docker and passing environment variables
-echo "INFO: pulling Bottlenecks docker ${DOCKER_TAG}"
-docker pull opnfv/bottlenecks:${DOCKER_TAG} >$redirect
-
-opts="--privileged=true -id"
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
- -e BRANCH=${BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
- -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL} \
- -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e BUILD_TAG=${BUILD_TAG}"
-docker_volume="-v /var/run/docker.sock:/var/run/docker.sock -v /tmp:/tmp"
-
-cmd="docker run ${opts} ${envs} --name bottlenecks-load-master ${docker_volume} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
-echo "BOTTLENECKS INFO: running docker run commond: ${cmd}"
-${cmd} >$redirect
-sleep 5
-
-# Run test suite
-if [[ $SUITE_NAME == *posca* ]]; then
- POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
- sudo rm -f ${OPENRC}
-
- if [[ -f ${OPENRC} ]]; then
- echo "BOTTLENECKS INFO: openstack credentials path is ${OPENRC}"
- cat ${OPENRC}
- else
- echo "BOTTLENECKS ERROR: couldn't find openstack rc file: ${OPENRC}, please check if the it's been properly provided."
- exit 1
- fi
-
- # Finding and crearting POD description files from different deployments
- ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
- if [ "$INSTALLER_TYPE" == "fuel" ]; then
- echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
- sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
- fi
-
- if [ "$INSTALLER_TYPE" == "apex" ]; then
- echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
- sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${BOTTLENECKS_CONFIG}/id_rsa
- fi
-
- set +e
-
- sudo -H pip install virtualenv
-
- cd ${RELENG_REPO}/modules
- sudo virtualenv venv
- source venv/bin/activate
- sudo -H pip install -e ./ >/dev/null
- sudo -H pip install netaddr
-
- if [[ ${INSTALLER_TYPE} == fuel ]]; then
- options="-u root -p r00tme"
- elif [[ ${INSTALLER_TYPE} == apex ]]; then
- options="-u stack -k /root/.ssh/id_rsa"
- else
- echo "Don't support to generate pod.yaml on ${INSTALLER_TYPE} currently."
- fi
-
- deactivate
-
- sudo rm -rf ${RELENG_REPO}/modules/venv
- sudo rm -rf ${RELENG_REPO}/modules/opnfv.egg-info
-
- set -e
-
- cd ${WORKSPACE}
-
- if [ -f ${BOTTLENECKS_CONFIG}/pod.yaml ]; then
- echo "FILE: ${BOTTLENECKS_CONFIG}/pod.yaml:"
- cat ${BOTTLENECKS_CONFIG}/pod.yaml
- else
- echo "ERROR: cannot find file ${BOTTLENECKS_CONFIG}/pod.yaml. Please check if it is existing."
- sudo ls -al ${BOTTLENECKS_CONFIG}
- fi
-
- # Running test cases through Bottlenecks docker
- if [[ $SUITE_NAME == posca_stress_traffic ]]; then
- TEST_CASE=posca_factor_system_bandwidth
- elif [[ $SUITE_NAME == posca_stress_ping ]]; then
- TEST_CASE=posca_factor_ping
- else
- TEST_CASE=$SUITE_NAME
- fi
- testcase_cmd="docker exec bottlenecks-load-master python ${POSCA_SCRIPT}/../run_testsuite.py testcase $TEST_CASE $REPORT"
- echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
- ${testcase_cmd} >$redirect
-elif [[ $SUITE_NAME == *kubestone* ]]; then
- if [[ $SUITE_NAME == kubestone_deployment_capacity ]]; then
- TEST_CASE=${KUBESTONE_TEST_DIR}/deployment_capacity.yaml
- fi
- testcase_cmd="docker exec bottlenecks-load-master python ${KUBESTONE_TEST_DIR}/../stress_test.py -c $TEST_CASE"
- echo "BOTTLENECKS INFO: running test case ${TEST_CASE} with report indicator: ${testcase_cmd}"
- ${testcase_cmd} >$redirect
-fi
diff --git a/jjb/calipso/calipso-rtd-jobs.yaml b/jjb/calipso/calipso-rtd-jobs.yaml
deleted file mode 100644
index e6b61d768..000000000
--- a/jjb/calipso/calipso-rtd-jobs.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
- name: calipso-rtd
- project: calipso
- project-name: calipso
-
- gerrit-skip-vote: true
- project-pattern: 'calipso'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-calipso/47356/'
- rtd-token: '9b88e25a769998fc316b25efe15eca2b7c1474f4'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/calipso/calipso.yaml b/jjb/calipso/calipso.yaml
deleted file mode 100644
index 7ecabdf29..000000000
--- a/jjb/calipso/calipso.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
----
-- project:
- name: calipso
-
- project: '{name}'
-
- jobs:
- - 'calipso-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'calipso-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - verify-unit-tests
-
-- builder:
- name: verify-unit-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
- cd $WORKSPACE
- PYTHONPATH=$PWD api/test/verify.sh
diff --git a/jjb/ci_gate_security/anteater-report-to-gerrit.sh b/jjb/ci_gate_security/anteater-report-to-gerrit.sh
deleted file mode 100644
index 51a8ac47b..000000000
--- a/jjb/ci_gate_security/anteater-report-to-gerrit.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-set -o pipefail
-export PATH=$PATH:/usr/local/bin/
-EXITSTATUS=0
-
-# This Log should always exist
-if [[ -e securityaudit.log ]] ; then
-
- #check if log has errors
- if grep ERROR securityaudit.log; then
- EXITSTATUS=1
- fi
-
- grep 'ERROR' securityaudit.log | awk -F"ERROR - " '{ print $2 }' | tr -d "\'\"" > shortlog
-
- # Only report to Gerrit when there are errors to report.
- if [[ -s shortlog ]]; then
- echo -e "\nposting security audit report to gerrit...\n"
- ssh -p 29418 gerrit.opnfv.org \
- "gerrit review -p $GERRIT_PROJECT \
- -m \"$(cat shortlog)\" \
- $GERRIT_PATCHSET_REVISION \
- --notify NONE"
- fi
-
- exit $EXITSTATUS
-fi
diff --git a/jjb/ci_gate_security/anteater-security-audit-weekly.sh b/jjb/ci_gate_security/anteater-security-audit-weekly.sh
deleted file mode 100644
index 6caa13117..000000000
--- a/jjb/ci_gate_security/anteater-security-audit-weekly.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2017 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-ANTEATER_SCAN_PATCHSET="${ANTEATER_SCAN_PATCHSET:-true}"
-
-cd $WORKSPACE
-REPORTDIR='.reports'
-mkdir -p $REPORTDIR
-# Ensure any user can read the reports directory
-chmod 777 $REPORTDIR
-
-ANTEATER_FILES="--patchset /home/opnfv/anteater/$PROJECT/patchset"
-
-if [[ "$ANTEATER_SCAN_PATCHSET" == "true" ]]; then
- echo "Generating patchset file to list changed files"
- git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
- echo "Changed files are"
- echo "--------------------------------------------------------"
- cat $WORKSPACE/patchset
- echo "--------------------------------------------------------"
-else
- echo "Checking full project $PROJECT"
- ANTEATER_FILES="--path /home/opnfv/anteater/$PROJECT"
-fi
-
-vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT -v $WORKSPACE/$REPORTDIR:/home/opnfv/anteater/$REPORTDIR"
-envs="-e PROJECT=$PROJECT"
-
-echo "Pulling releng-anteater docker image"
-echo "--------------------------------------------------------"
-docker pull opnfv/releng-anteater
-echo "--------------------------------------------------------"
-
-cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
-/home/opnfv/venv/bin/anteater --project $PROJECT $ANTEATER_FILES"
-echo "Running docker container"
-echo "$cmd"
-$cmd > $WORKSPACE/securityaudit.log 2>&1
-exit_code=$?
-echo "--------------------------------------------------------"
-echo "Docker container exited with code: $exit_code"
-echo "--------------------------------------------------------"
-exit 0
diff --git a/jjb/ci_gate_security/anteater-security-audit.sh b/jjb/ci_gate_security/anteater-security-audit.sh
deleted file mode 100644
index 8a170b044..000000000
--- a/jjb/ci_gate_security/anteater-security-audit.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-cd $WORKSPACE
-REPORTDIR='.reports'
-mkdir -p $REPORTDIR
-# Ensure any user can read the reports directory
-chmod 777 $REPORTDIR
-echo "Generating patchset file to list changed files"
-git diff HEAD^1 --name-only | sed "s#^#/home/opnfv/anteater/$PROJECT/#" > $WORKSPACE/patchset
-echo "Changed files are"
-echo "--------------------------------------------------------"
-cat $WORKSPACE/patchset
-echo "--------------------------------------------------------"
-
-vols="-v $WORKSPACE:/home/opnfv/anteater/$PROJECT -v $WORKSPACE/$REPORTDIR:/home/opnfv/anteater/$REPORTDIR"
-envs="-e PROJECT=$PROJECT"
-
-echo "Pulling releng-anteater docker image"
-echo "--------------------------------------------------------"
-docker pull opnfv/releng-anteater
-echo "--------------------------------------------------------"
-
-cmd="docker run -i $envs $vols --rm opnfv/releng-anteater \
-/home/opnfv/venv/bin/anteater --project $PROJECT --patchset /home/opnfv/anteater/$PROJECT/patchset"
-echo "Running docker container"
-echo "$cmd"
-$cmd > $WORKSPACE/securityaudit.log 2>&1
-exit_code=$?
-echo "--------------------------------------------------------"
-echo "Docker container exited with code: $exit_code"
-echo "--------------------------------------------------------"
-cat securityaudit.log
-exit 0
diff --git a/jjb/ci_gate_security/opnfv-ci-gate-security.yaml b/jjb/ci_gate_security/opnfv-ci-gate-security.yaml
deleted file mode 100644
index 1f75829a7..000000000
--- a/jjb/ci_gate_security/opnfv-ci-gate-security.yaml
+++ /dev/null
@@ -1,190 +0,0 @@
----
-# SPDX-license-identifier: Apache-2.0
-########################
-# Job configuration for opnfv-anteater (security audit)
-########################
-- project:
-
- name: anteaterfw
-
- project: anteaterfw
-
- repo:
- - apex
- - apex-os-net-config
- - apex-puppet-tripleo
- - apex-tripleo-heat-templates
- - armband
- - auto
- - availability
- - bamboo
- - barometer
- - bottlenecks
- - calipso
- - clover
- - container4nfv
- - cperf
- - daisy
- - doctor
- - domino
- - dovetail
- - dpacc
- - enfv
- - fds
- - fuel
- - ipv6
- - joid
- - kvmfornfv
- - models
- - moon
- - nfvbench
- - onosfw
- - opera
- - opnfvdocs
- - orchestra
- - ovn4nfv
- - ovno
- - ovsnfv
- - parser
- - pharos
- - qtip
- - releng
- - releng-anteater
- - releng-testresults
- - releng-utils
- - releng-xci
- - samplevnf
- - sdnvpn
- - securityscanning
- - sfc
- - snaps
- - stor4nfv
- - storperf
- - ves
- - vswitchperf
- - yardstick
-
- jobs:
- - 'opnfv-security-audit-verify-{stream}'
- - 'opnfv-security-audit-{repo}-weekly-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
-########################
-# job templates
-########################
-- job-template:
- name: 'opnfv-security-audit-{repo}-weekly-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - opnfv-build-defaults
- - string:
- name: ANTEATER_SCAN_PATCHSET
- default: "false"
- description: "Have anteater scan patchsets (true) or full project (false)"
- - project-parameter:
- project: '{repo}'
- branch: '{branch}'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - timed: '@weekly'
-
- builders:
- - anteater-security-audit-weekly
-
- publishers:
- # defined in jjb/global/releng-macros.yml
- - 'email-{repo}-ptl':
- subject: 'OPNFV Security Scan Result: {repo}'
- - workspace-cleanup:
- fail-build: false
-
-- job-template:
- name: 'opnfv-security-audit-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- # yamllint disable rule:line-length
- description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
-
- scm:
- - git-scm-gerrit
-
- # yamllint disable rule:line-length
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'apex|armband|bamboo|barometer|bottlenecks|calipso|cperf|daisy|doctor|dovetail|dpacc|enfv|fds|fuel|pharos|releng|sandbox|yardstick|infra|ipv6|kvmfornfv|models|moon'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
- # yamllint enable rule:line-length
-
- builders:
- - anteater-security-audit
- - report-security-audit-result-to-gerrit
- publishers:
- - archive-artifacts:
- artifacts: ".reports/*"
-
-########################
-# builder macros
-########################
-- builder:
- name: anteater-security-audit
- builders:
- - shell:
- !include-raw: ./anteater-security-audit.sh
-
-- builder:
- name: report-security-audit-result-to-gerrit
- builders:
- - shell:
- !include-raw: ./anteater-report-to-gerrit.sh
-
-- builder:
- name: anteater-security-audit-weekly
- builders:
- - shell:
- !include-raw: ./anteater-security-audit-weekly.sh
diff --git a/jjb/clover/clover-project.yaml b/jjb/clover/clover-project.yaml
deleted file mode 100644
index f75a59e80..000000000
--- a/jjb/clover/clover-project.yaml
+++ /dev/null
@@ -1,172 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: clover
-
- project: '{name}'
-
- jobs:
- - 'clover-verify-{stream}'
- - 'clover-daily-upload-{stream}'
- - 'clover-daily-deploy-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'clover-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./verify.sh
-
-- job-template:
- name: 'clover-daily-upload-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- scm:
- - git-scm
-
- triggers:
- - timed: '0 8 * * *'
-
- wrappers:
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'clover-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./upload.sh
-
-- job-template:
- name: 'clover-daily-deploy-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'clover-daily-deploy-.*?'
- - 'container4nfv-daily-deploy-.*?'
- blocking-level: 'NODE'
-
- wrappers:
- - timeout:
- timeout: 180
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-virtual6
- default-slaves:
- - huawei-virtual6
-
- scm:
- - git-scm
-
- triggers:
- - timed: '0 9 * * *'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./deploy.sh
-
-###################
-# parameter macros
-###################
-- parameter:
- name: 'clover-defaults'
- parameters:
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/clover/clover-rtd-jobs.yaml b/jjb/clover/clover-rtd-jobs.yaml
deleted file mode 100644
index d768c7cd7..000000000
--- a/jjb/clover/clover-rtd-jobs.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-- project:
- name: clover-rtd
- project: clover
- project-name: clover
-
- gerrit-skip-vote: true
- project-pattern: 'clover'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-clover/47357/'
- rtd-token: '8b47c0a3c1cfe7de885bf217628b58dd91f14f2e'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/clover/clover-views.yaml b/jjb/clover/clover-views.yaml
deleted file mode 100644
index 15a05e60a..000000000
--- a/jjb/clover/clover-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: clover-view
- views:
- - project-view
- project-name: clover
diff --git a/jjb/cntt/cntt.yaml b/jjb/cntt/cntt.yaml
new file mode 100644
index 000000000..9a60cf1f3
--- /dev/null
+++ b/jjb/cntt/cntt.yaml
@@ -0,0 +1,97 @@
+---
+- builder:
+ name: cntt-tox
+ builders:
+ - shell: |
+ set +x
+ if [ -d {dir} ] && [ -f {dir}/tox.ini ]; then
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install tox texlive \
+ texlive-latex-extra latexmk pandoc -y
+ (cd {dir} && tox
+ wget -q -N https://dl.min.io/client/mc/release/linux-amd64/mc && chmod a+x mc
+ ./mc cp -r -q build/ opnfv/artifacts.opnfv.org/{stream}/$BUILD_TAG
+ echo "\nhttps://artifacts.opnfv.org/{stream}/$BUILD_TAG/index.html\n")
+ fi
+ if [ -d {dir}/build ] && [ -f {dir}/build/Makefile ]; then
+ (cd {dir}/build && make
+ wget -q -N https://dl.min.io/client/mc/release/linux-amd64/mc && chmod a+x mc
+ for i in *.pdf; do
+ ./mc cp -r -q $i opnfv/artifacts.opnfv.org/{stream}/$BUILD_TAG
+ echo "\nhttps://artifacts.opnfv.org/{stream}/$BUILD_TAG/$i\n"
+ done)
+ fi
+ if [ -d {dir}/gsma ]; then
+ (cd {dir}/gsma
+ wget -q -N https://dl.min.io/client/mc/release/linux-amd64/mc && chmod a+x mc
+ for i in *.docx *.rst; do
+ ./mc cp -r -q $i opnfv/artifacts.opnfv.org/{stream}/$BUILD_TAG/gsma
+ echo "\nhttps://artifacts.opnfv.org/{stream}/$BUILD_TAG/gsma/$i\n"
+ done)
+ fi
+
+- scm:
+ name: cntt-scm
+ scm:
+ - git:
+ url: https://github.com/cntt-n/CNTT
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/pull/*/head:refs/remotes/origin/pr/*'
+
+- parameter:
+ name: cntt-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'opnfv-build'
+
+- job-template:
+ name: cntt-tox-{stream}
+ scm:
+ - cntt-scm
+ triggers:
+ - github
+ parameters:
+ - cntt-parameter
+ builders:
+ - cntt-tox:
+ stream: '{stream}'
+ dir: '{dir}'
+ publishers:
+ - github-notifier
+
+- project:
+ name: cntt
+ stream:
+ - rm:
+ dir: doc/ref_model
+ - ra1:
+ dir: doc/ref_arch/openstack
+ - ra2:
+ dir: doc/ref_arch/kubernetes
+ - rc:
+ dir: doc/ref_cert
+ - rc1:
+ dir: doc/ref_cert/RC1
+ - rc2:
+ dir: doc/ref_cert/RC2
+ - ri1:
+ dir: doc/ref_impl/cntt-ri
+ - ri2:
+ dir: doc/ref_impl/cntt-ri2
+ jobs:
+ - cntt-tox-{stream}
+
+- view:
+ name: cntt
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^cntt.*$
diff --git a/jjb/container4nfv/arm64/deploy-cni.sh b/jjb/container4nfv/arm64/deploy-cni.sh
deleted file mode 100755
index 9afb98048..000000000
--- a/jjb/container4nfv/arm64/deploy-cni.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash -e
-
-cd container4nfv/src/arm/cni-deploy
-
-DEPLOY_SCENARIO={scenario}
-
-virtualenv .venv
-source .venv/bin/activate
-pip install ansible==2.6.1
-
-ansible-playbook -i inventory/inventory.cfg deploy.yml --tags flannel,multus
-
-if [ "$DEPLOY_SCENARIO" == "k8-sriov-nofeature-noha" ]; then
- ansible-playbook -i inventory/inventory.cfg deploy.yml --tags sriov
-elif [ "$DEPLOY_SCENARIO" == "k8-vpp-nofeature-noha" ]; then
- ansible-playbook -i inventory/inventory.cfg deploy.yml --tags vhost-vpp
-fi
diff --git a/jjb/container4nfv/arm64/yardstick-arm64.sh b/jjb/container4nfv/arm64/yardstick-arm64.sh
deleted file mode 100755
index 26c6fdcfe..000000000
--- a/jjb/container4nfv/arm64/yardstick-arm64.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/bash
-set -e
-
-sshpass -p root ssh root@10.1.0.50 \
- "mkdir -p /etc/yardstick; rm -rf /etc/yardstick/admin.conf"
-
-
-sshpass -p root ssh root@10.1.0.50 \
- kubectl config set-cluster yardstick --server=127.0.0.1:8080 --insecure-skip-tls-verify=true --kubeconfig=/etc/yardstick/admin.conf
-sshpass -p root ssh root@10.1.0.50 \
- kubectl config set-context yardstick --cluster=yardstick --kubeconfig=/etc/yardstick/admin.conf
-sshpass -p root ssh root@10.1.0.50 \
- kubectl config use-context yardstick --kubeconfig=/etc/yardstick/admin.conf
-
-
-
-if [ ! -n "$redirect" ]; then
- redirect="/dev/stdout"
-fi
-
-if [ ! -n "$DOCKER_TAG" ]; then
- DOCKER_TAG='latest'
-fi
-
-if [ ! -n "$NODE_NAME" ]; then
- NODE_NAME='arm-virutal03'
-fi
-
-if [ ! -n "$DEPLOY_SCENARIO" ]; then
- DEPLOY_SCENARIO='k8-nosdn-lb-noha_daily'
-fi
-
-if [ ! -n "$YARDSTICK_DB_BACKEND" ]; then
- YARDSTICK_DB_BACKEND='-i 104.197.68.199:8086'
-fi
-
-# Pull the image with correct tag
-DOCKER_REPO='opnfv/yardstick'
-if [ "$(uname -m)" = 'aarch64' ]; then
- DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
-fi
-echo "Yardstick: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
-sshpass -p root ssh root@10.1.0.50 \
- docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
-
-if [ ! -n "$BRANCH" ]; then
- BRANCH=master
-fi
-
-opts="--name=yardstick --privileged=true --net=host -d -it "
-envs="-e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
- -e NODE_NAME=${NODE_NAME} \
- -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
-rc_file_vol="-v /etc/yardstick/admin.conf:/etc/yardstick/admin.conf"
-cacert_file_vol=""
-map_log_dir=""
-sshkey=""
-YARDSTICK_SCENARIO_SUITE_NAME="opnfv_k8-nosdn-lb-noha_daily.yaml"
-
-# map log directory
-branch=${BRANCH##*/}
-#branch="master"
-dir_result="${HOME}/opnfv/yardstick/results/${branch}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-map_log_dir="-v ${dir_result}:/tmp/yardstick"
-
-# Run docker
-cmd="docker rm -f yardstick || true"
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} /bin/bash"
-echo "Yardstick: Running docker cmd: ${cmd}"
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-
-cmd='sudo docker exec yardstick sed -i.bak "/# execute tests/i\sed -i.bak \"s/openretriever\\\/yardstick/openretriever\\\/yardstick_aarch64/g\" \
- $\{YARDSTICK_REPO_DIR\}/tests/opnfv/test_cases/opnfv_yardstick_tc080.yaml" /usr/local/bin/exec_tests.sh'
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-echo "Yardstick: run tests: ${YARDSTICK_SCENARIO_SUITE_NAME}"
-cmd="sudo docker exec yardstick exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-cmd="docker rm -f yardstick"
-sshpass -p root ssh root@10.1.0.50 \
- ${cmd}
-
-echo "Yardstick: done!"
diff --git a/jjb/container4nfv/container4nfv-arm64.yaml b/jjb/container4nfv/container4nfv-arm64.yaml
deleted file mode 100644
index b72c09547..000000000
--- a/jjb/container4nfv/container4nfv-arm64.yaml
+++ /dev/null
@@ -1,35 +0,0 @@
----
-
-- job-template:
- name: 'container4nfv-{scenario}-{pod}-daily-{stream}'
- disabled: '{obj:disabled}'
- node: '{slave-label}'
-
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/container4nfv
- branches:
- - master
- basedir: container4nfv
- wipe-workspace: true
-
- builders:
- - shell: !include-raw: arm64/deploy-cni.sh
-
-
-- trigger:
- name: 'trigger-deploy-virtual-master'
- triggers:
- - timed: '0 12 * * *'
-- trigger:
- name: 'trigger-deploy-virtual-hunter'
- triggers:
- - timed: '0 18 * * *'
-- trigger:
- name: 'trigger-deploy-baremetal-master'
- triggers:
- - timed: '0 12 * * *'
-- trigger:
- name: 'trigger-deploy-baremetal-hunter'
- triggers:
- - timed: '0 18 * * *'
diff --git a/jjb/container4nfv/container4nfv-project.yaml b/jjb/container4nfv/container4nfv-project.yaml
deleted file mode 100644
index ca286bff7..000000000
--- a/jjb/container4nfv/container4nfv-project.yaml
+++ /dev/null
@@ -1,169 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: container4nfv
-
- project: '{name}'
-
- jobs:
- - 'container4nfv-verify-{stream}'
- - 'container4nfv-daily-upload-{stream}'
- - 'container4nfv-daily-deploy-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'container4nfv-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./build.sh
-
-- job-template:
- name: 'container4nfv-daily-upload-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- scm:
- - git-scm
-
- wrappers:
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'container4nfv-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./upload.sh
-
-- job-template:
- name: 'container4nfv-daily-deploy-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'clover-daily-deploy-.*?'
- - 'container4nfv-daily-deploy-.*?'
- blocking-level: 'NODE'
-
- wrappers:
- - timeout:
- timeout: 240
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- # yamllint disable rule:line-length
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-virtual6
- default-slaves:
- - huawei-virtual6
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - shell: |
- cd $WORKSPACE/ci
- ./deploy.sh
-
-###################
-# parameter macros
-###################
-- parameter:
- name: 'container4nfv-defaults'
- parameters:
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/container4nfv/container4nfv-rtd-jobs.yaml b/jjb/container4nfv/container4nfv-rtd-jobs.yaml
deleted file mode 100644
index 70512a985..000000000
--- a/jjb/container4nfv/container4nfv-rtd-jobs.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-- project:
- name: container4nfv-rtd
- project: container4nfv
- project-name: container4nfv
-
- gerrit-skip-vote: true
- project-pattern: 'container4nfv'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-container4nfv/47359/'
- rtd-token: '82f917a788d006dc15df14ecd3c991115490bf8a'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/container4nfv/container4nfv-views.yaml b/jjb/container4nfv/container4nfv-views.yaml
deleted file mode 100644
index f57e7f0da..000000000
--- a/jjb/container4nfv/container4nfv-views.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- project:
- name: container4nfv-view
- views:
- - common-view
- view-name: container4nfv
- view-regex: (^container.*|^yardstick-arm64.*)
diff --git a/jjb/cperf/cirros-upload.yaml.ansible b/jjb/cperf/cirros-upload.yaml.ansible
deleted file mode 100644
index 855bb1f3d..000000000
--- a/jjb/cperf/cirros-upload.yaml.ansible
+++ /dev/null
@@ -1,39 +0,0 @@
----
-- hosts: all
- tasks:
- - copy:
- src: "{{ lookup('env', 'WORKSPACE') }}/{{ item }}"
- dest: "/home/heat-admin/{{ item }}"
- owner: heat-admin
- group: heat-admin
- mode: 0775
- with_items:
- - cirros-0.3.5-x86_64-disk.img
- - overcloudrc
- - name: Upload cirros glance image
- shell: >
- source /home/heat-admin/overcloudrc && openstack image create
- cirros-0.3.5-x86_64-disk --public
- --file /home/heat-admin/cirros-0.3.5-x86_64-disk.img
- --disk-format qcow2 --container-format bare
- - name: Create nano flavor
- shell: >
- source /home/heat-admin/overcloudrc && openstack flavor create
- --id 42 --ram 64 --disk 0 --vcpus 1 m1.nano
- - name: Open CSIT TCP port for netcat
- iptables:
- chain: INPUT
- action: insert
- protocol: tcp
- destination_port: 12345
- jump: ACCEPT
- become: yes
- - name: Open CSIT UDP port for netcat
- iptables:
- chain: INPUT
- action: insert
- protocol: udp
- destination_port: 12345
- jump: ACCEPT
- become: yes
-
diff --git a/jjb/cperf/cperf-ci-jobs.yaml b/jjb/cperf/cperf-ci-jobs.yaml
deleted file mode 100644
index 61bdebd34..000000000
--- a/jjb/cperf/cperf-ci-jobs.yaml
+++ /dev/null
@@ -1,210 +0,0 @@
----
-###################################
-# job configuration for cperf
-###################################
-- project:
- name: cperf-ci-jobs
- project: cperf
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
-
- installer: apex
-
- testsuite:
- - csit
- - cbench
-
- jobs:
- - 'cperf-{installer}-{testsuite}-{stream}'
- - 'cperf-upload-logs-csit'
-
-################################
-# job template
-################################
-- job-template:
- name: 'cperf-{installer}-{testsuite}-{stream}'
-
- concurrent: true
-
- properties:
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER Suite: $CPERF_SUITE_NAME ODL BRANCH: $ODL_BRANCH'
- - timeout:
- timeout: 400
- abort: true
-
- parameters:
- - cperf-parameter:
- testsuite: '{testsuite}'
- gs-pathname: '{gs-pathname}'
- docker-tag: '{docker-tag}'
- stream: '{stream}'
-
- builders:
- - 'cperf-{testsuite}-builder'
-
-- job-template:
- name: 'cperf-upload-logs-csit'
-
- concurrent: true
-
- disabled: false
-
- parameters:
- - cperf-parameter:
- testsuite: 'csit'
- gs-pathname: '{gs-pathname}'
- docker-tag: '{docker-tag}'
- stream: '{stream}'
-
- # yamllint enable rule:line-length
- properties:
- - logrotate-default
- - throttle:
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- builders:
- - 'cperf-upload-logs-csit'
-
-########################
-# parameter macros
-########################
-- parameter:
- name: cperf-parameter
- parameters:
- - string:
- name: CPERF_SUITE_NAME
- default: '{testsuite}'
- description: "Suite name to run"
- - string:
- name: ODL_BRANCH
- default: 'master'
- description: "Branch that OpenDaylight is running"
- - string:
- name: OS_VERSION
- default: 'master'
- description: "OpenStack version (short name, no stable/ prefix)"
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where the opnfv documents will be stored in gs repository"
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debug output information"
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: RC_FILE_PATH
- default: ''
- description: "Path to the OS credentials file if given"
- - string:
- name: SSH_KEY_PATH
- default: ''
- description: "Path to the private SSH key to access OPNFV nodes"
- - string:
- name: NODE_FILE_PATH
- default: ''
- description: "Path to the yaml file describing overcloud nodes"
- - string:
- name: ODL_CONTAINERIZED
- default: 'true'
- description: "boolean set true if ODL on overcloud is a container"
-
-########################
-# trigger macros
-########################
-
-########################
-# builder macros
-########################
-- builder:
- name: cperf-csit-builder
- builders:
- - 'cperf-cleanup'
- - 'cperf-prepare-robot'
- - 'cperf-robot-netvirt-csit'
-
-- builder:
- name: cperf-cbench-builder
- builders:
- - 'cperf-cleanup'
- - 'cperf-prepare-robot'
- - 'cperf-robot-cbench'
-
-- builder:
- name: cperf-prepare-robot
- builders:
- - shell:
- !include-raw: ./cperf-prepare-robot.sh
-
-- builder:
- name: cperf-robot-cbench
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # cbench requires the openflow drop test feature to be installed.
- sshpass -p karaf ssh -o StrictHostKeyChecking=no \
- -o UserKnownHostsFile=/dev/null \
- -o LogLevel=error \
- -p 8101 karaf@$SDN_CONTROLLER_IP \
- feature:install odl-openflowplugin-flow-services-ui odl-openflowplugin-drop-test
-
- robot_cmd="pybot -e exclude -L TRACE -d /tmp \
- -v ODL_SYSTEM_1_IP:${SDN_CONTROLLER_IP} \
- -v ODL_SYSTEM_IP:${SDN_CONTROLLER_IP} \
- -v BUNDLEFOLDER:/opt/opendaylight \
- -v RESTCONFPORT:8081 \
- -v USER_HOME:/tmp \
- -v USER:heat-admin \
- -v ODL_SYSTEM_USER:heat-admin \
- -v TOOLS_SYSTEM_IP:localhost \
- -v of_port:6653"
- robot_suite="/home/opnfv/repos/odl_test/csit/suites/openflowplugin/Performance/010_Cbench.robot"
-
- docker run -i -v /tmp:/tmp opnfv/cperf:$DOCKER_TAG ${robot_cmd} ${robot_suite}
-
-- builder:
- name: cperf-robot-netvirt-csit
- builders:
- - shell:
- !include-raw: ./cperf-robot-netvirt-csit.sh
-
-- builder:
- name: cperf-cleanup
- builders:
- - shell: |
- #!/bin/bash
- [[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
- echo "Cleaning up docker containers/images..."
- # Remove previous running containers if exist
- if [[ ! -z $(docker ps -a | grep opnfv/cperf) ]]; then
- echo "Removing existing opnfv/cperf containers..."
- docker ps -a | grep opnfv/cperf | awk '{print $1}' | xargs docker rm -f >${redirect}
- fi
-
-- builder:
- name: cperf-upload-logs-csit
- builders:
- - shell: !include-raw: ./cperf-upload-logs-csit.sh
diff --git a/jjb/cperf/cperf-prepare-robot.sh b/jjb/cperf/cperf-prepare-robot.sh
deleted file mode 100755
index d88c6d510..000000000
--- a/jjb/cperf/cperf-prepare-robot.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [ -z ${RC_FILE_PATH+x} ]; then
- undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
- sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/overcloudrc /tmp/overcloudrc
-else
- cp -f $RC_FILE_PATH ${WORKSPACE}/overcloudrc
-fi
-
-sudo chmod 755 ${WORKSPACE}/overcloudrc
-source ${WORKSPACE}/overcloudrc
-
-# copy ssh key for robot
-
-if [ -z ${SSH_KEY_PATH+x} ]; then
- sudo scp -o StrictHostKeyChecking=no root@$INSTALLER_IP:/home/stack/.ssh/id_rsa ${WORKSPACE}/
- sudo chown -R jenkins-ci:jenkins-ci ${WORKSPACE}/
- # done with sudo. jenkins-ci is the user from this point
- chmod 0600 ${WORKSPACE}/id_rsa
-else
- cp -f ${SSH_KEY_PATH} ${WORKSPACE}/
-fi
-
-docker pull opnfv/cperf:$DOCKER_TAG
-
-sudo mkdir -p /tmp/robot_results
diff --git a/jjb/cperf/cperf-robot-netvirt-csit.sh b/jjb/cperf/cperf-robot-netvirt-csit.sh
deleted file mode 100755
index eac1316a5..000000000
--- a/jjb/cperf/cperf-robot-netvirt-csit.sh
+++ /dev/null
@@ -1,186 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [[ ! -z ${SKIP_CSIT+x} && "$SKIP_CSIT" == "True" ]]; then
- echo "Skipping csit run"
- exit 0
-fi
-
-if [ "$OS_VERSION" == 'master' ]; then
- FULL_OS_VER='master'
-else
- FULL_OS_VER="stable/${OS_VERSION}"
-fi
-
-if [ "$ODL_BRANCH" == 'master' ]; then
- ODL_STREAM='neon'
-else
- ODL_STREAM=${ODL_BRANCH#"stable/"}
-fi
-
-echo "ODL Stream set: ${ODL_STREAM} and OS Version is ${FULL_OS_VER}"
-
-sudo rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng.git
-REL_PATH='releng/jjb/cperf'
-
-# NOTE: sourcing overcloudrc unsets any variable with OS_ prefix
-source ${WORKSPACE}/overcloudrc
-# note SDN_CONTROLLER_IP is set in overcloudrc, which is the VIP
-# for admin/public network (since we are running single network deployment)
-
-NUM_CONTROL_NODES=$(python ${REL_PATH}/parse-node-yaml.py num_nodes --file $NODE_FILE_PATH)
-NUM_COMPUTE_NODES=$(python ${REL_PATH}/parse-node-yaml.py num_nodes --node-type compute --file $NODE_FILE_PATH)
-
-echo "Number of Control nodes found: ${NUM_CONTROL_NODES}"
-echo "Number of Compute nodes found: ${NUM_COMPUTE_NODES}"
-
-# Only 1 combo or ctrl node is specified, even for OS HA deployments
-# Currently supported combinations are:
-# 0cmb-1ctl-2cmp
-# 1cmb-0ctl-0cmp
-# 1cmb-0ctl-1cmp
-if [ "$NUM_COMPUTE_NODES" -eq 0 ]; then
- OPENSTACK_TOPO="1cmb-0ctl-0cmp"
-else
- OPENSTACK_TOPO="0cmb-1ctl-2cmp"
-fi
-
-idx=1
-EXTRA_ROBOT_ARGS=""
-for idx in `seq 1 $NUM_CONTROL_NODES`; do
- CONTROLLER_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-number ${idx} --file $NODE_FILE_PATH)
- EXTRA_ROBOT_ARGS+=" -v ODL_SYSTEM_${idx}_IP:${CONTROLLER_IP} \
- -v OS_CONTROL_NODE_${idx}_IP:${CONTROLLER_IP} \
- -v ODL_SYSTEM_${idx}_IP:${CONTROLLER_IP} \
- -v HA_PROXY_${idx}_IP:${SDN_CONTROLLER_IP}"
-done
-
-# In all-in-one these Compute IPs still need to be passed to robot
-if [ "$NUM_COMPUTE_NODES" -eq 0 ]; then
- EXTRA_ROBOT_ARGS+=" -v OS_COMPUTE_1_IP:'' -v OS_COMPUTE_2_IP:''"
-else
- idx=1
- for idx in `seq 1 $NUM_COMPUTE_NODES`; do
- COMPUTE_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-type compute --node-number ${idx} --file $NODE_FILE_PATH)
- EXTRA_ROBOT_ARGS+=" -v OS_COMPUTE_${idx}_IP:${COMPUTE_IP}"
- done
-fi
-
-CONTROLLER_1_IP=$(python ${REL_PATH}/parse-node-yaml.py get_value -k address --node-number 1 --file $NODE_FILE_PATH)
-
-if [ "$ODL_CONTAINERIZED" == 'false' ]; then
- EXTRA_ROBOT_ARGS+=" -v NODE_KARAF_COUNT_COMMAND:'ps axf | grep org.apache.karaf | grep -v grep | wc -l || echo 0' \
- -v NODE_START_COMMAND:'sudo systemctl start opendaylight_api' \
- -v NODE_KILL_COMMAND:'sudo systemctl stop opendaylight_api' \
- -v NODE_STOP_COMMAND:'sudo systemctl stop opendaylight_api' \
- -v NODE_FREEZE_COMMAND:'sudo systemctl stop opendaylight_api' "
-else
- EXTRA_ROBOT_ARGS+=" -v NODE_KARAF_COUNT_COMMAND:'sudo docker ps | grep opendaylight_api | wc -l || echo 0' \
- -v NODE_START_COMMAND:'sudo docker start opendaylight_api' \
- -v NODE_KILL_COMMAND:'sudo docker stop opendaylight_api' \
- -v NODE_STOP_COMMAND:'sudo docker stop opendaylight_api' \
- -v NODE_FREEZE_COMMAND:'sudo docker stop opendaylight_api' "
-fi
-
-# FIXME(trozet) remove this once it is fixed in csit
-# Upload glance image into openstack
-wget -O ${WORKSPACE}/cirros-0.3.5-x86_64-disk.img http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img
-export ANSIBLE_HOST_KEY_CHECKING=False
-ansible-playbook -i ${CONTROLLER_1_IP}, -u heat-admin --key-file ${WORKSPACE}/id_rsa ${REL_PATH}/cirros-upload.yaml.ansible --ssh-extra-args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' -vvv
-
-LOGS_LOCATION=/tmp/robot_results
-
-robot_cmd="pybot \
- --removekeywords wuks \
- --xunit robotxunit.xml \
- --name 'CSIT' \
- -e exclude \
- -d $LOGS_LOCATION \
- -v BUNDLEFOLDER:/opt/opendaylight \
- -v CONTROLLER_USER:heat-admin \
- -v CMP_INSTANCES_SHARED_PATH:/var/lib/nova/instances/ \
- -v DEFAULT_LINUX_PROMPT:\$ \
- -v DEFAULT_LINUX_PROMPT_STRICT:]\$ \
- -v DEFAULT_USER:heat-admin \
- -v DEVSTACK_DEPLOY_PATH:/tmp \
- -v EXTERNAL_GATEWAY:$CONTROLLER_1_IP \
- -v EXTERNAL_PNF:$CONTROLLER_1_IP \
- -v EXTERNAL_SUBNET:192.0.2.0/24 \
- -v EXTERNAL_SUBNET_ALLOCATION_POOL:start=192.0.2.100,end=192.0.2.200 \
- -v EXTERNAL_INTERNET_ADDR:$CONTROLLER_1_IP \
- -v HA_PROXY_IP:$SDN_CONTROLLER_IP \
- -v NUM_ODL_SYSTEM:$NUM_CONTROL_NODES \
- -v NUM_OS_SYSTEM:$(($NUM_CONTROL_NODES + $NUM_COMPUTE_NODES)) \
- -v NUM_TOOLS_SYSTEM:0 \
- -v ODL_SNAT_MODE:conntrack \
- -v ODL_STREAM:$ODL_STREAM \
- -v ODL_SYSTEM_IP:$CONTROLLER_1_IP \
- -v OS_CONTROL_NODE_IP:$CONTROLLER_1_IP \
- -v OPENSTACK_BRANCH:$FULL_OS_VER \
- -v OPENSTACK_TOPO:$OPENSTACK_TOPO \
- -v OS_USER:heat-admin \
- -v ODL_ENABLE_L3_FWD:yes \
- -v ODL_SYSTEM_USER:heat-admin \
- -v ODL_SYSTEM_PROMPT:\$ \
- -v PRE_CLEAN_OPENSTACK_ALL:True \
- -v PUBLIC_PHYSICAL_NETWORK:datacentre \
- -v RESTCONFPORT:8081 \
- -v ODL_RESTCONF_USER:admin \
- -v ODL_RESTCONF_PASSWORD:$SDN_CONTROLLER_PASSWORD \
- -v KARAF_PROMPT_LOGIN:'opendaylight-user' \
- -v KARAF_PROMPT:'opendaylight-user.*root.*>' \
- -v SECURITY_GROUP_MODE:stateful \
- -v USER:heat-admin \
- -v USER_HOME:\$HOME \
- -v TOOLS_SYSTEM_IP:'' \
- -v NODE_ROLE_INDEX_START:0 \
- -v WORKSPACE:/tmp \
- $EXTRA_ROBOT_ARGS \
- -v of_port:6653 "
-
-SUITE_HOME='/home/opnfv/repos/odl_test/csit/suites'
-
-# Disabled suites
-#
-# ${SUITE_HOME}/netvirt/vpnservice/vpn_basic_ipv6.robot
-# This suite fails with an error indicating the connection was closed
-# to the overcloud control node:
-# https://build.opnfv.org/ci/job/cperf-apex-csit-master/104/consoleFull
-#
-# Minimize HA CSIT as it does not pass all suites
-if [ "$NUM_CONTROL_NODES" -eq 3 ]; then
- suites="${SUITE_HOME}/openstack/connectivity/l2.robot \
- ${SUITE_HOME}/openstack/connectivity/l3.robot"
-else
- suites="${SUITE_HOME}/openstack/connectivity/l2.robot \
- ${SUITE_HOME}/openstack/connectivity/l3.robot \
- ${SUITE_HOME}/openstack/connectivity/live_migration.robot \
- ${SUITE_HOME}/openstack/connectivity/external_network.robot \
- ${SUITE_HOME}/openstack/connectivity/security_group.robot \
- ${SUITE_HOME}/openstack/securitygroup/neutron_security_group.robot \
- ${SUITE_HOME}/openstack/securitygroup/security_group_l3bcast.robot \
- ${SUITE_HOME}/netvirt/vpnservice/vpn_basic.robot \
- ${SUITE_HOME}/netvirt/elan/elan.robot \
- ${SUITE_HOME}/netvirt/vpnservice/arp_learning.robot \
- ${SUITE_HOME}/netvirt/l2l3_gatewaymac_arp.robot \
- ${SUITE_HOME}/integration/Create_JVM_Plots.robot"
-fi
-
-echo "Robot command set: ${robot_cmd}"
-echo "Running robot..."
-docker run -i --net=host \
- -v ${LOGS_LOCATION}:${LOGS_LOCATION} \
- -v ${WORKSPACE}/id_rsa:/tmp/id_rsa \
- -v ${WORKSPACE}/overcloudrc:/tmp/overcloudrc \
- opnfv/cperf:$DOCKER_TAG \
- /bin/bash -c "source /tmp/overcloudrc; mkdir -p \$HOME/.ssh; cp /tmp/id_rsa \$HOME/.ssh; \
- cd /home/opnfv/repos/odl_test/ && git pull origin master; \
- pip install odltools; \
- ${robot_cmd} ${suites};"
-
-echo "Running post CSIT clean"
-ansible-playbook -i ${CONTROLLER_1_IP}, -u heat-admin --key-file ${WORKSPACE}/id_rsa ${REL_PATH}/csit-clean.yaml.ansible --ssh-extra-args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' -vvv
diff --git a/jjb/cperf/cperf-upload-logs-csit.sh b/jjb/cperf/cperf-upload-logs-csit.sh
deleted file mode 100644
index bd8680406..000000000
--- a/jjb/cperf/cperf-upload-logs-csit.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-ODL_STREAM=${ODL_BRANCH#"stable/"}
-
-LOGS_LOCATION=/tmp/robot_results
-UPLOAD_LOCATION=artifacts.opnfv.org/cperf/cperf-apex-csit-${ODL_STREAM}/${BUILD_NUMBER}/
-echo "Uploading robot logs to ${UPLOAD_LOCATION}"
-gsutil -m cp -r -v ${LOGS_LOCATION} gs://${UPLOAD_LOCATION} > gsutil.latest_logs.log
diff --git a/jjb/cperf/cperf-views.yaml b/jjb/cperf/cperf-views.yaml
deleted file mode 100644
index ef982e84e..000000000
--- a/jjb/cperf/cperf-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: cperf-view
- views:
- - project-view
- project-name: cperf
diff --git a/jjb/cperf/csit-clean.yaml.ansible b/jjb/cperf/csit-clean.yaml.ansible
deleted file mode 100644
index 0151dd824..000000000
--- a/jjb/cperf/csit-clean.yaml.ansible
+++ /dev/null
@@ -1,11 +0,0 @@
----
-- hosts: all
- tasks:
- - name: Delete cirros glance image
- shell: >
- source /home/heat-admin/overcloudrc && openstack image delete
- cirros-0.3.5-x86_64-disk
- - name: Delete nano flavor
- shell: >
- source /home/heat-admin/overcloudrc && openstack flavor delete
- m1.nano
diff --git a/jjb/cperf/parse-node-yaml.py b/jjb/cperf/parse-node-yaml.py
deleted file mode 100644
index 5a7575540..000000000
--- a/jjb/cperf/parse-node-yaml.py
+++ /dev/null
@@ -1,71 +0,0 @@
-##############################################################################
-# Copyright (c) 2018 Tim Rozet (trozet@redhat.com) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import argparse
-import sys
-import yaml
-
-
-def get_node_data_by_number(node_type, node_number):
- node_idx = 1
- for node_name, node_data in data['servers'].items():
- if node_type == node_data['type']:
- if node_idx == node_number:
- return node_name, node_data
- else:
- node_idx += 1
-
-
-def get_node_value(node_type, node_number, key):
- node_name, node_data = get_node_data_by_number(node_type, node_number)
- if not key and node_name is not None:
- return node_name
- elif node_data and isinstance(node_data, dict) and key in node_data:
- return node_data[key]
-
-
-def get_number_of_nodes(node_type):
- nodes = data['servers']
- num_nodes = 0
- for node_name, node_data in nodes.items():
- if node_data['type'] == node_type:
- num_nodes += 1
- return num_nodes
-
-
-FUNCTION_MAP = {'num_nodes':
- {'func': get_number_of_nodes,
- 'args': ['node_type']},
- 'get_value':
- {'func': get_node_value,
- 'args': ['node_type', 'node_number', 'key']},
- }
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument('command', choices=FUNCTION_MAP.keys())
- parser.add_argument('-f', '--file',
- dest='node_file',
- required=True)
- parser.add_argument('--node-type',
- default='controller',
- required=False)
- parser.add_argument('--node-number',
- default=1,
- type=int,
- required=False)
- parser.add_argument('-k', '--key',
- required=False)
- args = parser.parse_args(sys.argv[1:])
- with open(args.node_file, 'r') as fh:
- data = yaml.safe_load(fh)
- assert 'servers' in data
- func = FUNCTION_MAP[args.command]['func']
- args = [getattr(args, x) for x in FUNCTION_MAP[args.command]['args']]
- print(func(*args))
diff --git a/jjb/daisy4nfv/daisy-daily-jobs.yaml b/jjb/daisy4nfv/daisy-daily-jobs.yaml
deleted file mode 100644
index 4a7e6e908..000000000
--- a/jjb/daisy4nfv/daisy-daily-jobs.yaml
+++ /dev/null
@@ -1,423 +0,0 @@
----
-# jenkins job templates for Daisy
-# TODO
-# [ ] enable baremetal jobs after baremetal deployment finish
-# [ ] enable jobs in danuble
-# [ ] add more scenarios
-# [ ] integration with yardstick
-
-- project:
-
- name: 'daisy'
- project: '{name}'
- installer: '{name}'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- disabled: true
- gs-pathname: ''
- fraser: &fraser
- stream: fraser
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- pod:
- # -------------------------------
- # CI PODs
- # -------------------------------
- - baremetal:
- slave-label: daisy-baremetal
- <<: *master
- - virtual:
- slave-label: daisy-virtual
- <<: *master
- - baremetal:
- slave-label: daisy-baremetal
- <<: *fraser
- - virtual:
- slave-label: daisy-virtual
- <<: *fraser
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - zte-pod3:
- slave-label: zte-pod3
- <<: *master
- - zte-pod3:
- slave-label: zte-pod3
- <<: *fraser
- - zte-pod9:
- slave-label: zte-pod9
- <<: *master
- - zte-pod9:
- slave-label: zte-pod9
- <<: *fraser
-
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- # HA scenarios
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
- # NOHA scenarios
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
- # ODL_L3 scenarios
- - 'os-odl-nofeature-ha':
- auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
- # ovs_dpdk scenarios
- - 'os-nosdn-ovs_dpdk-noha':
- auto-trigger-name: 'daisy-{scenario}-{pod}-daily-{stream}-trigger'
-
- jobs:
- - '{project}-{scenario}-{pod}-daily-{stream}'
- - '{project}-deploy-{pod}-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{project}-{scenario}-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'daisy-os-.*?-{pod}-daily-.*?'
- - 'daisy-daily-.*'
- - 'daisy-kolla-build-.*'
- blocking-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - 'testapi-parameter'
- - 'daisy-project-parameter':
- gs-pathname: '{gs-pathname}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - trigger-builds:
- - project: 'daisy-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- UPSTREAM_JOB_NAME=$JOB_NAME
- UPSTREAM_BUILD_ID=$BUILD_ID
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-daisy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- UPSTREAM_JOB_NAME=$JOB_NAME
- UPSTREAM_BUILD_ID=$BUILD_ID
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-daisy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- UPSTREAM_JOB_NAME=$JOB_NAME
- UPSTREAM_BUILD_ID=$BUILD_ID
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
-- job-template:
- name: '{project}-deploy-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'daisy-kolla-build-.*'
- - '{installer}-(build|deploy|test)-daily-(fraser|master)'
- - '{installer}-deploy-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
- - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
- blocking-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - 'testapi-parameter'
- - 'daisy-project-parameter':
- gs-pathname: '{gs-pathname}'
- - 'deploy-scenario'
- - string:
- name: DEPLOY_TIMEOUT
- default: '150'
- description: 'Deployment timeout in minutes'
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'track-begin-timestamp'
- - shell:
- !include-raw-escape: ./daisy4nfv-download-artifact.sh
- - shell:
- !include-raw-escape: ./daisy-deploy.sh
- publishers:
- - 'report-provision-result'
-
-########################
-# trigger macros
-########################
-# ----------------------------------------------
-# Triggers for job running on daisy-baremetal against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-virtual against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: '0 16 * * *'
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: '0 12 * * *'
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-baremetal against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '0 0,6 * * *'
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: '0 12,18 * * *'
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-baremetal-daily-fraser-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on daisy-virtual against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '0 0 * * *'
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-virtual-daily-fraser-trigger'
- triggers:
- - timed: '0 20 * * *'
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-virtual-daily-fraser-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on zte-pod3 against master branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod3-daily-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# Triggers for job running on zte-pod3 against fraser branch
-# ----------------------------------------------
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-zte-pod3-daily-fraser-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-zte-pod3-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-zte-pod3-daily-fraser-trigger'
- triggers:
- - timed: '0 16,22 * * *'
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod3-daily-fraser-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# ZTE POD9 Triggers running against master branch
-# ----------------------------------------------
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod9-daily-master-trigger'
- triggers:
- - timed: '0 20 * * *'
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-zte-pod9-daily-master-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-zte-pod9-daily-master-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-zte-pod9-daily-master-trigger'
- triggers:
- - timed: ''
-
-# ----------------------------------------------
-# ZTE POD9 Triggers running against fraser branch
-# ----------------------------------------------
-# ovs_dpdk Scenarios
-- trigger:
- name: 'daisy-os-nosdn-ovs_dpdk-noha-zte-pod9-daily-fraser-trigger'
- triggers:
- - timed: '0 10 * * *'
-# Basic HA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-ha-zte-pod9-daily-fraser-trigger'
- triggers:
- - timed: ''
-# Basic NOHA Scenarios
-- trigger:
- name: 'daisy-os-nosdn-nofeature-noha-zte-pod9-daily-fraser-trigger'
- triggers:
- - timed: ''
-# ODL Scenarios
-- trigger:
- name: 'daisy-os-odl-nofeature-ha-zte-pod9-daily-fraser-trigger'
- triggers:
- - timed: ''
diff --git a/jjb/daisy4nfv/daisy-deploy.sh b/jjb/daisy4nfv/daisy-deploy.sh
deleted file mode 100755
index 950b9be1f..000000000
--- a/jjb/daisy4nfv/daisy-deploy.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-set -o nounset
-set -o pipefail
-
-echo "--------------------------------------------------------"
-echo "This is $INSTALLER_TYPE deploy job!"
-echo "--------------------------------------------------------"
-
-DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-noha"}
-BRIDGE=${BRIDGE:-pxebr}
-LAB_NAME=${NODE_NAME/-*}
-POD_NAME=${NODE_NAME/*-}
-deploy_ret=0
-
-if [[ ! "$NODE_NAME" =~ "-virtual" ]] && [[ ! "$LAB_NAME" =~ (zte) ]]; then
- echo "Unsupported lab $LAB_NAME for now, Cannot continue!"
- exit $deploy_ret
-fi
-
-# clone the securedlab/pharos repo
-cd $WORKSPACE
-
-# There are no PDFs in euphrates branch of pharos repo.
-if [[ "$BRANCH" =~ "euphrates" ]]; then
- CONFIG_REPO_NAME=securedlab
-else
- CONFIG_REPO_NAME=pharos
-fi
-
-if [[ "$BRANCH" =~ "master" ]]; then
- DOCTOR_OPT="-d 1"
-else
- DOCTOR_OPT=""
-fi
-
-LABS_DIR=/var/tmp/opnfv-${CONFIG_REPO_NAME}
-
-echo "Cloning ${CONFIG_REPO_NAME} repo $BRANCH to $LABS_DIR"
-sudo rm -rf $LABS_DIR
-git clone ssh://jenkins-zte@gerrit.opnfv.org:29418/${CONFIG_REPO_NAME} \
- --quiet --branch $BRANCH $LABS_DIR
-
-DEPLOY_COMMAND="sudo -E ./ci/deploy/deploy.sh -L $LABS_DIR \
- -l $LAB_NAME -p $POD_NAME -B $BRIDGE -s $DEPLOY_SCENARIO \
- $DOCTOR_OPT"
-
-# log info to console
-echo """
-Deployment parameters
---------------------------------------------------------
-Scenario: $DEPLOY_SCENARIO
-LAB: $LAB_NAME
-POD: $POD_NAME
-BRIDGE: $BRIDGE
-
-Starting the deployment using $INSTALLER_TYPE. This could take some time...
---------------------------------------------------------
-Issuing command
-$DEPLOY_COMMAND
-"""
-
-# start the deployment
-$DEPLOY_COMMAND
-
-if [ $? -ne 0 ]; then
- echo
- echo "Depolyment failed!"
- deploy_ret=1
-else
- echo
- echo "--------------------------------------------------------"
- echo "Deployment done!"
-fi
-
-exit $deploy_ret
diff --git a/jjb/daisy4nfv/daisy-project-jobs.yaml b/jjb/daisy4nfv/daisy-project-jobs.yaml
deleted file mode 100644
index 791454820..000000000
--- a/jjb/daisy4nfv/daisy-project-jobs.yaml
+++ /dev/null
@@ -1,312 +0,0 @@
----
-######################################################################
-# Add daily jobs, for buidoing, deploying and testing
-# TODO:
-# - [ ] Add yardstick and functest for test stage
-# - [x] Use daisy-baremetal-defauls for choosing baremetal deployment
-######################################################################
-
-#############################
-# Job configuration for daisy
-#############################
-- project:
- name: daisy-project-jobs
-
- project: 'daisy'
-
- installer: 'daisy'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
- phase:
- - 'build':
- slave-label: 'opnfv-build-centos'
- - 'deploy':
- slave-label: 'daisy-baremetal'
- - 'test':
- slave-label: 'opnfv-build-centos'
-
- jobs:
- - '{installer}-daily-{stream}'
- - '{installer}-{phase}-daily-{stream}'
- - '{installer}-kolla-build-{stream}'
-
-#############################
-# docker build job templates
-#############################
-- job-template:
- name: '{installer}-kolla-build-{stream}'
- disabled: false
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- # Note: Need to block all jobs which may create daisy VM.
- blocking-jobs:
- - '{installer}-kolla-build-.*'
- - 'daisy-deploy-.*'
- - 'daisy-daily-.*'
- blocking-level: 'NODE'
-
- scm:
- - git-scm
-
- triggers:
- - 'daisy-kolla-build-{stream}-trigger'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'daisy-virtual-defaults'
- - '{installer}-defaults'
- - '{installer}-project-parameter':
- gs-pathname: '{gs-pathname}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 720
- fail: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - shell:
- !include-raw-escape: ./daisy4nfv-build-kolla-image.sh
-
- publishers:
- - '{installer}-recipients'
- - email-jenkins-admins-on-failure
-
-- trigger:
- name: 'daisy-kolla-build-fraser-trigger'
- triggers:
- - timed: '0 0 * * 0'
-
-- trigger:
- name: 'daisy-kolla-build-master-trigger'
- triggers:
- - timed: '0 12 * * 0'
-
-
-########################
-# job templates
-########################
-- job-template:
- name: '{installer}-daily-{stream}'
-
- project-type: multijob
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{installer}-daily-.*'
- - '{installer}-kolla-build-.*'
- - 'daisy4nfv-merge-build-.*'
- - 'daisy4nfv-verify-build-.*'
- blocking-level: 'NODE'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '0 8 * * *'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - '{installer}-defaults'
- - '{installer}-project-parameter':
- gs-pathname: '{gs-pathname}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: '{installer}-build-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: '{installer}-deploy-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: test
- condition: SUCCESSFUL
- projects:
- - name: '{installer}-test-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
- publishers:
- - '{installer}-recipients'
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: '{installer}-{phase}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 6
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{installer}-daily-(build|deploy|test)-(fraser|master)'
- - '{installer}-.*-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
- - '(functest|yardstick)-{installer}-(baremetal|virtual|zte-pod3|zte-pod9)-daily-(fraser|master)'
- blocking-level: 'NODE'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- - '{installer}-project-parameter':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{installer}-{phase}-daily-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'daisy-build-daily-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-basic.sh
- - shell:
- !include-raw: ./daisy4nfv-build.sh
- - shell:
- !include-raw: ./daisy4nfv-upload-artifact.sh
- - 'clean-workspace'
-
-- builder:
- name: 'daisy-deploy-daily-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-download-artifact.sh
- - shell:
- !include-raw: ./daisy-deploy.sh
-
-- builder:
- name: 'daisy-test-daily-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
-
-#####################################
-# parameter macros
-#####################################
-- publisher:
- name: 'daisy-recipients'
- publishers:
- # yamllint disable rule:line-length
- - email:
- recipients: hu.zhijiang@zte.com.cn lu.yao135@zte.com.cn zhou.ya@zte.com.cn yangyang1@zte.com.cn julienjut@gmail.com
- # yamllint enable rule:line-length
- - email-jenkins-admins-on-failure
-
-- parameter:
- name: 'daisy-project-parameter'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy-rtd-jobs.yaml b/jjb/daisy4nfv/daisy-rtd-jobs.yaml
deleted file mode 100644
index 2e7689a68..000000000
--- a/jjb/daisy4nfv/daisy-rtd-jobs.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
----
-- project:
- name: daisy-rtd
- project: daisy
- project-name: daisy
-
- project-pattern: 'daisy'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-daisy/47361/'
- rtd-token: '265efe14ff0bb3fa0d4ea66d6be1b7b511d5d713'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/daisy4nfv/daisy4nfv-basic.sh b/jjb/daisy4nfv/daisy4nfv-basic.sh
deleted file mode 100755
index 87f5482e0..000000000
--- a/jjb/daisy4nfv/daisy4nfv-basic.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv basic job!"
-echo "--------------------------------------------------------"
-
diff --git a/jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh b/jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh
deleted file mode 100755
index 0441ea159..000000000
--- a/jjb/daisy4nfv/daisy4nfv-build-kolla-image.sh
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-upload_image_to_opnfv () {
- image=$1
-
- sha512sum -b $image > $image.sha512sum
- gsutil cp $image.sha512sum gs://$GS_URL/upstream/$image.sha512sum
-
- echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
- echo
- gsutil cp $image gs://$GS_URL/upstream/$image
- gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/upstream/$image
-
- # check if we uploaded the file successfully to see if things are fine
- gsutil ls gs://$GS_URL/upstream/$image
- if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- exit 1
- fi
-}
-
-
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv kolla image build job!"
-echo "--------------------------------------------------------"
-
-# start the build
-cd $WORKSPACE
-rm -rf docker_build_dir
-mkdir -p docker_build_dir
-
-# -j is for deciding which branch will be used when building,
-# only for OPNFV
-sudo -E ./ci/kolla-build.sh -j $JOB_NAME -w $WORKSPACE/docker_build_dir
-
-if [ $? -ne 0 ]; then
- echo
- echo "Kolla build failed!"
- deploy_ret=1
-else
- echo
- echo "--------------------------------------------------------"
- echo "Kolla build done!"
-fi
-
-image=$(ls $WORKSPACE/docker_build_dir/kolla-build-output/kolla-image-*.tgz)
-upload_image_to_opnfv $image
-
-echo
-echo "--------------------------------------------------------"
-echo "All done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
deleted file mode 100755
index a081b3bc6..000000000
--- a/jjb/daisy4nfv/daisy4nfv-build.sh
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/bin/bash
-
-set -o errexit
-set -o nounset
-set -o pipefail
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv build job!"
-echo "--------------------------------------------------------"
-
-# set OPNFV_ARTIFACT_VERSION
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Building Daisy4nfv ISO for a merged change"
- export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
-else
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-fi
-
-# build output directory
-OUTPUT_DIR=$WORKSPACE/build_output
-mkdir -p $OUTPUT_DIR
-
-# start the build
-cd $WORKSPACE
-./ci/build.sh $OUTPUT_DIR $OPNFV_ARTIFACT_VERSION
-
-# save information regarding artifact into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $OUTPUT_DIR/opnfv-$OPNFV_ARTIFACT_VERSION.bin | cut -d' ' -f1)"
- echo "OPNFV_ARTIFACT_URL_ISO=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM_ISO=$(sha512sum $OUTPUT_DIR/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
deleted file mode 100755
index ae5ca3813..000000000
--- a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-
-# use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly
-[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL
-
-if [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
- # get the properties file for the Daisy4nfv BIN built for a merged change
- curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
-else
- # get the latest.properties file in order to get info regarding latest artifact
- echo "Downloading http://$GS_URL/latest.properties"
- curl -L -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
-fi
-
-# check if we got the file
-[[ -f $WORKSPACE/latest.properties ]] || exit 1
-
-# source the file so we get artifact metadata
-source $WORKSPACE/latest.properties
-
-# echo the info about artifact that is used during the deployment
-OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
-echo "Using $OPNFV_ARTIFACT for deployment"
-
-[[ "$NODE_NAME" =~ (zte) ]] && OPNFV_ARTIFACT_URL=${GS_BASE_PROXY%%/*}/$OPNFV_ARTIFACT_URL
-
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # check if we already have the image to avoid redownload
- BINSTORE="/bin_mount/opnfv_ci/${BRANCH##*/}"
- if [[ -f "$BINSTORE/$OPNFV_ARTIFACT" && ! -z $OPNFV_ARTIFACT_SHA512SUM ]]; then
- echo "BIN exists locally. Starting to check the sha512sum."
- if [[ $OPNFV_ARTIFACT_SHA512SUM = $(sha512sum -b $BINSTORE/$OPNFV_ARTIFACT | cut -d' ' -f1) ]]; then
- echo "Sha512sum is verified. Skipping the download and using the file from BIN store."
- ln -s $BINSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.bin
- echo "--------------------------------------------------------"
- echo
- ls -al $WORKSPACE/opnfv.bin
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
- exit 0
- fi
- fi
-fi
-
-# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
-echo "This could take some time... Now the time is $(date -u)"
-echo "--------------------------------------------------------"
-echo
-
-# download the file
-if [[ "$NODE_NAME" =~ (zte) ]] && [ -x "$(command -v aria2c)" ]; then
- DOWNLOAD_CMD="aria2c -x 3 --allow-overwrite=true -d $WORKSPACE -o opnfv.bin"
-else
- DOWNLOAD_CMD="curl -L -s -o $WORKSPACE/opnfv.bin"
-fi
-
-maxretries=3
-cnt=0
-rc=1
-while [ $cnt -lt $maxretries ] && [ $rc -ne 0 ]
-do
- cnt=$[cnt + 1]
- $DOWNLOAD_CMD http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1
- rc=$?
-done
-
-# list the file
-ls -al $WORKSPACE/opnfv.bin
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
diff --git a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml
deleted file mode 100644
index c261c23bc..000000000
--- a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yaml
+++ /dev/null
@@ -1,226 +0,0 @@
----
-- project:
- name: 'daisy4nfv-merge-jobs'
-
- project: 'daisy'
-
- installer: 'daisy'
-
- ###########################################################
- # use alias to keep the jobs'name existed already unchanged
- ###########################################################
- alias: 'daisy4nfv'
-
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
- #####################################
- # patch merge phases
- #####################################
- phase:
- - 'build':
- slave-label: 'opnfv-build-centos'
- - 'deploy-virtual':
- slave-label: 'daisy-virtual'
-
- #####################################
- # jobs
- #####################################
- jobs:
- - '{alias}-merge-{stream}'
- - '{alias}-merge-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: '{alias}-merge-{stream}'
-
- project-type: multijob
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{alias}-merge-(master|fraser)'
- blocking-level: 'NODE'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'code/**'
- - compare-type: ANT
- pattern: 'deploy/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: '.gitignore'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - '{alias}-merge-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: '{alias}-merge-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: '{alias}-merge-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: '{alias}-merge-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{alias}-merge-{phase}-.*'
- - '{installer}-daily-.*'
- blocking-level: 'NODE'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - '{alias}-merge-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-merge-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'daisy-merge-build-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-basic.sh
- - shell:
- !include-raw: ./daisy4nfv-build.sh
- - shell:
- !include-raw: ./daisy4nfv-upload-artifact.sh
- - 'clean-workspace'
-
-- builder:
- name: 'daisy-merge-deploy-virtual-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-download-artifact.sh
- - shell:
- !include-raw: ./daisy-deploy.sh
- - 'clean-workspace'
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'daisy4nfv-merge-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
deleted file mode 100755
index bd6eb7ee0..000000000
--- a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv smoke test job!"
-echo "--------------------------------------------------------"
-
diff --git a/jjb/daisy4nfv/daisy4nfv-upload-artifact.sh b/jjb/daisy4nfv/daisy4nfv-upload-artifact.sh
deleted file mode 100755
index def4f6a75..000000000
--- a/jjb/daisy4nfv/daisy4nfv-upload-artifact.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2016 ZTE Coreporation and others.
-# hu.zhijiang@zte.com.cn
-# sun.jing22@zte.com.cn
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-# check if we built something
-if [ -f $WORKSPACE/.noupload ]; then
- echo "Nothing new to upload. Exiting."
- /bin/rm -f $WORKSPACE/.noupload
- exit 0
-fi
-
-# source the opnfv.properties to get ARTIFACT_VERSION
-source $WORKSPACE/opnfv.properties
-
-importkey () {
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-}
-
-signbin () {
-gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin
-
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin.sig
-echo "BIN signature Upload Complete!"
-}
-
-uploadbin () {
-# log info to console
-echo "Uploading $INSTALLER_TYPE artifact. This could take some time..."
-echo
-
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.bin \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > gsutil.bin.log 2>&1
-gsutil cp $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso >> gsutil.bin.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- gsutil cp $WORKSPACE/opnfv.properties \
- gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-elif [[ "$JOB_NAME" =~ "merge" ]]; then
- echo "Uploaded Daisy4nfv artifacts for a merged change"
-fi
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/latest.properties \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-# BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- echo "Check log $WORKSPACE/gsutil.bin.log on the machine where this build is done."
- exit 1
-fi
-
-echo "Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.bin"
-echo
-echo "--------------------------------------------------------"
-echo
-}
-
-importkey
-signbin
-uploadbin
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml
deleted file mode 100644
index c8c1db096..000000000
--- a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yaml
+++ /dev/null
@@ -1,225 +0,0 @@
----
-- project:
- name: 'daisy4nfv-verify-jobs'
- project: 'daisy'
- installer: 'daisy'
- ##########################################################
- # use alias to keep the jobs'name existed alread unchanged
- ##########################################################
- alias: 'daisy4nfv'
-
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
- #####################################
- # patch verification phases
- #####################################
- phase:
- - unit:
- slave-label: 'opnfv-build'
- - build:
- slave-label: 'opnfv-build-centos'
-
- #####################################
- # jobs
- #####################################
- jobs:
- - '{alias}-verify-{stream}'
- - '{alias}-verify-{phase}-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: '{alias}-verify-{stream}'
- project-type: multijob
- disabled: false
- concurrent: true
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{alias}-merge-build-.*'
- blocking-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'code/**'
- - compare-type: ANT
- pattern: 'deploy/**'
- - compare-type: ANT
- pattern: 'tests/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: '.gitignore'
- readable-message: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-centos-defaults'
- - '{alias}-verify-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: unit
- condition: SUCCESSFUL
- projects:
- - name: '{alias}-verify-unit-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: '{alias}-verify-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: '{alias}-verify-{phase}-{stream}'
- disabled: '{obj:disabled}'
- concurrent: true
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 6
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '{alias}-merge-build-.*'
- - '{alias}-verify-build-.*'
- - '{installer}-daily-.*'
- blocking-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - '{alias}-verify-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'daisy-verify-build-macro'
- builders:
- - shell:
- !include-raw: ./daisy4nfv-basic.sh
- - shell:
- !include-raw: ./daisy4nfv-build.sh
- - 'clean-workspace'
-
-- builder:
- name: daisy-verify-unit-macro
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
- set -o xtrace
- tox -e py27
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'daisy4nfv-verify-defaults'
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: CACHE_DIRECTORY
- default: $HOME/opnfv/cache/$INSTALLER_TYPE
- description: "Directory where the cache to be used during the build is located."
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/doctor/doctor-env-presetup.sh b/jjb/doctor/doctor-env-presetup.sh
deleted file mode 100755
index ebbf32c01..000000000
--- a/jjb/doctor/doctor-env-presetup.sh
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env bash
-set -o errexit
-set -o pipefail
-
-# set vars from env if not provided by user as options
-installer_key_file=${installer_key_file:-$HOME/installer_key_file}
-opnfv_installer=${opnfv_installer:-$HOME/opnfv-installer.sh}
-
-# Fetch INSTALLER_IP for APEX deployments
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
-
- echo "Gathering IP information for Apex installer VM"
- ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
- if sudo virsh list | grep undercloud; then
- echo "Installer VM detected"
- undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
- echo "Installer ip is ${INSTALLER_IP}"
- else
- echo "No available installer VM exists and no credentials provided...exiting"
- exit 1
- fi
-
- sudo cp /root/.ssh/id_rsa ${installer_key_file}
- sudo chown `whoami`:`whoami` ${installer_key_file}
-
-elif [[ ${INSTALLER_TYPE} == 'daisy' ]]; then
- echo "Gathering IP information for Daisy installer VM"
- if sudo virsh list | grep daisy; then
- echo "Installer VM detected"
-
- bridge_name=$(sudo virsh domiflist daisy | grep vnet | awk '{print $3}')
- echo "Bridge is $bridge_name"
-
- installer_mac=$(sudo virsh domiflist daisy | grep vnet | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- export INSTALLER_IP=$(/usr/sbin/arp -e -i $bridge_name | grep ${installer_mac} | head -n 1 | awk {'print $1'})
-
- echo "Installer ip is ${INSTALLER_IP}"
- else
- echo "No available installer VM exists...exiting"
- exit 1
- fi
-
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
- if [[ ! "${BRANCH}" =~ "danube" ]]; then
- export SSH_KEY=${SSH_KEY:-/var/lib/opnfv/mcp.rsa}
- sudo cp ${SSH_KEY} ${installer_key_file}
- sudo chown `whoami`:`whoami` ${installer_key_file}
- fi
-fi
-
-
-# Checking if destination path is valid
-if [ -d $opnfv_installer ]; then
- error "Please provide the full destination path for the installer ip file including the filename"
-else
- # Check if we can create the file (e.g. path is correct)
- touch $opnfv_installer || error "Cannot create the file specified. Check that the path is correct and run the script again."
-fi
-
-
-# Write the installer info to the file
-echo export INSTALLER_TYPE=${INSTALLER_TYPE} > $opnfv_installer
-echo export INSTALLER_IP=${INSTALLER_IP} >> $opnfv_installer
-if [ -e ${installer_key_file} ]; then
- echo export SSH_KEY=${installer_key_file} >> $opnfv_installer
-fi
diff --git a/jjb/doctor/doctor-rtd-jobs.yaml b/jjb/doctor/doctor-rtd-jobs.yaml
deleted file mode 100644
index 512b9907a..000000000
--- a/jjb/doctor/doctor-rtd-jobs.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-- project:
- name: doctor-rtd
- project: doctor
- project-name: doctor
-
- gerrit-skip-vote: true
- project-pattern: 'doctor'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-doctor/47362/'
- rtd-token: 'bf8640556a3ba3151e4e5602facc5ed982dd88c8'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/doctor/doctor.yaml b/jjb/doctor/doctor.yaml
deleted file mode 100644
index 1b86a3a67..000000000
--- a/jjb/doctor/doctor.yaml
+++ /dev/null
@@ -1,260 +0,0 @@
----
-- project:
- name: doctor
-
- project: '{name}'
- project-name: '{name}'
- project-pattern: '{project}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- installer:
- - 'fuel':
- scenario: 'os-nosdn-nofeature-noha'
-
- arch:
- - 'x86_64'
- - 'aarch64'
-
- inspector:
- - 'sample'
- - 'congress'
-
- jobs:
- - 'doctor-verify-{stream}'
- - 'doctor-verify-fault_management-{installer}-{inspector}-{arch}-{stream}'
- - 'doctor-verify-maintenance-{installer}-{inspector}-{arch}-{stream}'
- - 'doctor-verify-all-{installer}-{inspector}-{arch}-{stream}'
-
-- job-template:
- name: 'doctor-verify-{stream}'
- disabled: '{obj:disabled}'
- project-type: 'multijob'
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'doctor-slave-parameter'
- scm:
- - git-scm-gerrit
- triggers:
- - 'doctor-verify':
- project: '{project}'
- branch: '{branch}'
- files: 'doctor_tests/**'
-
- builders:
- - shell: |
- #!/bin/bash
- # we do nothing here as the main stuff will be done
- # in phase jobs
- echo "Triggering phase jobs!"
- - multijob:
- name: 'doctor-verify-fuel'
- execution-type: PARALLEL
- projects:
- - name: 'doctor-verify-fault_management-fuel-sample-x86_64-{stream}'
- predefined-parameters: |
- PROJECT=$PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- kill-phase-on: FAILURE
- git-revision: true
- - name: 'doctor-verify-maintenance-fuel-sample-x86_64-{stream}'
- predefined-parameters: |
- PROJECT=$PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- kill-phase-on: FAILURE
- git-revision: true
-
-- job-template:
- name: 'doctor-verify-fault_management-{installer}-{inspector}-{arch}-{stream}'
- disabled: '{obj:disabled}'
- node: 'zte-virtual4'
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 30
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - 'doctor-slave-parameter'
- - 'doctor-parameter':
- inspector: '{inspector}'
- scenario: '{scenario}'
- test_case: 'fault_management'
- admin_tool_type: 'fenix'
- app_manager_type: 'vnfm'
- scm:
- - git-scm-gerrit
- builders:
- - 'doctor-verify-installer-inspector-builders-macro'
- publishers:
- - 'doctor-verify-publishers-macro'
-
-- job-template:
- name: 'doctor-verify-maintenance-{installer}-{inspector}-{arch}-{stream}'
- disabled: '{obj:disabled}'
- node: 'nokia-pod1'
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 40
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - 'doctor-slave-parameter'
- - 'doctor-parameter':
- inspector: '{inspector}'
- scenario: '{scenario}'
- test_case: 'maintenance'
- admin_tool_type: 'fenix'
- app_manager_type: 'vnfm'
- scm:
- - git-scm-gerrit
- builders:
- - 'doctor-verify-installer-inspector-builders-macro'
- publishers:
- - 'doctor-verify-publishers-macro'
-
-- job-template:
- name: 'doctor-verify-all-{installer}-{inspector}-{arch}-{stream}'
- disabled: '{obj:disabled}'
- node: 'doctor-{installer}-{arch}'
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 50
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - 'doctor-slave-parameter'
- - 'doctor-parameter':
- inspector: '{inspector}'
- scenario: '{scenario}'
- test_case: 'all'
- admin_tool_type: 'fenix'
- app_manager_type: 'vnfm'
- scm:
- - git-scm-gerrit
- builders:
- - 'doctor-verify-installer-inspector-builders-macro'
- publishers:
- - 'doctor-verify-publishers-macro'
-
-
-# -------------------------------
-# parameter macros
-# -------------------------------
-- parameter:
- name: 'doctor-parameter'
- parameters:
- - string:
- name: INSPECTOR_TYPE
- default: '{inspector}'
- description: 'inspector component'
- - string:
- name: TEST_CASE
- default: '{test_case}'
- description: 'test case: all, fault_management or maintenance'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: 'Scenario to deploy and test'
- - string:
- name: ADMIN_TOOL_TYPE
- default: '{admin_tool_type}'
- description: 'admin_tool_type: sample, fenix'
- - string:
- name: APP_MANAGER_TYPE
- default: '{app_manager_type}'
- description: 'app_manager_type: sample, vnfm'
-
-# -------------------------------
-# builder macros
-# -------------------------------
-
-- builder:
- name: 'doctor-verify-installer-inspector-builders-macro'
- builders:
- # yamllint disable rule:indentation
- - shell: !include-raw:
- - ./doctor-env-presetup.sh
- - ../../utils/fetch_os_creds.sh
- - shell: |
- #!/bin/bash
-
- # prepare the env for test
- . $HOME/opnfv-openrc.sh
- if [ -f $HOME/os_cacert ]; then
- export OS_CACERT=$HOME/os_cacert
- fi
- . $HOME/opnfv-installer.sh
-
- # run tox to trigger the test
- # As Jenkins user, it has no permission to send ICMP package
- sudo -E tox -e py36
-
-
-# -------------------------------
-# publisher macros
-# -------------------------------
-- publisher:
- name: 'doctor-verify-publishers-macro'
- publishers:
- - archive:
- artifacts: 'doctor_tests/*.log'
- - email-jenkins-admins-on-failure
- - workspace-cleanup
-
-#####################################
-# trigger macros
-#####################################
-- trigger:
- name: 'doctor-verify'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '{files}'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
diff --git a/jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml b/jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml
deleted file mode 100644
index a579af6d6..000000000
--- a/jjb/dovetail-webportal/dovetail-webportal-project-jobs.yaml
+++ /dev/null
@@ -1,102 +0,0 @@
----
-###################################################
-# Non-ci jobs for Dovetail project
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: dovetail-webportal-project-jobs
-
- project: 'dovetail-webportal'
-
- jobs:
- - 'dovetail-webportal-verify-{stream}'
- - 'dovetail-webportal-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'dovetail-webportal-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- builders:
- - dovetail-webportal-unit-tests
-
-- job-template:
- name: 'dovetail-webportal-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - dovetail-webportal-unit-tests
-
-################################
-# builders for dovetail project
-###############################
-- builder:
- name: dovetail-webportal-unit-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- tox
diff --git a/jjb/dovetail/dovetail-artifacts-upload.sh b/jjb/dovetail/dovetail-artifacts-upload.sh
deleted file mode 100755
index f1a9e7222..000000000
--- a/jjb/dovetail/dovetail-artifacts-upload.sh
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-
-echo "dovetail: pull and save the images"
-
-[[ -d ${CACHE_DIR} ]] || mkdir -p ${CACHE_DIR}
-
-cd ${CACHE_DIR}
-sudo docker pull ${DOCKER_REPO_NAME}:${DOCKER_TAG}
-sudo docker save -o ${STORE_FILE_NAME} ${DOCKER_REPO_NAME}:${DOCKER_TAG}
-sudo chmod og+rw ${STORE_FILE_NAME}
-
-OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-GS_UPLOAD_LOCATION="${STORE_URL}/${OPNFV_ARTIFACT_VERSION}"
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_UPLOAD_LOCATION"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-source $WORKSPACE/opnfv.properties
-
-importkey () {
-# clone releng repository
-echo "Cloning releng repository..."
-[ -d releng ] && rm -rf releng
-git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null
-#this is where we import the siging key
-if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then
- source $WORKSPACE/releng/utils/gpg_import_key.sh
-fi
-}
-
-sign () {
-gpg2 -vvv --batch --yes --no-tty \
- --default-key opnfv-helpdesk@rt.linuxfoundation.org \
- --passphrase besteffort \
- --detach-sig ${CACHE_DIR}/${STORE_FILE_NAME}
-
-gsutil cp ${CACHE_DIR}/${STORE_FILE_NAME}.sig ${STORE_URL}/${STORE_FILE_NAME}.sig
-echo "signature Upload Complete!"
-}
-
-upload () {
-# log info to console
-echo "Uploading ${STORE_FILE_NAME} to artifact. This could take some time..."
-echo
-
-cd $WORKSPACE
-# upload artifact and additional files to google storage
-gsutil cp ${CACHE_DIR}/${STORE_FILE_NAME} \
-${STORE_URL}/${STORE_FILE_NAME} > gsutil.dockerfile.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
-${STORE_URL}/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
-gsutil cp $WORKSPACE/opnfv.properties \
- ${STORE_URL}/latest.properties > gsutil.latest.log 2>&1
-
-gsutil -m setmeta \
- -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- ${STORE_URL}/latest.properties \
- ${STORE_URL}/opnfv-$OPNFV_ARTIFACT_VERSION.properties > /dev/null 2>&1
-
-gsutil -m setmeta \
- -h "Cache-Control:private, max-age=0, no-transform" \
- ${STORE_URL}/${STORE_FILE_NAME} > /dev/null 2>&1
-
-# disabled errexit due to gsutil setmeta complaints
-# BadRequestException: 400 Invalid argument
-# check if we uploaded the file successfully to see if things are fine
-gsutil ls ${STORE_URL}/${STORE_FILE_NAME} > /dev/null 2>&1
-if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifact!"
- exit 1
-fi
-
-echo "dovetail: uploading Done!"
-echo
-echo "--------------------------------------------------------"
-echo
-}
-
-#importkey
-#sign
-upload
diff --git a/jjb/dovetail/dovetail-artifacts-upload.yaml b/jjb/dovetail/dovetail-artifacts-upload.yaml
deleted file mode 100644
index 9a11c6e26..000000000
--- a/jjb/dovetail/dovetail-artifacts-upload.yaml
+++ /dev/null
@@ -1,115 +0,0 @@
----
-############################################
-# dovetail upload artifacts job
-############################################
-- project:
- name: dovetail-artifacts-upload
-
- project: 'dovetail'
-
- jobs:
- - 'dovetail-{image}-artifacts-upload-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- image:
- - 'dovetail'
- - 'functest'
- - 'yardstick'
- - 'testapi'
- - 'mongo'
-
-#############################################
-# job template
-#############################################
-
-- job-template:
- name: 'dovetail-{image}-artifacts-upload-{stream}'
-
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - dovetail-parameter:
- gs-pathname: '{gs-pathname}'
- image: '{image}'
- branch: '{branch}'
-
- scm:
- - git-scm
-
- builders:
- - 'dovetail-builder-artifacts-upload'
- - 'dovetail-upload-artifacts-cache-cleanup'
- - 'dovetail-images-cleanup'
-
-####################
-# parameter macros
-####################
-- parameter:
- name: dovetail-parameter
- parameters:
- - string:
- name: CACHE_DIR
- default: $WORKSPACE/cache{gs-pathname}
- description: "the cache to store packages downloaded"
- - string:
- name: STORE_URL
- default: gs://artifacts.opnfv.org/dovetail{gs-pathname}
- description: "LF artifacts url for storage of dovetail packages"
- - string:
- name: DOCKER_REPO_NAME
- default: opnfv/{image}
- description: "docker repo name"
- - string:
- name: DOCKER_TAG
- default: latest
- description: "docker image tag of which will be uploaded to artifacts"
- - string:
- name: STORE_FILE_NAME
- default: image_{image}_{branch}_$BUILD_ID.docker
- description: "stored file name"
-
-####################################
-# builders for dovetail project
-####################################
-- builder:
- name: dovetail-builder-artifacts-upload
- builders:
- - shell:
- !include-raw: ./dovetail-artifacts-upload.sh
-
-- builder:
- name: dovetail-upload-artifacts-cache-cleanup
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "Dovetail: cleanup cache used for storage downloaded packages"
-
- /bin/rm -rf $CACHE_DIR
-
-- builder:
- name: dovetail-images-cleanup
- builders:
- - shell:
- !include-raw: ./dovetail-cleanup.sh
diff --git a/jjb/dovetail/dovetail-ci-jobs.yaml b/jjb/dovetail/dovetail-ci-jobs.yaml
deleted file mode 100644
index 3e5c052f6..000000000
--- a/jjb/dovetail/dovetail-ci-jobs.yaml
+++ /dev/null
@@ -1,300 +0,0 @@
----
-###################################
-# job configuration for dovetail
-###################################
-- project:
- name: dovetail
-
- project: '{name}'
-
- # --------------------------------------
- # BRANCH ANCHORS
- # --------------------------------------
- # 1)the stream/branch here represents the SUT(System Under Test) stream/branch
- # 2)docker-tag is the docker tag of dovetail(only master by now, then all latest used)
- # the dovetail stream is one-to-one mapping with dovetail docker-tag
- # the dovetail is not sync with A/B/C release
- master: &master
- stream: master
- branch: '{stream}'
- dovetail-branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- disabled: true
- hunter: &hunter
- stream: hunter
- branch: 'stable/{stream}'
- dovetail-branch: 'master'
- gs-pathname: '/{stream}'
- docker-tag: 'latest'
-
- # ----------------------------------
- # POD, PLATFORM, AND BRANCH MAPPING
- # ----------------------------------
- # CI PODs
- # This section should only contain the SUTs
- # that have been switched using labels for slaves
- # -----------------------------------------------
- # the pods, SUTs listed here are just examples to
- # let the dovetail tool run, there can be more ways beside CI to
- # run the dovetail tool.
- # pods, SUTs will be added/adjusted when needed
- pod:
- # fuel CI PODs
- - baremetal:
- slave-label: fuel-baremetal
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - virtual:
- slave-label: fuel-virtual
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- # -------------------------------
- # Installers not using labels
- # CI PODs
- # This section should only contain the installers
- # that have not been switched using labels for slaves
- # -------------------------------
- # apex PODs
- - virtual:
- slave-label: apex-virtual-master
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: apex-virtual-master
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - baremetal:
- slave-label: apex-baremetal-master
- SUT: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- # armband CI PODs
- - armband-baremetal:
- slave-label: armband-baremetal
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - armband-virtual:
- slave-label: armband-virtual
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - zte-pod1:
- slave-label: zte-pod1
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod2:
- slave-label: zte-pod2
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod3:
- slave-label: zte-pod3
- SUT: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
-
- # -------------------------------
- testsuite:
- - 'default'
- - 'proposed_tests'
-
- testarea:
- - 'mandatory'
- - 'optional'
-
- jobs:
- - 'dovetail-{SUT}-{pod}-{testsuite}-{stream}'
- - 'dovetail-{SUT}-{pod}-{testsuite}-{testarea}-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
- name: 'dovetail-{SUT}-{pod}-{testsuite}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 300
- abort: true
- - fix-workspace-permissions
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{dovetail-branch}'
- - '{SUT}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull dovetail docker image'
- - string:
- name: CI_DEBUG
- default: 'true'
- description: "Show debug output information"
- - string:
- name: TESTSUITE
- default: '{testsuite}'
- description: "dovetail testsuite to run"
- - string:
- name: TESTAREA
- default: 'all'
- description: "dovetail testarea to run"
- - string:
- name: DOVETAIL_REPO_DIR
- default: "/home/opnfv/dovetail"
- description: "Directory where the dovetail repository is cloned"
- - string:
- name: SUT_BRANCH
- default: '{branch}'
- description: "SUT branch"
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'dovetail-cleanup'
- - 'dovetail-run'
-
- publishers:
- - archive:
- artifacts: 'results/**/*'
- allow-empty: true
- fingerprint: true
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: 'dovetail-{SUT}-{pod}-{testsuite}-{testarea}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 300
- abort: true
- - fix-workspace-permissions
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{dovetail-branch}'
- - '{SUT}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull dovetail docker image'
- - string:
- name: CI_DEBUG
- default: 'true'
- description: "Show debug output information"
- - string:
- name: TESTSUITE
- default: '{testsuite}'
- description: "dovetail testsuite to run"
- - string:
- name: TESTAREA
- default: '{testarea}'
- description: "dovetail testarea to run"
- - string:
- name: DOVETAIL_REPO_DIR
- default: "/home/opnfv/dovetail"
- description: "Directory where the dovetail repository is cloned"
- - string:
- name: SUT_BRANCH
- default: '{branch}'
- description: "SUT branch"
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'dovetail-cleanup'
- - 'dovetail-run'
-
- publishers:
- - archive:
- artifacts: 'results/**/*'
- allow-empty: true
- fingerprint: true
- - email-jenkins-admins-on-failure
-
-# -------------------------
-# builder macros
-# -------------------------
-- builder:
- name: dovetail-run
- builders:
- - shell:
- !include-raw: ./dovetail-run.sh
-
-- builder:
- name: dovetail-cleanup
- builders:
- - shell:
- !include-raw: ./dovetail-cleanup.sh
diff --git a/jjb/dovetail/dovetail-cleanup.sh b/jjb/dovetail/dovetail-cleanup.sh
deleted file mode 100755
index 2d66fe022..000000000
--- a/jjb/dovetail/dovetail-cleanup.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-# clean up dependent project docker images, which has no containers and image tag None
-clean_images=(opnfv/functest opnfv/yardstick opnfv/testapi mongo)
-for clean_image in "${clean_images[@]}"; do
- dangling_images=($(docker images -f "dangling=true" | grep ${clean_image} | awk '{print $3}'))
- if [[ -n ${dangling_images} ]]; then
- for image_id in "${dangling_images[@]}"; do
- echo "Removing image $image_id, which has no containers and image tag is None"
- docker rmi $image_id >${redirect}
- done
- fi
-done
-
-echo "Remove dovetail images with tag None and containers with these images ..."
-dangling_images=($(docker images -f "dangling=true" | grep opnfv/dovetail | awk '{print $3}'))
-if [[ -n ${dangling_images} ]]; then
- for image_id in "${dangling_images[@]}"; do
- echo "Removing image $image_id with tag None and its related containers"
- docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
- docker rmi $image_id >${redirect}
- done
-fi
-
-echo "Cleaning up dovetail docker containers..."
-if [[ ! -z $(docker ps -a | grep opnfv/dovetail) ]]; then
- echo "Removing existing opnfv/dovetail containers..."
- docker ps -a | grep opnfv/dovetail | awk '{print $1}' | xargs docker rm -f >${redirect}
-fi
-
-#echo "Remove dovetail existing images if exist..."
-#if [[ ! -z $(docker images | grep opnfv/dovetail) ]]; then
-# echo "Docker images to remove:"
-# docker images | head -1 && docker images | grep opnfv/dovetail >${redirect}
-# image_tags=($(docker images | grep opnfv/dovetail | awk '{print $2}'))
-# for tag in "${image_tags[@]}"; do
-# echo "Removing docker image opnfv/dovetail:$tag..."
-# docker rmi opnfv/dovetail:$tag >${redirect}
-# done
-#fi
diff --git a/jjb/dovetail/dovetail-project-jobs.yaml b/jjb/dovetail/dovetail-project-jobs.yaml
deleted file mode 100644
index 2e9652e68..000000000
--- a/jjb/dovetail/dovetail-project-jobs.yaml
+++ /dev/null
@@ -1,116 +0,0 @@
----
-###################################################
-# Non-ci jobs for Dovetail project
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: dovetail-project-jobs
-
- project: 'dovetail'
-
- jobs:
- - 'dovetail-verify-{stream}'
- - 'dovetail-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'dovetail-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- builders:
- - dovetail-unit-tests
-
-- job-template:
- name: 'dovetail-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - dovetail-unit-tests
-
-################################
-# builders for dovetail project
-###############################
-- builder:
- name: dovetail-hello-world
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
-
- echo "hello world"
-
-
-- builder:
- name: dovetail-unit-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- tox
diff --git a/jjb/dovetail/dovetail-rtd-jobs.yaml b/jjb/dovetail/dovetail-rtd-jobs.yaml
deleted file mode 100644
index 395c75adb..000000000
--- a/jjb/dovetail/dovetail-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: dovetail-rtd
- project: dovetail
- project-name: dovetail
-
- project-pattern: 'dovetail'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-dovetail/47364/'
- rtd-token: '47df9d7358b153666c2cf5cc139cd3a65d135688'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/dovetail/dovetail-run.sh b/jjb/dovetail/dovetail-run.sh
deleted file mode 100755
index a52019e41..000000000
--- a/jjb/dovetail/dovetail-run.sh
+++ /dev/null
@@ -1,451 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#the noun INSTALLER is used in community, here is just the example to run.
-#multi-platforms are supported.
-
-set -e
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-DEPLOY_TYPE=baremetal
-[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
-
-DOVETAIL_HOME=${WORKSPACE}/ovp
-[ -d ${DOVETAIL_HOME} ] && sudo rm -rf ${DOVETAIL_HOME}
-
-mkdir -p ${DOVETAIL_HOME}
-
-DOVETAIL_CONFIG=${DOVETAIL_HOME}/pre_config
-mkdir -p ${DOVETAIL_CONFIG}
-
-DOVETAIL_IMAGES=${DOVETAIL_HOME}/images
-mkdir -p ${DOVETAIL_IMAGES}
-
-OPENRC=${DOVETAIL_CONFIG}/env_config.sh
-CACERT=${DOVETAIL_CONFIG}/os_cacert
-POD=${DOVETAIL_CONFIG}/pod.yaml
-
-ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
-sshkey=""
-
-TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results
-
-check_file_exists() {
- if [[ -f $1 ]]; then
- echo 0
- else
- echo 1
- fi
-}
-
-get_cred_file_with_scripts() {
- echo "INFO: clone releng repo..."
- releng_repo=${WORKSPACE}/releng
- [ -d ${releng_repo} ] && sudo rm -rf ${releng_repo}
- git clone https://gerrit.opnfv.org/gerrit/releng ${releng_repo} >/dev/null
-
- echo "INFO: clone pharos repo..."
- pharos_repo=${WORKSPACE}/pharos
- [ -d ${pharos_repo} ] && sudo rm -rf ${pharos_repo}
- git clone https://git.opnfv.org/pharos ${pharos_repo} >/dev/null
-
- echo "INFO: SUT branch is $SUT_BRANCH"
- echo "INFO: dovetail branch is $BRANCH"
- BRANCH_BACKUP=$BRANCH
- export BRANCH=$SUT_BRANCH
- cmd="${releng_repo}/utils/fetch_os_creds.sh -d ${OPENRC} -i ${INSTALLER_TYPE} -a ${INSTALLER_IP} -o ${CACERT} >${redirect}"
- echo "INFO: cmd is ${cmd}"
- ${cmd}
- export BRANCH=$BRANCH_BACKUP
-}
-
-get_apex_cred_file() {
- instack_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
- sshkey="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- if [[ -n $(sudo iptables -L FORWARD |grep "REJECT"|grep "reject-with icmp-port-unreachable") ]]; then
- #note: this happens only in opnfv-lf-pod1
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- get_cred_file_with_scripts
-}
-
-get_fuel_cred_file() {
- get_cred_file_with_scripts
-}
-
-get_joid_cred_file() {
- # If production lab then creds may be retrieved dynamically
- # creds are on the jumphost, always in the same folder
- sudo cp $LAB_CONFIG/admin-openrc $OPENRC
-}
-
-change_cred_file_cacert_path() {
- if [[ ${INSTALLER_TYPE} == "apex" ]]; then
- echo "INFO: apex doesn't need to set OS_CACERT."
- return 0
- fi
- exists=`check_file_exists ${CACERT}`
- if [[ $exists == 0 ]]; then
- echo "INFO: set ${INSTALLER_TYPE} openstack cacert file to be ${CACERT}"
- if [[ ${INSTALLER_TYPE} == "fuel" ]]; then
- sed -i "s#/etc/ssl/certs/mcp_os_cacert#${CACERT}#g" ${OPENRC}
- fi
- else
- echo "ERROR: cannot find file ${CACERT}. Please check if it exists."
- sudo ls -al ${DOVETAIL_CONFIG}
- exit 1
- fi
-}
-
-change_cred_file_ext_net() {
- exists=`check_file_exists ${OPENRC}`
- if [[ $exists == 0 ]]; then
- echo "export EXTERNAL_NETWORK=${EXTERNAL_NETWORK}" >> ${OPENRC}
- else
- echo "ERROR: cannot find file $OPENRC. Please check if it exists."
- sudo ls -al ${DOVETAIL_CONFIG}
- exit 1
- fi
-}
-
-get_cred_file() {
- if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- get_apex_cred_file
- elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
- get_fuel_cred_file
- elif [[ ${INSTALLER_TYPE} == 'joid' ]]; then
- get_joid_cred_file
- fi
-
- exists=`check_file_exists ${OPENRC}`
- if [[ $exists == 0 ]]; then
- echo "INFO: original openstack credentials file is:"
- cat $OPENRC
- echo "INFO: change cacert file path in credentials file"
- change_cred_file_cacert_path
- #echo "INFO: set external network in credentials file"
- #change_cred_file_ext_net
- echo "INFO: final openstack credentials file is:"
- cat $OPENRC
- else
- echo "ERROR: cannot find file $OPENRC. Please check if it exists."
- sudo ls -al ${DOVETAIL_CONFIG}
- exit 1
- fi
-}
-
-get_fuel_baremetal_pod_file() {
- fuel_ctl_ssh_options="${ssh_options} -i ${SSH_KEY}"
- ssh_user="ubuntu"
- fuel_ctl_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
- "sudo salt 'cfg*' pillar.get _param:openstack_control_address --out text| \
- cut -f2 -d' '")
- fuel_cmp_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
- "sudo salt 'cmp001*' pillar.get _param:openstack_control_address --out text| \
- cut -f2 -d' '")
- fuel_dbs_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
- "sudo salt 'dbs01*' pillar.get _param:openstack_database_node01_address --out text| \
- cut -f2 -d' '")
- fuel_msg_ip=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
- "sudo salt 'msg01*' pillar.get _param:openstack_message_queue_node01_address --out text| \
- cut -f2 -d' '")
- ipmi_index=$(ssh 2>/dev/null ${fuel_ctl_ssh_options} "${ssh_user}@${INSTALLER_IP}" \
- "sudo salt 'ctl*' network.ip_addrs cidr=${fuel_ctl_ip} --out text | grep ${fuel_ctl_ip} | cut -c 5")
-
- organization="$(cut -d'-' -f1 <<< "${NODE_NAME}")"
- pod_name="$(cut -d'-' -f2 <<< "${NODE_NAME}")"
- pdf_file=${pharos_repo}/labs/${organization}/${pod_name}.yaml
- ipmiIp=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.address)
- ipmiIp="$(cut -d'/' -f1 <<< "${ipmiIp}")"
- ipmiPass=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.pass)
- ipmiUser=$(cat ${pdf_file} | shyaml get-value nodes.$[ipmi_index-1].remote_management.user)
- [[ $ipmiUser == ENC* ]] && ipmiUser=$(eyaml decrypt -s ${ipmiUser//[[:blank:]]/})
- [[ $ipmiPass == ENC* ]] && ipmiPass=$(eyaml decrypt -s ${ipmiPass//[[:blank:]]/})
-
- cat << EOF >${POD}
-nodes:
-- {ip: ${INSTALLER_IP}, name: node0, key_filename: ${DOVETAIL_CONFIG}/id_rsa,
- role: Jumpserver, user: ${ssh_user}}
-- {ip: ${fuel_ctl_ip}, name: node1, key_filename: ${DOVETAIL_CONFIG}/id_rsa,
- role: controller, user: ${ssh_user}, ipmi_ip: ${ipmiIp}, ipmi_user: ${ipmiUser}, ipmi_password: ${ipmiPass}}
-- {ip: ${fuel_msg_ip}, name: msg01, key_filename: ${DOVETAIL_CONFIG}/id_rsa, role: controller, user: ${ssh_user}}
-- {ip: ${fuel_cmp_ip}, name: cmp01, key_filename: ${DOVETAIL_CONFIG}/id_rsa, role: controller, user: ${ssh_user}}
-- {ip: ${fuel_dbs_ip}, name: dbs01, key_filename: ${DOVETAIL_CONFIG}/id_rsa, role: controller, user: ${ssh_user}}
-EOF
-}
-
-get_pod_file_with_scripts() {
- set +e
- sudo pip install virtualenv
-
- cd ${releng_repo}/modules
- sudo virtualenv venv
- source venv/bin/activate
- sudo pip install -e ./ >/dev/null
- sudo pip install netaddr
-
- if [[ ${INSTALLER_TYPE} == fuel ]]; then
- options="-u ubuntu -k /root/.ssh/id_rsa"
- elif [[ ${INSTALLER_TYPE} == apex ]]; then
- options="-u stack -k /root/.ssh/id_rsa"
- elif [[ ${INSTALLER_TYPE} == daisy ]]; then
- options="-u root -p r00tme"
- else
- echo "WARNING: Don't support to generate ${POD} on ${INSTALLER_TYPE} currently."
- echo "WARNING: HA test cases may not run properly."
- fi
-
- cmd="sudo python ${releng_repo}/utils/create_pod_file.py -t ${INSTALLER_TYPE} \
- -i ${INSTALLER_IP} ${options} -f ${POD} \
- -s ${DOVETAIL_CONFIG}/id_rsa"
- echo "INFO: cmd is ${cmd}"
- ${cmd}
-
- deactivate
- set -e
- cd ${WORKSPACE}
-}
-
-change_apex_pod_file_process_info() {
- cat << EOF >> ${POD}
-process_info:
-- {testcase_name: yardstick.ha.rabbitmq, attack_process: rabbitmq_server}
-- {testcase_name: yardstick.ha.cinder_api, attack_process: cinder_wsgi}
-EOF
-}
-
-change_fuel_pod_file_process_info() {
- cat << EOF >> ${POD}
-process_info:
-- {testcase_name: yardstick.ha.cinder_api, attack_process: cinder-wsgi}
-- {testcase_name: yardstick.ha.rabbitmq, attack_process: rabbitmq_server, attack_host: msg01}
-- {testcase_name: yardstick.ha.neutron_l3_agent, attack_process: neutron-l3-agent, attack_host: cmp01}
-- {testcase_name: yardstick.ha.database, attack_process: mysqld, attack_host: dbs01}
-EOF
-}
-
-change_pod_file_process_info() {
- sudo chmod 666 ${POD}
- echo "INFO: adapt process info for $INSTALLER_TYPE ..."
- if [ "$INSTALLER_TYPE" == "apex" ]; then
- change_apex_pod_file_process_info
- elif [ "$INSTALLER_TYPE" == "fuel" ]; then
- change_fuel_pod_file_process_info
- fi
-}
-
-get_pod_file() {
- # These packages are used for parsing yaml files and decrypting ipmi user and password.
- sudo pip install shyaml
- sudo yum install -y rubygems || sudo apt-get install -y ruby
- sudo gem install hiera-eyaml
- if [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_TYPE} == 'baremetal' ]]; then
- get_fuel_baremetal_pod_file
- fi
-
- exists=`check_file_exists ${POD}`
- if [[ $exists == 1 ]]; then
- get_pod_file_with_scripts
- fi
-
- exists=`check_file_exists ${POD}`
- if [[ $exists == 0 ]]; then
- change_pod_file_process_info
- else
- echo "ERROR: cannot find file ${POD}. Please check if it exists."
- sudo ls -al ${DOVETAIL_CONFIG}
- exit 1
- fi
-
- echo "INFO: file ${POD} is:"
- cat ${POD}
-}
-
-get_cred_file
-get_pod_file
-
-if [ "$INSTALLER_TYPE" == "fuel" ]; then
- if [[ "${SUT_BRANCH}" =~ "danube" ]]; then
- echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
- sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
- else
- cp ${SSH_KEY} ${DOVETAIL_CONFIG}/id_rsa
- fi
-fi
-
-if [ "$INSTALLER_TYPE" == "apex" ]; then
- echo "Fetching id_rsa file from jump_server $INSTALLER_IP..."
- sudo scp $ssh_options stack@${INSTALLER_IP}:~/.ssh/id_rsa ${DOVETAIL_CONFIG}/id_rsa
-fi
-
-if [ "$INSTALLER_TYPE" == "daisy" ]; then
- echo "Fetching id_dsa file from jump_server $INSTALLER_IP..."
- sshpass -p r00tme sudo scp $ssh_options root@${INSTALLER_IP}:~/.ssh/id_dsa ${DOVETAIL_CONFIG}/id_rsa
-fi
-
-
-image_path=${HOME}/opnfv/dovetail/images
-if [[ ! -d ${image_path} ]]; then
- mkdir -p ${image_path}
-fi
-# sdnvpn test case needs to download this image first before running
-ubuntu_image=${image_path}/ubuntu-16.04-server-cloudimg-amd64-disk1.img
-if [[ ! -f ${ubuntu_image} ]]; then
- echo "Download image ubuntu-16.04-server-cloudimg-amd64-disk1.img ..."
- wget -q -nc https://artifacts.opnfv.org/sdnvpn/ubuntu-16.04-server-cloudimg-amd64-disk1.img -P ${image_path}
-fi
-sudo cp ${ubuntu_image} ${DOVETAIL_IMAGES}
-
-# yardstick and bottlenecks need to download this image first before running
-cirros_image=${image_path}/cirros-0.3.5-x86_64-disk.img
-if [[ ! -f ${cirros_image} ]]; then
- echo "Download image cirros-0.3.5-x86_64-disk.img ..."
- wget -q -nc http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img -P ${image_path}
-fi
-sudo cp ${cirros_image} ${DOVETAIL_IMAGES}
-
-# functest needs to download this image first before running
-cirros_image=${image_path}/cirros-0.4.0-x86_64-disk.img
-if [[ ! -f ${cirros_image} ]]; then
- echo "Download image cirros-0.4.0-x86_64-disk.img ..."
- wget -q -nc http://download.cirros-cloud.net/0.4.0/cirros-0.4.0-x86_64-disk.img -P ${image_path}
-fi
-sudo cp ${cirros_image} ${DOVETAIL_IMAGES}
-
-# snaps_smoke test case needs to download this image first before running
-ubuntu14_image=${image_path}/ubuntu-14.04-server-cloudimg-amd64-disk1.img
-if [[ ! -f ${ubuntu14_image} ]]; then
- echo "Download image ubuntu-14.04-server-cloudimg-amd64-disk1.img ..."
- wget -q -nc https://cloud-images.ubuntu.com/releases/14.04/release/ubuntu-14.04-server-cloudimg-amd64-disk1.img -P ${image_path}
-fi
-sudo cp ${ubuntu14_image} ${DOVETAIL_IMAGES}
-
-# cloudify_ims test case needs to download these 2 images first before running
-cloudify_image=${image_path}/cloudify-manager-premium-4.0.1.qcow2
-if [[ ! -f ${cloudify_image} ]]; then
- echo "Download image cloudify-manager-premium-4.0.1.qcow2 ..."
- wget -q -nc http://repository.cloudifysource.org/cloudify/4.0.1/sp-release/cloudify-manager-premium-4.0.1.qcow2 -P ${image_path}
-fi
-sudo cp ${cloudify_image} ${DOVETAIL_IMAGES}
-trusty_image=${image_path}/trusty-server-cloudimg-amd64-disk1.img
-if [[ ! -f ${trusty_image} ]]; then
- echo "Download image trusty-server-cloudimg-amd64-disk1.img ..."
- wget -q -nc http://cloud-images.ubuntu.com/trusty/current/trusty-server-cloudimg-amd64-disk1.img -P ${image_path}
-fi
-sudo cp ${trusty_image} ${DOVETAIL_IMAGES}
-
-opts="--privileged=true -id"
-
-docker_volume="-v /var/run/docker.sock:/var/run/docker.sock"
-dovetail_home_volume="-v ${DOVETAIL_HOME}:${DOVETAIL_HOME}"
-
-# Pull the image with correct tag
-DOCKER_REPO='opnfv/dovetail'
-if [ "$(uname -m)" = 'aarch64' ]; then
- DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
- DOCKER_TAG="latest"
-fi
-
-echo "Dovetail: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
-docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
-
-cmd="docker run ${opts} -e DOVETAIL_HOME=${DOVETAIL_HOME} -e INSTALLER_TYPE=${INSTALLER_TYPE} \
- -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e NODE_NAME=${NODE_NAME} -e BUILD_TAG=${BUILD_TAG} \
- -e TEST_DB_URL=${TEST_DB_URL} -e VERSION=${SUT_BRANCH} \
- ${docker_volume} ${dovetail_home_volume} \
- ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} /bin/bash"
-echo "Dovetail: running docker run command: ${cmd}"
-${cmd} >${redirect}
-sleep 5
-container_id=$(docker ps | grep "${DOCKER_REPO}:${DOCKER_TAG}" | awk '{print $1}' | head -1)
-echo "Container ID=${container_id}"
-if [ -z ${container_id} ]; then
- echo "Cannot find ${DOCKER_REPO} container ID ${container_id}. Please check if it exists."
- docker ps -a
- exit 1
-fi
-echo "Container Start: docker start ${container_id}"
-docker start ${container_id}
-sleep 5
-docker ps >${redirect}
-if [ $(docker ps | grep "${DOCKER_REPO}:${DOCKER_TAG}" | wc -l) == 0 ]; then
- echo "The container ${DOCKER_REPO} with ID=${container_id} has not been properly started. Exiting..."
- exit 1
-fi
-
-# Modify tempest_conf.yaml file
-tempest_conf_file=${DOVETAIL_CONFIG}/tempest_conf.yaml
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- volume_device='vdb'
-else
- volume_device='vdc'
-fi
-
-cat << EOF >$tempest_conf_file
-
-compute:
- min_compute_nodes: 2
- volume_device_name: ${volume_device}
- max_microversion: 2.65
-
-EOF
-
-echo "${tempest_conf_file}..."
-cat ${tempest_conf_file}
-
-cp_tempest_cmd="docker cp ${DOVETAIL_CONFIG}/tempest_conf.yaml $container_id:/home/opnfv/dovetail/dovetail/userconfig"
-echo "exec command: ${cp_tempest_cmd}"
-$cp_tempest_cmd
-
-if [[ ${TESTSUITE} == 'default' ]]; then
- testsuite=''
-else
- testsuite="--testsuite ${TESTSUITE}"
-fi
-
-if [[ ${TESTAREA} == 'mandatory' ]]; then
- testarea='--mandatory'
-elif [[ ${TESTAREA} == 'optional' ]]; then
- testarea="--optional"
-elif [[ ${TESTAREA} == 'all' ]]; then
- testarea=""
-else
- testarea="--testarea ${TESTAREA}"
-fi
-
-run_cmd="dovetail run ${testsuite} ${testarea} --deploy-scenario ${DEPLOY_SCENARIO} -d -r --opnfv-ci"
-echo "Container exec command: ${run_cmd}"
-docker exec $container_id ${run_cmd}
-
-sudo cp -r ${DOVETAIL_HOME}/results ./
-result_package=$(find ${DOVETAIL_HOME} -name 'logs_*')
-echo "Results package is ${result_package}"
-for item in ${result_package};
-do
- sudo mv ${item} ./results
-done
-
-# To make sure the file owner is the current user, for the copied results files in the above line
-echo "Change owner of result files ..."
-CURRENT_USER=${SUDO_USER:-$USER}
-PRIMARY_GROUP=$(id -gn $CURRENT_USER)
-echo "Current user is ${CURRENT_USER}, group is ${PRIMARY_GROUP}"
-sudo chown -R ${CURRENT_USER}:${PRIMARY_GROUP} ./results
-
-#remove useless files to save disk space
-sudo rm -rf ./results/workspace
-sudo rm -f ./results/yardstick.img
-sudo rm -f ./results/bottlenecks/tmp*
-
-echo "Dovetail: done!"
-
diff --git a/jjb/edgecloud/edgecloud-rtd-jobs.yaml b/jjb/edgecloud/edgecloud-rtd-jobs.yaml
deleted file mode 100644
index 41b013716..000000000
--- a/jjb/edgecloud/edgecloud-rtd-jobs.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
----
-- project:
- name: edgecloud-rtd
- project: edgecloud
- project-name: edgecloud
-
- project-pattern: 'edgecloud'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-edgecloud/52895/'
- rtd-token: '47989bec8e8da44ab2f33491cd6031f0411d319b'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/edgecloud/edgecloud-views.yaml b/jjb/edgecloud/edgecloud-views.yaml
deleted file mode 100644
index 2e865ddd0..000000000
--- a/jjb/edgecloud/edgecloud-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: edgecloud-rtd-view
- views:
- - project-view
- project-name: edgecloud
diff --git a/jjb/fds/fds-rtd-jobs.yaml b/jjb/fds/fds-rtd-jobs.yaml
deleted file mode 100644
index 9bf47b956..000000000
--- a/jjb/fds/fds-rtd-jobs.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
----
-- project:
- name: fds-rtd
- project: fds
- project-name: fds
-
- project-pattern: 'fds'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-fds/47367/'
- rtd-token: '756989c50a7c8f3350c4943f3d39a73762a4cd85'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/fds/fds-views.yaml b/jjb/fds/fds-views.yaml
deleted file mode 100644
index d06b1af0b..000000000
--- a/jjb/fds/fds-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: fds-view
- views:
- - project-view
- project-name: fds
diff --git a/jjb/fuel/fuel-daily-jobs.yaml b/jjb/fuel/fuel-daily-jobs.yaml
deleted file mode 100644
index 5a63d093e..000000000
--- a/jjb/fuel/fuel-daily-jobs.yaml
+++ /dev/null
@@ -1,705 +0,0 @@
----
-# jenkins job templates for Fuel
-- project:
-
- name: 'fuel'
-
- project: '{name}'
-
- installer: '{name}'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- gs-pathname: ''
- functest_docker_tag: iruya
- iruya: &iruya
- stream: iruya
- branch: 'stable/{stream}'
- disabled: false
- gs-pathname: '/{stream}'
- functest_docker_tag: '{stream}'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # CI PODs
- # -------------------------------
- pod:
- - baremetal:
- slave-label: fuel-baremetal
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- <<: *iruya
- - virtual:
- slave-label: fuel-virtual
- <<: *iruya
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - zte-pod1:
- slave-label: zte-pod1
- <<: *master
- - itri-pod1:
- slave-label: itri-pod1
- <<: *master
- # -------------------------------
- # scenarios
- # -------------------------------
- scenario:
- # HA scenarios
- - 'os-nosdn-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-ovn-nofeature-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-ovs-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-fdio-ha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- # NOHA scenarios
- - 'os-nosdn-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl-sfc-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl-bgpvpn-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-ovn-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-odl-ovs-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-ovs-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'os-nosdn-fdio-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- - 'k8-calico-nofeature-noha':
- auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
-
- jobs:
- - 'fuel-{scenario}-{pod}-daily-{stream}'
- - 'fuel-deploy-{pod}-daily-{stream}'
- - 'fuel-collect-logs-{pod}-daily-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'fuel-{scenario}-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-(os|k8)-.*?-{pod}-daily-.*'
- - 'fuel-verify-.*'
- blocking-level: 'NODE'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - trigger-builds:
- - project: 'fuel-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-fuel-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- DOCKER_TAG={functest_docker_tag}
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'yardstick-fuel-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- # 1.here the stream means the SUT stream, dovetail stream is defined in its own job
- # 2.testsuite default here is for the test cases already added into OVP
- # 3.run default testsuite mandatory test cases against ha scenario
- # 4.run default testsuite optional test cases against ha scenario twice one week
- # 5.run proposed_tests testsuite optional test cases against ha scenario
- # 6.not used for release criteria or compliance, only to debug the dovetail tool bugs
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: '.*-ha'
- label: '{scenario}'
- steps:
- - trigger-builds:
- - project: 'dovetail-fuel-{pod}-default-mandatory-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - project: 'dovetail-fuel-{pod}-proposed_tests-optional-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - conditional-step:
- condition-kind: and
- condition-operands:
- - condition-kind: regex-match
- regex: '.*-ha'
- label: '{scenario}'
- - condition-kind: day-of-week
- day-selector: select-days
- days:
- MON: true
- WED: true
- use-build-time: true
- steps:
- - trigger-builds:
- - project: 'dovetail-fuel-{pod}-default-optional-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'fuel-collect-logs-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO={scenario}
- INSTALLER_VERSION={stream}
- block: true
- same-node: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
- publishers:
- - email-fuel-ptl
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: 'fuel-deploy-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-deploy-{pod}-daily-.*'
- blocking-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - testapi-parameter
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-ha'
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - track-begin-timestamp
- - shell:
- !include-raw-escape: ./fuel-deploy.sh
-
- publishers:
- - email-fuel-ptl
- - email-jenkins-admins-on-failure
- - report-provision-result
-
-- job-template:
- name: 'fuel-collect-logs-{pod}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
- - '{slave-label}-defaults':
- installer: '{installer}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl-nofeature-ha'
-
- scm:
- - git-scm
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - shell:
- !include-raw-escape: ./fuel-logs.sh
-
- publishers:
- - email-jenkins-admins-on-failure
-
-########################
-# trigger macros
-########################
-# ----------------------------------------------
-# Triggers for job running on fuel-baremetal against master branch
-# ----------------------------------------------
-# HA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '5 20 * * 1,2,4,6,7'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '5 2 * * *'
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: '5 5 * * *'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-ha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-sfc-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-bgpvpn-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-ovs-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: '5 20 * * 3,5'
-- trigger:
- name: 'fuel-k8-calico-nofeature-noha-baremetal-daily-master-trigger'
- triggers:
- - timed: ''
-# ----------------------------------------------
-# Triggers for job running on fuel-baremetal against iruya branch
-# ----------------------------------------------
-# HA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: '0 20 * * 1,4,7'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: '0 2 * * *'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: '0 5 * * *'
-- trigger:
- name: 'fuel-os-nosdn-fdio-ha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-sfc-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-bgpvpn-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: '0 20 * * 6'
-- trigger:
- name: 'fuel-os-odl-ovs-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: '0 20 * * 2'
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: '0 20 * * 3,5'
-- trigger:
- name: 'fuel-k8-calico-nofeature-noha-baremetal-daily-iruya-trigger'
- triggers:
- - timed: ''
-# ----------------------------------------------
-# Triggers for job running on fuel-virtual against master branch
-# ----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-ha-virtual-daily-master-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 13 * * *'
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 18 * * *'
-- trigger:
- name: 'fuel-os-odl-sfc-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 4 * * 1,4'
-- trigger:
- name: 'fuel-os-odl-bgpvpn-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 6 * * 2,5'
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 23 * * *'
-- trigger:
- name: 'fuel-os-odl-ovs-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 2 * * *'
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 9 * * *'
-- trigger:
- name: 'fuel-os-nosdn-fdio-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 20 * * *'
-- trigger:
- name: 'fuel-k8-calico-nofeature-noha-virtual-daily-master-trigger'
- triggers:
- - timed: '5 16 * * *'
-# ----------------------------------------------
-# Triggers for job running on fuel-virtual against iruya branch
-# ----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-virtual-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-virtual-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-virtual-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-virtual-daily-iruya-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-ha-virtual-daily-iruya-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '0 13 * * *'
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '0 18 * * *'
-- trigger:
- name: 'fuel-os-odl-sfc-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '0 4 * * 1,4'
-- trigger:
- name: 'fuel-os-odl-bgpvpn-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '0 7 * * 2,5'
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '0 23 * * *'
-- trigger:
- name: 'fuel-os-odl-ovs-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '0 2 * * *'
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '0 9 * * *'
-- trigger:
- name: 'fuel-os-nosdn-fdio-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '0 13 * * 6,7'
-- trigger:
- name: 'fuel-k8-calico-nofeature-noha-virtual-daily-iruya-trigger'
- triggers:
- - timed: '5 13 * * 6,7'
-# ----------------------------------------------
-# ZTE POD1 Triggers running against master branch
-# ----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: '0 10 * * *'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-ha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-sfc-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-bgpvpn-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-odl-ovs-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-nosdn-fdio-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-k8-calico-nofeature-noha-zte-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-# ----------------------------------------------
-# ITRI POD1 Triggers running against master branch
-# ----------------------------------------------
-- trigger:
- name: 'fuel-os-nosdn-nofeature-ha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 13 * * 1'
-- trigger:
- name: 'fuel-os-odl-nofeature-ha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 17 * * 1'
-- trigger:
- name: 'fuel-os-ovn-nofeature-ha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 13 * * 2'
-- trigger:
- name: 'fuel-os-nosdn-ovs-ha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 17 * * 2'
-- trigger:
- name: 'fuel-os-nosdn-fdio-ha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-# NOHA Scenarios
-- trigger:
- name: 'fuel-os-nosdn-nofeature-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 13 * * 3'
-- trigger:
- name: 'fuel-os-odl-nofeature-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 17 * * 3'
-- trigger:
- name: 'fuel-os-odl-sfc-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 13 * * 4'
-- trigger:
- name: 'fuel-os-odl-bgpvpn-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-os-ovn-nofeature-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 17 * * 4'
-- trigger:
- name: 'fuel-os-odl-ovs-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 13 * * 5'
-- trigger:
- name: 'fuel-os-nosdn-ovs-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 17 * * 5'
-- trigger:
- name: 'fuel-os-nosdn-fdio-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'fuel-k8-calico-nofeature-noha-itri-pod1-daily-master-trigger'
- triggers:
- - timed: '0 13 * * 6'
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
deleted file mode 100755
index dd89447e6..000000000
--- a/jjb/fuel/fuel-deploy.sh
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson AB, Mirantis Inc., Enea Software AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-set -o pipefail
-
-export TERM="vt220"
-
-# set deployment parameters
-export TMPDIR=${HOME}/tmpdir
-# shellcheck disable=SC2153
-LAB_NAME=${NODE_NAME/-*}
-# shellcheck disable=SC2153
-POD_NAME=${NODE_NAME/*-}
-
-# Fuel currently supports arm, enea, ericsson, intel, lf, unh, zte and itri labs
-if [[ ! "${LAB_NAME}" =~ (arm|enea|ericsson|intel|lf|unh|zte|itri) ]]; then
- echo "Unsupported/unidentified lab ${LAB_NAME}. Cannot continue!"
- exit 1
-fi
-
-echo "Using configuration for ${LAB_NAME}"
-
-# create TMPDIR if it doesn't exist, change permissions
-mkdir -p "${TMPDIR}"
-sudo chmod a+x "${HOME}" "${TMPDIR}"
-
-cd "${WORKSPACE}" || exit 1
-
-# log file name
-FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
-
-# Limited scope for vPOD verify jobs running on armband-virtual
-[[ ! "${JOB_NAME}" =~ verify-deploy-virtual-arm64 ]] || EXTRA_ARGS='-e'
-
-# turn on DEBUG mode
-[[ ${CI_DEBUG,,} == true ]] && EXTRA_ARGS="-D ${EXTRA_ARGS:-}"
-
-# construct the command
-DEPLOY_COMMAND="${WORKSPACE}/ci/deploy.sh \
- -l ${LAB_NAME} -p ${POD_NAME} -s ${DEPLOY_SCENARIO} \
- -S ${TMPDIR} ${EXTRA_ARGS:-} \
- -L ${WORKSPACE}/${FUEL_LOG_FILENAME}"
-
-# log info to console
-echo "Deployment parameters"
-echo "--------------------------------------------------------"
-echo "Scenario: ${DEPLOY_SCENARIO}"
-echo "Lab: ${LAB_NAME}"
-echo "POD: ${POD_NAME}"
-echo
-echo "Starting the deployment using ${INSTALLER_TYPE}. This could take some time..."
-echo "--------------------------------------------------------"
-echo
-
-# start the deployment
-echo "Issuing command"
-echo "${DEPLOY_COMMAND}"
-echo
-
-${DEPLOY_COMMAND}
-exit_code=$?
-
-echo
-echo "--------------------------------------------------------"
-echo "Deployment is done!"
-
-# upload logs for baremetal deployments
-# work with virtual deployments is still going on, so skip that for now
-if [[ "${JOB_NAME}" =~ baremetal-daily ]]; then
- echo "Uploading deployment logs"
- gsutil cp "${WORKSPACE}/${FUEL_LOG_FILENAME}" \
- "gs://${GS_URL}/logs/${FUEL_LOG_FILENAME}" > /dev/null 2>&1
- echo "Logs are available at http://${GS_URL}/logs/${FUEL_LOG_FILENAME}"
-fi
-
-if [[ "${exit_code}" -ne 0 ]]; then
- echo "Deployment failed!"
- exit "${exit_code}"
-fi
-
-echo "Deployment is successful!"
-exit 0
diff --git a/jjb/fuel/fuel-docker-jobs.yaml b/jjb/fuel/fuel-docker-jobs.yaml
deleted file mode 100644
index a78bb1c25..000000000
--- a/jjb/fuel/fuel-docker-jobs.yaml
+++ /dev/null
@@ -1,224 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-- project:
-
- name: fuel-docker
-
- project: fuel
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - iruya:
- branch: 'stable/{stream}'
- disabled: false
-
- arch_tag:
- - 'amd64':
- slave_label: 'opnfv-build-ubuntu'
- - 'arm64':
- slave_label: 'opnfv-build-ubuntu-arm'
-
- # settings for jobs run in multijob phases
- build-job-settings: &build-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters: |
- PUSH_IMAGE=$PUSH_IMAGE
- CACHE_INVALIDATE=$CACHE_INVALIDATE
- COMMIT_ID=$COMMIT_ID
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- manifest-job-settings: &manifest-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters:
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- jobs:
- - "fuel-docker-{stream}"
- - "fuel-docker-build-{arch_tag}-{stream}"
- - "fuel-docker-manifest-{stream}"
-
-########################
-# job templates
-########################
-- job-template:
- name: 'fuel-docker-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- parameters:
- - fuel-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- arch_tag: 'amd64'
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-docker-.*'
- blocking-level: 'NODE'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: "*/25 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- builders:
- - multijob:
- name: 'build fuel images'
- execution-type: PARALLEL
- projects:
- - name: 'fuel-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'fuel-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish fuel manifests'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'fuel-docker-manifest-{stream}'
- <<: *manifest-job-settings
-
- publishers:
- - email-fuel-ptl
-
-- job-template:
- name: 'fuel-docker-build-{arch_tag}-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - fuel-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: '{slave_label}'
- arch_tag: '{arch_tag}'
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-docker-build-.*'
- blocking-level: 'NODE'
- scm:
- - git-scm
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{stream}" in
- "master")
- tag="latest" ;;
- *)
- tag="{stream}" ;;
- esac
- ./ci/build.sh $tag
-
-- job-template:
- name: 'fuel-docker-manifest-{stream}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for fuel-docker images"
-
-
- disabled: '{obj:disabled}'
-
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{stream}" in
- "master")
- tag="latest" ;;
- *)
- tag="{stream}" ;;
- esac
- sudo manifest-tool push from-args \
- --platforms linux/amd64,linux/arm64 \
- --template $REPO/fuel:saltmaster-reclass-ARCH-$tag \
- --target $REPO/fuel:saltmaster-reclass-$tag
- sudo manifest-tool push from-args \
- --platforms linux/amd64,linux/arm64 \
- --template $REPO/fuel:saltminion-maas-ARCH-$tag \
- --target $REPO/fuel:saltminion-maas-$tag
- exit $?
-
-# parameter macro
-- parameter:
- name: fuel-job-parameters
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: '{slave_label}'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: CACHE_INVALIDATE
- default: "0"
- description: "Set to any non-zero value to force a Docker cache cleanup"
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for fuel-docker images"
diff --git a/jjb/fuel/fuel-logs.sh b/jjb/fuel/fuel-logs.sh
deleted file mode 100755
index a7d852685..000000000
--- a/jjb/fuel/fuel-logs.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2017 Ericsson AB, Mirantis Inc., Enea Software AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-set -o pipefail
-
-# Fuel requires deploy script to be ran with sudo, Armband does not
-SUDO='sudo -E'
-[ "${PROJECT}" = 'fuel' ] || SUDO=
-
-# Log file name
-FUEL_PM_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}_pm.log.tar.gz"
-
-# Construct the command
-LOG_COMMAND="${SUDO} ${WORKSPACE}/mcp/scripts/log.sh \
- ${WORKSPACE}/${FUEL_PM_LOG_FILENAME}"
-
-# Log info to console
-echo "Collecting post mortem logs ..."
-echo "--------------------------------------------------------"
-echo "${LOG_COMMAND}"
-
-${LOG_COMMAND}
-
-# Upload logs for both baremetal and virtual deployments
-echo "Uploading deployment logs"
-echo "--------------------------------------------------------"
-gsutil cp "${WORKSPACE}/${FUEL_PM_LOG_FILENAME}" \
- "gs://${GS_URL}/logs/${FUEL_PM_LOG_FILENAME}" > /dev/null 2>&1
-echo "Logs are available at http://${GS_URL}/logs/${FUEL_PM_LOG_FILENAME}"
diff --git a/jjb/fuel/fuel-rtd-jobs.yaml b/jjb/fuel/fuel-rtd-jobs.yaml
deleted file mode 100644
index 2a7cc5b09..000000000
--- a/jjb/fuel/fuel-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: fuel-rtd
- project: fuel
- project-name: fuel
-
- project-pattern: 'fuel'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-fuel/47205/'
- rtd-token: '38f40bf6c08fd4bccb930871bc29b08404cf98b0'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - iruya:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/fuel/fuel-set-scenario.sh b/jjb/fuel/fuel-set-scenario.sh
deleted file mode 100755
index 4c8ed7334..000000000
--- a/jjb/fuel/fuel-set-scenario.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE, Mirantis Inc., Enea Software AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-set -x
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# This function allows developers to specify the impacted scenario by
-# requesting a RE-check via a gerrit change comment under a specific format.
-#
-# Patterns to be searched in change comment:
-# recheck: <scenario-name>
-# reverify: <scenario-name>
-# Examples:
-# recheck: os-odl-ovs-noha
-# reverify: os-nosdn-nofeature-ha
-
-function set_scenario() {
- # process gerrit event comment text (if present)
- DEPLOY_SCENARIO=$(echo "${GERRIT_EVENT_COMMENT_TEXT}" | \
- grep -Po '(?!:(recheck|reverify):\s*)([-\w]+ha)')
- if [ -z "${DEPLOY_SCENARIO}" ]; then
- if [[ "$JOB_NAME" =~ baremetal ]]; then
- DEPLOY_SCENARIO='os-nosdn-nofeature-ha'
- else
- DEPLOY_SCENARIO='os-nosdn-nofeature-noha'
- fi
- fi
- # save the scenario names into java properties file
- # so they can be injected to downstream jobs via envInject
- echo "Recording the scenario '${DEPLOY_SCENARIO}' for downstream jobs"
- echo "DEPLOY_SCENARIO=${DEPLOY_SCENARIO}" > "$WORK_DIRECTORY/scenario.properties"
-}
-
-# ensure GERRIT vars are set
-[ -n "${GERRIT_CHANGE_NUMBER}" ] || exit 1
-GERRIT_EVENT_COMMENT_TEXT="${GERRIT_EVENT_COMMENT_TEXT:-''}"
-
-# this directory is where the temporary properties file will be stored
-WORK_DIRECTORY=/tmp/$GERRIT_CHANGE_NUMBER
-/bin/rm -rf "$WORK_DIRECTORY" && mkdir -p "$WORK_DIRECTORY"
-
-set_scenario
diff --git a/jjb/fuel/fuel-verify-jobs.yaml b/jjb/fuel/fuel-verify-jobs.yaml
deleted file mode 100644
index dcc8b30c7..000000000
--- a/jjb/fuel/fuel-verify-jobs.yaml
+++ /dev/null
@@ -1,271 +0,0 @@
----
-- project:
- name: 'fuel-verify-jobs'
-
- project: 'fuel'
-
- installer: 'fuel'
- #####################################
- # branch definitions
- #####################################
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- functest_docker_tag: iruya
- - iruya:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- functest_docker_tag: '{stream}'
- #####################################
- # cluster architectures
- #####################################
- # Note: arm64 was removed since it was basically a no-op
- arch_tag:
- - 'amd64':
- slave-label: 'fuel'
- functest-suite-label: 'fuel'
- #####################################
- # cluster types
- #####################################
- type:
- - 'virtual'
- - 'baremetal'
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'docker-build'
- - 'deploy'
- #####################################
- # jobs
- #####################################
- jobs:
- - 'fuel-verify-{type}-{arch_tag}-{stream}'
- - 'fuel-verify-{phase}-{type}-{arch_tag}-{stream}'
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'fuel-verify-{type}-{arch_tag}-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 4
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-(os|k8)-.*?-daily-.*'
- - 'fuel-verify-.*'
- blocking-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- triggers:
- - 'fuel-verify-{type}-{arch_tag}-trigger':
- project: '{project}'
- branch: '{branch}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-{type}-defaults':
- installer: '{installer}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - 'fuel-verify-set-scenario-macro'
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/scenario.properties"
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - multijob:
- name: docker-build
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-docker-build-{type}-{arch_tag}-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'fuel-verify-deploy-{type}-{arch_tag}-{stream}'
- current-parameters: false
- predefined-parameters: |
- MCP_DOCKER_TAG={arch_tag}-verify
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- # Use Functest job definition from jjb/functest/functest-daily-jobs
- - name: 'functest-{functest-suite-label}-{type}-suite-{stream}'
- current-parameters: false
- predefined-parameters: |
- FUNCTEST_MODE=tier
- FUNCTEST_TIER=healthcheck
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- DOCKER_TAG={functest_docker_tag}
- node-parameters: true
- enable-condition: "def m = ! ('$NODE_LABELS' =~ /armband-virtual/)"
- kill-phase-on: NEVER
- abort-all-job: true
-
-- job-template:
- name: 'fuel-verify-{phase}-{type}-{arch_tag}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 2
- max-per-node: 1
- option: 'project'
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'fuel-verify-docker-build-.*'
- - 'fuel-verify-deploy-.*'
- blocking-level: 'NODE'
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-{type}-defaults':
- installer: '{installer}'
- - '{installer}-defaults':
- gs-pathname: '{gs-pathname}'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-########################
-# trigger macros
-########################
-- trigger:
- name: 'fuel-verify-virtual-amd64-trigger'
- triggers:
- - gerrit: &fuel_verify_virtual_amd64_trigger
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: '(recheck|reverify)(\s|$|:\s*[-\w]+-noha)'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'ci/**'
- - compare-type: ANT
- pattern: 'mcp/**'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
- readable-message: true
-- trigger:
- name: 'fuel-verify-virtual-arm64-trigger'
- triggers:
- - gerrit:
- <<: *fuel_verify_virtual_amd64_trigger
-- trigger:
- name: 'fuel-verify-baremetal-amd64-trigger'
- triggers:
- - gerrit: &fuel_verify_baremetal_amd64_trigger
- <<: *fuel_verify_virtual_amd64_trigger
- trigger-on:
- - comment-added-contains-event:
- comment-contains-value: '(recheck|reverify):\s*[-\w]+-ha'
-- trigger:
- name: 'fuel-verify-baremetal-arm64-trigger'
- triggers:
- - gerrit:
- <<: *fuel_verify_baremetal_amd64_trigger
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'fuel-verify-set-scenario-macro'
- builders:
- - shell:
- !include-raw: ./fuel-set-scenario.sh
-- builder:
- name: 'fuel-verify-deploy-macro'
- builders:
- - shell:
- !include-raw: ./fuel-deploy.sh
-- builder:
- name: 'fuel-verify-docker-build-macro'
- builders:
- - shell: |
- #!/bin/bash -ex
- sudo -E ./ci/build.sh 'verify' ''
diff --git a/jjb/functest/functest-alpine.sh b/jjb/functest/functest-alpine.sh
deleted file mode 100755
index 14143d2e8..000000000
--- a/jjb/functest/functest-alpine.sh
+++ /dev/null
@@ -1,270 +0,0 @@
-#!/bin/bash
-
-set -e
-set +u
-set +o pipefail
-
-REPO=${REPO:-opnfv}
-CI_LOOP=${CI_LOOP:-daily}
-TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results
-ENERGY_RECORDER_API_URL=http://energy.opnfv.fr/resources
-DOCKER_TAG=${DOCKER_TAG:-$([[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/})}
-
-run_tiers() {
- tiers=$1
- cmd_opt="run_tests -r -t all"
- [[ $BUILD_TAG =~ "suite" ]] && cmd_opt="run_tests -t all"
- for tier in ${tiers[@]}; do
- FUNCTEST_IMAGE=${REPO}/functest-${tier}:${DOCKER_TAG}
- echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
- docker pull ${FUNCTEST_IMAGE}>/dev/null
- cmd="docker run --rm ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
- echo "Running Functest tier '${tier}'. CMD: ${cmd}"
- eval ${cmd}
- ret_value=$?
- if [ ${ret_value} != 0 ]; then
- echo ${ret_value} > ${ret_val_file}
- if [ ${tier} == 'healthcheck' ]; then
- echo "Healthcheck tier failed. Exiting Functest..."
- skip_tests=1
- break
- fi
- fi
- done
-}
-
-run_test() {
- test_name=$1
- cmd_opt="run_tests -t ${test_name}"
- # Determine which Functest image should be used for the test case
- case ${test_name} in
- connection_check|tenantnetwork1|tenantnetwork2|vmready1|vmready2|singlevm1|singlevm2|vping_ssh|vping_userdata|cinder_test|odl|api_check|snaps_health_check|tempest_smoke)
- FUNCTEST_IMAGE=${REPO}/functest-healthcheck:${DOCKER_TAG} ;;
- neutron-tempest-plugin-api|rally_sanity|refstack_defcore|tempest_full|tempest_scenario|patrole|snaps_smoke|neutron_trunk|networking-bgpvpn|networking-sfc|barbican)
- FUNCTEST_IMAGE=${REPO}/functest-smoke:${DOCKER_TAG} ;;
- rally_full|rally_jobs|shaker|vmtp)
- FUNCTEST_IMAGE=${REPO}/functest-benchmarking:${DOCKER_TAG} ;;
- cloudify|cloudify_ims|heat_ims|vyos_vrouter|juju_epc)
- FUNCTEST_IMAGE=${REPO}/functest-vnf:${DOCKER_TAG} ;;
- *)
- echo "Unkown test case $test_name"
- exit 1
- ;;
- esac
- echo "Functest: Pulling Functest Docker image ${FUNCTEST_IMAGE} ..."
- docker pull ${FUNCTEST_IMAGE}>/dev/null
- cmd="docker run --rm ${envs} ${volumes} ${TESTCASE_OPTIONS} ${FUNCTEST_IMAGE} /bin/bash -c '${cmd_opt}'"
- echo "Running Functest test case '${test_name}'. CMD: ${cmd}"
- eval ${cmd}
- ret_value=$?
- if [ ${ret_value} != 0 ]; then
- echo ${ret_value} > ${ret_val_file}
- fi
-}
-
-
-redirect="/dev/stdout"
-FUNCTEST_DIR=/home/opnfv/functest
-DEPLOY_TYPE=baremetal
-[[ $BUILD_TAG =~ "virtual" ]] && DEPLOY_TYPE=virt
-HOST_ARCH=$(uname -m)
-
-# Prepare OpenStack credentials volume
-rc_file=${HOME}/opnfv-openrc.sh
-
-if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
- rc_file=$LAB_CONFIG/admin-openrc
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]] && [[ "${DEPLOY_SCENARIO}" =~ -ha$ ]]; then
- cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
-fi
-
-rc_file_vol="-v ${rc_file}:${FUNCTEST_DIR}/conf/env_file"
-
-echo "Functest: Start Docker and prepare environment"
-
-echo "Functest: Download images that will be used by test cases"
-images_dir="${HOME}/opnfv/functest/images"
-download_script=${WORKSPACE}/functest/ci/download_images.sh
-chmod +x ${download_script}
-${download_script} ${images_dir} ${DEPLOY_SCENARIO} ${HOST_ARCH} 2> ${redirect}
-
-images_vol="-v ${images_dir}:${FUNCTEST_DIR}/images"
-
-dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
-custom_params=
-test -f ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG} && custom_params=$(cat ${HOME}/opnfv/functest/custom/params_${DOCKER_TAG})
-
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
- -e BUILD_TAG=${BUILD_TAG} -e DEPLOY_TYPE=${DEPLOY_TYPE} -e CI_LOOP=${CI_LOOP} \
- -e TEST_DB_URL=${TEST_DB_URL} -e ENERGY_RECORDER_API_URL=${ENERGY_RECORDER_API_URL} \
- -e DEBUG=true"
-
-ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-
-if [ "${INSTALLER_TYPE}" == 'fuel' ]; then
- COMPUTE_ARCH=$(ssh -l ubuntu ${INSTALLER_IP} -i ${SSH_KEY} ${ssh_options} \
- "sudo salt 'cmp*' grains.get cpuarch --out yaml | awk '{print \$2; exit}'")
- IMAGE_PROPERTIES="hw_disk_bus:scsi,hw_scsi_model:virtio-scsi"
- envs="${envs} -e POD_ARCH=${COMPUTE_ARCH} -e BLOCK_MIGRATION=false"
-fi
-
-if [[ ${INSTALLER_TYPE} == 'fuel' && ${DEPLOY_SCENARIO} == 'os-nosdn-nofeature-noha' ]]; then
- libvirt_vol="-v ${ssh_key}:${FUNCTEST_DIR}/conf/libvirt_key"
- envs="${envs} -e LIBVIRT_USER=ubuntu -e LIBVIRT_KEY_PATH=${FUNCTEST_DIR}/conf/libvirt_key"
-fi
-
-if [[ ${DEPLOY_SCENARIO} == *"ovs"* ]] || [[ ${DEPLOY_SCENARIO} == *"fdio"* ]]; then
- if [[ -n ${IMAGE_PROPERTIES} ]]; then
- IMAGE_PROPERTIES="${IMAGE_PROPERTIES},hw_mem_page_size:large"
- else
- IMAGE_PROPERTIES="hw_mem_page_size:large"
- fi
- FLAVOR_EXTRA_SPECS="hw:mem_page_size:large"
-fi
-
-if [[ -n ${IMAGE_PROPERTIES} ]] || [[ -n ${FLAVOR_EXTRA_SPECS} ]]; then
- envs="${envs} -e IMAGE_PROPERTIES=${IMAGE_PROPERTIES} -e FLAVOR_EXTRA_SPECS=${FLAVOR_EXTRA_SPECS}"
-fi
-
-tempest_conf_yaml=$(mktemp)
-case ${INSTALLER_TYPE} in
-apex)
- cat << EOF > "${tempest_conf_yaml}"
----
-compute-feature-enabled:
- shelve: false
- vnc_console: true
- block_migration_for_live_migration: false
-identity-feature-enabled:
- api_v2: false
- api_v2_admin: false
-image-feature-enabled:
- api_v2: true
- api_v1: false
-object-storage:
- operator_role: SwiftOperator
-volume:
- storage_protocol: ceph
-volume-feature-enabled:
- backup: false
-EOF
- ;;
-fuel)
- cat << EOF > "${tempest_conf_yaml}"
----
-compute-feature-enabled:
- shelve: false
- vnc_console: false
- spice_console: true
-identity-feature-enabled:
- api_v2: false
- api_v2_admin: false
-image-feature-enabled:
- api_v2: true
- api_v1: false
-volume:
- storage_protocol: iSCSI
-volume-feature-enabled:
- backup: false
-EOF
- ;;
-*)
- cat << EOF > "${tempest_conf_yaml}"
----
-compute-feature-enabled:
- shelve: false
- vnc_console: false
-identity-feature-enabled:
- api_v2: false
- api_v2_admin: false
-image-feature-enabled:
- api_v2: true
- api_v1: false
-volume:
- storage_protocol: iSCSI
-volume-feature-enabled:
- backup: false
-EOF
- ;;
-esac
-case ${BRANCH} in
-master)
- cat << EOF >> "${tempest_conf_yaml}"
-compute:
- max_microversion: latest
-EOF
- ;;
-stable/hunter)
- cat << EOF >> "${tempest_conf_yaml}"
-compute:
- max_microversion: 2.65
-EOF
- ;;
-esac
-echo "tempest_conf.yaml:" && cat "${tempest_conf_yaml}"
-
-volumes="${images_vol} ${results_vol} ${sshkey_vol} ${libvirt_vol} \
- ${userconfig_vol} ${rc_file_vol} ${cacert_file_vol} \
- -v ${tempest_conf_yaml}:/usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/tempest_conf.yaml"
-
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- blacklist_yaml=$(mktemp)
- cat << EOF >> "${blacklist_yaml}"
----
--
- scenarios:
- - os-ovn-nofeature-ha
- tests:
- - neutron_tempest_plugin.api.admin.test_agent_management
- - neutron_tempest_plugin.api.admin.test_dhcp_agent_scheduler
- - patrole_tempest_plugin.tests.api.network.test_agents_rbac
- - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_network_type
- - patrole_tempest_plugin.tests.api.network.test_networks_rbac.NetworksRbacTest.test_create_network_provider_segmentation_id
- - tempest.api.network.admin.test_agent_management
- - tempest.api.network.admin.test_dhcp_agent_scheduler
- - tempest.api.object_storage.test_crossdomain.CrossdomainTest.test_get_crossdomain_policy
--
- scenarios:
- - os-nosdn-nofeature-ha
- tests:
- - tempest.api.object_storage.test_crossdomain.CrossdomainTest.test_get_crossdomain_policy
--
- scenarios:
- - os-nosdn-nofeature-noha
- tests:
- - tempest.api.object_storage.test_crossdomain.CrossdomainTest.test_get_crossdomain_policy
-EOF
- volumes="${volumes} -v ${blacklist_yaml}:/usr/lib/python2.7/site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/blacklist.yaml"
-fi
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo 0 > ${ret_val_file}
-
-set +e
-
-if [ ${FUNCTEST_MODE} == 'testcase' ]; then
- echo "FUNCTEST_MODE=testcase, FUNCTEST_SUITE_NAME=${FUNCTEST_SUITE_NAME}"
- run_test ${FUNCTEST_SUITE_NAME}
-elif [ ${FUNCTEST_MODE} == 'tier' ]; then
- echo "FUNCTEST_MODE=tier, FUNCTEST_TIER=${FUNCTEST_TIER}"
- tiers=(${FUNCTEST_TIER})
- run_tiers ${tiers}
-else
- tests=()
- skip_tests=0
- if [ "${HOST_ARCH}" != "aarch64" ]; then
- tiers=(healthcheck smoke benchmarking vnf)
- else
- tiers=(healthcheck smoke benchmarking)
- fi
- run_tiers ${tiers}
- if [ ${skip_tests} -eq 0 ]; then
- for test in "${tests[@]}"; do
- run_test "$test"
- done
- fi
-fi
diff --git a/jjb/functest/functest-cleanup.sh b/jjb/functest/functest-cleanup.sh
deleted file mode 100755
index c21b543a6..000000000
--- a/jjb/functest/functest-cleanup.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-echo "Cleaning up docker containers/images..."
-FUNCTEST_IMAGE=opnfv/functest
-
-# Remove containers along with image opnfv/functest*:<none>
-dangling_images=($(docker images -f "dangling=true" | grep $FUNCTEST_IMAGE | awk '{print $3}'))
-if [[ -n ${dangling_images} ]]; then
- echo " Removing $FUNCTEST_IMAGE:<none> images and their containers..."
- for image_id in "${dangling_images[@]}"; do
- echo " Removing image_id: $image_id and its containers"
- containers=$(docker ps -a | grep $image_id | awk '{print $1}')
- if [[ -n "$containers" ]];then
- docker rm -f $containers >${redirect}
- fi
- docker rmi $image_id >${redirect}
- done
-fi
-
-# Remove previous running containers if exist
-functest_containers=$(docker ps -a | grep $FUNCTEST_IMAGE | awk '{print $1}')
-if [[ -n ${functest_containers} ]]; then
- echo " Removing existing $FUNCTEST_IMAGE containers..."
- docker rm -f $functest_containers >${redirect}
-fi
-
-# Remove existing images if exist
-if [[ $CLEAN_DOCKER_IMAGES == true ]]; then
- functest_image_tags=($(docker images | grep $FUNCTEST_IMAGE | awk '{print $2}'))
- if [[ -n ${functest_image_tags} ]]; then
- echo " Docker images to be removed:" >${redirect}
- (docker images | head -1 && docker images | grep $FUNCTEST_IMAGE) >${redirect}
- for tag in "${functest_image_tags[@]}"; do
- echo " Removing docker image $FUNCTEST_IMAGE:$tag..."
- docker rmi $FUNCTEST_IMAGE:$tag >${redirect}
- done
- fi
-fi
diff --git a/jjb/functest/functest-daily-jobs.yaml b/jjb/functest/functest-daily-jobs.yaml
deleted file mode 100644
index 3cdff3d16..000000000
--- a/jjb/functest/functest-daily-jobs.yaml
+++ /dev/null
@@ -1,304 +0,0 @@
----
-###################################
-# job configuration for functest
-###################################
-- project:
- name: functest-daily
-
- project: functest
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- iruya: &iruya
- stream: iruya
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- hunter: &hunter
- stream: hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # fuel CI PODs
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *iruya
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- <<: *iruya
- # apex CI PODs
- - virtual:
- slave-label: apex-virtual-master
- installer: apex
- <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- <<: *master
- - virtual:
- slave-label: apex-virtual-master
- installer: apex
- <<: *iruya
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- <<: *iruya
- # armband CI PODs
- - armband-baremetal:
- slave-label: armband-baremetal
- installer: fuel
- <<: *master
- - armband-baremetal:
- slave-label: armband-baremetal
- installer: fuel
- <<: *iruya
- # fuel NONE CI PODs
- - zte-pod1:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *master
- - itri-pod1:
- slave-label: fuel-baremetal
- installer: fuel
- <<: *master
-
- testsuite:
- - 'suite':
- job-timeout: 60
- - 'daily':
- job-timeout: 600
-
- jobs:
- - 'functest-{installer}-{pod}-{testsuite}-{stream}'
-
-################################
-# job template
-################################
-- job-template:
- name: 'functest-{installer}-{pod}-{testsuite}-{stream}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- max-total: 10
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER Suite: $FUNCTEST_MODE Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: '{job-timeout}'
- abort: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - 'functest-{testsuite}-parameter'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-noha'
- - string:
- name: DOCKER_TAG
- default: ''
- - string:
- name: CLEAN_DOCKER_IMAGES
- default: 'false'
- description: 'Remove downloaded docker images (opnfv/functest*:*)'
- - functest-parameter:
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'functest-{testsuite}-builder'
-
-
-########################
-# parameter macros
-########################
-- parameter:
- name: functest-daily-parameter
- parameters:
- - string:
- name: FUNCTEST_MODE
- default: 'daily'
- description: "Daily suite name to run"
-
-- parameter:
- name: functest-suite-parameter
- parameters:
- - choice:
- name: FUNCTEST_MODE
- choices:
- - 'tier'
- - 'testcase'
- default: 'tier'
- description: "Test case or Tier to be run"
- - choice:
- name: FUNCTEST_SUITE_NAME
- choices:
- - 'connection_check'
- - 'api_check'
- - 'snaps_health_check'
- - 'vping_ssh'
- - 'vping_userdata'
- - 'cinder_test'
- - 'tempest_smoke'
- - 'rally_sanity'
- - 'refstack_defcore'
- - 'patrole'
- - 'odl'
- - 'snaps_smoke'
- - 'shaker'
- - 'vmtp'
- - 'neutron_trunk'
- - 'tempest_full'
- - 'rally_full'
- - 'cloudify_ims'
- - 'vyos_vrouter'
- - 'juju_epc'
- - 'parser'
- default: 'connection_check'
- - choice:
- name: FUNCTEST_TIER
- choices:
- - 'healthcheck'
- - 'smoke'
- - 'benchmarking'
- - 'components'
- - 'vnf'
- - 'parser'
- default: 'healthcheck'
- - string:
- name: TESTCASE_OPTIONS
- default: ''
- description: 'Additional parameters specific to test case(s)'
-
-- parameter:
- name: functest-parameter
- parameters:
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where the opnfv documents will be stored in gs repository"
- - string:
- name: FUNCTEST_REPO_DIR
- default: "/home/opnfv/repos/functest"
- description: "Directory where the Functest repository is cloned"
- - string:
- name: PUSH_RESULTS_TO_DB
- default: "true"
- description: "Push the results of all the tests to the resultDB"
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debug output information"
- - string:
- name: RC_FILE_PATH
- default: ''
- description: "Path to the OS credentials file if given"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-########################
-# trigger macros
-########################
-- trigger:
- name: 'functest-master'
- triggers:
- - pollscm:
- cron: "H 9 * * *"
-########################
-# builder macros
-########################
-- builder:
- name: functest-daily-builder
- builders:
- - 'functest-cleanup'
- - 'functest-daily'
- - 'functest-store-results'
- - 'functest-exit'
-
-- builder:
- name: functest-suite-builder
- builders:
- - 'functest-cleanup'
- - 'functest-daily'
- - 'functest-store-results'
- - 'functest-exit'
-
-- builder:
- name: functest-daily
- builders:
- # yamllint disable rule:indentation
- - conditional-step:
- condition-kind: regex-match
- regex: "os-.*"
- label: '$DEPLOY_SCENARIO'
- steps:
- - shell:
- !include-raw:
- - ./functest-env-presetup.sh
- - ../../utils/fetch_os_creds.sh
- - ./functest-alpine.sh
- - conditional-step:
- condition-kind: regex-match
- regex: "k8-.*"
- label: '$DEPLOY_SCENARIO'
- steps:
- - shell:
- !include-raw:
- - ../../utils/fetch_k8_conf.sh
- - ./functest-k8.sh
-
-# yamllint enable rule:indentation
-- builder:
- name: functest-store-results
- builders:
- - shell:
- !include-raw: ../../utils/push-test-logs.sh
-
-- builder:
- name: functest-cleanup
- builders:
- - shell:
- !include-raw: ./functest-cleanup.sh
-
-- builder:
- name: functest-exit
- builders:
- - shell:
- !include-raw: ./functest-exit.sh
diff --git a/jjb/functest/functest-docker.yaml b/jjb/functest/functest-docker.yaml
deleted file mode 100644
index 4b3d04401..000000000
--- a/jjb/functest/functest-docker.yaml
+++ /dev/null
@@ -1,340 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-- project:
-
- name: functest-docker
-
- project: functest
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - jerma:
- branch: 'stable/{stream}'
- disabled: false
- - iruya:
- branch: 'stable/{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- arch_tag:
- - 'amd64':
- slave_label: 'lf-build2'
- - 'arm64':
- slave_label: 'opnfv-build-ubuntu-arm'
-
- # yamllint disable rule:key-duplicates
- image:
- - 'core'
- - 'tempest'
- - 'healthcheck'
- - 'smoke'
- - 'benchmarking'
- - 'vnf'
- - 'smoke-cntt'
- - 'benchmarking-cntt'
- - 'features'
-
- exclude:
- - stream: 'master'
- image: 'tempest'
- - stream: 'jerma'
- image: 'tempest'
- - stream: 'master'
- image: 'features'
- - stream: 'jerma'
- image: 'features'
- - stream: 'iruya'
- image: 'features'
-
- # settings for jobs run in multijob phases
- build-job-settings: &build-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters: |
- PUSH_IMAGE=$PUSH_IMAGE
- COMMIT_ID=$COMMIT_ID
- GERRIT_REFNAME=$GERRIT_REFNAME
- DOCKERFILE=$DOCKERFILE
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- manifest-job-settings: &manifest-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters:
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- # yamllint enable rule:key-duplicates
- jobs:
- - "functest-docker-{stream}"
- - "functest-{image}-docker-build-{arch_tag}-{stream}"
- - "functest-{image}-docker-manifest-{stream}"
-
-########################
-# job templates
-########################
-- job-template:
- name: 'functest-docker-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- parameters:
- - functest-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- arch_tag: 'amd64'
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- builders:
- - multijob:
- name: 'build functest-core images'
- execution-type: PARALLEL
- projects:
- - name: 'functest-core-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-core-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish functest-core manifests'
- execution-type: PARALLEL
- projects:
- - name: 'functest-core-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - multijob:
- name: 'build functest-tempest images'
- execution-type: PARALLEL
- projects:
- - name: 'functest-tempest-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-tempest-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish functest-tempest manifests'
- execution-type: PARALLEL
- projects:
- - name: 'functest-tempest-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - multijob:
- name: 'build all functest images'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'functest-healthcheck-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-healthcheck-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-smoke-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-smoke-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-benchmarking-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-benchmarking-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-vnf-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-vnf-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-features-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-features-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish all manifests'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'functest-healthcheck-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-smoke-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-benchmarking-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-vnf-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-features-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - multijob:
- name: 'build all functest cntt images'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'functest-smoke-cntt-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-smoke-cntt-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - name: 'functest-benchmarking-cntt-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-benchmarking-cntt-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish all cntt manifests'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'functest-smoke-cntt-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - name: 'functest-benchmarking-cntt-docker-manifest-{stream}'
- <<: *manifest-job-settings
-
- publishers:
- - 'functest-amd64-recipients'
- - 'functest-arm64-recipients'
-
-- job-template:
- name: 'functest-{image}-docker-build-{arch_tag}-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - functest-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: '{slave_label}'
- arch_tag: '{arch_tag}'
- scm:
- - git-scm
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{arch_tag}" in
- "arm64")
- sudo amd64_dirs= arm64_dirs=docker/{image} arm_dirs= bash ./build.sh ;;
- *)
- sudo amd64_dirs=docker/{image} arm64_dirs= arm_dirs= bash ./build.sh ;;
- esac
- exit $?
-
-- job-template:
- name: 'functest-{image}-docker-manifest-{stream}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-
- disabled: '{obj:disabled}'
-
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{stream}" in
- "master")
- tag="latest" ;;
- *)
- tag="{stream}" ;;
- esac
- sudo manifest-tool push from-args \
- --platforms linux/amd64,linux/arm64 \
- --template $REPO/functest-{image}:ARCH-$tag \
- --target $REPO/functest-{image}:$tag
- exit $?
-
-# parameter macro
-- parameter:
- name: functest-job-parameters
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: '{slave_label}'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: "Dockerfile"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-# publisher macros
-- publisher:
- name: 'functest-arm64-recipients'
- publishers:
- - email:
- recipients: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- delia.popescu@enea.com
-
-- publisher:
- name: 'functest-amd64-recipients'
- publishers:
- - email:
- recipients: >
- jalausuch@suse.com morgan.richomme@orange.com
- cedric.ollivier@orange.com feng.xiaowei@zte.com.cn
- juha.kosonen@nokia.com wangwulin@huawei.com
- valentin.boucher@kontron.com
diff --git a/jjb/functest/functest-env-presetup.sh b/jjb/functest/functest-env-presetup.sh
deleted file mode 100755
index 510670bc2..000000000
--- a/jjb/functest/functest-env-presetup.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env bash
-set -o errexit
-set -o pipefail
-
-# Fetch INSTALLER_IP for APEX deployments
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- if [ -n "$RC_FILE_PATH" ]; then
- echo "RC_FILE_PATH is set: ${RC_FILE_PATH}...skipping detecting UC IP"
- else
- echo "Gathering IP information for Apex installer VM"
- ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
- if sudo virsh list | grep undercloud; then
- echo "Installer VM detected"
- undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- export INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
- export sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
- export stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
-
- if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- echo "Installer ip is ${INSTALLER_IP}"
- else
- echo "No available installer VM exists and no credentials provided...exiting"
- exit 1
- fi
- fi
-
-elif [[ ${INSTALLER_TYPE} == 'daisy' ]]; then
- echo "Gathering IP information for Daisy installer VM"
- if sudo virsh list | grep daisy; then
- echo "Installer VM detected"
-
- bridge_name=$(sudo virsh domiflist daisy | grep vnet | awk '{print $3}')
- echo "Bridge is $bridge_name"
-
- installer_mac=$(sudo virsh domiflist daisy | grep vnet | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- export INSTALLER_IP=$(/usr/sbin/arp -e -i $bridge_name | grep ${installer_mac} | head -n 1 | awk {'print $1'})
-
- echo "Installer ip is ${INSTALLER_IP}"
- else
- echo "No available installer VM exists...exiting"
- exit 1
- fi
-
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
- if [[ ! "${BRANCH}" =~ "danube" ]]; then
- echo "Map mcp ssh_key"
- export sshkey_vol="-v ${SSH_KEY:-/var/lib/opnfv/mcp.rsa}:/root/.ssh/id_rsa"
- fi
-fi
-
diff --git a/jjb/functest/functest-exit.sh b/jjb/functest/functest-exit.sh
deleted file mode 100644
index 925a3cfbb..000000000
--- a/jjb/functest/functest-exit.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-if [ ! -f ${ret_val_file} ]; then
- echo "Return value not found!"
- exit -1
-fi
-
-ret_val=`cat ${ret_val_file}`
-
-exit ${ret_val}
diff --git a/jjb/functest/functest-k8.sh b/jjb/functest/functest-k8.sh
deleted file mode 100755
index fb0e95526..000000000
--- a/jjb/functest/functest-k8.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-
-set -e
-set +u
-set +o pipefail
-
-redirect="/dev/stdout"
-FUNCTEST_DIR=/home/opnfv/functest
-
-admin_conf_file_vol="-v ${HOME}/admin.conf:/root/.kube/config"
-cat ${HOME}/admin.conf
-
-dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-results_vol="-v ${dir_result}:${FUNCTEST_DIR}/results"
-
-volumes="${results_vol} ${admin_conf_file_vol}"
-
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} \
- -e NODE_NAME=${NODE_NAME} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} \
- -e BUILD_TAG=${BUILD_TAG}"
-
-DOCKER_TAG=${DOCKER_TAG:-$([[ ${BRANCH##*/} == "master" ]] && echo "latest" || echo ${BRANCH##*/})}
-
-set +e
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo 0 > ${ret_val_file}
-
-FUNCTEST_IMAGES="\
-opnfv/functest-kubernetes-healthcheck:${DOCKER_TAG} \
-opnfv/functest-kubernetes-smoke:${DOCKER_TAG}"
-cmd_opt="run_tests -r -t all"
-
-for image in ${FUNCTEST_IMAGES}; do
- echo "Pulling Docker image ${image} ..."
- docker pull "${image}" >/dev/null
- cmd="docker run --rm ${envs} ${volumes} ${image} /bin/bash -c '${cmd_opt}'"
- echo "Running Functest k8s test cases, CMD: ${cmd}"
- eval ${cmd}
- ret_value=$?
- if [ ${ret_value} != 0 ]; then
- echo ${ret_value} > ${ret_val_file}
- fi
-done
diff --git a/jjb/functest/functest-kubernetes-docker.yaml b/jjb/functest/functest-kubernetes-docker.yaml
deleted file mode 100644
index baeaa1953..000000000
--- a/jjb/functest/functest-kubernetes-docker.yaml
+++ /dev/null
@@ -1,276 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-- project:
-
- name: functest-kubernetes-docker
-
- project: functest-kubernetes
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - jerma:
- branch: 'stable/{stream}'
- disabled: false
- - iruya:
- branch: 'stable/{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- arch_tag:
- - 'amd64':
- slave_label: 'lf-build2'
- - 'arm64':
- slave_label: 'opnfv-build-ubuntu-arm'
-
- # yamllint disable rule:key-duplicates
- image:
- - 'core'
- - 'healthcheck'
- - 'smoke'
-
- # settings for jobs run in multijob phases
- build-job-settings: &build-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters: |
- PUSH_IMAGE=$PUSH_IMAGE
- COMMIT_ID=$COMMIT_ID
- GERRIT_REFNAME=$GERRIT_REFNAME
- DOCKERFILE=$DOCKERFILE
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- manifest-job-settings: &manifest-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters:
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- # yamllint enable rule:key-duplicates
- jobs:
- - "functest-kubernetes-docker-{stream}"
- - "functest-kubernetes-{image}-docker-build-{arch_tag}-{stream}"
- - "functest-kubernetes-{image}-docker-manifest-{stream}"
-
-########################
-# job templates
-########################
-- job-template:
- name: 'functest-kubernetes-docker-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- parameters:
- - functest-kubernetes-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- arch_tag: 'amd64'
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- builders:
- - multijob:
- name: 'build functest-kubernetes-core images'
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-core-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-kubernetes-core-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish functest-kubernetes-core manifests'
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-core-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - multijob:
- name: 'build functest-kubernetes-healthcheck images'
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-healthcheck-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-kubernetes-healthcheck-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish functest-kubernetes-healthcheck manifests'
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-healthcheck-docker-manifest-{stream}'
- <<: *manifest-job-settings
- - multijob:
- name: 'build all functest-kubernetes images'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-smoke-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'functest-kubernetes-smoke-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish all manifests'
- condition: SUCCESSFUL
- execution-type: PARALLEL
- projects:
- - name: 'functest-kubernetes-smoke-docker-manifest-{stream}'
- <<: *manifest-job-settings
-
- publishers:
- - 'functest-kubernetes-amd64-recipients'
- - 'functest-kubernetes-arm64-recipients'
-
-- job-template:
- name: 'functest-kubernetes-{image}-docker-build-{arch_tag}-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - functest-kubernetes-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: '{slave_label}'
- arch_tag: '{arch_tag}'
- scm:
- - git-scm
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{arch_tag}" in
- "arm64")
- sudo amd64_dirs= arm64_dirs=docker/{image} bash ./build.sh ;;
- *)
- sudo amd64_dirs=docker/{image} arm64_dirs= bash ./build.sh ;;
- esac
- exit $?
-
-- job-template:
- name: 'functest-kubernetes-{image}-docker-manifest-{stream}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest-kubernetes images"
-
-
- disabled: '{obj:disabled}'
-
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{stream}" in
- "master")
- tag="latest" ;;
- *)
- tag="{stream}" ;;
- esac
- sudo manifest-tool push from-args \
- --platforms linux/amd64,linux/arm64 \
- --template $REPO/functest-kubernetes-{image}:ARCH-$tag \
- --target $REPO/functest-kubernetes-{image}:$tag
- exit $?
-
-# parameter macro
-- parameter:
- name: functest-kubernetes-job-parameters
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: '{slave_label}'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: "Dockerfile"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest-kubernetes images"
-
-# publisher macros
-- publisher:
- name: 'functest-kubernetes-arm64-recipients'
- publishers:
- - email:
- recipients: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- delia.popescu@enea.com
-
-- publisher:
- name: 'functest-kubernetes-amd64-recipients'
- publishers:
- - email:
- recipients: >
- jalausuch@suse.com morgan.richomme@orange.com
- cedric.ollivier@orange.com feng.xiaowei@zte.com.cn
- juha.kosonen@nokia.com wangwulin@huawei.com
- valentin.boucher@kontron.com
diff --git a/jjb/functest/functest-kubernetes-pi.yaml b/jjb/functest/functest-kubernetes-pi.yaml
new file mode 100644
index 000000000..efab56c05
--- /dev/null
+++ b/jjb/functest/functest-kubernetes-pi.yaml
@@ -0,0 +1,891 @@
+---
+- functest-kubernetes-pi-containers: &functest-kubernetes-pi-containers
+ name: 'functest-kubernetes-pi-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- functest-kubernetes-pi-params: &functest-kubernetes-pi-params
+ name: 'functest-kubernetes-pi-params'
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-smoke'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-security-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-security-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-security'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params: &functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params'
+ repo: 'ollivier'
+ container: 'functest-kubernetes-cnf'
+ port:
+ tag:
+ - latest:
+ node: v1.30
+ - v1.29:
+ node: v1.29
+ - v1.28:
+ node: v1.28
+ - v1.27:
+ node: v1.27
+ - v1.26:
+ node: v1.26
+ - arm-latest:
+ node: v1.30
+ - arm-v1.29:
+ node: v1.29
+ - arm-v1.28:
+ node: v1.28
+ - arm-v1.27:
+ node: v1.27
+ - arm-v1.26:
+ node: v1.26
+ - arm64-latest:
+ node: v1.30
+ - arm64-v1.29:
+ node: v1.29
+ - arm64-v1.28:
+ node: v1.28
+ - arm64-v1.27:
+ node: v1.27
+ - arm64-v1.26:
+ node: v1.26
+
+- functest-kubernetes-pi-jobs: &functest-kubernetes-pi-jobs
+ name: 'functest-kubernetes-pi-jobs'
+ current-parameters: true
+
+- parameter:
+ name: functest-kubernetes-pi-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: functest-kubernetes-pi-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- parameter:
+ name: functest-kubernetes-pi-DEPLOY_SCENARIO
+ parameters:
+ - string:
+ name: DEPLOY_SCENARIO
+ default: k8-nosdn-nofeature-noha
+
+- functest-kubernetes-pi-run-containers: &functest-kubernetes-pi-run-containers
+ name: 'functest-kubernetes-pi-run-containers'
+ <<: *functest-kubernetes-pi-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+
+- builder:
+ name: functest-kubernetes-pi-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker pull $image
+
+- builder:
+ name: functest-kubernetes-pi-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ mkdir -p $WORKSPACE/results
+ chown {uid}:{gid} $WORKSPACE/results
+ docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/functest-kubernetes/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: functest-kubernetes-pi-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker rmi $image || true
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-security-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-security-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-pull'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-pull-containers:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-pull'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pi-remove-images:
+ <<: *functest-kubernetes-pi-containers
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-rmi'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-healthcheck'
+ test:
+ - k8s_quick
+ - k8s_smoke
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-smoke'
+ test:
+ - xrally_kubernetes
+ - k8s_conformance
+ - k8s_conformance_serial
+ - sig_api_machinery
+ - sig_api_machinery_serial
+ - sig_apps
+ - sig_apps_serial
+ - sig_auth
+ - sig_cluster_lifecycle
+ - sig_instrumentation
+ - sig_network
+ - sig_node
+ - sig_scheduling_serial
+ - sig_storage
+ - sig_storage_serial
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-security-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-security'
+ test:
+ - kube_hunter
+ - kube_bench_master
+ - kube_bench_node
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-benchmarking'
+ test:
+ - xrally_kubernetes_full
+ - netperf
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-run-containers:
+ <<: *functest-kubernetes-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-cnf'
+ test:
+ - k8s_vims
+ - helm_vims
+ - cnf_testsuite
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-{test}-run'
+
+- builder:
+ name: functest-kubernetes-pi-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ mkdir -p $WORKSPACE/results
+ chown {uid}:{gid} $WORKSPACE/results
+ docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes \
+ -v /home/opnfv/functest-kubernetes/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image zip_campaign
+
+- job-template:
+ name: 'functest-kubernetes-pi-{tag}-zip'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-pi-zip:
+ <<: *functest-kubernetes-pi-run-containers
+
+- project:
+ name: 'functest-kubernetes-pi-zip'
+ <<: *functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-healthcheck'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-pi-{tag}-zip'
+
+- job-template:
+ name: 'functest-kubernetes-pi-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-pi-node:
+ node: '{node}'
+ - functest-kubernetes-pi-build_tag:
+ build_tag: ''
+ - functest-kubernetes-pi-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-kubernetes-(pi-)*.*-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-rmi'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-pull'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-healthcheck:{tag}
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-k8s_quick-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-healthcheck-{tag}-k8s_smoke-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-smoke:{tag}
+ execution-type: SEQUENTIALLY
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-k8s_conformance_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_api_machinery-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_api_machinery_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_apps-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_apps_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_auth-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_cluster_lifecycle-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_instrumentation-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_network-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_node-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_scheduling_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_storage-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-smoke-{tag}-sig_storage_serial-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-security:{tag}
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-kube_hunter-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-kube_bench_master-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-security-{tag}-kube_bench_node-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-benchmarking:{tag}
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-xrally_kubernetes_full-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-benchmarking-{tag}-netperf-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: ollivier/functest-kubernetes-cnf:{tag}
+ projects:
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-helm_vims-run'
+ <<: *functest-kubernetes-pi-jobs
+ - name: 'functest-kubernetes-pi-ollivier-functest-kubernetes-cnf-{tag}-cnf_testsuite-run'
+ <<: *functest-kubernetes-pi-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'functest-kubernetes-pi-{tag}-zip'
+ <<: *functest-kubernetes-pi-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-pi-daily'
+ <<: *functest-kubernetes-pi-params
+ jobs:
+ - 'functest-kubernetes-pi-{tag}-daily'
+
+- view:
+ name: functest-kubernetes-pi
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-pi-[a-z-0-9.]+-daily$
diff --git a/jjb/functest/functest-kubernetes-project-jobs.yaml b/jjb/functest/functest-kubernetes-project-jobs.yaml
deleted file mode 100644
index 2e1a410e2..000000000
--- a/jjb/functest/functest-kubernetes-project-jobs.yaml
+++ /dev/null
@@ -1,80 +0,0 @@
----
-- functest-kubernetes-project-params: &functest-kubernetes-project-params
- name: 'functest-kubernetes-project-params'
- tag:
- - latest:
- branch: master
- slave: lf-virtual1
- - jerma:
- branch: stable/jerma
- slave: lf-virtual1
- - iruya:
- branch: stable/iruya
- slave: lf-virtual1
- - hunter:
- branch: stable/hunter
- slave: lf-virtual1
-
-- builder:
- name: functest-kubernetes-run-tox
- builders:
- - shell: tox
-
-- trigger:
- name: functest-kubernetes-project-patchset-created
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'functest-kubernetes'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
-
-- parameter:
- name: functest-kubernetes-project-slave
- parameters:
- - label:
- name: slave
- default: '{slave}'
-
-- scm:
- name: functest-kubernetes-project-scm
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/functest-kubernetes
- refspec: '+refs/changes/*:refs/changes/*'
- branches:
- - '{ref}'
-
-- job-template:
- name: 'functest-kubernetes-run-tox-{tag}'
- triggers:
- - functest-kubernetes-project-patchset-created:
- branch: '{branch}'
- scm:
- - functest-kubernetes-project-scm:
- ref: $GERRIT_REFSPEC
- parameters:
- - functest-kubernetes-project-slave:
- slave: '{slave}'
- builders:
- - functest-kubernetes-run-tox
-
-- project:
- name: 'functest-kubernetes-run-tox'
- <<: *functest-kubernetes-project-params
- jobs:
- - 'functest-kubernetes-run-tox-{tag}'
diff --git a/jjb/functest/functest-kubernetes.yaml b/jjb/functest/functest-kubernetes.yaml
index fcc1eb15e..dcab411cd 100644
--- a/jjb/functest/functest-kubernetes.yaml
+++ b/jjb/functest/functest-kubernetes.yaml
@@ -1,32 +1,240 @@
---
-- functest-kubernetes-jobs: &functest-kubernetes-jobs
- name: 'functest-kubernetes-jobs'
- current-parameters: true
+- functest-kubernetes-containers: &functest-kubernetes-containers
+ name: 'functest-kubernetes-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
- functest-kubernetes-params: &functest-kubernetes-params
name: 'functest-kubernetes-params'
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params: &functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-smoke-params: &functest-kubernetes-opnfv-functest-kubernetes-smoke-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-smoke'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-security-params: &functest-kubernetes-opnfv-functest-kubernetes-security-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-security'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params: &functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params'
repo: 'opnfv'
+ container: 'functest-kubernetes-benchmarking'
port:
tag:
- latest:
+ from:
+ build_args:
branch: master
- slave: lf-virtual8
- - jerma:
- branch: stable/jerma
- slave: lf-virtual8
- - iruya:
- branch: stable/iruya
- slave: lf-virtual7
- - hunter:
- branch: stable/hunter
- slave: lf-virtual5
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-opnfv-functest-kubernetes-cnf-params: &functest-kubernetes-opnfv-functest-kubernetes-cnf-params
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-params'
+ repo: 'opnfv'
+ container: 'functest-kubernetes-cnf'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: v1.30
+ dependency: 3.16
+ - v1.29:
+ from:
+ build_args:
+ branch: stable/v1.29
+ node: v1.29
+ dependency: 3.16
+ - v1.28:
+ from:
+ build_args:
+ branch: stable/v1.28
+ node: v1.28
+ dependency: 3.16
+ - v1.27:
+ from:
+ build_args:
+ branch: stable/v1.27
+ node: v1.27
+ dependency: 3.16
+ - v1.26:
+ from:
+ build_args:
+ branch: stable/v1.26
+ node: v1.26
+ dependency: 3.16
+
+- functest-kubernetes-jobs: &functest-kubernetes-jobs
+ name: 'functest-kubernetes-jobs'
+ current-parameters: true
- parameter:
- name: functest-kubernetes-slave
+ name: functest-kubernetes-node
parameters:
- label:
- name: slave
- default: '{slave}'
+ name: node
+ default: '{node}'
- parameter:
name: functest-kubernetes-build_tag
@@ -35,36 +243,22 @@
name: build_tag
- parameter:
- name: functest-kubernetes-branch
- parameters:
- - string:
- name: branch
- default: '{branch}'
-
-- parameter:
- name: functest-kubernetes-DEBUG
- parameters:
- - string:
- name: DEBUG
- default: 'true'
-- parameter:
name: functest-kubernetes-DEPLOY_SCENARIO
parameters:
- string:
name: DEPLOY_SCENARIO
default: k8-nosdn-nofeature-noha
-- functest-kubernetes-containers: &functest-kubernetes-containers
- name: 'functest-kubernetes-containers'
- repo: '{repo}'
- port: '{port}'
- container: '{container}'
- tag: '{tag}'
-
- functest-kubernetes-run-containers: &functest-kubernetes-run-containers
name: 'functest-kubernetes-run-containers'
<<: *functest-kubernetes-containers
- test: '{test}'
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
- builder:
name: functest-kubernetes-pull-containers
@@ -85,7 +279,22 @@
builders:
- shell: |
set +x
- [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/* || true
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/results || true
if [ "{repo}" = "_" ]; then
image={container}:{tag}
elif [ "{port}" = "None" ]; then
@@ -93,19 +302,23 @@
else
image={repo}:{port}/{container}:{tag}
fi
+ mkdir -p $WORKSPACE/results
+ chown {uid}:{gid} $WORKSPACE/results
docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
-e S3_ENDPOINT_URL=https://storage.googleapis.com \
-e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
-e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/functest-kubernetes/.boto:/etc/boto.cfg \
-e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
-e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e NODE_NAME=$slave \
+ -e NODE_NAME=$node \
-e BUILD_TAG=$BUILD_TAG \
-v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -e DEBUG=$DEBUG \
- -e DEPLOY_SCENARIO=$DEPLOY_SCENARIO \
- -v /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config \
- -v /home/opnfv/functest-kubernetes/.boto:/root/.boto \
$image run_tests -t {test} -p -r
- builder:
@@ -122,135 +335,362 @@
fi
docker rmi $image || true
-- functest-kubernetes-build-containers: &functest-kubernetes-build-containers
- name: 'functest-kubernetes-build-containers'
- <<: *functest-kubernetes-containers
- ref_arg: '{ref_arg}'
- path: '{path}'
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-pull'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
-- builder:
- name: functest-kubernetes-build-containers
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
builders:
- - shell: |
- set +x
- if [ "{repo}" = "_" ]; then
- image={container}:{tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{tag}
- else
- image={repo}:{port}/{container}:{tag}
- fi
- if [ "{ref_arg}" = "None" ]; then
- build_arg=""
- else
- build_arg="--build-arg {ref_arg}={ref}"
- fi
- cd {path}
- docker build $build_arg \
- --pull=false --no-cache --force-rm=true \
- -t $image .
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
-- scm:
- name: functest-kubernetes-scm
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/functest-kubernetes
- refspec: '+refs/changes/*:refs/changes/*'
- branches:
- - '{ref}'
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-rmi'
- job-template:
- name: 'functest-kubernetes-{repo}-{container}-{tag}-pull'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-pull'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
+ - functest-kubernetes-node:
+ node: '{node}'
builders:
- functest-kubernetes-pull-containers:
<<: *functest-kubernetes-containers
- project:
- name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-pull'
- <<: *functest-kubernetes-params
- container: 'functest-kubernetes-healthcheck'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-smoke-params
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-pull'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
- project:
- name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-pull'
- <<: *functest-kubernetes-params
- container: 'functest-kubernetes-smoke'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-smoke-params
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-pull'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
- job-template:
- name: 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-pull'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-security-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
builders:
- functest-kubernetes-remove-images:
<<: *functest-kubernetes-containers
- project:
- name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-rmi'
- <<: *functest-kubernetes-params
- container: 'functest-kubernetes-healthcheck'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-security-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-pull'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
- project:
- name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-rmi'
- <<: *functest-kubernetes-params
- container: 'functest-kubernetes-smoke'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-rmi'
- job-template:
- name: 'functest-kubernetes-{repo}-{container}-{tag}-{test}-run'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-pull'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-pull'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-cnf-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-pull'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-containers
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-rmi'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-cnf-params
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
- functest-kubernetes-build_tag:
build_tag: ''
- - functest-kubernetes-DEBUG:
- DEBUG: 'true'
- functest-kubernetes-DEPLOY_SCENARIO:
DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
builders:
- functest-kubernetes-run-containers:
<<: *functest-kubernetes-run-containers
+ test: '{test}'
- project:
name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck'
- <<: *functest-kubernetes-params
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
container: 'functest-kubernetes-healthcheck'
test:
+ - k8s_quick
- k8s_smoke
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-{test}-run'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
- project:
name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke'
- <<: *functest-kubernetes-params
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-smoke-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
container: 'functest-kubernetes-smoke'
test:
- xrally_kubernetes
- k8s_conformance
- exclude:
- - tag: iruya
- test: xrally_kubernetes
- - tag: hunter
- test: xrally_kubernetes
+ - k8s_conformance_serial
+ - sig_api_machinery
+ - sig_api_machinery_serial
+ - sig_apps
+ - sig_apps_serial
+ - sig_auth
+ - sig_cluster_lifecycle
+ - sig_instrumentation
+ - sig_network
+ - sig_node
+ - sig_scheduling_serial
+ - sig_storage
+ - sig_storage_serial
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-security-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-security'
+ test:
+ - kube_hunter
+ - kube_bench_master
+ - kube_bench_node
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-{test}-run'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-benchmarking-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-benchmarking'
+ test:
+ - xrally_kubernetes_full
+ - netperf
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-{test}-run'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ - functest-kubernetes-build_tag:
+ build_tag: ''
+ - functest-kubernetes-DEPLOY_SCENARIO:
+ DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ builders:
+ - functest-kubernetes-run-containers:
+ <<: *functest-kubernetes-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-cnf-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
+ container: 'functest-kubernetes-cnf'
+ test:
+ - k8s_vims
+ - helm_vims
+ - cnf_testsuite
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-{test}-run'
- builder:
name: functest-kubernetes-zip
builders:
- shell: |
set +x
- [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/* || true
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/results || true
if [ "{repo}" = "_" ]; then
image={container}:{tag}
elif [ "{port}" = "None" ]; then
@@ -258,39 +698,52 @@
else
image={repo}:{port}/{container}:{tag}
fi
+ mkdir -p $WORKSPACE/results
+ chown {uid}:{gid} $WORKSPACE/results
docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
-e S3_ENDPOINT_URL=https://storage.googleapis.com \
-e S3_DST_URL=s3://artifacts.opnfv.org/functest-kubernetes \
-e HTTP_DST_URL=http://artifacts.opnfv.org/functest-kubernetes \
+ -v /home/opnfv/functest-kubernetes/.boto:/etc/boto.cfg \
-e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
-e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
-e BUILD_TAG=$BUILD_TAG \
-v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -e DEBUG=$DEBUG \
- -e DEPLOY_SCENARIO=$DEPLOY_SCENARIO \
- -v /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config \
- -v /home/opnfv/functest-kubernetes/.boto:/root/.boto \
$image zip_campaign
- job-template:
name: 'functest-kubernetes-{tag}-zip'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
+ - functest-kubernetes-node:
+ node: '{node}'
- functest-kubernetes-build_tag:
build_tag: ''
- - functest-kubernetes-DEBUG:
- DEBUG: 'true'
- functest-kubernetes-DEPLOY_SCENARIO:
DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
builders:
- functest-kubernetes-zip:
- <<: *functest-kubernetes-containers
+ <<: *functest-kubernetes-run-containers
- project:
- name: 'functest-kubernetes-{tag}-zip'
- <<: *functest-kubernetes-params
+ name: 'functest-kubernetes-zip'
+ <<: *functest-kubernetes-opnfv-functest-kubernetes-healthcheck-params
+ volumes:
+ - /home/opnfv/functest-kubernetes/config.{tag}:/root/.kube/config
+ - /home/opnfv/functest-kubernetes/config.{tag}:/home/xtesting/.kube/config
+ env:
+ - DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ published_ports:
container: 'functest-kubernetes-healthcheck'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- 'functest-kubernetes-{tag}-zip'
@@ -298,22 +751,22 @@
name: 'functest-kubernetes-{tag}-daily'
project-type: multijob
triggers:
- - timed: '@daily'
+ - timed: '@weekly'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
+ - functest-kubernetes-node:
+ node: '{node}'
- functest-kubernetes-build_tag:
build_tag: ''
- - functest-kubernetes-DEBUG:
- DEBUG: 'true'
- functest-kubernetes-DEPLOY_SCENARIO:
DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^functest-kubernetes-{tag}-(daily|check|gate)$'
+ - ^functest-kubernetes-(pi-)*.*-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
name: remove former images
@@ -322,6 +775,12 @@
<<: *functest-kubernetes-jobs
- name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
<<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
- multijob:
name: pull containers
projects:
@@ -329,9 +788,17 @@
<<: *functest-kubernetes-jobs
- name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-pull'
<<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-pull'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-pull'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-pull'
+ <<: *functest-kubernetes-jobs
- multijob:
name: opnfv/functest-kubernetes-healthcheck:{tag}
projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_quick-run'
+ <<: *functest-kubernetes-jobs
- name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_smoke-run'
<<: *functest-kubernetes-jobs
- multijob:
@@ -342,17 +809,234 @@
<<: *functest-kubernetes-jobs
- name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
<<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_api_machinery-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_api_machinery_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_apps-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_apps_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_auth-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_cluster_lifecycle-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_instrumentation-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_network-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_node-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_scheduling_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_storage-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_storage_serial-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-security:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_hunter-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench_master-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench_node-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-benchmarking:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-xrally_kubernetes_full-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-netperf-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-cnf:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-helm_vims-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-cnf_testsuite-run'
+ <<: *functest-kubernetes-jobs
- multijob:
name: dump all campaign data
projects:
- name: 'functest-kubernetes-{tag}-zip'
<<: *functest-kubernetes-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-daily'
+ <<: *functest-kubernetes-params
+ jobs:
+ - 'functest-kubernetes-{tag}-daily'
+
+- view:
+ name: functest-kubernetes
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-[a-z0-9.-]+-daily$
+
+- functest-kubernetes-build-containers: &functest-kubernetes-build-containers
+ name: 'functest-kubernetes-build-containers'
+ <<: *functest-kubernetes-containers
+ ref_arg: '{ref_arg}'
+ path: '{path}'
+ build_args: '{build_args}'
+ from: '{from}'
+
+- builder:
+ name: functest-kubernetes-build-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ build_args=""
+ if [ "{build_args}" != "None" ]; then
+ for i in $(echo {build_args} | tr -d '[]' |sed "s/, / /g" ); \
+ do build_args="--build-arg $i $build_args"; done
+ fi
+ if [ "{ref_arg}" != "None" ]; then
+ build_args="$build_args --build-arg {ref_arg}={ref}"
+ fi
+ cd {path}
+ if [ "{from}" != "None" ]; then
+ sed -i {from} Dockerfile
+ fi
+ docker build $build_args \
+ --pull=false --no-cache --force-rm=true \
+ -t $image .
+
+- scm:
+ name: functest-kubernetes-scm
+ scm:
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/functest-kubernetes'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ branches:
+ - '{ref}'
+
+- functest-kubernetes-dep: &functest-kubernetes-dep
+ name: 'functest-kubernetes-containers'
+ repo: '{repo}'
+ container: '{container}'
+ port: '{port}'
+ tag: '{dependency}'
- job-template:
- name: 'functest-kubernetes-{repo}-{container}-{tag}-gate'
+ name: 'functest-kubernetes-{tag}-dep-pull'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-pull-containers:
+ <<: *functest-kubernetes-dep
+
+- functest-kubernetes-dep-params: &functest-kubernetes-dep-params
+ name: 'functest-kubernetes-dep-params'
+ repo: '_'
+ container: 'alpine'
+ port:
+ tag:
+ - latest:
+ dependency: 3.16
+ - v1.29:
+ dependency: 3.16
+ - v1.28:
+ dependency: 3.16
+ - v1.27:
+ dependency: 3.16
+ - v1.26:
+ dependency: 3.16
+
+- project:
+ name: 'functest-kubernetes-dep-pull'
+ <<: *functest-kubernetes-dep-params
+ jobs:
+ - 'functest-kubernetes-{tag}-dep-pull'
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-dep-rmi'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-remove-images:
+ <<: *functest-kubernetes-dep
+
+- project:
+ name: 'functest-kubernetes-dep-rmi'
+ <<: *functest-kubernetes-dep-params
+ jobs:
+ - 'functest-kubernetes-{tag}-dep-rmi'
+
+- builder:
+ name: functest-kubernetes-tox
+ builders:
+ - shell: |
+ set +x
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install software-properties-common gpg -y
+ add-apt-repository -y ppa:deadsnakes/ppa
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ libxml2-dev libxslt-dev libffi-dev libjpeg-dev \
+ python3.10 python3.10-dev python3.10-distutils \
+ python3.9 python3.9-dev python3.9-distutils \
+ python3.8 python3.8-dev python3.8-distutils \
+ python3-pip enchant-2 -y
+ pip3 install tox
+
+ tox
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-tox'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ triggers:
+ - functest-kubernetes-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-tox:
+
+- project:
+ name: functest-kubernetes-tox
+ <<: *functest-kubernetes-params
+ jobs:
+ - 'functest-kubernetes-{tag}-tox'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
scm:
- functest-kubernetes-scm:
ref: $GERRIT_REFSPEC
@@ -360,92 +1044,182 @@
- functest-kubernetes-build-containers:
<<: *functest-kubernetes-build-containers
ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-core-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ port:
+ container: functest-kubernetes-core
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-gate'
- job-template:
- name: 'functest-kubernetes-{repo}-{container}-{tag}-check'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-gate'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
- - functest-kubernetes-branch:
- branch: '{branch}'
+ - functest-kubernetes-node:
+ node: '{node}'
scm:
- functest-kubernetes-scm:
- ref: $branch
+ ref: $GERRIT_REFSPEC
builders:
- functest-kubernetes-build-containers:
<<: *functest-kubernetes-build-containers
- ref: $branch
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: 'functest-kubernetes-_-golang-1.13-alpine3.11-rmi'
- repo: _
+ name: functest-kubernetes-opnfv-functest-kubernetes-healthcheck-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
port:
- container: golang
- tag: '1.13-alpine3.11'
- slave: master
+ container: functest-kubernetes-healthcheck
+ ref_arg:
+ path: docker/healthcheck
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-rmi'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: 'functest-kubernetes-_-golang-1.13-alpine3.11-pull'
- repo: _
+ name: functest-kubernetes-opnfv-functest-kubernetes-cnf-gate
+ <<: *functest-kubernetes-params
+ repo: opnfv
port:
- container: golang
- tag: '1.13-alpine3.11'
- slave: master
+ container: functest-kubernetes-cnf
+ ref_arg: BRANCH
+ path: docker/cnf
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-pull'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-build
+ name: functest-kubernetes-opnfv-functest-kubernetes-security-gate
<<: *functest-kubernetes-params
- container: functest-kubernetes-core
+ repo: opnfv
+ port:
+ container: functest-kubernetes-security
ref_arg: BRANCH
- path: docker/core
+ path: docker/security
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-gate'
- - 'functest-kubernetes-{repo}-{container}-{tag}-check'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-build
+ name: functest-kubernetes-opnfv-functest-kubernetes-smoke-gate
<<: *functest-kubernetes-params
- container: functest-kubernetes-healthcheck
+ repo: opnfv
+ port:
+ container: functest-kubernetes-smoke
ref_arg:
- path: docker/healthcheck
+ path: docker/smoke
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-gate'
- - 'functest-kubernetes-{repo}-{container}-{tag}-check'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-gate'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-gate'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-build
+ name: functest-kubernetes-opnfv-functest-kubernetes-benchmarking-gate
<<: *functest-kubernetes-params
- container: functest-kubernetes-smoke
+ repo: opnfv
+ port:
+ container: functest-kubernetes-benchmarking
ref_arg:
- path: docker/smoke
+ path: docker/benchmarking
jobs:
- - 'functest-kubernetes-{repo}-{container}-{tag}-gate'
- - 'functest-kubernetes-{repo}-{container}-{tag}-check'
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-gate'
+
+- trigger:
+ name: functest-kubernetes-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'functest-kubernetes'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
- job-template:
- name: 'functest-kubernetes-{tag}-check'
+ name: 'functest-kubernetes-{tag}-review'
project-type: multijob
+ triggers:
+ - functest-kubernetes-patchset-created:
+ branch: '{branch}'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
+ - functest-kubernetes-node:
+ node: '{node}'
- functest-kubernetes-build_tag:
build_tag: ''
- - functest-kubernetes-branch:
- branch: '{branch}'
- - functest-kubernetes-DEBUG:
- DEBUG: 'true'
- functest-kubernetes-DEPLOY_SCENARIO:
DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^functest-kubernetes-{tag}-(daily|check|gate)$'
+ - ^functest-kubernetes-(pi-)*.*-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
name: remove former images
@@ -454,34 +1228,51 @@
<<: *functest-kubernetes-jobs
- name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
<<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-rmi'
+ <<: *functest-kubernetes-jobs
- multijob:
- name: remove dependencies
+ name: remove dependency
projects:
- - name: 'functest-kubernetes-_-golang-1.13-alpine3.11-rmi'
+ - name: 'functest-kubernetes-{tag}-dep-rmi'
<<: *functest-kubernetes-jobs
- multijob:
- name: pull dependencies
+ name: pull dependency
projects:
- - name: 'functest-kubernetes-_-golang-1.13-alpine3.11-pull'
+ - name: 'functest-kubernetes-{tag}-dep-pull'
<<: *functest-kubernetes-jobs
- multijob:
name: build opnfv/functest-kubernetes-core
projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-check'
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-gate'
<<: *functest-kubernetes-jobs
- multijob:
- name: build opnfv/functest-kubernetes-healthcheck
+ name: build containers
projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-check'
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-gate'
<<: *functest-kubernetes-jobs
- multijob:
- name: build containers
+ name: build opnfv/functest-kubernetes-smoke
projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-check'
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-gate'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: build opnfv/functest-kubernetes-benchmarking
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-gate'
<<: *functest-kubernetes-jobs
- multijob:
name: opnfv/functest-kubernetes-healthcheck:{tag}
projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_quick-run'
+ <<: *functest-kubernetes-jobs
- name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_smoke-run'
<<: *functest-kubernetes-jobs
- multijob:
@@ -492,108 +1283,910 @@
<<: *functest-kubernetes-jobs
- name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
<<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_api_machinery-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_api_machinery_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_apps-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_apps_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_auth-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_cluster_lifecycle-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_instrumentation-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_network-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_node-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_scheduling_serial-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_storage-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sig_storage_serial-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-security:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_hunter-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench_master-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-kube_bench_node-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-benchmarking:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-xrally_kubernetes_full-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-netperf-run'
+ <<: *functest-kubernetes-jobs
+ - multijob:
+ name: opnfv/functest-kubernetes-cnf:{tag}
+ projects:
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-k8s_vims-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-helm_vims-run'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-cnf_testsuite-run'
+ <<: *functest-kubernetes-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-review'
+ <<: *functest-kubernetes-params
+ jobs:
+ - 'functest-kubernetes-{tag}-review'
+
+- view:
+ name: functest-kubernetes-review
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-[a-z0-9.-]+-review$
+
+- view:
+ name: functest-kubernetes-tox
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-[a-z0-9.-]+-tox$
+
+- builder:
+ name: functest-kubernetes-push-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker push $image
- trigger:
- name: functest-kubernetes-patchset-created
+ name: functest-kubernetes-commit
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'functest-kubernetes'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
+ - pollscm:
+ cron: "*/30 * * * *"
- job-template:
- name: 'functest-kubernetes-{tag}-gate'
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-core-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-core
+ port:
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-healthcheck-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-healthcheck
+ port:
+ ref_arg:
+ path: docker/healthcheck
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-cnf-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-cnf
+ port:
+ ref_arg: BRANCH
+ path: docker/cnf
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-security-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-security
+ port:
+ ref_arg: BRANCH
+ path: docker/security
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-smoke-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-smoke
+ port:
+ ref_arg:
+ path: docker/smoke
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-build'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
+ builders:
+ - functest-kubernetes-build-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-kubernetes-push-containers:
+ <<: *functest-kubernetes-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-kubernetes-opnfv-functest-kubernetes-benchmarking-build
+ <<: *functest-kubernetes-params
+ repo: opnfv
+ container: functest-kubernetes-benchmarking
+ port:
+ ref_arg:
+ path: docker/benchmarking
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-build'
+
+- job-template:
+ name: 'functest-kubernetes-{tag}-docker'
project-type: multijob
triggers:
- - functest-kubernetes-patchset-created:
- branch: '{branch}'
+ - functest-kubernetes-commit
+ scm:
+ - functest-kubernetes-scm:
+ ref: '{branch}'
parameters:
- - functest-kubernetes-slave:
- slave: '{slave}'
- - functest-kubernetes-build_tag:
- build_tag: ''
- - functest-kubernetes-DEBUG:
- DEBUG: 'true'
- - functest-kubernetes-DEPLOY_SCENARIO:
- DEPLOY_SCENARIO: k8-nosdn-nofeature-noha
+ - functest-kubernetes-node:
+ node: '{node}'
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^functest-kubernetes-{tag}-(daily|check|gate)$'
+ - ^functest-kubernetes-(pi-)*.*-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
- name: remove former images
+ name: remove dependency
projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-rmi'
- <<: *functest-kubernetes-jobs
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-rmi'
+ - name: 'functest-kubernetes-{tag}-dep-rmi'
<<: *functest-kubernetes-jobs
- multijob:
- name: remove dependencies
+ name: pull dependency
projects:
- - name: 'functest-kubernetes-_-golang-1.13-alpine3.11-rmi'
- <<: *functest-kubernetes-jobs
- - multijob:
- name: pull dependencies
- projects:
- - name: 'functest-kubernetes-_-golang-1.13-alpine3.11-pull'
+ - name: 'functest-kubernetes-{tag}-dep-pull'
<<: *functest-kubernetes-jobs
- multijob:
name: build opnfv/functest-kubernetes-core
projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-gate'
- <<: *functest-kubernetes-jobs
- - multijob:
- name: build opnfv/functest-kubernetes-healthcheck
- projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-gate'
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-build'
<<: *functest-kubernetes-jobs
- multijob:
name: build containers
projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-gate'
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-build'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-build'
+ <<: *functest-kubernetes-jobs
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-build'
<<: *functest-kubernetes-jobs
- multijob:
- name: opnfv/functest-kubernetes-healthcheck:{tag}
+ name: build opnfv/functest-kubernetes-smoke
projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-k8s_smoke-run'
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-build'
<<: *functest-kubernetes-jobs
- multijob:
- name: opnfv/functest-kubernetes-smoke:{tag}
- execution-type: SEQUENTIALLY
+ name: build opnfv/functest-kubernetes-benchmarking
projects:
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-xrally_kubernetes-run'
- <<: *functest-kubernetes-jobs
- - name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-k8s_conformance-run'
+ - name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-build'
<<: *functest-kubernetes-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
- project:
- name: 'functest-kubernetes'
+ name: 'functest-kubernetes-docker'
<<: *functest-kubernetes-params
jobs:
- - 'functest-kubernetes-{tag}-daily'
- - 'functest-kubernetes-{tag}-check'
- - 'functest-kubernetes-{tag}-gate'
+ - 'functest-kubernetes-{tag}-docker'
+
+- builder:
+ name: functest-kubernetes-trivy
+ builders:
+ - shell: |
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./trivy image --exit-code 1 $image
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-core'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-cnf'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-security'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-smoke'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-trivy'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-trivy:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-trivy'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-trivy'
+
+- builder:
+ name: functest-kubernetes-grype
+ builders:
+ - shell: |
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./grype -q $image
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-core'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-cnf'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-security'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-smoke'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-grype'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-grype:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-grype'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-grype'
+
+- builder:
+ name: functest-kubernetes-sbom
+ builders:
+ - shell: |
+ apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ mkdir -p ~/.docker
+ curl -sSfL https://raw.githubusercontent.com/docker/sbom-cli-plugin/main/install.sh | sh -s --
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ docker sbom $image
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-core-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-core'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-core-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-healthcheck'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-healthcheck-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-cnf-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-cnf'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-cnf-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-security-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-security'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-security-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-smoke-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-smoke'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-smoke-{tag}-sbom'
+
+- job-template:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-kubernetes-node:
+ node: '{node}'
+ builders:
+ - functest-kubernetes-sbom:
+ <<: *functest-kubernetes-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-sbom'
+ <<: *functest-kubernetes-params
+ repo: 'opnfv'
+ container: 'functest-kubernetes-benchmarking'
+ port:
+ jobs:
+ - 'functest-kubernetes-opnfv-functest-kubernetes-benchmarking-{tag}-sbom'
- view:
- name: functest-kubernetes
+ name: functest-kubernetes-docker
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-kubernetes-[a-z0-9.-]+-docker$
+
+- view:
+ name: functest-kubernetes-trivy
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes-pi)(?!functest-kubernetes-ng)^functest-kubernetes-[a-z-0-9.]+-trivy$
+
+- view:
+ name: functest-kubernetes-grype
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes-pi)(?!functest-kubernetes-ng)^functest-kubernetes-[a-z-0-9.]+-grype$
+
+- view:
+ name: functest-kubernetes-sbom
view-type: list
columns:
- status
@@ -602,4 +2195,4 @@
- last-success
- last-failure
- last-duration
- regex: ^functest-kubernetes-[a-z]+-(daily|check|gate)$
+ regex: (?!functest-kubernetes-pi)(?!functest-kubernetes-ng)^functest-kubernetes-[a-z-0-9.]+-sbom$
diff --git a/jjb/functest/functest-pi.yaml b/jjb/functest/functest-pi.yaml
new file mode 100644
index 000000000..1ac14f584
--- /dev/null
+++ b/jjb/functest/functest-pi.yaml
@@ -0,0 +1,1239 @@
+---
+- functest-pi-containers: &functest-pi-containers
+ name: 'functest-pi-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- functest-pi-params: &functest-pi-params
+ name: 'functest-pi-params'
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-healthcheck-params: &functest-pi-ollivier-functest-healthcheck-params
+ name: 'functest-pi-ollivier-functest-healthcheck-params'
+ repo: 'ollivier'
+ container: 'functest-healthcheck'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-smoke-params: &functest-pi-ollivier-functest-smoke-params
+ name: 'functest-pi-ollivier-functest-smoke-params'
+ repo: 'ollivier'
+ container: 'functest-smoke'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-smoke-cntt-params: &functest-pi-ollivier-functest-smoke-cntt-params
+ name: 'functest-pi-ollivier-functest-smoke-cntt-params'
+ repo: 'ollivier'
+ container: 'functest-smoke-cntt'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-benchmarking-params: &functest-pi-ollivier-functest-benchmarking-params
+ name: 'functest-pi-ollivier-functest-benchmarking-params'
+ repo: 'ollivier'
+ container: 'functest-benchmarking'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-benchmarking-cntt-params: &functest-pi-ollivier-functest-benchmarking-cntt-params
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-params'
+ repo: 'ollivier'
+ container: 'functest-benchmarking-cntt'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-ollivier-functest-vnf-params: &functest-pi-ollivier-functest-vnf-params
+ name: 'functest-pi-ollivier-functest-vnf-params'
+ repo: 'ollivier'
+ container: 'functest-vnf'
+ port:
+ tag:
+ - latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - xena-latest:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ - arm64-latest:
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ - arm64-zed:
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ - arm64-yoga:
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ - arm64-xena:
+ node: laas-xena
+ DASHBOARD_URL: http://10.200.120.76
+ - arm64-wallaby:
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+
+- functest-pi-jobs: &functest-pi-jobs
+ name: 'functest-pi-jobs'
+ current-parameters: true
+
+- parameter:
+ name: functest-pi-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: functest-pi-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- parameter:
+ name: functest-pi-EXTERNAL_NETWORK
+ parameters:
+ - string:
+ name: EXTERNAL_NETWORK
+ default: public
+
+- parameter:
+ name: functest-pi-VOLUME_DEVICE_NAME
+ parameters:
+ - string:
+ name: VOLUME_DEVICE_NAME
+ default: sdb
+
+- parameter:
+ name: functest-pi-IMAGE_PROPERTIES
+ parameters:
+ - string:
+ name: IMAGE_PROPERTIES
+ default: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+
+- functest-pi-run-containers: &functest-pi-run-containers
+ name: 'functest-pi-run-containers'
+ <<: *functest-pi-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+ DASHBOARD_URL: '{DASHBOARD_URL}'
+
+- builder:
+ name: functest-pi-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker pull $image
+
+- builder:
+ name: functest-pi-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/functest/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ -e DASHBOARD_URL={DASHBOARD_URL} \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: functest-pi-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker rmi $image || true
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-healthcheck-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-healthcheck-pull'
+ <<: *functest-pi-ollivier-functest-healthcheck-params
+ jobs:
+ - 'functest-pi-ollivier-functest-healthcheck-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-healthcheck-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-healthcheck-rmi'
+ <<: *functest-pi-ollivier-functest-healthcheck-params
+ jobs:
+ - 'functest-pi-ollivier-functest-healthcheck-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-pull'
+ <<: *functest-pi-ollivier-functest-smoke-params
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-rmi'
+ <<: *functest-pi-ollivier-functest-smoke-params
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-pull'
+ <<: *functest-pi-ollivier-functest-smoke-cntt-params
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-cntt-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-rmi'
+ <<: *functest-pi-ollivier-functest-smoke-cntt-params
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-pull'
+ <<: *functest-pi-ollivier-functest-benchmarking-params
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-rmi'
+ <<: *functest-pi-ollivier-functest-benchmarking-params
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-pull'
+ <<: *functest-pi-ollivier-functest-benchmarking-cntt-params
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-rmi'
+ <<: *functest-pi-ollivier-functest-benchmarking-cntt-params
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-vnf-{tag}-pull'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-pull-containers:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-vnf-pull'
+ <<: *functest-pi-ollivier-functest-vnf-params
+ jobs:
+ - 'functest-pi-ollivier-functest-vnf-{tag}-pull'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-vnf-{tag}-rmi'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ builders:
+ - functest-pi-remove-images:
+ <<: *functest-pi-containers
+
+- project:
+ name: 'functest-pi-ollivier-functest-vnf-rmi'
+ <<: *functest-pi-ollivier-functest-vnf-params
+ jobs:
+ - 'functest-pi-ollivier-functest-vnf-{tag}-rmi'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-healthcheck-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-healthcheck'
+ <<: *functest-pi-ollivier-functest-healthcheck-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-healthcheck'
+ test:
+ - connection_check
+ - tenantnetwork1
+ - tenantnetwork2
+ - vmready1
+ - vmready2
+ - singlevm1
+ - singlevm2
+ - vping_ssh
+ - vping_userdata
+ - cinder_test
+ - odl
+ - tempest_smoke
+ - tempest_horizon
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-healthcheck-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke'
+ <<: *functest-pi-ollivier-functest-smoke-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-smoke'
+ test:
+ - tempest_neutron
+ - tempest_cinder
+ - tempest_keystone
+ - tempest_heat
+ - tempest_telemetry
+ - rally_sanity
+ - refstack_compute
+ - refstack_object
+ - refstack_platform
+ - tempest_full
+ - tempest_scenario
+ - tempest_slow
+ - patrole_admin
+ - patrole_member
+ - patrole_reader
+ - tempest_barbican
+ - tempest_octavia
+ - tempest_cyborg
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-smoke-cntt'
+ <<: *functest-pi-ollivier-functest-smoke-cntt-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-smoke-cntt'
+ test:
+ - tempest_neutron_cntt
+ - tempest_cinder_cntt
+ - tempest_keystone_cntt
+ - tempest_heat_cntt
+ - rally_sanity_cntt
+ - tempest_full_cntt
+ - tempest_scenario_cntt
+ - tempest_slow_cntt
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-smoke-cntt-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking'
+ <<: *functest-pi-ollivier-functest-benchmarking-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-benchmarking'
+ test:
+ - rally_full
+ - rally_jobs
+ - vmtp
+ - shaker
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-benchmarking-cntt'
+ <<: *functest-pi-ollivier-functest-benchmarking-cntt-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-benchmarking-cntt'
+ test:
+ - rally_full_cntt
+ - rally_jobs_cntt
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-pi-ollivier-functest-vnf-{tag}-{test}-run'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-run-containers:
+ <<: *functest-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-pi-ollivier-functest-vnf'
+ <<: *functest-pi-ollivier-functest-vnf-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-vnf'
+ test:
+ - cloudify
+ - cloudify_ims
+ - heat_ims
+ - vyos_vrouter
+ - juju_epc
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-ollivier-functest-vnf-{tag}-{test}-run'
+
+- builder:
+ name: functest-pi-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/functest \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/functest \
+ -v /home/opnfv/functest/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ -e DASHBOARD_URL={DASHBOARD_URL} \
+ $image zip_campaign
+
+- job-template:
+ name: 'functest-pi-{tag}-zip'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-pi-zip:
+ <<: *functest-pi-run-containers
+
+- project:
+ name: 'functest-pi-zip'
+ <<: *functest-pi-ollivier-functest-healthcheck-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-healthcheck'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-pi-{tag}-zip'
+
+- job-template:
+ name: 'functest-pi-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-pi-node:
+ node: '{node}'
+ - functest-pi-build_tag:
+ build_tag: ''
+ - functest-pi-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-pi-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-pi-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^functest-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-rmi'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-pull'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-pull'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-healthcheck:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-connection_check-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tenantnetwork1-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tenantnetwork2-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vmready1-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vmready2-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-singlevm1-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-singlevm2-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vping_ssh-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-vping_userdata-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-cinder_test-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-odl-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tempest_smoke-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-healthcheck-{tag}-tempest_horizon-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-smoke:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_neutron-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_cinder-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_keystone-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_heat-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_telemetry-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-rally_sanity-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_compute-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_object-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-refstack_platform-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_full-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_scenario-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_slow-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_admin-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_member-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-patrole_reader-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_barbican-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_octavia-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-{tag}-tempest_cyborg-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-smoke-cntt:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-benchmarking:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rally_full-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-rally_jobs-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-vmtp-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-{tag}-shaker-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-benchmarking-cntt:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: ollivier/functest-vnf:{tag}
+ projects:
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-cloudify-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-cloudify_ims-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-heat_ims-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-vyos_vrouter-run'
+ <<: *functest-pi-jobs
+ - name: 'functest-pi-ollivier-functest-vnf-{tag}-juju_epc-run'
+ <<: *functest-pi-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'functest-pi-{tag}-zip'
+ <<: *functest-pi-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-pi-daily'
+ <<: *functest-pi-params
+ jobs:
+ - 'functest-pi-{tag}-daily'
+
+- view:
+ name: functest-pi
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-pi-[a-z-0-9.]+-daily$
diff --git a/jjb/functest/functest-project-jobs.yaml b/jjb/functest/functest-project-jobs.yaml
deleted file mode 100644
index 306c3ca93..000000000
--- a/jjb/functest/functest-project-jobs.yaml
+++ /dev/null
@@ -1,80 +0,0 @@
----
-- functest-project-params: &functest-project-params
- name: 'functest-project-params'
- tag:
- - latest:
- branch: master
- slave: lf-virtual1
- - jerma:
- branch: stable/jerma
- slave: lf-virtual1
- - iruya:
- branch: stable/iruya
- slave: lf-virtual1
- - hunter:
- branch: stable/hunter
- slave: lf-virtual1
-
-- builder:
- name: functest-run-tox
- builders:
- - shell: tox
-
-- trigger:
- name: functest-project-patchset-created
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'functest'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
-
-- parameter:
- name: functest-project-slave
- parameters:
- - label:
- name: slave
- default: '{slave}'
-
-- scm:
- name: functest-project-scm
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/functest
- refspec: '+refs/changes/*:refs/changes/*'
- branches:
- - '{ref}'
-
-- job-template:
- name: 'functest-run-tox-{tag}'
- triggers:
- - functest-project-patchset-created:
- branch: '{branch}'
- scm:
- - functest-project-scm:
- ref: $GERRIT_REFSPEC
- parameters:
- - functest-project-slave:
- slave: '{slave}'
- builders:
- - functest-run-tox
-
-- project:
- name: 'functest-run-tox'
- <<: *functest-project-params
- jobs:
- - 'functest-run-tox-{tag}'
diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh
deleted file mode 100755
index 469a57726..000000000
--- a/jjb/functest/functest-suite.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
-if [ -z $container_id ]; then
- echo "Functest container not found"
- exit 1
-fi
-
-global_ret_val=0
-
-tests=($(echo $FUNCTEST_SUITE_NAME | tr "," "\n"))
-for test in ${tests[@]}; do
- cmd="run_tests -t $test"
- docker exec $container_id $cmd
- let global_ret_val+=$?
-done
-
-ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
-echo ${global_ret_val}>${ret_val_file}
-
-exit 0
diff --git a/jjb/functest/functest.yaml b/jjb/functest/functest.yaml
index b93be3a4e..463dd9a0a 100644
--- a/jjb/functest/functest.yaml
+++ b/jjb/functest/functest.yaml
@@ -1,36 +1,312 @@
---
-- functest-jobs: &functest-jobs
- name: 'functest-jobs'
- current-parameters: true
+- functest-containers: &functest-containers
+ name: 'functest-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
- functest-params: &functest-params
name: 'functest-params'
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-healthcheck-params: &functest-opnfv-functest-healthcheck-params
+ name: 'functest-opnfv-functest-healthcheck-params'
+ repo: 'opnfv'
+ container: 'functest-healthcheck'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-smoke-params: &functest-opnfv-functest-smoke-params
+ name: 'functest-opnfv-functest-smoke-params'
+ repo: 'opnfv'
+ container: 'functest-smoke'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-smoke-cntt-params: &functest-opnfv-functest-smoke-cntt-params
+ name: 'functest-opnfv-functest-smoke-cntt-params'
+ repo: 'opnfv'
+ container: 'functest-smoke-cntt'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-benchmarking-params: &functest-opnfv-functest-benchmarking-params
+ name: 'functest-opnfv-functest-benchmarking-params'
+ repo: 'opnfv'
+ container: 'functest-benchmarking'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-benchmarking-cntt-params: &functest-opnfv-functest-benchmarking-cntt-params
+ name: 'functest-opnfv-functest-benchmarking-cntt-params'
+ repo: 'opnfv'
+ container: 'functest-benchmarking-cntt'
+ port:
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-opnfv-functest-vnf-params: &functest-opnfv-functest-vnf-params
+ name: 'functest-opnfv-functest-vnf-params'
repo: 'opnfv'
+ container: 'functest-vnf'
port:
tag:
- latest:
+ from:
+ build_args:
branch: master
- slave: lf-virtual9
- dashboard_url: http://172.30.13.94
- - jerma:
- branch: stable/jerma
- slave: lf-pod4
- dashboard_url: http://172.30.12.83
- - iruya:
- branch: stable/iruya
- slave: lf-virtual4
- dashboard_url: http://172.30.13.89
- - hunter:
- branch: stable/hunter
- slave: lf-virtual6
- dashboard_url: http://172.30.13.91
+ node: lf-pod4
+ DASHBOARD_URL: http://172.30.12.83
+ dependency: 3.14
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: lf-virtual9
+ DASHBOARD_URL: http://172.30.13.94
+ dependency: 3.14
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: lf-pod4-3
+ DASHBOARD_URL: http://172.30.12.88
+ dependency: 3.14
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: lf-pod4-2
+ DASHBOARD_URL: http://172.30.12.85
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: lf-virtual6
+ DASHBOARD_URL: http://172.30.13.91
+ dependency: 3.13
+
+- functest-jobs: &functest-jobs
+ name: 'functest-jobs'
+ current-parameters: true
- parameter:
- name: functest-slave
+ name: functest-node
parameters:
- label:
- name: slave
- default: '{slave}'
+ name: node
+ default: '{node}'
- parameter:
name: functest-build_tag
@@ -39,44 +315,37 @@
name: build_tag
- parameter:
- name: functest-branch
+ name: functest-EXTERNAL_NETWORK
parameters:
- string:
- name: branch
- default: '{branch}'
+ name: EXTERNAL_NETWORK
+ default: public
- parameter:
- name: functest-DEBUG
+ name: functest-VOLUME_DEVICE_NAME
parameters:
- string:
- name: DEBUG
- default: 'true'
+ name: VOLUME_DEVICE_NAME
+ default: sdb
- parameter:
- name: functest-EXTERNAL_NETWORK
+ name: functest-IMAGE_PROPERTIES
parameters:
- string:
- name: EXTERNAL_NETWORK
- default: public
-
-- functest-containers: &functest-containers
- name: 'functest-containers'
- repo: '{repo}'
- port: '{port}'
- container: '{container}'
- tag: '{tag}'
+ name: IMAGE_PROPERTIES
+ default: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
- functest-run-containers: &functest-run-containers
name: 'functest-run-containers'
<<: *functest-containers
- test: '{test}'
- dashboard_url: '{dashboard_url}'
-
-- functest-build-containers: &functest-build-containers
- name: 'functest-build-containers'
- <<: *functest-containers
- ref_arg: '{ref_arg}'
- path: '{path}'
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+ DASHBOARD_URL: '{DASHBOARD_URL}'
- builder:
name: functest-pull-containers
@@ -93,33 +362,26 @@
sudo docker pull $image
- builder:
- name: functest-build-containers
+ name: functest-run-containers
builders:
- shell: |
set +x
- if [ "{repo}" = "_" ]; then
- image={container}:{tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{tag}
- else
- image={repo}:{port}/{container}:{tag}
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
fi
- if [ "{ref_arg}" = "None" ]; then
- build_arg=""
- else
- build_arg="--build-arg {ref_arg}={ref}"
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
fi
- cd {path}
- sudo docker build $build_arg \
- --pull=false --no-cache --force-rm=true \
- -t $image .
-
-- builder:
- name: functest-run-containers
- builders:
- - shell: |
- set +x
- [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
if [ "{repo}" = "_" ]; then
image={container}:{tag}
elif [ "{port}" = "None" ]; then
@@ -127,35 +389,25 @@
else
image={repo}:{port}/{container}:{tag}
fi
- if [ "{tag}" = "latest" ]; then
- py=3.7
- elif [ "{tag}" = "jerma" ]; then
- py=3.7
- elif [ "{tag}" = "iruya" ]; then
- py=3.6
- else
- py=2.7
- fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
-e S3_ENDPOINT_URL=https://storage.googleapis.com \
-e S3_DST_URL=s3://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
-e HTTP_DST_URL=http://artifacts.opnfv.org/functest/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/functest/.boto:/etc/boto.cfg \
-e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e NODE_NAME=$slave \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
-e BUILD_TAG=$BUILD_TAG \
-v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -e DEBUG=$DEBUG \
- -e EXTERNAL_NETWORK=$EXTERNAL_NETWORK \
- -e DASHBOARD_URL={dashboard_url} \
- -e VOLUME_DEVICE_NAME=sdb \
- -e IMAGE_PROPERTIES=hw_scsi_model:virtio-scsi,hw_disk_bus:scsi \
- -v /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file \
- -v /home/opnfv/functest/images:/home/opnfv/functest/images \
- -v /home/opnfv/functest/tempest_blacklist.yaml:/usr/lib/python$py/\
- site-packages/functest/opnfv_tests/openstack/tempest/custom_tests/\
- blacklist.yaml \
- -v /home/opnfv/functest/.boto:/root/.boto \
- $image run_tests -t {test} -r -p
+ -e DASHBOARD_URL={DASHBOARD_URL} \
+ $image run_tests -t {test} -p -r
- builder:
name: functest-remove-images
@@ -171,121 +423,215 @@
fi
sudo docker rmi $image || true
-- scm:
- name: functest-scm
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/functest
- refspec: '+refs/changes/*:refs/changes/*'
- branches:
- - '{ref}'
-
- job-template:
- name: 'functest-{repo}-{container}-{tag}-pull'
+ name: 'functest-opnfv-functest-healthcheck-{tag}-pull'
parameters:
- - functest-slave:
- slave: '{slave}'
+ - functest-node:
+ node: '{node}'
builders:
- functest-pull-containers:
<<: *functest-containers
- project:
name: 'functest-opnfv-functest-healthcheck-pull'
- <<: *functest-params
- container: 'functest-healthcheck'
+ <<: *functest-opnfv-functest-healthcheck-params
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-rmi'
+ <<: *functest-opnfv-functest-healthcheck-params
jobs:
- - 'functest-{repo}-{container}-{tag}-pull'
+ - 'functest-opnfv-functest-healthcheck-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
- project:
name: 'functest-opnfv-functest-smoke-pull'
- <<: *functest-params
- container: 'functest-smoke'
+ <<: *functest-opnfv-functest-smoke-params
jobs:
- - 'functest-{repo}-{container}-{tag}-pull'
+ - 'functest-opnfv-functest-smoke-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
+
+- project:
+ name: 'functest-opnfv-functest-smoke-rmi'
+ <<: *functest-opnfv-functest-smoke-params
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
- project:
name: 'functest-opnfv-functest-smoke-cntt-pull'
- <<: *functest-params
- container: 'functest-smoke-cntt'
+ <<: *functest-opnfv-functest-smoke-cntt-params
jobs:
- - 'functest-{repo}-{container}-{tag}-pull'
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
- project:
- name: 'functest-opnfv-functest-benchmarking-pull'
- <<: *functest-params
- container: 'functest-benchmarking'
+ name: 'functest-opnfv-functest-smoke-cntt-rmi'
+ <<: *functest-opnfv-functest-smoke-cntt-params
jobs:
- - 'functest-{repo}-{container}-{tag}-pull'
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
- project:
- name: 'functest-opnfv-functest-vnf-pull'
- <<: *functest-params
- container: 'functest-vnf'
+ name: 'functest-opnfv-functest-benchmarking-pull'
+ <<: *functest-opnfv-functest-benchmarking-params
jobs:
- - 'functest-{repo}-{container}-{tag}-pull'
+ - 'functest-opnfv-functest-benchmarking-{tag}-pull'
- job-template:
- name: 'functest-{repo}-{container}-{tag}-rmi'
+ name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
parameters:
- - functest-slave:
- slave: '{slave}'
+ - functest-node:
+ node: '{node}'
builders:
- functest-remove-images:
<<: *functest-containers
- project:
- name: 'functest-opnfv-functest-healthcheck-rmi'
- <<: *functest-params
- container: 'functest-healthcheck'
+ name: 'functest-opnfv-functest-benchmarking-rmi'
+ <<: *functest-opnfv-functest-benchmarking-params
jobs:
- - 'functest-{repo}-{container}-{tag}-rmi'
+ - 'functest-opnfv-functest-benchmarking-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
- project:
- name: 'functest-opnfv-functest-smoke-rmi'
- <<: *functest-params
- container: 'functest-smoke'
+ name: 'functest-opnfv-functest-benchmarking-cntt-pull'
+ <<: *functest-opnfv-functest-benchmarking-cntt-params
jobs:
- - 'functest-{repo}-{container}-{tag}-rmi'
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
- project:
- name: 'functest-opnfv-functest-smoke-cntt-rmi'
- <<: *functest-params
- container: 'functest-smoke-cntt'
+ name: 'functest-opnfv-functest-benchmarking-cntt-rmi'
+ <<: *functest-opnfv-functest-benchmarking-cntt-params
jobs:
- - 'functest-{repo}-{container}-{tag}-rmi'
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-pull'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-containers
- project:
- name: 'functest-opnfv-functest-benchmarking-rmi'
- <<: *functest-params
- container: 'functest-benchmarking'
+ name: 'functest-opnfv-functest-vnf-pull'
+ <<: *functest-opnfv-functest-vnf-params
jobs:
- - 'functest-{repo}-{container}-{tag}-rmi'
+ - 'functest-opnfv-functest-vnf-{tag}-pull'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-containers
- project:
name: 'functest-opnfv-functest-vnf-rmi'
- <<: *functest-params
- container: 'functest-vnf'
+ <<: *functest-opnfv-functest-vnf-params
jobs:
- - 'functest-{repo}-{container}-{tag}-rmi'
+ - 'functest-opnfv-functest-vnf-{tag}-rmi'
- job-template:
- name: 'functest-{repo}-{container}-{tag}-{test}-run'
+ name: 'functest-opnfv-functest-healthcheck-{tag}-{test}-run'
parameters:
- - functest-slave:
- slave: '{slave}'
+ - functest-node:
+ node: '{node}'
- functest-build_tag:
build_tag: ''
- - functest-DEBUG:
- DEBUG: 'true'
- functest-EXTERNAL_NETWORK:
EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
builders:
- functest-run-containers:
<<: *functest-run-containers
+ test: '{test}'
- project:
name: 'functest-opnfv-functest-healthcheck'
- <<: *functest-params
+ <<: *functest-opnfv-functest-healthcheck-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
container: 'functest-healthcheck'
test:
- connection_check
@@ -301,84 +647,226 @@
- odl
- tempest_smoke
- tempest_horizon
- exclude:
- - tag: hunter
- test: tempest_horizon
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- - 'functest-{repo}-{container}-{tag}-{test}-run'
+ - 'functest-opnfv-functest-healthcheck-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
- project:
name: 'functest-opnfv-functest-smoke'
- <<: *functest-params
+ <<: *functest-opnfv-functest-smoke-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
container: 'functest-smoke'
test:
- - neutron-tempest-plugin-api
+ - tempest_neutron
- tempest_cinder
- tempest_keystone
+ - tempest_heat
+ - tempest_telemetry
- rally_sanity
- - refstack_defcore
- refstack_compute
- refstack_object
- refstack_platform
- tempest_full
- tempest_scenario
- tempest_slow
- - patrole
- - neutron_trunk
- - networking-bgpvpn
- - networking-sfc
- - barbican
- - octavia
- exclude:
- - tag: latest
- test: refstack_defcore
- - tag: jerma
- test: refstack_defcore
- - tag: iruya
- test: refstack_defcore
- - tag: iruya
- test: octavia
- - tag: hunter
- test: refstack_compute
- - tag: hunter
- test: refstack_object
- - tag: hunter
- test: refstack_platform
- - tag: hunter
- test: octavia
+ - patrole_admin
+ - patrole_member
+ - patrole_reader
+ - tempest_barbican
+ - tempest_octavia
+ - tempest_cyborg
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- - 'functest-{repo}-{container}-{tag}-{test}-run'
+ - 'functest-opnfv-functest-smoke-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
- project:
name: 'functest-opnfv-functest-smoke-cntt'
- <<: *functest-params
+ <<: *functest-opnfv-functest-smoke-cntt-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
container: 'functest-smoke-cntt'
test:
- - neutron-tempest-plugin-api
- - tempest_cinder
- - tempest_keystone
- - rally_sanity
- - tempest_full
- - tempest_scenario
- - tempest_slow
+ - tempest_neutron_cntt
+ - tempest_cinder_cntt
+ - tempest_keystone_cntt
+ - tempest_heat_cntt
+ - rally_sanity_cntt
+ - tempest_full_cntt
+ - tempest_scenario_cntt
+ - tempest_slow_cntt
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- - 'functest-{repo}-{container}-{tag}-{test}-run'
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
- project:
name: 'functest-opnfv-functest-benchmarking'
- <<: *functest-params
+ <<: *functest-opnfv-functest-benchmarking-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
container: 'functest-benchmarking'
test:
- rally_full
- rally_jobs
- vmtp
- shaker
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt'
+ <<: *functest-opnfv-functest-benchmarking-cntt-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
+ container: 'functest-benchmarking-cntt'
+ test:
+ - rally_full_cntt
+ - rally_jobs_cntt
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- - 'functest-{repo}-{container}-{tag}-{test}-run'
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-{test}-run'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-{test}-run'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ - functest-build_tag:
+ build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ builders:
+ - functest-run-containers:
+ <<: *functest-run-containers
+ test: '{test}'
- project:
name: 'functest-opnfv-functest-vnf'
- <<: *functest-params
+ <<: *functest-opnfv-functest-vnf-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
container: 'functest-vnf'
test:
- cloudify
@@ -386,15 +874,34 @@
- heat_ims
- vyos_vrouter
- juju_epc
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- - 'functest-{repo}-{container}-{tag}-{test}-run'
+ - 'functest-opnfv-functest-vnf-{tag}-{test}-run'
- builder:
name: functest-zip
builders:
- shell: |
set +x
- [ ! -z "$WORKSPACE" ] && rm -rf $WORKSPACE/* || true
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
if [ "{repo}" = "_" ]; then
image={container}:{tag}
elif [ "{port}" = "None" ]; then
@@ -402,29 +909,59 @@
else
image={repo}:{port}/{container}:{tag}
fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
-e S3_ENDPOINT_URL=https://storage.googleapis.com \
-e S3_DST_URL=s3://artifacts.opnfv.org/functest \
-e HTTP_DST_URL=http://artifacts.opnfv.org/functest \
+ -v /home/opnfv/functest/.boto:/etc/boto.cfg \
-e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
-e BUILD_TAG=$BUILD_TAG \
-v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -v /home/opnfv/functest/.boto:/root/.boto \
+ -e DASHBOARD_URL={DASHBOARD_URL} \
$image zip_campaign
- job-template:
name: 'functest-{tag}-zip'
parameters:
+ - functest-node:
+ node: '{node}'
- functest-build_tag:
build_tag: ''
+ - functest-EXTERNAL_NETWORK:
+ EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
builders:
- functest-zip:
- <<: *functest-containers
+ <<: *functest-run-containers
- project:
- name: 'functest-{tag}-zip'
- <<: *functest-params
+ name: 'functest-zip'
+ <<: *functest-opnfv-functest-healthcheck-params
+ volumes:
+ - /home/opnfv/functest/openstack.creds:/home/opnfv/functest/conf/env_file
+ - /home/opnfv/functest/images:/home/opnfv/functest/images
+ env:
+ - EXTERNAL_NETWORK=$EXTERNAL_NETWORK
+ - VOLUME_DEVICE_NAME=$VOLUME_DEVICE_NAME
+ - IMAGE_PROPERTIES=$IMAGE_PROPERTIES
+ published_ports:
container: 'functest-healthcheck'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- 'functest-{tag}-zip'
@@ -432,22 +969,26 @@
name: 'functest-{tag}-daily'
project-type: multijob
triggers:
- - timed: '@daily'
+ - timed: '@weekly'
parameters:
- - functest-slave:
- slave: '{slave}'
+ - functest-node:
+ node: '{node}'
- functest-build_tag:
build_tag: ''
- - functest-DEBUG:
- DEBUG: 'true'
- functest-EXTERNAL_NETWORK:
EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^functest-{tag}-(daily|check|gate)$'
+ - ^functest-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
name: remove former images
@@ -460,6 +1001,8 @@
<<: *functest-jobs
- name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
<<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+ <<: *functest-jobs
- name: 'functest-opnfv-functest-vnf-{tag}-rmi'
<<: *functest-jobs
- multijob:
@@ -473,6 +1016,8 @@
<<: *functest-jobs
- name: 'functest-opnfv-functest-benchmarking-{tag}-pull'
<<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-pull'
+ <<: *functest-jobs
- name: 'functest-opnfv-functest-vnf-{tag}-pull'
<<: *functest-jobs
- multijob:
@@ -507,15 +1052,17 @@
- multijob:
name: opnfv/functest-smoke:{tag}
projects:
- - name: 'functest-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_neutron-run'
<<: *functest-jobs
- name: 'functest-opnfv-functest-smoke-{tag}-tempest_cinder-run'
<<: *functest-jobs
- name: 'functest-opnfv-functest-smoke-{tag}-tempest_keystone-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_heat-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-refstack_defcore-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
<<: *functest-jobs
- name: 'functest-opnfv-functest-smoke-{tag}-refstack_compute-run'
<<: *functest-jobs
@@ -529,34 +1076,36 @@
<<: *functest-jobs
- name: 'functest-opnfv-functest-smoke-{tag}-tempest_slow-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-patrole-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_admin-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-neutron_trunk-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_member-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_reader-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-networking-sfc-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_barbican-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-barbican-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_octavia-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-octavia-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
<<: *functest-jobs
- multijob:
name: opnfv/functest-smoke-cntt:{tag}
projects:
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
<<: *functest-jobs
- multijob:
name: opnfv/functest-benchmarking:{tag}
@@ -570,8 +1119,14 @@
- name: 'functest-opnfv-functest-benchmarking-{tag}-shaker-run'
<<: *functest-jobs
- multijob:
+ name: opnfv/functest-benchmarking-cntt:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+ <<: *functest-jobs
+ - multijob:
name: opnfv/functest-vnf:{tag}
- execution-type: SEQUENTIALLY
projects:
- name: 'functest-opnfv-functest-vnf-{tag}-cloudify-run'
<<: *functest-jobs
@@ -588,148 +1143,390 @@
projects:
- name: 'functest-{tag}-zip'
<<: *functest-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-daily'
+ <<: *functest-params
+ jobs:
+ - 'functest-{tag}-daily'
+
+- view:
+ name: functest
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-[a-z0-9.-]+-daily$
+
+- functest-build-containers: &functest-build-containers
+ name: 'functest-build-containers'
+ <<: *functest-containers
+ ref_arg: '{ref_arg}'
+ path: '{path}'
+ build_args: '{build_args}'
+ from: '{from}'
+
+- builder:
+ name: functest-build-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ build_args=""
+ if [ "{build_args}" != "None" ]; then
+ for i in $(echo {build_args} | tr -d '[]' |sed "s/, / /g" ); \
+ do build_args="--build-arg $i $build_args"; done
+ fi
+ if [ "{ref_arg}" != "None" ]; then
+ build_args="$build_args --build-arg {ref_arg}={ref}"
+ fi
+ cd {path}
+ if [ "{from}" != "None" ]; then
+ sed -i {from} Dockerfile
+ fi
+ sudo docker build $build_args \
+ --pull=false --no-cache --force-rm=true \
+ -t $image .
+
+- scm:
+ name: functest-scm
+ scm:
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/functest'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ branches:
+ - '{ref}'
+
+- functest-dep: &functest-dep
+ name: 'functest-containers'
+ repo: '{repo}'
+ container: '{container}'
+ port: '{port}'
+ tag: '{dependency}'
- job-template:
- name: 'functest-{repo}-{container}-{tag}-gate'
+ name: 'functest-{tag}-dep-pull'
parameters:
- - functest-slave:
- slave: '{slave}'
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-pull-containers:
+ <<: *functest-dep
+
+- functest-dep-params: &functest-dep-params
+ name: 'functest-dep-params'
+ repo: '_'
+ container: 'alpine'
+ port:
+ tag:
+ - latest:
+ dependency: 3.14
+ - zed:
+ dependency: 3.14
+ - yoga:
+ dependency: 3.14
+ - xena:
+ dependency: 3.14
+ - wallaby:
+ dependency: 3.13
+
+- project:
+ name: 'functest-dep-pull'
+ <<: *functest-dep-params
+ jobs:
+ - 'functest-{tag}-dep-pull'
+
+- job-template:
+ name: 'functest-{tag}-dep-rmi'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-remove-images:
+ <<: *functest-dep
+
+- project:
+ name: 'functest-dep-rmi'
+ <<: *functest-dep-params
+ jobs:
+ - 'functest-{tag}-dep-rmi'
+
+- builder:
+ name: functest-tox
+ builders:
+ - shell: |
+ set +x
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install software-properties-common gpg -y
+ sudo add-apt-repository -y ppa:deadsnakes/ppa
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ libxml2-dev libxslt-dev libffi-dev libjpeg-dev \
+ python3.10 python3.10-dev python3.10-distutils \
+ python3.9 python3.9-dev python3.9-distutils \
+ python3.8 python3.8-dev python3.8-distutils \
+ python3-pip enchant-2 -y
+ sudo pip3 install tox
+
+ tox
+
+- job-template:
+ name: 'functest-{tag}-tox'
scm:
- functest-scm:
ref: $GERRIT_REFSPEC
+ triggers:
+ - functest-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - functest-node:
+ node: '{node}'
builders:
- - functest-build-containers:
- <<: *functest-build-containers
- ref: $GERRIT_REFSPEC
+ - functest-tox:
+
+- project:
+ name: functest-tox
+ <<: *functest-params
+ jobs:
+ - 'functest-{tag}-tox'
- job-template:
- name: 'functest-{repo}-{container}-{tag}-check'
+ name: 'functest-opnfv-functest-core-{tag}-gate'
parameters:
- - functest-slave:
- slave: '{slave}'
- - functest-branch:
- branch: '{branch}'
+ - functest-node:
+ node: '{node}'
scm:
- functest-scm:
- ref: $branch
+ ref: $GERRIT_REFSPEC
builders:
- functest-build-containers:
<<: *functest-build-containers
- ref: $branch
-
-- project:
- name: 'functest-_-alpine-3.11-rmi'
- repo: _
- port:
- container: alpine
- tag: '3.11'
- slave: master
- jobs:
- - 'functest-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'functest-_-alpine-3.11-pull'
- repo: _
- port:
- container: alpine
- tag: '3.11'
- slave: master
- jobs:
- - 'functest-{repo}-{container}-{tag}-pull'
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-opnfv-functest-core-{tag}-build
+ name: functest-opnfv-functest-core-gate
<<: *functest-params
+ repo: opnfv
+ port:
container: functest-core
ref_arg: BRANCH
path: docker/core
jobs:
- - 'functest-{repo}-{container}-{tag}-gate'
- - 'functest-{repo}-{container}-{tag}-check'
+ - 'functest-opnfv-functest-core-{tag}-gate'
-- project:
- name: functest-opnfv-functest-tempest-{tag}-build
- <<: *functest-params
- container: functest-tempest
- ref_arg: BRANCH
- path: docker/tempest
- jobs:
- - 'functest-{repo}-{container}-{tag}-gate'
- - 'functest-{repo}-{container}-{tag}-check'
- exclude:
- - tag: latest
- - tag: jerma
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-opnfv-functest-healthcheck-{tag}-build
+ name: functest-opnfv-functest-healthcheck-gate
<<: *functest-params
+ repo: opnfv
+ port:
container: functest-healthcheck
ref_arg: BRANCH
path: docker/healthcheck
jobs:
- - 'functest-{repo}-{container}-{tag}-gate'
- - 'functest-{repo}-{container}-{tag}-check'
+ - 'functest-opnfv-functest-healthcheck-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-opnfv-functest-smoke-{tag}-build
+ name: functest-opnfv-functest-smoke-gate
<<: *functest-params
+ repo: opnfv
+ port:
container: functest-smoke
ref_arg: BRANCH
path: docker/smoke
jobs:
- - 'functest-{repo}-{container}-{tag}-gate'
- - 'functest-{repo}-{container}-{tag}-check'
+ - 'functest-opnfv-functest-smoke-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-opnfv-functest-benchmarking-{tag}-build
+ name: functest-opnfv-functest-benchmarking-gate
<<: *functest-params
+ repo: opnfv
+ port:
container: functest-benchmarking
ref_arg: BRANCH
path: docker/benchmarking
jobs:
- - 'functest-{repo}-{container}-{tag}-gate'
- - 'functest-{repo}-{container}-{tag}-check'
+ - 'functest-opnfv-functest-benchmarking-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-opnfv-functest-vnf-{tag}-build
+ name: functest-opnfv-functest-vnf-gate
<<: *functest-params
+ repo: opnfv
+ port:
container: functest-vnf
ref_arg:
path: docker/vnf
jobs:
- - 'functest-{repo}-{container}-{tag}-gate'
- - 'functest-{repo}-{container}-{tag}-check'
+ - 'functest-opnfv-functest-vnf-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: functest-opnfv-functest-smoke-cntt-{tag}-build
+ name: functest-opnfv-functest-smoke-cntt-gate
<<: *functest-params
+ repo: opnfv
+ port:
container: functest-smoke-cntt
ref_arg: BRANCH
path: docker/smoke-cntt
jobs:
- - 'functest-{repo}-{container}-{tag}-gate'
- - 'functest-{repo}-{container}-{tag}-check'
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-gate'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-gate'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: $GERRIT_REFSPEC
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
+
+- project:
+ name: functest-opnfv-functest-benchmarking-cntt-gate
+ <<: *functest-params
+ repo: opnfv
+ port:
+ container: functest-benchmarking-cntt
+ ref_arg: BRANCH
+ path: docker/benchmarking-cntt
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-gate'
+
+- trigger:
+ name: functest-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'functest'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
- job-template:
- name: 'functest-{tag}-check'
+ name: 'functest-{tag}-review'
project-type: multijob
+ triggers:
+ - functest-patchset-created:
+ branch: '{branch}'
parameters:
- - functest-slave:
- slave: '{slave}'
+ - functest-node:
+ node: '{node}'
- functest-build_tag:
build_tag: ''
- - functest-branch:
- branch: '{branch}'
- - functest-DEBUG:
- DEBUG: 'true'
- functest-EXTERNAL_NETWORK:
EXTERNAL_NETWORK: public
+ - functest-VOLUME_DEVICE_NAME:
+ VOLUME_DEVICE_NAME: sdb
+ - functest-IMAGE_PROPERTIES:
+ IMAGE_PROPERTIES: hw_scsi_model:virtio-scsi,hw_disk_bus:scsi
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^functest-{tag}-(daily|check|gate)$'
+ - ^functest-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
name: remove former images
@@ -742,43 +1539,42 @@
<<: *functest-jobs
- name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
<<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rmi'
+ <<: *functest-jobs
- name: 'functest-opnfv-functest-vnf-{tag}-rmi'
<<: *functest-jobs
- multijob:
- name: remove dependencies
+ name: remove dependency
projects:
- - name: 'functest-_-alpine-3.11-rmi'
+ - name: 'functest-{tag}-dep-rmi'
<<: *functest-jobs
- multijob:
- name: pull dependencies
+ name: pull dependency
projects:
- - name: 'functest-_-alpine-3.11-pull'
+ - name: 'functest-{tag}-dep-pull'
<<: *functest-jobs
- multijob:
name: build opnfv/functest-core
projects:
- - name: 'functest-opnfv-functest-core-{tag}-check'
- <<: *functest-jobs
- - multijob:
- name: build opnfv/functest-tempest
- projects:
- - name: 'functest-opnfv-functest-tempest-{tag}-check'
+ - name: 'functest-opnfv-functest-core-{tag}-gate'
<<: *functest-jobs
- multijob:
name: build containers
projects:
- - name: 'functest-opnfv-functest-healthcheck-{tag}-check'
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-gate'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-check'
+ - name: 'functest-opnfv-functest-smoke-{tag}-gate'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-benchmarking-{tag}-check'
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-gate'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-vnf-{tag}-check'
+ - name: 'functest-opnfv-functest-vnf-{tag}-gate'
<<: *functest-jobs
- multijob:
- name: build opnfv/functest-smoke-cntt
+ name: build cntt containers
projects:
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-check'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-gate'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-gate'
<<: *functest-jobs
- multijob:
name: opnfv/functest-healthcheck:{tag}
@@ -812,15 +1608,17 @@
- multijob:
name: opnfv/functest-smoke:{tag}
projects:
- - name: 'functest-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_neutron-run'
<<: *functest-jobs
- name: 'functest-opnfv-functest-smoke-{tag}-tempest_cinder-run'
<<: *functest-jobs
- name: 'functest-opnfv-functest-smoke-{tag}-tempest_keystone-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_heat-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_telemetry-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-refstack_defcore-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
<<: *functest-jobs
- name: 'functest-opnfv-functest-smoke-{tag}-refstack_compute-run'
<<: *functest-jobs
@@ -834,34 +1632,36 @@
<<: *functest-jobs
- name: 'functest-opnfv-functest-smoke-{tag}-tempest_slow-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-patrole-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_admin-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-neutron_trunk-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_member-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-patrole_reader-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-networking-sfc-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_barbican-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-barbican-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_octavia-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-octavia-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cyborg-run'
<<: *functest-jobs
- multijob:
name: opnfv/functest-smoke-cntt:{tag}
projects:
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_neutron_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_heat_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario_cntt-run'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow_cntt-run'
<<: *functest-jobs
- multijob:
name: opnfv/functest-benchmarking:{tag}
@@ -875,8 +1675,14 @@
- name: 'functest-opnfv-functest-benchmarking-{tag}-shaker-run'
<<: *functest-jobs
- multijob:
+ name: opnfv/functest-benchmarking-cntt:{tag}
+ projects:
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_full_cntt-run'
+ <<: *functest-jobs
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-rally_jobs_cntt-run'
+ <<: *functest-jobs
+ - multijob:
name: opnfv/functest-vnf:{tag}
- execution-type: SEQUENTIALLY
projects:
- name: 'functest-opnfv-functest-vnf-{tag}-cloudify-run'
<<: *functest-jobs
@@ -888,220 +1694,964 @@
<<: *functest-jobs
- name: 'functest-opnfv-functest-vnf-{tag}-juju_epc-run'
<<: *functest-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-review'
+ <<: *functest-params
+ jobs:
+ - 'functest-{tag}-review'
+
+- view:
+ name: functest-review
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-[a-z0-9.-]+-review$
+
+- view:
+ name: functest-tox
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-[a-z0-9.-]+-tox$
+
+- builder:
+ name: functest-push-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker push $image
- trigger:
- name: functest-patchset-created
+ name: functest-commit
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'functest'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
+ - pollscm:
+ cron: "*/30 * * * *"
- job-template:
- name: 'functest-{tag}-gate'
+ name: 'functest-opnfv-functest-core-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-core-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-core
+ port:
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-healthcheck-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-healthcheck
+ port:
+ ref_arg: BRANCH
+ path: docker/healthcheck
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-smoke-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-smoke
+ port:
+ ref_arg: BRANCH
+ path: docker/smoke
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-benchmarking-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-benchmarking
+ port:
+ ref_arg: BRANCH
+ path: docker/benchmarking
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-vnf-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-vnf
+ port:
+ ref_arg:
+ path: docker/vnf
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-smoke-cntt-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-smoke-cntt
+ port:
+ ref_arg: BRANCH
+ path: docker/smoke-cntt
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-build'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-build'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ scm:
+ - functest-scm:
+ ref: '{branch}'
+ builders:
+ - functest-build-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - functest-push-containers:
+ <<: *functest-build-containers
+ ref: '{branch}'
+
+- project:
+ name: functest-opnfv-functest-benchmarking-cntt-build
+ <<: *functest-params
+ repo: opnfv
+ container: functest-benchmarking-cntt
+ port:
+ ref_arg: BRANCH
+ path: docker/benchmarking-cntt
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-build'
+
+- job-template:
+ name: 'functest-{tag}-docker'
project-type: multijob
triggers:
- - functest-patchset-created:
- branch: '{branch}'
+ - functest-commit
+ scm:
+ - functest-scm:
+ ref: '{branch}'
parameters:
- - functest-slave:
- slave: '{slave}'
- - functest-build_tag:
- build_tag: ''
- - functest-DEBUG:
- DEBUG: 'true'
- - functest-EXTERNAL_NETWORK:
- EXTERNAL_NETWORK: public
+ - functest-node:
+ node: '{node}'
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^functest-{tag}-(daily|check|gate)$'
+ - ^functest-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
- name: remove former images
+ name: remove dependency
projects:
- - name: 'functest-opnfv-functest-healthcheck-{tag}-rmi'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-rmi'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rmi'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-benchmarking-{tag}-rmi'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-vnf-{tag}-rmi'
+ - name: 'functest-{tag}-dep-rmi'
<<: *functest-jobs
- multijob:
- name: remove dependencies
+ name: pull dependency
projects:
- - name: 'functest-_-alpine-3.11-rmi'
- <<: *functest-jobs
- - multijob:
- name: pull dependencies
- projects:
- - name: 'functest-_-alpine-3.11-pull'
+ - name: 'functest-{tag}-dep-pull'
<<: *functest-jobs
- multijob:
name: build opnfv/functest-core
projects:
- - name: 'functest-opnfv-functest-core-{tag}-gate'
- <<: *functest-jobs
- - multijob:
- name: build opnfv/functest-tempest
- projects:
- - name: 'functest-opnfv-functest-tempest-{tag}-gate'
+ - name: 'functest-opnfv-functest-core-{tag}-build'
<<: *functest-jobs
- multijob:
name: build containers
projects:
- - name: 'functest-opnfv-functest-healthcheck-{tag}-gate'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-gate'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-benchmarking-{tag}-gate'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-vnf-{tag}-gate'
- <<: *functest-jobs
- - multijob:
- name: build opnfv/functest-smoke-cntt
- projects:
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-gate'
- <<: *functest-jobs
- - multijob:
- name: opnfv/functest-healthcheck:{tag}
- projects:
- - name: 'functest-opnfv-functest-healthcheck-{tag}-connection_check-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-tenantnetwork1-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-tenantnetwork2-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-vmready1-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-vmready2-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-singlevm1-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-singlevm2-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-vping_ssh-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-vping_userdata-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-cinder_test-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-odl-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-tempest_smoke-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-healthcheck-{tag}-tempest_horizon-run'
- <<: *functest-jobs
- - multijob:
- name: opnfv/functest-smoke:{tag}
- projects:
- - name: 'functest-opnfv-functest-smoke-{tag}-neutron-tempest-plugin-api-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-tempest_cinder-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-tempest_keystone-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-rally_sanity-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-refstack_defcore-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-refstack_compute-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-refstack_object-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-refstack_platform-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-tempest_full-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-tempest_scenario-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-tempest_slow-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-patrole-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-neutron_trunk-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-networking-bgpvpn-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-networking-sfc-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-barbican-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-{tag}-octavia-run'
- <<: *functest-jobs
- - multijob:
- name: opnfv/functest-smoke-cntt:{tag}
- projects:
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-neutron-tempest-plugin-api-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_cinder-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_keystone-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-rally_sanity-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_full-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_scenario-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-smoke-cntt-{tag}-tempest_slow-run'
- <<: *functest-jobs
- - multijob:
- name: opnfv/functest-benchmarking:{tag}
- projects:
- - name: 'functest-opnfv-functest-benchmarking-{tag}-rally_full-run'
+ - name: 'functest-opnfv-functest-healthcheck-{tag}-build'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-benchmarking-{tag}-rally_jobs-run'
+ - name: 'functest-opnfv-functest-smoke-{tag}-build'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-benchmarking-{tag}-vmtp-run'
+ - name: 'functest-opnfv-functest-benchmarking-{tag}-build'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-benchmarking-{tag}-shaker-run'
+ - name: 'functest-opnfv-functest-vnf-{tag}-build'
<<: *functest-jobs
- multijob:
- name: opnfv/functest-vnf:{tag}
- execution-type: SEQUENTIALLY
+ name: build cntt containers
projects:
- - name: 'functest-opnfv-functest-vnf-{tag}-cloudify-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-vnf-{tag}-cloudify_ims-run'
+ - name: 'functest-opnfv-functest-smoke-cntt-{tag}-build'
<<: *functest-jobs
- - name: 'functest-opnfv-functest-vnf-{tag}-heat_ims-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-vnf-{tag}-vyos_vrouter-run'
- <<: *functest-jobs
- - name: 'functest-opnfv-functest-vnf-{tag}-juju_epc-run'
+ - name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-build'
<<: *functest-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
- project:
- name: 'functest'
+ name: 'functest-docker'
<<: *functest-params
jobs:
- - 'functest-{tag}-daily'
- - 'functest-{tag}-check'
- - 'functest-{tag}-gate'
+ - 'functest-{tag}-docker'
+
+- builder:
+ name: functest-trivy
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./trivy image --exit-code 1 $image
+
+- job-template:
+ name: 'functest-opnfv-functest-core-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-core-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-core'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-healthcheck'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-vnf-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-vnf'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-trivy'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-trivy'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-trivy:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt-trivy'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-trivy'
+
+- builder:
+ name: functest-grype
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sudo sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./grype -q $image
+
+- job-template:
+ name: 'functest-opnfv-functest-core-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-core-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-core'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-healthcheck'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-vnf-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-vnf'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-grype'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-grype'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-grype:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt-grype'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-grype'
+
+- builder:
+ name: functest-sbom
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ sudo mkdir -p ~/.docker
+ curl -sSfL https://raw.githubusercontent.com/docker/sbom-cli-plugin/main/install.sh | sudo sh -s --
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker sbom $image
+
+- job-template:
+ name: 'functest-opnfv-functest-core-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-core-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-core'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-core-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-healthcheck-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-healthcheck-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-healthcheck'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-healthcheck-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-vnf-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-vnf-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-vnf'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-vnf-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-smoke-cntt-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-smoke-cntt-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-smoke-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-smoke-cntt-{tag}-sbom'
+
+- job-template:
+ name: 'functest-opnfv-functest-benchmarking-cntt-{tag}-sbom'
+ triggers:
+ - timed: '@weekly'
+ parameters:
+ - functest-node:
+ node: '{node}'
+ builders:
+ - functest-sbom:
+ <<: *functest-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'functest-opnfv-functest-benchmarking-cntt-sbom'
+ <<: *functest-params
+ repo: 'opnfv'
+ container: 'functest-benchmarking-cntt'
+ port:
+ jobs:
+ - 'functest-opnfv-functest-benchmarking-cntt-{tag}-sbom'
- view:
- name: functest
+ name: functest-docker
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^functest-[a-z0-9.-]+-docker$
+
+- view:
+ name: functest-trivy
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes)(?!functest-pi)^functest-[a-z-0-9.]+-trivy$
+
+- view:
+ name: functest-grype
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!functest-kubernetes)(?!functest-pi)^functest-[a-z-0-9.]+-grype$
+
+- view:
+ name: functest-sbom
view-type: list
columns:
- status
@@ -1110,4 +2660,4 @@
- last-success
- last-failure
- last-duration
- regex: ^functest-[a-z]+-(daily|check|gate)$
+ regex: (?!functest-kubernetes)(?!functest-pi)^functest-[a-z-0-9.]+-sbom$
diff --git a/jjb/functest/xtesting-ci-docker.yaml b/jjb/functest/xtesting-ci-docker.yaml
new file mode 100644
index 000000000..c439d1484
--- /dev/null
+++ b/jjb/functest/xtesting-ci-docker.yaml
@@ -0,0 +1,189 @@
+---
+- builder:
+ name: xtesting-ci-docker-builder
+ builders:
+ - shell: |
+ cd {dir}
+ sudo docker build --pull -t {image} .
+ sudo docker push {image}
+ sudo docker system prune --all -f
+
+- builder:
+ name: xtesting-ci-docker-multiarch-builder
+ builders:
+ - shell: |
+ sudo docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
+ git clone https://github.com/estesp/manifest-tool
+ (cd manifest-tool && git checkout v0.9.0 && \
+ sudo -E make && sudo -E make install)
+ (cd {dir} && \
+ sudo docker build --pull -t {image}:amd64-{tag} . && \
+ sudo docker push {image}:amd64-{tag})
+ (cd {dir} && \
+ git checkout Dockerfile
+ sed -i -e "s|{from_amd64}|{to_arm64}|g" Dockerfile
+ sudo docker build --pull -t {image}:arm64-{tag} . && \
+ sudo docker push {image}:arm64-{tag})
+ (cd {dir} && \
+ git checkout Dockerfile
+ sed -i -e "s|{from_amd64}|{to_arm}|g" Dockerfile
+ sudo docker build --pull -t {image}:arm-{tag} . && \
+ sudo docker push {image}:arm-{tag})
+ sudo manifest-tool push from-args \
+ --platforms linux/amd64,linux/arm,linux/arm64 \
+ --template {image}:ARCH-{tag} \
+ --target {image}:{tag}
+ sudo docker system prune --all -f
+
+- scm:
+ name: xtesting-ci-docker-scm
+ scm:
+ - git:
+ url: https://github.com/collivier/xtesting-docker.git
+ git-config-name:
+ git-config-email:
+
+- trigger:
+ name: xtesting-ci-docker-trigger
+ triggers:
+ - pollscm:
+ cron: "H/30 * * * *"
+ - timed: '@daily'
+
+- parameter:
+ name: xtesting-ci-docker-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'xtesting'
+
+- job-template:
+ name: xtesting-ci-docker-{stream}
+ scm:
+ - xtesting-ci-docker-scm
+ parameters:
+ - xtesting-ci-docker-parameter
+ triggers:
+ - xtesting-ci-docker-trigger
+ builders:
+ - xtesting-ci-docker-builder:
+ image: '{image}'
+ dir: '{dir}'
+
+- job-template:
+ name: xtesting-ci-docker-multitarch-{stream}
+ scm:
+ - xtesting-ci-docker-scm
+ parameters:
+ - xtesting-ci-docker-parameter
+ triggers:
+ - xtesting-ci-docker-trigger
+ builders:
+ - xtesting-ci-docker-multiarch-builder:
+ image: '{image}'
+ dir: '{dir}'
+ tag: '{tag}'
+ from_amd64: '{from_amd64}'
+ to_arm64: '{to_arm64}'
+ to_arm: '{to_arm}'
+
+- project:
+ name: xtesting-ci-docker
+ stream:
+ - jenkins-lts-slim:
+ dir: jenkins.debian
+ image: opnfv/xtesting-jenkins:lts-slim
+ - jenkins-lts-alpine:
+ dir: jenkins
+ image: opnfv/xtesting-jenkins:lts-alpine
+ - s3www-v0.5.3:
+ dir: s3www
+ image: opnfv/xtesting-s3www:v0.5.3
+ jobs:
+ - xtesting-ci-docker-{stream}
+
+- project:
+ name: xtesting-ci-docker-multiarch
+ stream:
+ - jenkins-agent-debian:
+ dir: jenkins-agent
+ image: opnfv/xtesting-jenkins-agent
+ tag: 4.9-bullseye
+ from_amd64: debian:bullseye
+ to_arm64: arm64v8/debian:bullseye
+ to_arm: arm32v7/debian:bullseye
+ - jenkins-agent-ubuntu:
+ dir: jenkins-agent-ubuntu
+ image: opnfv/xtesting-jenkins-agent
+ tag: 4.9-jammy
+ from_amd64: ubuntu:jammy
+ to_arm64: arm64v8/ubuntu:jammy
+ to_arm: arm32v7/ubuntu:jammy
+ - jenkins-agent-auto:
+ dir: jenkins-agent-auto
+ image: opnfv/xtesting-jenkins-agent-auto
+ tag: 4.9-bullseye
+ from_amd64: opnfv/xtesting-jenkins-agent:4.9-bullseye
+ to_arm64: opnfv/xtesting-jenkins-agent:arm64-4.9-bullseye
+ to_arm: opnfv/xtesting-jenkins-agent:arm-4.9-bullseye
+ jobs:
+ - xtesting-ci-docker-multitarch-{stream}
+
+- builder:
+ name: xtesting-ci-docker-trivy-builder
+ builders:
+ - shell: |
+ curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b .
+ ./trivy image --exit-code 1 {image}
+
+- trigger:
+ name: xtesting-ci-docker-trivy-trigger
+ triggers:
+ - timed: '@daily'
+
+- parameter:
+ name: xtesting-ci-docker-trivy-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'xtesting'
+
+- job-template:
+ name: 'xtesting-ci-docker-trivy-{stream}'
+ triggers:
+ - xtesting-ci-docker-trivy-trigger
+ parameters:
+ - xtesting-ci-docker-trivy-parameter
+ builders:
+ - xtesting-ci-docker-trivy-builder:
+ image: '{image}'
+
+- project:
+ name: 'xtesting-ci-docker-trivy'
+ stream:
+ - jenkins-lts-slim:
+ image: opnfv/xtesting-jenkins:lts-slim
+ - jenkins-lts-alpine:
+ image: opnfv/xtesting-jenkins:lts-alpine
+ - s3www-v0.5.3:
+ image: opnfv/xtesting-s3www:v0.5.3
+ - xtesting-jenkins-agent-4.9-bullseye:
+ image: opnfv/xtesting-jenkins-agent:4.9-bullseye
+ - xtesting-jenkins-agent-4.9-jammy:
+ image: opnfv/xtesting-jenkins-agent:4.9-jammy
+ - xtesting-jenkins-agent-auto-4.9-bullseye:
+ image: opnfv/xtesting-jenkins-agent-auto:4.9-bullseye
+ jobs:
+ - 'xtesting-ci-docker-trivy-{stream}'
+
+- view:
+ name: xtesting-ci-docker
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-ci-docker.*$
diff --git a/jjb/functest/xtesting-ci-vm.yaml b/jjb/functest/xtesting-ci-vm.yaml
new file mode 100644
index 000000000..367598fe4
--- /dev/null
+++ b/jjb/functest/xtesting-ci-vm.yaml
@@ -0,0 +1,233 @@
+---
+- scm:
+ name: xtesting-ci-vm-scm
+ scm:
+ - git:
+ url: '{url}'
+ branches:
+ - '{ref}'
+
+- builder:
+ name: xtesting-ci-vm-builder
+ builders:
+ - shell: |
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
+ echo "deb https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee \
+ /etc/apt/sources.list.d/google-cloud-sdk.list
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ DEBIAN_FRONTEND=noninteractive sudo apt-get \
+ -o DPkg::Lock::Timeout=300 install python3-diskimage-builder -y
+ if [ "{project}" == "xtestingci" ]; then
+ export ELEMENTS_PATH=$(pwd)/elements
+ else
+ git init ansible-role-xtesting
+ (cd ansible-role-xtesting &&
+ git fetch --tags https://github.com/collivier/ansible-role-xtesting.git {role-version} &&
+ git checkout FETCH_HEAD)
+ export ELEMENTS_PATH=$(pwd)/elements:$(pwd)/ansible-role-xtesting/elements
+ fi
+ export DIB_XTESTINGCI_VERSION={role-version}
+ disk-image-create --image-size 20 -o {project}-{version}.qcow2 debian vm {project}
+ gsutil cp {project}-{version}.qcow2 gs://artifacts.opnfv.org/{project}/{project}-{version}.qcow2
+ rm -rf {project}-{version}.qcow2 {project}-{version}.d
+
+- trigger:
+ name: xtesting-ci-vm-trigger
+ triggers:
+ - timed: '@weekly'
+
+- parameter:
+ name: xtesting-ci-vm-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'opnfv-build'
+
+- job-template:
+ name: '{project}-{version}-vm'
+ scm:
+ - xtesting-ci-vm-scm:
+ url: '{url}'
+ ref: '{ref}'
+ triggers:
+ - xtesting-ci-vm-trigger
+ parameters:
+ - xtesting-ci-vm-parameter
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ blocking-level: 'NODE'
+ blocking-jobs:
+ - '^.*-vm$'
+ builders:
+ - xtesting-ci-vm-builder:
+ project: '{project}'
+ version: '{version}'
+ role-version: '{role-version}'
+
+- project:
+ name: 'xtesting-ci-vm'
+ project:
+ - xtestingci:
+ url: https://github.com/collivier/ansible-role-xtesting.git
+ - xtesting:
+ url: https://git.opnfv.org/functest-xtesting.git
+ - functest:
+ url: https://git.opnfv.org/functest.git
+ - functest-kubernetes:
+ url: https://git.opnfv.org/functest-kubernetes.git
+ version:
+ - latest:
+ ref: master
+ role-version: master
+ - '2023.2':
+ ref: stable/2023.2
+ role-version: master
+ - '2023.1':
+ ref: stable/2023.1
+ role-version: master
+ - zed:
+ ref: stable/zed
+ role-version: master
+ - yoga:
+ ref: stable/yoga
+ role-version: master
+ - xena:
+ ref: stable/xena
+ role-version: master
+ - wallaby:
+ ref: stable/wallaby
+ role-version: master
+ - leguer:
+ ref: stable/leguer
+ role-version: master
+ - kali:
+ ref: stable/kali
+ role-version: master
+ - jerma:
+ ref: stable/jerma
+ role-version: master
+ - v1.28:
+ ref: stable/v1.28
+ role-version: master
+ - v1.27:
+ ref: stable/v1.27
+ role-version: master
+ - v1.26:
+ ref: stable/v1.26
+ role-version: master
+ - v1.25:
+ ref: stable/v1.25
+ role-version: master
+ - v1.24:
+ ref: stable/v1.24
+ role-version: master
+ - v1.23:
+ ref: stable/v1.23
+ role-version: master
+ - v1.22:
+ ref: stable/v1.22
+ role-version: master
+ exclude:
+ - project: xtestingci
+ version: '2023.2'
+ - project: functest
+ version: '2023.2'
+ - project: functest-kubernetes
+ version: '2023.2'
+ - project: xtestingci
+ version: '2023.1'
+ - project: functest
+ version: '2023.1'
+ - project: functest-kubernetes
+ version: '2023.1'
+ - project: xtestingci
+ version: zed
+ - project: functest-kubernetes
+ version: zed
+ - project: xtestingci
+ version: yoga
+ - project: functest-kubernetes
+ version: yoga
+ - project: xtestingci
+ version: xena
+ - project: functest-kubernetes
+ version: xena
+ - project: xtestingci
+ version: wallaby
+ - project: functest-kubernetes
+ version: wallaby
+ - project: xtestingci
+ version: leguer
+ - project: xtesting
+ version: leguer
+ - project: functest-kubernetes
+ version: leguer
+ - project: xtestingci
+ version: kali
+ - project: xtesting
+ version: kali
+ - project: functest-kubernetes
+ version: kali
+ - project: xtestingci
+ version: jerma
+ - project: xtesting
+ version: jerma
+ - project: functest-kubernetes
+ version: jerma
+ - project: xtestingci
+ version: v1.28
+ - project: functest
+ version: v1.28
+ - project: xtesting
+ version: v1.28
+ - project: xtestingci
+ version: v1.27
+ - project: functest
+ version: v1.27
+ - project: xtesting
+ version: v1.27
+ - project: xtestingci
+ version: v1.26
+ - project: functest
+ version: v1.26
+ - project: xtesting
+ version: v1.26
+ - project: xtestingci
+ version: v1.25
+ - project: functest
+ version: v1.25
+ - project: xtesting
+ version: v1.25
+ - project: xtestingci
+ version: v1.24
+ - project: functest
+ version: v1.24
+ - project: xtesting
+ version: v1.24
+ - project: xtestingci
+ version: v1.23
+ - project: functest
+ version: v1.23
+ - project: xtesting
+ version: v1.23
+ - project: xtestingci
+ version: v1.22
+ - project: functest
+ version: v1.22
+ - project: xtesting
+ version: v1.22
+ jobs:
+ - '{project}-{version}-vm'
+
+- view:
+ name: xtesting-ci-vm
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^.*-vm$
diff --git a/jjb/functest/xtesting-ci.yaml b/jjb/functest/xtesting-ci.yaml
new file mode 100644
index 000000000..fa7fe8265
--- /dev/null
+++ b/jjb/functest/xtesting-ci.yaml
@@ -0,0 +1,182 @@
+---
+- builder:
+ name: xtesting-ci-tests
+ builders:
+ - shell: |
+ set +x
+ sudo apt-get -o DPkg::Lock::Timeout=300 update
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install python3-pip docker.io podman -y
+ curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.17.0/kind-linux-amd64
+ chmod +x ./kind
+ sudo mv ./kind /usr/local/bin/kind
+ kind delete clusters xtesting jenkins gitlab || true
+ sudo docker ps -aq |xargs sudo docker stop || true
+ sudo docker ps -aq |xargs sudo docker rm || true
+ sudo docker system prune -f --all || true
+ sudo rm -f /etc/systemd/system/docker.service.d/http-proxy.conf
+ sudo systemctl daemon-reload
+ sudo systemctl restart docker
+ sudo podman ps -aq |xargs sudo podman stop || true
+ sudo podman ps -aq |xargs sudo podman rm || true
+ sudo rm -rfv /data /tmp/xtesting*
+ sudo apt-get install ansible patch -y
+ rm -rf ~/.ansible/roles/collivier.xtesting
+ case {release} in
+ stable)
+ ansible-galaxy install -f collivier.xtesting ;;
+ *)
+ ansible-galaxy install -f git+https://github.com/collivier/ansible-role-xtesting.git,{release}
+ mv ~/.ansible/roles/ansible-role-xtesting ~/.ansible/roles/collivier.xtesting ;;
+ esac
+ (cd ~/.ansible/roles/collivier.xtesting; patch -p1 < tests/docker_config_json.patch)
+ ansible-galaxy collection install -f -r ~/.ansible/roles/collivier.xtesting/requirements.yml
+ ansible-playbook -vvvv ~/.ansible/roles/collivier.xtesting/tests/{playbook}.yml
+
+- builder:
+ name: xtesting-ci-tests-remote
+ builders:
+ - shell: |
+ set +x
+ ssh opnfv@10.200.140.224 << EOF
+ sudo apt-get -o DPkg::Lock::Timeout=300 update
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install python3-pip docker.io podman -y
+ curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.17.0/kind-linux-amd64
+ chmod +x ./kind
+ sudo mv ./kind /usr/local/bin/kind
+ kind delete clusters xtesting jenkins gitlab || true
+ sudo docker ps -aq |xargs sudo docker stop || true
+ sudo docker ps -aq |xargs sudo docker rm || true
+ sudo docker system prune -f --all || true
+ sudo rm -f /etc/systemd/system/docker.service.d/http-proxy.conf
+ sudo systemctl daemon-reload
+ sudo systemctl restart docker
+ sudo podman ps -aq |xargs sudo podman stop || true
+ sudo podman ps -aq |xargs sudo podman rm || true
+ sudo rm -rfv /data /tmp/xtesting*
+ EOF
+ sudo apt-get install ansible patch -y
+ rm -rf ~/.ansible/roles/collivier.xtesting
+ case {release} in
+ stable)
+ ansible-galaxy install -f collivier.xtesting ;;
+ *)
+ ansible-galaxy install -f git+https://github.com/collivier/ansible-role-xtesting.git,{release}
+ mv ~/.ansible/roles/ansible-role-xtesting ~/.ansible/roles/collivier.xtesting ;;
+ esac
+ (cd ~/.ansible/roles/collivier.xtesting; patch -p1 < tests/docker_config_json.patch)
+ ansible-galaxy collection install -f -r ~/.ansible/roles/collivier.xtesting/requirements.yml
+ sed -i "s/127.0.0.1/10.200.140.224/g" ~/.ansible/roles/collivier.xtesting/tests/{playbook}.yml
+ echo 10.200.140.224 ansible_host=10.200.140.224 ansible_user=opnfv > /tmp/inventory
+ ansible-playbook -i /tmp/inventory -vvvv ~/.ansible/roles/collivier.xtesting/tests/{playbook}.yml
+
+- parameter:
+ name: xtesting-ci-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- job-template:
+ name: 'xtesting-ci-tests-{release}-{playbook}'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-ci-node:
+ node: '{node}'
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ blocking-level: 'NODE'
+ blocking-jobs:
+ - '^xtesting-ci-tests-.*$'
+ builders:
+ - xtesting-ci-tests:
+ playbook: '{playbook}'
+ release: '{release}'
+
+- job-template:
+ name: 'xtesting-ci-tests-remote-{release}-{playbook}'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-ci-node:
+ node: '{node}'
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ blocking-level: 'NODE'
+ blocking-jobs:
+ - '^xtesting-ci-tests-.*$'
+ builders:
+ - xtesting-ci-tests-remote:
+ playbook: '{playbook}'
+ release: '{release}'
+
+- playbook: &playbook
+ name: 'playbook'
+ playbook:
+ - all
+ - podman
+ - proxy
+ - proxy2
+ - radosgw
+ - nexus
+ - repo
+ - twice
+ - macro
+ - branch
+ - jjb
+ - gitlab.insert
+ - jenkins_kind
+ - jenkins_kind2
+ - gitlab_kind
+ - chainedci
+ - kubernetes0
+ - kubernetes1
+ - kubernetes2
+ - proxy_kubernetes
+ - proxy_kubernetes2
+ - k8s_jenkins_kind0
+ - k8s_jenkins_kind1
+ - k8s_jenkins_kind2
+ - k8s_jenkins_kind3
+ - k8s_gitlab0
+ - k8s_gitlab1
+ - k8s_gitlab_kind0
+ - k8s_gitlab_kind1
+ - k8s_gitlab_kind2
+ - k8s_gitlab_kind3
+
+- project:
+ name: xtesting-ci-tests
+ <<: *playbook
+ node: xtestingci
+ release:
+ - stable
+ - master
+ jobs:
+ - 'xtesting-ci-tests-{release}-{playbook}'
+
+- project:
+ name: xtesting-ci-tests-remote
+ <<: *playbook
+ node: xtesting
+ release:
+ - stable
+ - master
+ jobs:
+ - 'xtesting-ci-tests-remote-{release}-{playbook}'
+
+- view:
+ name: xtesting-ci
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-ci-tests-.*$
diff --git a/jjb/functest/xtesting-docker.yaml b/jjb/functest/xtesting-docker.yaml
deleted file mode 100644
index 32689498c..000000000
--- a/jjb/functest/xtesting-docker.yaml
+++ /dev/null
@@ -1,234 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-- project:
-
- name: xtesting-docker
-
- project: functest-xtesting
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - jerma:
- branch: 'stable/{stream}'
- disabled: false
- - iruya:
- branch: 'stable/{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- arch_tag:
- - 'amd64':
- slave_label: 'lf-build2'
- - 'arm64':
- slave_label: 'opnfv-build-ubuntu-arm'
-
- # settings for jobs run in multijob phases
- build-job-settings: &build-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters: |
- PUSH_IMAGE=$PUSH_IMAGE
- COMMIT_ID=$COMMIT_ID
- GERRIT_REFNAME=$GERRIT_REFNAME
- DOCKERFILE=$DOCKERFILE
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- manifest-job-settings: &manifest-job-settings
- current-parameters: false
- git-revision: true
- node-parameters: false
- predefined-parameters:
- GERRIT_REFNAME=$GERRIT_REFNAME
- kill-phase-on: FAILURE
- abort-all-jobs: false
-
- # yamllint enable rule:key-duplicates
- jobs:
- - "xtesting-docker-{stream}"
- - "xtesting-docker-build-{arch_tag}-{stream}"
- - "xtesting-docker-manifest-{stream}"
-
-########################
-# job templates
-########################
-- job-template:
- name: 'xtesting-docker-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- parameters:
- - xtesting-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: 'opnfv-build-ubuntu'
- arch_tag: 'amd64'
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- builders:
- - multijob:
- name: 'build xtesting images'
- execution-type: PARALLEL
- projects:
- - name: 'xtesting-docker-build-amd64-{stream}'
- <<: *build-job-settings
- - name: 'xtesting-docker-build-arm64-{stream}'
- <<: *build-job-settings
- - multijob:
- name: 'publish xtesting manifests'
- execution-type: PARALLEL
- projects:
- - name: 'xtesting-docker-manifest-{stream}'
- <<: *manifest-job-settings
-
- publishers:
- - 'xtesting-amd64-recipients'
- - 'xtesting-arm64-recipients'
-
-- job-template:
- name: 'xtesting-docker-build-{arch_tag}-{stream}'
- disabled: '{obj:disabled}'
- parameters:
- - xtesting-job-parameters:
- project: '{project}'
- branch: '{branch}'
- slave_label: '{slave_label}'
- arch_tag: '{arch_tag}'
- scm:
- - git-scm
- builders:
- - shell: |
- #!/bin/bash -ex
- sudo arch={arch_tag} bash ./build.sh
- exit $?
-
-- job-template:
- name: 'xtesting-docker-manifest-{stream}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-
- disabled: '{obj:disabled}'
-
- builders:
- - shell: |
- #!/bin/bash -ex
- case "{stream}" in
- "master")
- tag="latest" ;;
- *)
- tag="{stream}" ;;
- esac
- sudo manifest-tool push from-args \
- --platforms linux/amd64,linux/arm64 \
- --template $REPO/xtesting:ARCH-$tag \
- --target $REPO/xtesting:$tag
- exit $?
-
-- parameter:
- name: xtesting-job-parameters
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: '{slave_label}'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: "Dockerfile"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
- - string:
- name: PROJECT
- default: "{project}"
- description: "Project name used to enable job conditions"
- - string:
- name: REPO
- default: "opnfv"
- description: "Repository name for functest images"
-
-# publisher macros
-- publisher:
- name: 'xtesting-arm64-recipients'
- publishers:
- - email:
- recipients: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- delia.popescu@enea.com
-
-- publisher:
- name: 'xtesting-amd64-recipients'
- publishers:
- - email:
- recipients: >
- jalausuch@suse.com morgan.richomme@orange.com
- cedric.ollivier@orange.com feng.xiaowei@zte.com.cn
- juha.kosonen@nokia.com wangwulin@huawei.com
- valentin.boucher@kontron.com
diff --git a/jjb/functest/xtesting-pi.yaml b/jjb/functest/xtesting-pi.yaml
new file mode 100644
index 000000000..86ac98f30
--- /dev/null
+++ b/jjb/functest/xtesting-pi.yaml
@@ -0,0 +1,425 @@
+---
+- xtesting-pi-containers: &xtesting-pi-containers
+ name: 'xtesting-pi-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
+
+- xtesting-pi-params: &xtesting-pi-params
+ name: 'xtesting-pi-params'
+ tag:
+ - latest:
+ node: opnfv-build
+ - 2023.2:
+ node: opnfv-build
+ - 2023.1:
+ node: opnfv-build
+ - zed:
+ node: opnfv-build
+ - yoga:
+ node: opnfv-build
+ - xena:
+ node: opnfv-build
+ - wallaby:
+ node: opnfv-build
+ - arm-latest:
+ node: opnfv-build
+ - arm-2.23.2:
+ node: opnfv-build
+ - arm-2.23.1:
+ node: opnfv-build
+ - arm-zed:
+ node: opnfv-build
+ - arm-yoga:
+ node: opnfv-build
+ - arm-xena:
+ node: opnfv-build
+ - arm-wallaby:
+ node: opnfv-build
+ - arm64-latest:
+ node: opnfv-build
+ - arm64-2023.2:
+ node: opnfv-build
+ - arm64-2023.1:
+ node: opnfv-build
+ - arm64-zed:
+ node: opnfv-build
+ - arm64-yoga:
+ node: opnfv-build
+ - arm64-xena:
+ node: opnfv-build
+ - arm64-wallaby:
+ node: opnfv-build
+
+- xtesting-pi-ollivier-xtesting-params: &xtesting-pi-ollivier-xtesting-params
+ name: 'xtesting-pi-ollivier-xtesting-params'
+ repo: 'ollivier'
+ container: 'xtesting'
+ port:
+ tag:
+ - latest:
+ node: opnfv-build
+ - 2023.2:
+ node: opnfv-build
+ - 2023.1:
+ node: opnfv-build
+ - zed:
+ node: opnfv-build
+ - yoga:
+ node: opnfv-build
+ - xena:
+ node: opnfv-build
+ - wallaby:
+ node: opnfv-build
+ - arm-latest:
+ node: opnfv-build
+ - arm-2.23.2:
+ node: opnfv-build
+ - arm-2.23.1:
+ node: opnfv-build
+ - arm-zed:
+ node: opnfv-build
+ - arm-yoga:
+ node: opnfv-build
+ - arm-xena:
+ node: opnfv-build
+ - arm-wallaby:
+ node: opnfv-build
+ - arm64-latest:
+ node: opnfv-build
+ - arm64-2023.2:
+ node: opnfv-build
+ - arm64-2023.1:
+ node: opnfv-build
+ - arm64-zed:
+ node: opnfv-build
+ - arm64-yoga:
+ node: opnfv-build
+ - arm64-xena:
+ node: opnfv-build
+ - arm64-wallaby:
+ node: opnfv-build
+
+- xtesting-pi-jobs: &xtesting-pi-jobs
+ name: 'xtesting-pi-jobs'
+ current-parameters: true
+
+- parameter:
+ name: xtesting-pi-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- parameter:
+ name: xtesting-pi-build_tag
+ parameters:
+ - random-string:
+ name: build_tag
+
+- xtesting-pi-run-containers: &xtesting-pi-run-containers
+ name: 'xtesting-pi-run-containers'
+ <<: *xtesting-pi-containers
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
+
+- builder:
+ name: xtesting-pi-pull-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker pull $image
+
+- builder:
+ name: xtesting-pi-run-containers
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/xtesting/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image run_tests -t {test} -p -r
+
+- builder:
+ name: xtesting-pi-remove-images
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker rmi $image || true
+
+- job-template:
+ name: 'xtesting-pi-ollivier-xtesting-{tag}-pull'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ builders:
+ - xtesting-pi-pull-containers:
+ <<: *xtesting-pi-containers
+
+- project:
+ name: 'xtesting-pi-ollivier-xtesting-pull'
+ <<: *xtesting-pi-ollivier-xtesting-params
+ jobs:
+ - 'xtesting-pi-ollivier-xtesting-{tag}-pull'
+
+- job-template:
+ name: 'xtesting-pi-ollivier-xtesting-{tag}-rmi'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ builders:
+ - xtesting-pi-remove-images:
+ <<: *xtesting-pi-containers
+
+- project:
+ name: 'xtesting-pi-ollivier-xtesting-rmi'
+ <<: *xtesting-pi-ollivier-xtesting-params
+ jobs:
+ - 'xtesting-pi-ollivier-xtesting-{tag}-rmi'
+
+- job-template:
+ name: 'xtesting-pi-ollivier-xtesting-{tag}-{test}-run'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ - xtesting-pi-build_tag:
+ build_tag: ''
+ builders:
+ - xtesting-pi-run-containers:
+ <<: *xtesting-pi-run-containers
+ test: '{test}'
+
+- project:
+ name: 'xtesting-pi-ollivier-xtesting'
+ <<: *xtesting-pi-ollivier-xtesting-params
+ volumes:
+ env:
+ published_ports:
+ container: 'xtesting'
+ test:
+ - first
+ - second
+ - third
+ - fourth
+ - fifth
+ - sixth
+ - eighth
+ - nineth
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ exclude:
+ - tag: wallaby
+ test: nineth
+ - tag: xena
+ test: nineth
+ jobs:
+ - 'xtesting-pi-ollivier-xtesting-{tag}-{test}-run'
+
+- builder:
+ name: xtesting-pi-zip
+ builders:
+ - shell: |
+ set +x
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
+ sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
+ -e S3_ENDPOINT_URL=https://storage.googleapis.com \
+ -e S3_DST_URL=s3://artifacts.opnfv.org/xtesting \
+ -e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting \
+ -v /home/opnfv/xtesting/.boto:/etc/boto.cfg \
+ -e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
+ -e BUILD_TAG=$BUILD_TAG \
+ -v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
+ $image zip_campaign
+
+- job-template:
+ name: 'xtesting-pi-{tag}-zip'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ - xtesting-pi-build_tag:
+ build_tag: ''
+ builders:
+ - xtesting-pi-zip:
+ <<: *xtesting-pi-run-containers
+
+- project:
+ name: 'xtesting-pi-zip'
+ <<: *xtesting-pi-ollivier-xtesting-params
+ volumes:
+ env:
+ published_ports:
+ container: 'xtesting'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
+ jobs:
+ - 'xtesting-pi-{tag}-zip'
+
+- job-template:
+ name: 'xtesting-pi-{tag}-daily'
+ project-type: multijob
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-pi-node:
+ node: '{node}'
+ - xtesting-pi-build_tag:
+ build_tag: ''
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
+ properties:
+ - build-blocker:
+ blocking-jobs:
+ - ^xtesting-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
+ builders:
+ - multijob:
+ name: remove former images
+ projects:
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-rmi'
+ <<: *xtesting-pi-jobs
+ - multijob:
+ name: pull containers
+ projects:
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-pull'
+ <<: *xtesting-pi-jobs
+ - multijob:
+ name: ollivier/xtesting:{tag}
+ projects:
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-first-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-second-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-third-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-fourth-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-fifth-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-sixth-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-eighth-run'
+ <<: *xtesting-pi-jobs
+ - name: 'xtesting-pi-ollivier-xtesting-{tag}-nineth-run'
+ <<: *xtesting-pi-jobs
+ - multijob:
+ name: dump all campaign data
+ projects:
+ - name: 'xtesting-pi-{tag}-zip'
+ <<: *xtesting-pi-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-pi-daily'
+ <<: *xtesting-pi-params
+ jobs:
+ - 'xtesting-pi-{tag}-daily'
+
+- view:
+ name: xtesting-pi
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-pi-[a-z-0-9.]+-daily$
diff --git a/jjb/functest/xtesting-project-jobs.yaml b/jjb/functest/xtesting-project-jobs.yaml
deleted file mode 100644
index a1013feb3..000000000
--- a/jjb/functest/xtesting-project-jobs.yaml
+++ /dev/null
@@ -1,80 +0,0 @@
----
-- xtesting-project-params: &xtesting-project-params
- name: 'xtesting-project-params'
- tag:
- - latest:
- branch: master
- slave: lf-virtual1
- - jerma:
- branch: stable/jerma
- slave: lf-virtual1
- - iruya:
- branch: stable/iruya
- slave: lf-virtual1
- - hunter:
- branch: stable/hunter
- slave: lf-virtual1
-
-- builder:
- name: xtesting-run-tox
- builders:
- - shell: tox
-
-- trigger:
- name: xtesting-project-patchset-created
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'functest-xtesting'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
-
-- parameter:
- name: xtesting-project-slave
- parameters:
- - label:
- name: slave
- default: '{slave}'
-
-- scm:
- name: xtesting-project-scm
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/functest-xtesting
- refspec: '+refs/changes/*:refs/changes/*'
- branches:
- - '{ref}'
-
-- job-template:
- name: 'xtesting-run-tox-{tag}'
- triggers:
- - xtesting-project-patchset-created:
- branch: '{branch}'
- scm:
- - xtesting-project-scm:
- ref: $GERRIT_REFSPEC
- parameters:
- - xtesting-project-slave:
- slave: '{slave}'
- builders:
- - xtesting-run-tox
-
-- project:
- name: 'xtesting-run-tox'
- <<: *xtesting-project-params
- jobs:
- - 'xtesting-run-tox-{tag}'
diff --git a/jjb/functest/xtesting.yaml b/jjb/functest/xtesting.yaml
index 44231e432..fbd8ba445 100644
--- a/jjb/functest/xtesting.yaml
+++ b/jjb/functest/xtesting.yaml
@@ -1,32 +1,116 @@
---
-- xtesting-jobs: &xtesting-jobs
- name: 'xtesting-jobs'
- current-parameters: true
+- xtesting-containers: &xtesting-containers
+ name: 'xtesting-containers'
+ repo: '{repo}'
+ port: '{port}'
+ container: '{container}'
+ tag: '{tag}'
- xtesting-params: &xtesting-params
name: 'xtesting-params'
+ tag:
+ - latest:
+ from:
+ build_args:
+ branch: master
+ node: opnfv-build
+ dependency: 3.17
+ - 2023.2:
+ from:
+ build_args:
+ branch: stable/2023.2
+ node: opnfv-build
+ dependency: 3.17
+ - 2023.1:
+ from:
+ build_args:
+ branch: stable/2023.1
+ node: opnfv-build
+ dependency: 3.17
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: opnfv-build
+ dependency: 3.16
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: opnfv-build
+ dependency: 3.16
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: opnfv-build
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: opnfv-build
+ dependency: 3.13
+
+- xtesting-opnfv-xtesting-params: &xtesting-opnfv-xtesting-params
+ name: 'xtesting-opnfv-xtesting-params'
repo: 'opnfv'
+ container: 'xtesting'
port:
tag:
- latest:
+ from:
+ build_args:
branch: master
- slave: lf-virtual1
- - jerma:
- branch: stable/jerma
- slave: lf-virtual1
- - iruya:
- branch: stable/iruya
- slave: lf-virtual1
- - hunter:
- branch: stable/hunter
- slave: lf-virtual1
+ node: opnfv-build
+ dependency: 3.17
+ - 2023.2:
+ from:
+ build_args:
+ branch: stable/2023.2
+ node: opnfv-build
+ dependency: 3.17
+ - 2023.1:
+ from:
+ build_args:
+ branch: stable/2023.1
+ node: opnfv-build
+ dependency: 3.17
+ - zed:
+ from:
+ build_args:
+ branch: stable/zed
+ node: opnfv-build
+ dependency: 3.16
+ - yoga:
+ from:
+ build_args:
+ branch: stable/yoga
+ node: opnfv-build
+ dependency: 3.16
+ - xena:
+ from:
+ build_args:
+ branch: stable/xena
+ node: opnfv-build
+ dependency: 3.14
+ - wallaby:
+ from:
+ build_args:
+ branch: stable/wallaby
+ node: opnfv-build
+ dependency: 3.13
+
+- xtesting-jobs: &xtesting-jobs
+ name: 'xtesting-jobs'
+ current-parameters: true
- parameter:
- name: xtesting-slave
+ name: xtesting-node
parameters:
- label:
- name: slave
- default: '{slave}'
+ name: node
+ default: '{node}'
- parameter:
name: xtesting-build_tag
@@ -34,31 +118,16 @@
- random-string:
name: build_tag
-- parameter:
- name: xtesting-branch
- parameters:
- - string:
- name: branch
- default: '{branch}'
-
-- parameter:
- name: xtesting-DEBUG
- parameters:
- - string:
- name: DEBUG
- default: 'true'
-
-- xtesting-containers: &xtesting-containers
- name: 'xtesting-containers'
- repo: '{repo}'
- port: '{port}'
- container: '{container}'
- tag: '{tag}'
-
- xtesting-run-containers: &xtesting-run-containers
name: 'xtesting-run-containers'
<<: *xtesting-containers
- test: '{test}'
+ privileged: '{privileged}'
+ volumes: '{volumes}'
+ env: '{env}'
+ network: '{network}'
+ uid: '{uid}'
+ gid: '{gid}'
+ published_ports: '{published_ports}'
- builder:
name: xtesting-pull-containers
@@ -79,7 +148,22 @@
builders:
- shell: |
set +x
- [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
if [ "{repo}" = "_" ]; then
image={container}:{tag}
elif [ "{port}" = "None" ]; then
@@ -87,17 +171,23 @@
else
image={repo}:{port}/{container}:{tag}
fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
-e S3_ENDPOINT_URL=https://storage.googleapis.com \
-e S3_DST_URL=s3://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
-e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting/$BUILD_TAG/$JOB_NAME-$BUILD_ID \
+ -v /home/opnfv/xtesting/.boto:/etc/boto.cfg \
-e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
-e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
- -e NODE_NAME=$slave \
+ -e NODE_NAME=$node \
-e BUILD_TAG=$BUILD_TAG \
-v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -e DEBUG=$DEBUG \
- -v /home/opnfv/xtesting/.boto:/root/.boto \
$image run_tests -t {test} -p -r
- builder:
@@ -114,91 +204,54 @@
fi
sudo docker rmi $image || true
-- xtesting-build-containers: &xtesting-build-containers
- name: 'xtesting-build-containers'
- <<: *xtesting-containers
- ref_arg: '{ref_arg}'
- path: '{path}'
-
-- builder:
- name: xtesting-build-containers
- builders:
- - shell: |
- set +x
- if [ "{repo}" = "_" ]; then
- image={container}:{tag}
- elif [ "{port}" = "None" ]; then
- image={repo}/{container}:{tag}
- else
- image={repo}:{port}/{container}:{tag}
- fi
- if [ "{ref_arg}" = "None" ]; then
- build_arg=""
- else
- build_arg="--build-arg {ref_arg}={ref}"
- fi
- cd {path}
- sudo docker build $build_arg \
- --pull=false --no-cache --force-rm=true \
- -t $image .
-
-- scm:
- name: xtesting-scm
- scm:
- - git:
- url: https://gerrit.opnfv.org/gerrit/functest-xtesting
- refspec: '+refs/changes/*:refs/changes/*'
- branches:
- - '{ref}'
-
- job-template:
- name: 'xtesting-{repo}-{container}-{tag}-pull'
+ name: 'xtesting-opnfv-xtesting-{tag}-pull'
parameters:
- - xtesting-slave:
- slave: '{slave}'
+ - xtesting-node:
+ node: '{node}'
builders:
- xtesting-pull-containers:
<<: *xtesting-containers
- project:
name: 'xtesting-opnfv-xtesting-pull'
- <<: *xtesting-params
- container: 'xtesting'
+ <<: *xtesting-opnfv-xtesting-params
jobs:
- - 'xtesting-{repo}-{container}-{tag}-pull'
+ - 'xtesting-opnfv-xtesting-{tag}-pull'
- job-template:
- name: 'xtesting-{repo}-{container}-{tag}-rmi'
+ name: 'xtesting-opnfv-xtesting-{tag}-rmi'
parameters:
- - xtesting-slave:
- slave: '{slave}'
+ - xtesting-node:
+ node: '{node}'
builders:
- xtesting-remove-images:
<<: *xtesting-containers
- project:
name: 'xtesting-opnfv-xtesting-rmi'
- <<: *xtesting-params
- container: 'xtesting'
+ <<: *xtesting-opnfv-xtesting-params
jobs:
- - 'xtesting-{repo}-{container}-{tag}-rmi'
+ - 'xtesting-opnfv-xtesting-{tag}-rmi'
- job-template:
- name: 'xtesting-{repo}-{container}-{tag}-{test}-run'
+ name: 'xtesting-opnfv-xtesting-{tag}-{test}-run'
parameters:
- - xtesting-slave:
- slave: '{slave}'
+ - xtesting-node:
+ node: '{node}'
- xtesting-build_tag:
build_tag: ''
- - xtesting-DEBUG:
- DEBUG: 'true'
builders:
- xtesting-run-containers:
<<: *xtesting-run-containers
+ test: '{test}'
- project:
name: 'xtesting-opnfv-xtesting'
- <<: *xtesting-params
+ <<: *xtesting-opnfv-xtesting-params
+ volumes:
+ env:
+ published_ports:
container: 'xtesting'
test:
- first
@@ -207,21 +260,41 @@
- fourth
- fifth
- sixth
+ - eighth
+ - nineth
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
exclude:
- - tag: hunter
- test: sixth
- - tag: iruya
- test: sixth
+ - tag: wallaby
+ test: nineth
+ - tag: xena
+ test: nineth
jobs:
- - 'xtesting-{repo}-{container}-{tag}-{test}-run'
-
+ - 'xtesting-opnfv-xtesting-{tag}-{test}-run'
- builder:
name: xtesting-zip
builders:
- shell: |
set +x
- [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/* || true
+ volumes=;
+ if [ "{volumes}" != "None" ]; then
+ for i in $(echo {volumes} | tr -d '[]' |sed "s/, / /g" ); \
+ do volumes="-v $i $volumes"; done
+ fi
+ env=;
+ if [ "{env}" != "None" ]; then
+ for i in $(eval echo {env} | tr -d '[]' |sed "s/, / /g" ); \
+ do env="-e $i $env"; done
+ fi
+ published_ports=;
+ if [ "{published_ports}" != "None" ]; then
+ for i in $(echo {published_ports} | tr -d '[]' |sed "s/, / /g" ); \
+ do published_ports="-p $i $published_ports"; done
+ fi
+ [ ! -z "$WORKSPACE" ] && sudo rm -rf $WORKSPACE/results || true
if [ "{repo}" = "_" ]; then
image={container}:{tag}
elif [ "{port}" = "None" ]; then
@@ -229,35 +302,47 @@
else
image={repo}:{port}/{container}:{tag}
fi
+ sudo mkdir -p $WORKSPACE/results
+ sudo chown {uid}:{gid} $WORKSPACE/results
sudo docker run --rm \
+ --privileged={privileged} \
+ --network={network} \
+ $volumes \
+ $env \
+ $published_ports \
-e S3_ENDPOINT_URL=https://storage.googleapis.com \
-e S3_DST_URL=s3://artifacts.opnfv.org/xtesting \
-e HTTP_DST_URL=http://artifacts.opnfv.org/xtesting \
+ -v /home/opnfv/xtesting/.boto:/etc/boto.cfg \
-e TEST_DB_URL=http://testresults.opnfv.org/test/api/v1/results \
-e TEST_DB_EXT_URL=http://testresults.opnfv.org/test/api/v1/results \
+ -e NODE_NAME=$node \
-e BUILD_TAG=$BUILD_TAG \
-v $WORKSPACE/../$JOB_NAME/results:/var/lib/xtesting/results \
- -e DEBUG=$DEBUG \
- -v /home/opnfv/xtesting/.boto:/root/.boto \
$image zip_campaign
- job-template:
name: 'xtesting-{tag}-zip'
parameters:
- - xtesting-slave:
- slave: '{slave}'
+ - xtesting-node:
+ node: '{node}'
- xtesting-build_tag:
build_tag: ''
- - xtesting-DEBUG:
- DEBUG: 'true'
builders:
- xtesting-zip:
- <<: *xtesting-containers
+ <<: *xtesting-run-containers
- project:
- name: 'xtesting-{tag}-zip'
- <<: *xtesting-params
+ name: 'xtesting-zip'
+ <<: *xtesting-opnfv-xtesting-params
+ volumes:
+ env:
+ published_ports:
container: 'xtesting'
+ privileged: 'false'
+ network: bridge
+ uid: 1000
+ gid: 1000
jobs:
- 'xtesting-{tag}-zip'
@@ -267,18 +352,18 @@
triggers:
- timed: '@daily'
parameters:
- - xtesting-slave:
- slave: '{slave}'
+ - xtesting-node:
+ node: '{node}'
- xtesting-build_tag:
build_tag: ''
- - xtesting-DEBUG:
- DEBUG: 'true'
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^xtesting-{tag}-(daily|check|gate)$'
+ - ^xtesting-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
name: remove former images
@@ -305,88 +390,247 @@
<<: *xtesting-jobs
- name: 'xtesting-opnfv-xtesting-{tag}-sixth-run'
<<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-eighth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-nineth-run'
+ <<: *xtesting-jobs
- multijob:
name: dump all campaign data
projects:
- name: 'xtesting-{tag}-zip'
<<: *xtesting-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-daily'
+ <<: *xtesting-params
+ jobs:
+ - 'xtesting-{tag}-daily'
+
+- view:
+ name: xtesting
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-[a-z0-9.-]+-daily$
+
+- xtesting-build-containers: &xtesting-build-containers
+ name: 'xtesting-build-containers'
+ <<: *xtesting-containers
+ ref_arg: '{ref_arg}'
+ path: '{path}'
+ build_args: '{build_args}'
+ from: '{from}'
+
+- builder:
+ name: xtesting-build-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ build_args=""
+ if [ "{build_args}" != "None" ]; then
+ for i in $(echo {build_args} | tr -d '[]' |sed "s/, / /g" ); \
+ do build_args="--build-arg $i $build_args"; done
+ fi
+ if [ "{ref_arg}" != "None" ]; then
+ build_args="$build_args --build-arg {ref_arg}={ref}"
+ fi
+ cd {path}
+ if [ "{from}" != "None" ]; then
+ sed -i {from} Dockerfile
+ fi
+ sudo docker build $build_args \
+ --pull=false --no-cache --force-rm=true \
+ -t $image .
+
+- scm:
+ name: xtesting-scm
+ scm:
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/functest-xtesting'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ branches:
+ - '{ref}'
+
+- xtesting-dep: &xtesting-dep
+ name: 'xtesting-containers'
+ repo: '{repo}'
+ container: '{container}'
+ port: '{port}'
+ tag: '{dependency}'
- job-template:
- name: 'xtesting-{repo}-{container}-{tag}-gate'
+ name: 'xtesting-{tag}-dep-pull'
parameters:
- - xtesting-slave:
- slave: '{slave}'
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-pull-containers:
+ <<: *xtesting-dep
+
+- xtesting-dep-params: &xtesting-dep-params
+ name: 'xtesting-dep-params'
+ repo: '_'
+ container: 'alpine'
+ port:
+ tag:
+ - latest:
+ dependency: 3.17
+ - 2023.2:
+ dependency: 3.17
+ - 2023.1:
+ dependency: 3.17
+ - zed:
+ dependency: 3.16
+ - yoga:
+ dependency: 3.16
+ - xena:
+ dependency: 3.14
+ - wallaby:
+ dependency: 3.13
+
+- project:
+ name: 'xtesting-dep-pull'
+ <<: *xtesting-dep-params
+ jobs:
+ - 'xtesting-{tag}-dep-pull'
+
+- job-template:
+ name: 'xtesting-{tag}-dep-rmi'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-remove-images:
+ <<: *xtesting-dep
+
+- project:
+ name: 'xtesting-dep-rmi'
+ <<: *xtesting-dep-params
+ jobs:
+ - 'xtesting-{tag}-dep-rmi'
+
+- builder:
+ name: xtesting-tox
+ builders:
+ - shell: |
+ set +x
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install software-properties-common gpg -y
+ sudo add-apt-repository -y ppa:deadsnakes/ppa
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ libxml2-dev libxslt-dev libffi-dev libjpeg-dev \
+ python3.10 python3.10-dev python3.10-distutils \
+ python3.9 python3.9-dev python3.9-distutils \
+ python3.8 python3.8-dev python3.8-distutils \
+ python3-pip enchant-2 -y
+ sudo pip3 install tox
+
+ tox
+
+- job-template:
+ name: 'xtesting-{tag}-tox'
scm:
- xtesting-scm:
ref: $GERRIT_REFSPEC
+ triggers:
+ - xtesting-patchset-created:
+ branch: '{branch}'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
builders:
- - xtesting-build-containers:
- <<: *xtesting-build-containers
- ref: $GERRIT_REFSPEC
+ - xtesting-tox:
+
+- project:
+ name: xtesting-tox
+ <<: *xtesting-params
+ jobs:
+ - 'xtesting-{tag}-tox'
- job-template:
- name: 'xtesting-{repo}-{container}-{tag}-check'
+ name: 'xtesting-opnfv-xtesting-{tag}-gate'
parameters:
- - xtesting-slave:
- slave: '{slave}'
- - xtesting-branch:
- branch: '{branch}'
+ - xtesting-node:
+ node: '{node}'
scm:
- xtesting-scm:
- ref: $branch
+ ref: $GERRIT_REFSPEC
builders:
- xtesting-build-containers:
<<: *xtesting-build-containers
- ref: $branch
-
-- project:
- name: 'xtesting-_-alpine-3.11-rmi'
- repo: _
- port:
- container: alpine
- tag: '3.11'
- slave: master
- jobs:
- - 'xtesting-{repo}-{container}-{tag}-rmi'
-
-- project:
- name: 'xtesting-_-alpine-3.11-pull'
- repo: _
- port:
- container: alpine
- tag: '3.11'
- slave: master
- jobs:
- - 'xtesting-{repo}-{container}-{tag}-pull'
+ ref: $GERRIT_REFSPEC
+ build_args: '{build_args}'
- project:
- name: xtesting-opnfv-xtesting-{tag}-build
+ name: xtesting-opnfv-xtesting-gate
<<: *xtesting-params
+ repo: opnfv
+ port:
container: xtesting
ref_arg: BRANCH
- path: docker
+ path: docker/core
jobs:
- - 'xtesting-{repo}-{container}-{tag}-gate'
- - 'xtesting-{repo}-{container}-{tag}-check'
+ - 'xtesting-opnfv-xtesting-{tag}-gate'
+
+- trigger:
+ name: xtesting-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'functest-xtesting'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
- job-template:
- name: 'xtesting-{tag}-check'
+ name: 'xtesting-{tag}-review'
project-type: multijob
+ triggers:
+ - xtesting-patchset-created:
+ branch: '{branch}'
parameters:
- - xtesting-slave:
- slave: '{slave}'
+ - xtesting-node:
+ node: '{node}'
- xtesting-build_tag:
build_tag: ''
- - xtesting-branch:
- branch: '{branch}'
- - xtesting-DEBUG:
- DEBUG: 'true'
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^xtesting-{tag}-(daily|check|gate)$'
+ - ^xtesting-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
name: remove former images
@@ -394,19 +638,19 @@
- name: 'xtesting-opnfv-xtesting-{tag}-rmi'
<<: *xtesting-jobs
- multijob:
- name: remove dependencies
+ name: remove dependency
projects:
- - name: 'xtesting-_-alpine-3.11-rmi'
+ - name: 'xtesting-{tag}-dep-rmi'
<<: *xtesting-jobs
- multijob:
- name: pull dependencies
+ name: pull dependency
projects:
- - name: 'xtesting-_-alpine-3.11-pull'
+ - name: 'xtesting-{tag}-dep-pull'
<<: *xtesting-jobs
- multijob:
name: opnfv/xtesting
projects:
- - name: 'xtesting-opnfv-xtesting-{tag}-check'
+ - name: 'xtesting-opnfv-xtesting-{tag}-gate'
<<: *xtesting-jobs
- multijob:
name: opnfv/xtesting:{tag}
@@ -423,96 +667,314 @@
<<: *xtesting-jobs
- name: 'xtesting-opnfv-xtesting-{tag}-sixth-run'
<<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-eighth-run'
+ <<: *xtesting-jobs
+ - name: 'xtesting-opnfv-xtesting-{tag}-nineth-run'
+ <<: *xtesting-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-review'
+ <<: *xtesting-params
+ jobs:
+ - 'xtesting-{tag}-review'
+
+- view:
+ name: xtesting-review
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-[a-z0-9.-]+-review$
+
+- view:
+ name: xtesting-tox
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-[a-z0-9.-]+-tox$
+
+- builder:
+ name: xtesting-push-containers
+ builders:
+ - shell: |
+ set +x
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker push $image
- trigger:
- name: xtesting-patchset-created
+ name: xtesting-commit
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'functest-xtesting'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
+ - pollscm:
+ cron: "*/30 * * * *"
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-build'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ scm:
+ - xtesting-scm:
+ ref: '{branch}'
+ builders:
+ - xtesting-build-containers:
+ <<: *xtesting-build-containers
+ ref: '{branch}'
+ build_args: '{build_args}'
+ - xtesting-push-containers:
+ <<: *xtesting-build-containers
+ ref: '{branch}'
+
+- project:
+ name: xtesting-opnfv-xtesting-build
+ <<: *xtesting-params
+ repo: opnfv
+ container: xtesting
+ port:
+ ref_arg: BRANCH
+ path: docker/core
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-build'
- job-template:
- name: 'xtesting-{tag}-gate'
+ name: 'xtesting-{tag}-docker'
project-type: multijob
triggers:
- - xtesting-patchset-created:
- branch: '{branch}'
+ - xtesting-commit
+ scm:
+ - xtesting-scm:
+ ref: '{branch}'
parameters:
- - xtesting-slave:
- slave: '{slave}'
- - xtesting-build_tag:
- build_tag: ''
- - xtesting-DEBUG:
- DEBUG: 'true'
+ - xtesting-node:
+ node: '{node}'
+ # PyYAML and yamllint differ here
+ # see https://github.com/yaml/pyyaml/issues/234
+ # yamllint disable rule:indentation
properties:
- build-blocker:
- use-build-blocker: true
- blocking-level: 'NODE'
blocking-jobs:
- - '^xtesting-{tag}-(daily|check|gate)$'
+ - ^xtesting-(pi-)*{tag}-(daily|docker|review)$
+ # yamllint enable rule:indentation
builders:
- multijob:
- name: remove former images
- projects:
- - name: 'xtesting-opnfv-xtesting-{tag}-rmi'
- <<: *xtesting-jobs
- - multijob:
- name: remove dependencies
+ name: remove dependency
projects:
- - name: 'xtesting-_-alpine-3.11-rmi'
+ - name: 'xtesting-{tag}-dep-rmi'
<<: *xtesting-jobs
- multijob:
- name: pull dependencies
+ name: pull dependency
projects:
- - name: 'xtesting-_-alpine-3.11-pull'
+ - name: 'xtesting-{tag}-dep-pull'
<<: *xtesting-jobs
- multijob:
name: opnfv/xtesting
projects:
- - name: 'xtesting-opnfv-xtesting-{tag}-gate'
- <<: *xtesting-jobs
- - multijob:
- name: opnfv/xtesting:{tag}
- projects:
- - name: 'xtesting-opnfv-xtesting-{tag}-first-run'
- <<: *xtesting-jobs
- - name: 'xtesting-opnfv-xtesting-{tag}-second-run'
- <<: *xtesting-jobs
- - name: 'xtesting-opnfv-xtesting-{tag}-third-run'
- <<: *xtesting-jobs
- - name: 'xtesting-opnfv-xtesting-{tag}-fourth-run'
- <<: *xtesting-jobs
- - name: 'xtesting-opnfv-xtesting-{tag}-fifth-run'
- <<: *xtesting-jobs
- - name: 'xtesting-opnfv-xtesting-{tag}-sixth-run'
+ - name: 'xtesting-opnfv-xtesting-{tag}-build'
<<: *xtesting-jobs
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
- project:
- name: 'xtesting'
+ name: 'xtesting-docker'
<<: *xtesting-params
jobs:
- - 'xtesting-{tag}-daily'
- - 'xtesting-{tag}-check'
- - 'xtesting-{tag}-gate'
+ - 'xtesting-{tag}-docker'
+
+- builder:
+ name: xtesting-trivy
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sudo sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./trivy image --exit-code 1 $image
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-trivy'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-trivy:
+ <<: *xtesting-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-opnfv-xtesting-trivy'
+ <<: *xtesting-params
+ repo: 'opnfv'
+ container: 'xtesting'
+ port:
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-trivy'
+
+- builder:
+ name: xtesting-grype
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sudo sh -s -- -b .
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ ./grype -q $image
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-grype'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-grype:
+ <<: *xtesting-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-opnfv-xtesting-grype'
+ <<: *xtesting-params
+ repo: 'opnfv'
+ container: 'xtesting'
+ port:
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-grype'
+
+- builder:
+ name: xtesting-sbom
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install curl -y
+
+ sudo mkdir -p ~/.docker
+ curl -sSfL https://raw.githubusercontent.com/docker/sbom-cli-plugin/main/install.sh | sudo sh -s --
+ if [ "{repo}" = "_" ]; then
+ image={container}:{tag}
+ elif [ "{port}" = "None" ]; then
+ image={repo}/{container}:{tag}
+ else
+ image={repo}:{port}/{container}:{tag}
+ fi
+ sudo docker sbom $image
+
+- job-template:
+ name: 'xtesting-opnfv-xtesting-{tag}-sbom'
+ triggers:
+ - timed: '@daily'
+ parameters:
+ - xtesting-node:
+ node: '{node}'
+ builders:
+ - xtesting-sbom:
+ <<: *xtesting-containers
+ publishers:
+ - email-ext:
+ failure: false
+ first-failure: true
+ fixed: true
+ recipients: cedric.ollivier@orange.com
+
+- project:
+ name: 'xtesting-opnfv-xtesting-sbom'
+ <<: *xtesting-params
+ repo: 'opnfv'
+ container: 'xtesting'
+ port:
+ jobs:
+ - 'xtesting-opnfv-xtesting-{tag}-sbom'
- view:
- name: xtesting
+ name: xtesting-docker
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^xtesting-[a-z0-9.-]+-docker$
+
+- view:
+ name: xtesting-trivy
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!xtesting-pi)^xtesting-[a-z-0-9.]+-trivy$
+
+- view:
+ name: xtesting-grype
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: (?!xtesting-pi)^xtesting-[a-z-0-9.]+-grype$
+
+- view:
+ name: xtesting-sbom
view-type: list
columns:
- status
@@ -521,4 +983,4 @@
- last-success
- last-failure
- last-duration
- regex: ^xtesting-[a-z]+-(daily|check|gate)$
+ regex: (?!xtesting-pi)^xtesting-[a-z-0-9.]+-sbom$
diff --git a/jjb/global-jjb/jjb b/jjb/global-jjb/jjb
deleted file mode 120000
index 23c884f04..000000000
--- a/jjb/global-jjb/jjb
+++ /dev/null
@@ -1 +0,0 @@
-../../global-jjb/jjb \ No newline at end of file
diff --git a/jjb/global-jjb/shell b/jjb/global-jjb/shell
deleted file mode 120000
index d37c43f86..000000000
--- a/jjb/global-jjb/shell
+++ /dev/null
@@ -1 +0,0 @@
-../../global-jjb/shell \ No newline at end of file
diff --git a/jjb/global/basic-jobs.yaml b/jjb/global/basic-jobs.yaml
deleted file mode 100644
index d2c271105..000000000
--- a/jjb/global/basic-jobs.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
----
-##
-# Basic Job Config
-#
-# This is used for project which don't have any jobs of substance
-# defined yet, but still need 'Verified+1'.
-##
-- job-group:
- name: '{project}-verify-basic'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- jobs:
- - '{project}-verify-{stream}'
-
-- job-template:
- name: '{project}-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit-trigger-patchset-created:
- project: '{project}'
- branch: '{branch}'
- files: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- echo "Nothing to verify!"
diff --git a/jjb/global/installer-params.yaml b/jjb/global/installer-params.yaml
deleted file mode 100644
index 163d9152b..000000000
--- a/jjb/global/installer-params.yaml
+++ /dev/null
@@ -1,140 +0,0 @@
----
-- parameter:
- name: 'apex-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.X.X'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: apex
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'external'
- description: 'external network for test'
-
-- parameter:
- name: 'fuel-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '10.20.0.2'
- description: 'IP of the installer'
- - string:
- name: SSH_KEY
- default: "/var/lib/opnfv/mcp.rsa"
- description: 'Path to private SSH key to access environment nodes'
- - string:
- name: INSTALLER_TYPE
- default: fuel
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'floating_net'
- description: 'external network for test'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debug output information"
-
-- parameter:
- name: 'joid-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.122.5'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: joid
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: MODEL
- default: 'os'
- description: 'Model to deploy (os|k8)'
- - string:
- name: OS_RELEASE
- default: 'pike'
- description: 'OpenStack release (mitaka|ocata|pike)'
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net
- description: "External network used for Floating ips."
- - string:
- name: LAB_CONFIG
- default: "$HOME/joid_config"
- description: "Local lab config and Openstack openrc location"
- - string:
- name: MAAS_REINSTALL
- default: 'false'
- description: "Reinstall MAAS and Bootstrap before deploy [true/false]"
- - string:
- name: UBUNTU_DISTRO
- default: 'xenial'
- description: "Ubuntu distribution to use for Openstack (xenial)"
- - string:
- name: CPU_ARCHITECTURE
- default: 'amd64'
- description: "CPU Architecture to use for Ubuntu distro "
-
-- parameter:
- name: 'daisy-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '10.20.7.3'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: daisy
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: BRIDGE
- default: 'br7'
- description: 'pxe bridge for booting of Daisy master'
- - string:
- name: EXTERNAL_NETWORK
- default: 'admin_external'
- description: 'external network for test'
-
-- parameter:
- name: 'infra-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.122.2'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: infra
- description: 'Installer used for deploying OPNFV on this POD'
-
-- parameter:
- name: 'netvirt-defaults'
- parameters:
- - string:
- name: INSTALLER_IP
- default: '192.168.X.X'
- description: 'IP of the installer'
- - string:
- name: INSTALLER_TYPE
- default: apex
- description: 'Installer used for deploying OPNFV on this POD'
- - string:
- name: EXTERNAL_NETWORK
- default: 'external'
- description: 'external network for test'
-
-- parameter:
- name: 'deploy-scenario'
- parameters:
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- description: "OPNFV deployment scenario"
diff --git a/jjb/global/installer-report.sh b/jjb/global/installer-report.sh
deleted file mode 100755
index 6cd83f1f1..000000000
--- a/jjb/global/installer-report.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-source $WORKSPACE/installer_track.sh
-
-gen_content()
-{
- cat <<EOF
-{
- "installer": "$INSTALLER",
- "version": "$INSTALLER_VERSION",
- "pod_name": "$NODE_NAME",
- "job_name": "$JOB_NAME",
- "build_id": "$BUILD_ID",
- "scenario": "$DEPLOY_SCENARIO",
- "upstream_job_name": "$UPSTREAM_JOB_NAME",
- "upstream_build_id":"$UPSTREAM_BUILD_ID",
- "criteria": "$PROVISION_RESULT",
- "start_date": "$TIMESTAMP_START",
- "stop_date": "$TIMESTAMP_END",
- "details":""
-}
-EOF
-}
-
-echo "Installer: $INSTALLER provision result: $PROVISION_RESULT"
-echo $(gen_content)
-
-set -o xtrace
-curl -H "Content-Type: application/json" -X POST -v -d "$(gen_content)" \
- $TESTAPI_URL/deployresults || true
-
-# INFO
-# postbuildscript plugin shall always return the original job running status,
-# for the result returned from postbuildscript affects the CI pipeline.
-if [ "$PROVISION_RESULT" == "PASS" ]; then
- exit 0
-else
- exit 1
-fi
diff --git a/jjb/global/releng-macros.yaml b/jjb/global/releng-macros.yaml
index 059903531..ea48cc80d 100644
--- a/jjb/global/releng-macros.yaml
+++ b/jjb/global/releng-macros.yaml
@@ -358,7 +358,8 @@
echo
cat gerrit_comment.txt
echo
- ssh -p 29418 gerrit.opnfv.org \
+ ssh -o 'PubkeyAcceptedKeyTypes +ssh-rsa' \
+ -p 29418 fbot@gerrit.opnfv.org \
"gerrit review -p $GERRIT_PROJECT \
-m '$(cat gerrit_comment.txt)' \
$GERRIT_PATCHSET_REVISION \
@@ -399,93 +400,6 @@
- report-build-result-to-gerrit
- builder:
- name: lint-init
- builders:
- - shell: |
- #!/bin/bash
- # Ensure we start with a clean environment
- rm -f bash-violation.log python-violation.log yaml-violation.log violation.log
- git --no-pager diff --diff-filter=MCRAT --name-only HEAD^1 > modified_files
-
-- builder:
- name: lint-report
- builders:
- - shell: |
- #!/bin/bash
- if [[ -s violation.log ]]; then
- cat violation.log
- echo "Reporting lint result...."
- set -x
- msg="Found syntax error and/or coding style violation(s) in the files modified by your patchset."
- sed -i -e "1s#^#${msg}\n\n#" violation.log
- cmd="gerrit review -p $GERRIT_PROJECT -m \"$(cat violation.log)\" $GERRIT_PATCHSET_REVISION --notify NONE"
- ssh -p 29418 gerrit.opnfv.org "$cmd"
-
- # Make sure the caller job failed
- exit 1
- fi
-
-- builder:
- name: lint-bash-code
- builders:
- - shell: |
- #!/bin/bash
- echo "Checking bash code..."
- for f in $(egrep '\.sh$' modified_files)
- do
- bash -n "$f" 2>> bash-violation.log
- done
- if [[ -s bash-violation.log ]]; then
- echo -e "Bash syntax error(s)\n---" >> violation.log
- sed -e 's/^/ /g' bash-violation.log >> violation.log
- fi
-
-- builder:
- name: lint-python-code
- builders:
- - shell: |
- #!/bin/bash
- # Install python package
- sudo -H pip install "flake8==2.6.2"
-
- echo "Checking python code..."
- for f in $(egrep '\.py$' modified_files)
- do
- flake8 "$f" >> python-violation.log
- done
- if [[ -s python-violation.log ]]; then
- echo -e "Python violation(s)\n---" >> violation.log
- sed -e 's/^/ /g' python-violation.log >> violation.log
- fi
-
-- builder:
- name: lint-yaml-code
- builders:
- - shell: |
- #!/bin/bash
- # sudo Install python packages
- sudo -H pip install "yamllint==1.8.2"
-
- echo "Checking yaml file..."
- for f in $(egrep '\.ya?ml$' modified_files)
- do
- yamllint "$f" >> yaml-violation.log
- done
- if [[ -s yaml-violation.log ]]; then
- echo -e "YAML violation(s)\n---" >> violation.log
- sed -e 's/^/ /g' yaml-violation.log >> violation.log
- fi
-
-- builder:
- name: lint-all-code
- builders:
- - lint-init
- - lint-bash-code
- - lint-python-code
- - lint-yaml-code
- - lint-report
-
-- builder:
name: clean-workspace
builders:
- shell: |
@@ -549,460 +463,3 @@
failure: true
send-to:
- recipients
-
-# Email PTL publishers
-- email_ptl_defaults: &email_ptl_defaults
- name: 'email_ptl_defaults'
- content-type: text
- attach-build-log: true
- attachments: '*.log'
- compress-log: true
- always: true
- subject: '{subject}'
-
-- publisher: &email_apex_ptl_defaults
- name: 'email-apex-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- trozet@redhat.com
-- publisher:
- name: 'email-apex-os-net-config-ptl'
- <<: *email_apex_ptl_defaults
-- publisher:
- name: 'email-apex-puppet-tripleo-ptl'
- <<: *email_apex_ptl_defaults
-- publisher:
- name: 'email-apex-tripleo-heat-templates-ptl'
- <<: *email_apex_ptl_defaults
-
-- publisher:
- name: 'email-armband-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- bob.monkman@arm.com
-
-- publisher:
- name: 'email-auto-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- tina.tsou@arm.com
-
-- publisher:
- name: 'email-availability-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- fuqiao@chinamobile.com
-
-- publisher:
- name: 'email-bamboo-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- donaldh@cisco.com
-
-- publisher:
- name: 'email-barometer-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- aasmith@redhat.com
-
-- publisher:
- name: 'email-bottlenecks-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- gabriel.yuyang@huawei.com
-
-- publisher:
- name: 'email-calipso-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- korlev@cisco.com
-
-- publisher:
- name: 'email-clover-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- stephen.wong1@huawei.com
-
-- publisher:
- name: 'email-container4nfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- jiaxuan@chinamobile.com
-
-- publisher:
- name: 'email-cperf-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- matt.welch@intel.com
-
-- publisher:
- name: 'email-daisy-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- hu.zhijiang@zte.com.cn
-
-- publisher:
- name: 'email-doctor-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- r-mibu@cq.jp.nec.com
-
-- publisher:
- name: 'email-domino-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- ulas.kozat@huawei.com
-
-- publisher:
- name: 'email-dovetail-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- georg.kunz@ericsson.com
-
-- publisher:
- name: 'email-dpacc-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- denglingli@chinamobile.com
-
-- publisher:
- name: 'email-enfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- JBuchanan@advaoptical.com
-
-- publisher:
- name: 'email-fds-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- fbrockne@cisco.com
-
-- publisher:
- name: 'email-fuel-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- Alexandru.Avadanii@enea.com
-
-- publisher:
- name: 'email-functest-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- cedric.ollivier@orange.com
-
-- publisher:
- name: 'email-ipv6-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- bh526r@att.com
-
-- publisher:
- name: 'email-joid-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- artur.tyloch@canonical.com
-
-- publisher:
- name: 'email-kvmfornfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- raghuveer.reddy@intel.com
-
-- publisher:
- name: 'email-models-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- bs3131@att.com
-
-- publisher:
- name: 'email-moon-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- ruan.he@orange.com
-
-- publisher:
- name: 'email-nfvbench-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- ahothan@cisco.com
-
-- publisher:
- name: 'email-onosfw-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- su.wei@huawei.com
-
-- publisher:
- name: 'email-opera-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- Yingjun.li@huawei.com
-
-- publisher:
- name: 'email-opnfvdocs-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- sofia.wallin@ericsson.com
-
-- publisher:
- name: 'email-orchestra-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- giuseppe.carella@fokus.fraunhofer.de
-
-- publisher:
- name: 'email-ovn4nfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- trinath.somanchi@gmail.com
-
-- publisher:
- name: 'email-ovno-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- wsmackie@juniper.net
-
-- publisher:
- name: 'email-ovsnfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- MarkD.Graymark.d.gray@intel.com
-
-- publisher:
- name: 'email-parser-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- shang.xiaodong@zte.com.cn
-
-- publisher: &email_pharos_ptl_defaults
- name: 'email-pharos-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- jack@jento.io
-
-- publisher: &email_laas_ptl_defaults
- name: 'email-laas-ptl'
- publishers:
- - email-ext:
- <<: *email_laas_ptl_defaults
- recipients: >
- pberberian@iol.unh.edu
-
-- publisher:
- name: 'email-qtip-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- wu.zhihui1@zte.com.cn
-
-- publisher: &email_releng_ptl_defaults
- name: 'email-releng-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- tbramwell@linuxfoundation.org
-- publisher:
- name: 'email-releng-anteater-ptl'
- <<: *email_releng_ptl_defaults
-- publisher:
- name: 'email-releng-testresults-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- fatih.degirmenci@ericsson.com
- feng.xiaowei@zte.com.cn
-- publisher:
- name: 'email-releng-utils-ptl'
- <<: *email_releng_ptl_defaults
-- publisher:
- name: 'email-releng-xci-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- fatih.degirmenci@ericsson.com
-
-- publisher:
- name: 'email-samplevnf-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- deepak.s@intel.com
-
-- publisher:
- name: 'email-sdnvpn-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- tim.irnich@ericsson.com
-
-- publisher:
- name: 'email-securityscanning-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- lhinds@redhat.com
-
-- publisher:
- name: 'email-sfc-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- mbuil@suse.com
-
-- publisher:
- name: 'email-snaps-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- s.pisarski@cablelabs.com
-
-- publisher:
- name: 'email-stor4nfv-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- shane.wang@intel.com
-
-- publisher:
- name: 'email-storperf-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- mark.beierl@emc.com
-
-- publisher:
- name: 'email-ves-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- bryan.sullivan@att.com
-
-- publisher:
- name: 'email-vswitchperf-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- sridhar.rao@spirent.com
-
-- publisher:
- name: 'email-yardstick-ptl'
- publishers:
- - email-ext:
- <<: *email_ptl_defaults
- recipients: >
- ross.b.brattain@intel.com
-
-- publisher:
- name: 'report-provision-result'
- publishers:
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - SUCCESS
- build-steps:
- - shell: |
- echo "export PROVISION_RESULT=PASS" >> $WORKSPACE/installer_track.sh
- echo "export INSTALLER=$INSTALLER_TYPE" >> $WORKSPACE/installer_track.sh
- echo "export TIMESTAMP_END="\'`date '+%Y-%m-%d %H:%M:%S.%3N'`\' >> $WORKSPACE/installer_track.sh
- - shell:
- !include-raw: installer-report.sh
- mark-unstable-if-failed: true
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - FAILURE
- - NOT_BUILT
- - UNSTABLE
- build-steps:
- - shell: |
- echo "export PROVISION_RESULT=FAIL" >> $WORKSPACE/installer_track.sh
- echo "export INSTALLER=$INSTALLER_TYPE" >> $WORKSPACE/installer_track.sh
- echo "export TIMESTAMP_END="\'`date '+%Y-%m-%d %H:%M:%S.%3N'`\' >> $WORKSPACE/installer_track.sh
- - shell:
- !include-raw: installer-report.sh
- mark-unstable-if-failed: true
diff --git a/jjb/global/slave-params.yaml b/jjb/global/slave-params.yaml
index b597b30e0..7d0996bb6 100644
--- a/jjb/global/slave-params.yaml
+++ b/jjb/global/slave-params.yaml
@@ -1,561 +1,4 @@
---
-#####################################################
-# Parameters for slaves using old labels
-# This will be cleaned up once the new job structure and
-# use of the new labels are in place
-#####################################################
-- parameter:
- name: 'apex-baremetal-master-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-baremetal-hunter-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-baremetal-gambia-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-baremetal-fraser-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-baremetal-euphrates-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-baremetal-danube-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal-danube'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-
-- parameter:
- name: 'apex-virtual-master-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-virtual-hunter-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-virtual-gambia-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-virtual-fraser-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-virtual-euphrates-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-master'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'apex-virtual-danube-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual-danube'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'lf-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod1
- default-slaves:
- - lf-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'lf-pod3-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod3
- default-slaves:
- - lf-pod3
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'lf-pod4-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod4
- default-slaves:
- - lf-pod4
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'lf-pod5-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - lf-pod5
- default-slaves:
- - lf-pod5
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-#####################################################
-# Parameters for CI baremetal PODs
-#####################################################
-- parameter:
- name: 'apex-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-baremetal'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'fuel-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'fuel-baremetal'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'armband-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'armband-baremetal'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'auto-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'auto-baremetal'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'joid-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'joid-baremetal'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: EXTERNAL_NETWORK
- default: ext-net
- description: "External network floating ips"
-
-- parameter:
- name: 'daisy-baremetal-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod2
- default-slaves:
- - zte-pod2
- - label:
- name: SLAVE_LABEL
- default: 'daisy-baremetal'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: INSTALLER_IP
- default: '10.20.7.3'
- description: 'IP of the installer'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-#####################################################
-# Parameters for CI virtual PODs
-#####################################################
-- parameter:
- name: 'apex-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'apex-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'fuel-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'fuel-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'armband-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'armband-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'joid-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'joid-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'daisy-virtual-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-virtual1
- - zte-virtual2
- default-slaves:
- - zte-virtual1
- - label:
- name: SLAVE_LABEL
- default: 'daisy-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: INSTALLER_IP
- default: '10.20.11.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'daisy1'
- description: 'pxe bridge for booting of Daisy master'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'functest-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'functest-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
-
-#####################################################
-# Parameters for build slaves
-#####################################################
-- parameter:
- name: 'opnfv-build-centos-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-centos'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'opnfv-build-ubuntu-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'lf-virtual1-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'lf-virtual1'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'laas-dashboard-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'laas-dashboard'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
- parameter:
name: 'opnfv-build-defaults'
parameters:
@@ -575,76 +18,11 @@
description: "Directory where the build artifact will be located upon the completion of the build."
- parameter:
- name: 'lf-build2-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'lf-build2'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'ericsson-build3-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-build3'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-build4-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-build4'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'huawei-build-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-build
- default-slaves:
- - huawei-build
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'opnfv-build-ubuntu-arm-defaults'
+ name: 'opnfv-build-ubuntu-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'opnfv-build-ubuntu-arm'
+ default: 'anuket-build'
description: 'Slave label on Jenkins'
all-nodes: false
node-eligibility: 'ignore-offline'
@@ -657,88 +35,6 @@
default: $WORKSPACE/build_output
description: "Directory where the build artifact will be located upon the completion of the build."
-#####################################################
-# Parameters for none-CI PODs
-#####################################################
-- parameter:
- name: 'cengn-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - cengn-pod1
- default-slaves:
- - cengn-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod1
- default-slaves:
- - intel-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod2
- default-slaves:
- - intel-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'intel-pod9-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod9
- default-slaves:
- - intel-pod9
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod10-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod10
- default-slaves:
- - intel-pod10
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- parameter:
name: 'intel-pod12-defaults'
parameters:
@@ -753,568 +49,3 @@
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod18-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod18
- default-slaves:
- - intel-pod18
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'itri-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - itri-pod1
- default-slaves:
- - itri-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.0.2'
- description: 'IP of the installer'
-
-- parameter:
- name: 'huawei-pod3-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod3
- default-slaves:
- - huawei-pod3
- - label:
- name: SLAVE_LABEL
- default: 'huawei-test'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'huawei-pod4-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod4
- default-slaves:
- - huawei-pod4
- - label:
- name: SLAVE_LABEL
- default: 'huawei-test'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-pod8-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod8
- default-slaves:
- - intel-pod8
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'intel-pod17-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod17
- default-slaves:
- - intel-pod17
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'huawei-virtual5-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'huawei-virtual5'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'huawei-virtual7-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-virtual7
- default-slaves:
- - huawei-virtual7
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'huawei-pod7-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod7
- default-slaves:
- - huawei-pod7
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
-- parameter:
- name: 'zte-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod1
- default-slaves:
- - zte-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.6.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br6'
- description: 'pxe bridge for booting of Fuel master'
-
-- parameter:
- name: 'zte-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod2
- default-slaves:
- - zte-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.7.3'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br7'
- description: 'pxe bridge for booting of Daisy master'
-
-- parameter:
- name: 'zte-pod3-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod3
- default-slaves:
- - zte-pod3
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.0.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br0'
- description: 'pxe bridge for booting of Daisy master'
-
-- parameter:
- name: 'zte-pod9-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-pod9
- default-slaves:
- - zte-pod9
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: INSTALLER_IP
- default: '10.20.7.2'
- description: 'IP of the installer'
- - string:
- name: BRIDGE
- default: 'br0'
- description: 'pxe bridge for booting of Daisy master'
-
-- parameter:
- name: zte-virtual5-defaults
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-virtual5
- default-slaves:
- - zte-virtual5
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: zte-virtual6-defaults
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - zte-virtual6
- default-slaves:
- - zte-virtual6
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'juniper-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - juniper-pod1
- default-slaves:
- - juniper-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: CEPH_DISKS
- default: /srv
- description: "Disks to use by ceph (comma separated list)"
-
-- parameter:
- name: 'orange-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod1
- default-slaves:
- - orange-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'orange-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod2
- default-slaves:
- - orange-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'orange-pod5-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - orange-pod5
- default-slaves:
- - orange-pod5
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'dell-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - dell-pod1
- default-slaves:
- - dell-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'dell-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - dell-pod2
- default-slaves:
- - dell-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'nokia-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - nokia-pod1
- default-slaves:
- - nokia-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: SSH_KEY
- default: /root/.ssh/id_rsa
- description: 'SSH key to use for Apex'
-
-- parameter:
- name: 'intel-virtual6-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-virtual6
- default-slaves:
- - intel-virtual6
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'intel-virtual10-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-virtual10
- default-slaves:
- - intel-virtual10
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'doctor-slave-parameter'
- parameters:
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-virtual5-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual5'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://git.opendaylight.org/gerrit/p/$PROJECT.git
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-virtual12-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual12'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-virtual13-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual13'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'ericsson-virtual-pod1bl01-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'ericsson-virtual-pod1bl01'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'odl-netvirt-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'odl-netvirt-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'odl-netvirt-virtual-intel-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'odl-netvirt-virtual-intel'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-- parameter:
- name: 'flex-pod1-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - flex-pod1
- default-slaves:
- - flex-pod1
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
-#####################################################
-# These slaves are just dummy slaves for sandbox jobs
-#####################################################
-- parameter:
- name: 'sandbox-baremetal-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'sandbox-baremetal'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'sandbox-virtual-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'sandbox-virtual'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
-
-- parameter:
- name: 'dummy-pod1-defaults'
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'dummy-pod1'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
diff --git a/jjb/ipv6/ipv6-rtd-jobs.yaml b/jjb/ipv6/ipv6-rtd-jobs.yaml
deleted file mode 100644
index e51173e2c..000000000
--- a/jjb/ipv6/ipv6-rtd-jobs.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
----
-- project:
- name: ipv6-rtd
- project: ipv6
- project-name: ipv6
-
- project-pattern: 'ipv6'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ipv6/47370/'
- rtd-token: 'e7abb4e2c3f3f0dfc1a8feefe39b27f4a4f9b98a'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/ipv6/ipv6-views.yaml b/jjb/ipv6/ipv6-views.yaml
deleted file mode 100644
index 936ca42d3..000000000
--- a/jjb/ipv6/ipv6-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: ipv6-view
- views:
- - project-view
- project-name: ipv6
diff --git a/jjb/ipv6/ipv6.yaml b/jjb/ipv6/ipv6.yaml
deleted file mode 100644
index 2946ec77b..000000000
--- a/jjb/ipv6/ipv6.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: ipv6
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/kvmfornfv/kvmfornfv-download-artifact.sh b/jjb/kvmfornfv/kvmfornfv-download-artifact.sh
deleted file mode 100755
index ea37eb29c..000000000
--- a/jjb/kvmfornfv/kvmfornfv-download-artifact.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-# do stuff differently based on the job type
-case "$JOB_TYPE" in
- verify)
- echo "Downloading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
- GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
- ;;
- daily)
- gsutil cp gs://$GS_URL/latest.properties $WORKSPACE/latest.properties
- source $WORKSPACE/latest.properties
- GS_UPLOAD_LOCATION=$OPNFV_ARTIFACT_URL
- echo "Downloading artifacts from $GS_UPLOAD_LOCATION for daily run. This could take some time..."
- ;;
- *)
- echo "Artifact download is not enabled for $JOB_TYPE jobs"
- exit 1
-esac
-
-GS_GUESTIMAGE_LOCATION="gs://artifacts.opnfv.org/$PROJECT/guest-image"
-/bin/mkdir -p $WORKSPACE/build_output
-gsutil cp -r $GS_UPLOAD_LOCATION/* $WORKSPACE/build_output > $WORKSPACE/gsutil.log 2>&1
-gsutil cp $GS_GUESTIMAGE_LOCATION/guest1.sha512 $WORKSPACE/build_output > $WORKSPACE/gsutil.log 2>&1
-
-echo "--------------------------------------------------------"
-ls -al $WORKSPACE/build_output
-echo "--------------------------------------------------------"
-echo
-echo "Downloaded artifacts!"
diff --git a/jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml b/jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml
deleted file mode 100644
index 7d0b925a5..000000000
--- a/jjb/kvmfornfv/kvmfornfv-rtd-jobs.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
- name: kvmfornfv-rtd
- project: kvmfornfv
- project-name: kvmfornfv
-
- gerrit-skip-vote: true
- project-pattern: 'kvmfornfv'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-kvmfornfv/47372/'
- rtd-token: '32ae6f0ad54181a27fd38d99821a021f5087554a'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/kvmfornfv/kvmfornfv-test.sh b/jjb/kvmfornfv/kvmfornfv-test.sh
deleted file mode 100755
index b31d61cce..000000000
--- a/jjb/kvmfornfv/kvmfornfv-test.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-##########################################################
-##This script includes executing cyclictest scripts.
-##########################################################
-#The latest build packages are stored in build_output
-
-ls -al $WORKSPACE/build_output
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-echo $TEST_NAME
-
-# do stuff differently based on the job type
-case "$JOB_TYPE" in
- verify)
- #start the test
- cd $WORKSPACE
- ./ci/test_kvmfornfv.sh $JOB_TYPE
- ;;
- daily)
- #start the test
- cd $WORKSPACE
- ./ci/test_kvmfornfv.sh $JOB_TYPE $TEST_NAME
- ;;
- *)
- echo "Test is not enabled for $JOB_TYPE jobs"
- exit 1
-esac
diff --git a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
deleted file mode 100755
index 91b6f4481..000000000
--- a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/bash
-set -o nounset
-
-if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
- JOB_TYPE=${BASH_REMATCH[0]}
-else
- echo "Unable to determine job type!"
- exit 1
-fi
-
-case "$JOB_TYPE" in
- verify)
- OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
- GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
- echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
- gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
- echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
- ;;
- daily)
- echo "Uploading daily artifacts This could take some time..."
- OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
- GS_UPLOAD_LOCATION="gs://$GS_URL/$OPNFV_ARTIFACT_VERSION"
- GS_LOG_LOCATION="gs://$GS_URL/logs-$(date -u +"%Y-%m-%d")"/
- ;;
- *)
- echo "Artifact upload is not enabled for $JOB_TYPE jobs"
- exit 1
-esac
-
-# save information regarding artifacts into file
-(
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_UPLOAD_LOCATION"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
-) > $WORKSPACE/opnfv.properties
-source $WORKSPACE/opnfv.properties
-
-# upload artifacts
-if [[ "$PHASE" == "build" ]]; then
- gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
- gsutil -m setmeta -r \
- -h "Cache-Control:private, max-age=0, no-transform" \
- $GS_UPLOAD_LOCATION > /dev/null 2>&1
-else
- if [[ "$JOB_TYPE" == "daily" ]]; then
- log_dir=$WORKSPACE/build_output/log
- if [[ -d "$log_dir" ]]; then
- #Uploading logs to artifacts
- echo "Uploading artifacts for future debugging needs...."
- gsutil cp -r $WORKSPACE/build_output/log-*.tar.gz $GS_LOG_LOCATION > $WORKSPACE/gsutil.log 2>&1
- # verifying the logs uploaded by cyclictest daily test job
- gsutil ls $GS_LOG_LOCATION > /dev/null 2>&1
- if [[ $? -ne 0 ]]; then
- echo "Problem while uploading logs to artifacts!"
- echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
- exit 1
- fi
- else
- echo "No test logs/artifacts available for uploading"
- fi
- fi
-fi
-
-# upload metadata file for the artifacts built by daily job
-if [[ "$JOB_TYPE" == "daily" && "$PHASE" == "build" ]]; then
- gsutil cp $WORKSPACE/opnfv.properties $GS_UPLOAD_LOCATION/opnfv.properties > $WORKSPACE/gsutil.log 2>&1
- gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > $WORKSPACE/gsutil.log 2>&1
- gsutil -m setmeta -r \
- -h "Cache-Control:private, max-age=0, no-transform" \
- $GS_UPLOAD_LOCATION/opnfv.properties \
- gs://$GS_URL/latest.properties > /dev/null 2>&1
-fi
-
-# verifying the artifacts uploading by verify/daily build job
-if [[ "$PHASE" == "build" ]]; then
- gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1
- if [[ $? -ne 0 ]]; then
- echo "Problem while uploading artifacts!"
- echo "Check log $WORKSPACE/gsutil.log on $NODE_NAME"
- exit 1
- fi
-fi
-echo "Uploaded artifacts!"
diff --git a/jjb/kvmfornfv/kvmfornfv-views.yaml b/jjb/kvmfornfv/kvmfornfv-views.yaml
deleted file mode 100644
index a02927667..000000000
--- a/jjb/kvmfornfv/kvmfornfv-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: kvmfornfv-view
- views:
- - project-view
- project-name: kvmfornfv
diff --git a/jjb/kvmfornfv/kvmfornfv.yaml b/jjb/kvmfornfv/kvmfornfv.yaml
deleted file mode 100644
index ad497e97d..000000000
--- a/jjb/kvmfornfv/kvmfornfv.yaml
+++ /dev/null
@@ -1,386 +0,0 @@
----
-- project:
- name: kvmfornfv
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
- #####################################
- # patch verification phases
- #####################################
- phase:
- - 'build':
- slave-label: 'opnfv-build-ubuntu'
- - 'test':
- slave-label: 'intel-pod10'
- #####################################
- # patch verification phases
- #####################################
- testname:
- - 'cyclictest'
- - 'packet_forward'
- - 'livemigration'
- #####################################
- # patch verification phases
- #####################################
- jobs:
- - 'kvmfornfv-verify-{stream}'
- - 'kvmfornfv-verify-{phase}-{stream}'
- - 'kvmfornfv-merge-{stream}'
- - 'kvmfornfv-daily-{stream}'
- - 'kvmfornfv-daily-build-{stream}'
- - 'kvmfornfv-{testname}-daily-test-{stream}'
-
-#####################################
-# job templates
-#####################################
-- job-template:
- name: 'kvmfornfv-verify-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 3
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: build
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-verify-build-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: test
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-verify-test-{stream}'
- current-parameters: false
- predefined-parameters: |
- BRANCH=$BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'kvmfornfv-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- scm:
- - git-scm-gerrit
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: PHASE
- default: '{phase}'
- description: "Execution of kvmfornfv daily '{phase}' job ."
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-verify-{phase}-macro'
-
-- job-template:
- name: 'kvmfornfv-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - shell:
- !include-raw: ./kvmfornfv-build.sh
-
-- job-template:
- name: 'kvmfornfv-daily-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: cyclictest-build
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-daily-build-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: cyclictest-test
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-cyclictest-daily-test-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: packetforward-test
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-packet_forward-daily-test-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: livemigration-test
- condition: SUCCESSFUL
- projects:
- - name: 'kvmfornfv-livemigration-daily-test-{stream}'
- current-parameters: false
- node-parameters: false
- git-revision: true
- kill-phase-on: FAILURE
- abort-all-job: true
-
-- job-template:
- name: 'kvmfornfv-daily-build-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: PHASE
- default: 'build'
- description: "Execution of kvmfornfv daily 'build' job ."
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-daily-build-macro'
-
-- job-template:
- name: 'kvmfornfv-{testname}-daily-test-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 360
- fail: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'intel-pod10-defaults'
- - 'kvmfornfv-defaults':
- gs-pathname: '{gs-pathname}'
- - string:
- name: TEST_NAME
- default: '{testname}'
- description: "Daily job to execute kvmfornfv '{testname}' testcase."
- - string:
- name: PHASE
- default: 'test'
- description: "Execution of kvmfornfv daily 'test' job ."
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - '{project}-{testname}-daily-test-macro'
-
-#####################################
-# builder macros
-#####################################
-- builder:
- name: 'kvmfornfv-verify-build-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-build.sh
- - shell:
- !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
- name: 'kvmfornfv-verify-test-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-download-artifact.sh
- - shell:
- !include-raw: ./kvmfornfv-test.sh
-
-- builder:
- name: 'kvmfornfv-daily-build-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-build.sh
- - shell:
- !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
- name: 'kvmfornfv-cyclictest-daily-test-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-download-artifact.sh
- - shell:
- !include-raw: ./kvmfornfv-test.sh
- - shell:
- !include-raw: ./kvmfornfv-upload-artifact.sh
-
-- builder:
- name: 'kvmfornfv-packet_forward-daily-test-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-download-artifact.sh
- - shell:
- !include-raw: ./kvmfornfv-test.sh
-
-- builder:
- name: 'kvmfornfv-livemigration-daily-test-macro'
- builders:
- - shell:
- !include-raw: ./kvmfornfv-download-artifact.sh
- - shell:
- !include-raw: ./kvmfornfv-test.sh
-
-#####################################
-# parameter macros
-#####################################
-- parameter:
- name: 'kvmfornfv-defaults'
- parameters:
- - string:
- name: GS_URL
- default: artifacts.opnfv.org/$PROJECT{gs-pathname}
- description: "URL to Google Storage."
diff --git a/jjb/laas/laas.yml b/jjb/laas/laas.yml
index 76c475eab..197495475 100644
--- a/jjb/laas/laas.yml
+++ b/jjb/laas/laas.yml
@@ -1,4 +1,22 @@
---
+- parameter:
+ name: 'laas-dashboard-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'laas-dashboard'
+ description: 'Slave label on Jenkins'
+ all-nodes: false
+ node-eligibility: 'ignore-offline'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+
- project:
name: laas-dashboard-jobs
@@ -14,22 +32,6 @@
- 'laas-dashboard-backup'
- 'laas-dashboard-deploy'
-- project:
- name: laas-dashboard-verify
-
- project: 'laas'
- project-name: 'laas'
- build-days-to-keep: 7
- tox-dir: 'dashboard'
-
- stream:
- - master
-
- disabled: false
-
- jobs:
- - 'gerrit-tox-verify'
-
- job-template:
name: 'laas-dashboard-backup'
diff --git a/jjb/nfvbench/nfvbench-rtd-jobs.yaml b/jjb/nfvbench/nfvbench-rtd-jobs.yaml
deleted file mode 100644
index 5ff94834a..000000000
--- a/jjb/nfvbench/nfvbench-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: nfvbench-rtd
- project: nfvbench
- project-name: nfvbench
-
- project-pattern: 'nfvbench'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-nfvbench/47376/'
- rtd-token: '7d77b15615ffe7906f1f20e245c80dc0a0f97e47'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/nfvbench/nfvbench-views.yaml b/jjb/nfvbench/nfvbench-views.yaml
index 6175a4427..4884adb78 100644
--- a/jjb/nfvbench/nfvbench-views.yaml
+++ b/jjb/nfvbench/nfvbench-views.yaml
@@ -1,6 +1,12 @@
---
-- project:
- name: nfvbench-view
- views:
- - project-view
- project-name: nfvbench
+- view:
+ name: nfvbench
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^nfvbench-.*
diff --git a/jjb/nfvbench/nfvbench.yaml b/jjb/nfvbench/nfvbench.yaml
index a9efb272a..bb5083b4d 100644
--- a/jjb/nfvbench/nfvbench.yaml
+++ b/jjb/nfvbench/nfvbench.yaml
@@ -14,11 +14,13 @@
gs-pathname: ''
docker-tag: 'latest'
disabled: false
- - gambia:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- docker-tag: 'stable'
- disabled: false
+
+- parameter:
+ name: nfvbench-parameter
+ parameters:
+ - label:
+ name: node
+ default: 'opnfv-build'
- job-template:
name: 'nfvbench-build-{stream}'
@@ -35,7 +37,7 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
# yamllint enable rule:line-length
- - 'opnfv-build-ubuntu-defaults'
+ - nfvbench-parameter
scm:
- git-scm-gerrit
@@ -45,6 +47,8 @@
server-name: 'gerrit.opnfv.org'
trigger-on:
- change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
projects:
- project-compare-type: 'ANT'
project-pattern: '{project}'
@@ -72,7 +76,7 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: "Used for overriding the GIT URL coming from Global Jenkins configuration in case if the stuff is done on none-LF HW."
# yamllint enable rule:line-length
- - 'opnfv-build-ubuntu-defaults'
+ - nfvbench-parameter
scm:
- git-scm-gerrit
@@ -85,6 +89,10 @@
exclude-drafts: 'false'
exclude-trivial-rebase: 'false'
exclude-no-code-change: 'false'
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
projects:
- project-compare-type: 'ANT'
project-pattern: '{project}'
@@ -94,6 +102,25 @@
builders:
- shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install software-properties-common gpg -y
+ sudo add-apt-repository -y ppa:deadsnakes/ppa
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ libxml2-dev libxslt-dev libffi-dev libjpeg-dev \
+ python3.8 python3.8-dev python3.8-distutils \
+ python3-pip -y
cd $WORKSPACE && tox
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
+ echo "deb https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee \
+ /etc/apt/sources.list.d/google-cloud-sdk.list
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ google-cloud-sdk python3-venv -y
cd $WORKSPACE/nfvbenchvm/dib
bash verify-image.sh -v
diff --git a/jjb/onosfw/onosfw-rtd-jobs.yaml b/jjb/onosfw/onosfw-rtd-jobs.yaml
deleted file mode 100644
index a7a5e737a..000000000
--- a/jjb/onosfw/onosfw-rtd-jobs.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
- name: onosfw-rtd
- project: onosfw
- project-name: onosfw
-
- gerrit-skip-vote: true
- project-pattern: 'onosfw'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-onosfw/47378/'
- rtd-token: '1ad406bcdf2d627e2e18fbcd6605f3456b05bb3d'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/onosfw/onosfw-views.yaml b/jjb/onosfw/onosfw-views.yaml
deleted file mode 100644
index 5e8920fa0..000000000
--- a/jjb/onosfw/onosfw-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: onosfw-view
- views:
- - project-view
- project-name: onosfw
diff --git a/jjb/onosfw/onosfw.yaml b/jjb/onosfw/onosfw.yaml
deleted file mode 100644
index 58a50bd57..000000000
--- a/jjb/onosfw/onosfw.yaml
+++ /dev/null
@@ -1,192 +0,0 @@
----
-- project:
-
- name: onosfw
-
- jobs:
- - 'onosfw-verify-{stream}'
- - 'onosfw-daily-{stream}'
- - 'onosfw-build-{stream}'
-
- # only master branch is enabled at the moment to keep no of jobs sane
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- project: 'onosfw'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'onosfw-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - 'builder-onosfw-helloworld'
-
-- job-template:
- name: 'onosfw-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - trigger-builds:
- - project: 'onosfw-build-{stream}'
- git-revision: true
- block: true
-
-- job-template:
- name: 'onosfw-build-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon the completion of the build."
-
- scm:
- - git-scm
-
- builders:
- - 'builder-onosfw-helloworld'
-
-########################
-# builder macros
-########################
-- builder:
- name: 'builder-onosfw-build'
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # log info to console
- echo "Starting the build of $PROJECT. This could take some time..."
- echo "--------------------------------------------------------"
- echo
-
- # create the cache directory if it doesn't exist
- [[ -d $CACHE_DIRECTORY ]] || mkdir -p $CACHE_DIRECTORY
- [[ -d $BUILD_DIRECTORY ]] || mkdir -p $BUILD_DIRECTORY
-
- # set OPNFV_ARTIFACT_VERSION
- export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
-
- # start the build
- cd $WORKSPACE/
- ./ci/build.sh $BUILD_DIRECTORY/
-
- # list the build artifacts
- ls -al $BUILD_DIRECTORY
-
- # save information regarding artifact into file
- (
- echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
- echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
- echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
- echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
- echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/onosfw.iso | cut -d' ' -f1)"
- echo "OPNFV_BUILD_URL=$BUILD_URL"
- ) > $BUILD_DIRECTORY/opnfv.properties
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
-
-
-# yamllint disable rule:line-length
-- builder:
- name: 'builder-onosfw-upload-artifact'
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o nounset
- set -o pipefail
-
- # log info to console
- echo "Uploading the $INSTALLER artifact. This could take some time..."
- echo "--------------------------------------------------------"
- echo
-
- # source the opnfv.properties to get ARTIFACT_VERSION
- source $BUILD_DIRECTORY/opnfv.properties
-
- # upload artifact and additional files to google storage
- gsutil cp $BUILD_DIRECTORY/onosfw.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log 2>&1
- gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log 2>&1
- gsutil cp $BUILD_DIRECTORY/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log 2>&1
-
- echo
- echo "--------------------------------------------------------"
- echo "Done!"
- echo "Artifact is available as http://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
-# yamllint enable rule:line-length
-
-
-- builder:
- name: 'builder-onosfw-helloworld'
- builders:
- - shell: |
- #!/bin/bash
- echo "Hello world!"
diff --git a/jjb/openci/create-ape.sh b/jjb/openci/create-ape.sh
deleted file mode 100755
index 7c9b46cc6..000000000
--- a/jjb/openci/create-ape.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-# workaround for https://github.com/pypa/virtualenv/issues/1029
-export PS1=${PS1:-}
-
-# This script creates ArtifactPublishedEvent
-
-git clone https://gitlab.openci.io/openci/prototypes.git
-cd prototypes/federated-cicd
-virtualenv openci_publish
-cd openci_publish
-source bin/activate
-python setup.py install
-
-# generate event body
-cat <<EOF > ./json_body.txt
-{
- "type": "$PUBLISH_EVENT_TYPE",
- "id": "$(uuidgen)",
- "time": "$(date -u +%Y-%m-%d_%H:%M:%SUTC)",
- "buildUrl": "$BUILD_URL",
- "branch": "master",
- "origin": "$PUBLISH_EVENT_ORIGIN",
- "artifactLocation": "$ARTIFACT_LOCATION",
- "confidenceLevel": "$CONFIDENCE_LEVEL"
-}
-EOF
-
-echo "Constructed $PUBLISH_EVENT_TYPE"
-echo "--------------------------------------------"
-cat ./json_body.txt
-echo "--------------------------------------------"
-
-python openci_publish -H 129.192.69.55 -U ${ACTIVEMQ_USER} -p ${ACTIVEMQ_PASSWORD} -n openci.prototype -B ./json_body.txt
-
-deactivate
diff --git a/jjb/openci/create-cde.sh b/jjb/openci/create-cde.sh
deleted file mode 100755
index 9780119ce..000000000
--- a/jjb/openci/create-cde.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-export PS1=${PS1:-}
-
-# This script creates CompositionDefinedEvent
-
-git clone https://gitlab.openci.io/openci/prototypes.git
-cd prototypes/federated-cicd
-virtualenv openci_publish
-cd openci_publish
-source bin/activate
-python setup.py install
-
-# generate event body
-cat <<EOF > ./json_body.txt
-{
- "type": "$PUBLISH_EVENT_TYPE",
- "id": "$(uuidgen)",
- "time": "$(date -u +%Y-%m-%d_%H:%M:%SUTC)",
- "buildUrl": "$BUILD_URL",
- "branch": "master",
- "origin": "$PUBLISH_EVENT_ORIGIN",
- "scenario": "$DEPLOY_SCENARIO",
- "compositionName": "$DEPLOY_SCENARIO",
- "compositionMetadataUrl": "$SCENARIO_METADATA_LOCATION"
-}
-EOF
-
-echo "Constructed $PUBLISH_EVENT_TYPE"
-echo "--------------------------------------------"
-cat ./json_body.txt
-echo "--------------------------------------------"
-
-python openci_publish -H 129.192.69.55 -U ${ACTIVEMQ_USER} -p ${ACTIVEMQ_PASSWORD} -n openci.prototype -B ./json_body.txt
-
-deactivate
diff --git a/jjb/openci/create-clme.sh b/jjb/openci/create-clme.sh
deleted file mode 100755
index 2ece019b0..000000000
--- a/jjb/openci/create-clme.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-export PS1=${PS1:-}
-
-# This script creates ConfidenceLevelModifiedEvent
-
-git clone https://gitlab.openci.io/openci/prototypes.git
-cd prototypes/federated-cicd
-virtualenv openci_publish
-cd openci_publish
-source bin/activate
-python setup.py install
-
-# generate event body
-cat <<EOF > ./json_body.txt
-{
- "type": "$PUBLISH_EVENT_TYPE",
- "id": "$(uuidgen)",
- "time": "$(date -u +%Y-%m-%d_%H:%M:%SUTC)",
- "buildUrl": "$BUILD_URL",
- "branch": "master",
- "origin": "$PUBLISH_EVENT_ORIGIN",
- "scenario": "$DEPLOY_SCENARIO",
- "compositionName": "$DEPLOY_SCENARIO",
- "compositionMetadataUrl": "$SCENARIO_METADATA_LOCATION",
- "confidenceLevel": "$CONFIDENCE_LEVEL"
-}
-EOF
-
-echo "Constructed $PUBLISH_EVENT_TYPE"
-echo "--------------------------------------------"
-cat ./json_body.txt
-echo "--------------------------------------------"
-
-python openci_publish -H 129.192.69.55 -U ${ACTIVEMQ_USER} -p ${ACTIVEMQ_PASSWORD} -n openci.prototype -B ./json_body.txt
-
-deactivate
diff --git a/jjb/openci/openci-odl-daily-jobs.yaml b/jjb/openci/openci-odl-daily-jobs.yaml
deleted file mode 100644
index bdaca5742..000000000
--- a/jjb/openci/openci-odl-daily-jobs.yaml
+++ /dev/null
@@ -1,81 +0,0 @@
----
-- project:
- name: openci-odl
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- jobs:
- - 'openci-odl-autorelease-daily-{stream}'
- - 'openci-odl-promote-daily-{stream}'
-
-# This job gets triggered manually for the demo purposes.
-#
-# In prototype, either what this job does needs to be integrated to
-# ODL autorelease job or triggered by the upstream autorelease job.
-- job-template:
- name: 'openci-odl-autorelease-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - string:
- name: PUBLISH_EVENT_TYPE
- default: ArtifactPublishedEvent
- description: 'The type of the event this job publishes'
- - string:
- name: PUBLISH_EVENT_ORIGIN
- default: ODL
- description: 'Originating community'
- - string:
- name: ARTIFACT_LOCATION
- default: https://url/to/artifact/on/odl/nexus/$BUILD_NUMBER
- description: 'The location of the artifact on ODL Nexus'
- - string:
- name: CONFIDENCE_LEVEL
- default: "'autorelease': 'SUCCESS'"
- description: 'The confidence level the published artifact gained'
- - 'opnfv-build-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./create-ape.sh
-
-# This job gets triggered by a ConfidenceLevelModifiedEvent published
-# by OPNFV jobs so ODL can promote the autorelease artifact even further.
-#
-# This job is created for the demo purposes and might not be there for
-# the prototype.
-- job-template:
- name: 'openci-odl-promote-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - 'opnfv-build-defaults'
-
- triggers:
- - jms-messaging:
- provider-name: openci.activemq
- selector: |
- JMSType = 'ConfidenceLevelModifiedEvent' and JMSOrigin = 'OPNFV' and JMSScenario = 'os-odl-nofeature-ha'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo
- echo "Triggered by $type"
- echo "----------------------------------"
- echo $CI_MESSAGE
- echo "----------------------------------"
- echo " Promoted ODL Autorelease artifact as release candidate!"
diff --git a/jjb/openci/openci-onap-daily-jobs.yaml b/jjb/openci/openci-onap-daily-jobs.yaml
deleted file mode 100644
index 88589d8ac..000000000
--- a/jjb/openci/openci-onap-daily-jobs.yaml
+++ /dev/null
@@ -1,81 +0,0 @@
----
-- project:
- name: openci-onap
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- jobs:
- - 'openci-onap-autorelease-daily-{stream}'
- - 'openci-onap-promote-daily-{stream}'
-
-# This job gets triggered manually for the demo purposes.
-#
-# In prototype, either what this job does needs to be integrated to
-# ONAP autorelease job or triggered by the upstream autorelease job.
-- job-template:
- name: 'openci-onap-autorelease-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - string:
- name: PUBLISH_EVENT_TYPE
- default: ArtifactPublishedEvent
- description: 'The type of the event this job publishes'
- - string:
- name: PUBLISH_EVENT_ORIGIN
- default: ONAP
- description: 'Originating community'
- - string:
- name: ARTIFACT_LOCATION
- default: https://url/to/artifact/on/onap/nexus/$BUILD_NUMBER
- description: 'The location of the artifact on ONAP Nexus'
- - string:
- name: CONFIDENCE_LEVEL
- default: "'autorelease': 'SUCCESS'"
- description: 'The confidence level the published artifact gained'
- - 'opnfv-build-defaults'
-
- builders:
- - shell:
- !include-raw-escape: ./create-ape.sh
-
-# This job gets triggered by a ConfidenceLevelModifiedEvent published
-# by OPNFV jobs so ONAP can promote the autorelease artifact even further.
-#
-# This job is created for the demo purposes and might not be there for
-# the prototype.
-- job-template:
- name: 'openci-onap-promote-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - 'opnfv-build-defaults'
-
- triggers:
- - jms-messaging:
- provider-name: openci.activemq
- selector: |
- JMSType = 'ConfidenceLevelModifiedEvent' and JMSOrigin = 'OPNFV' and JMSScenario = 'k8-nosdn-onap-ha'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo
- echo "Triggered by $type"
- echo "----------------------------------"
- echo $CI_MESSAGE
- echo "----------------------------------"
- echo " Promoted ONAP Autorelease artifact as release candidate!"
diff --git a/jjb/openci/openci-opnfv-daily-jobs.yaml b/jjb/openci/openci-opnfv-daily-jobs.yaml
deleted file mode 100644
index 020171bc2..000000000
--- a/jjb/openci/openci-opnfv-daily-jobs.yaml
+++ /dev/null
@@ -1,135 +0,0 @@
----
-- project:
- name: openci-opnfv
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
- scenario:
- - 'os-odl-nofeature-ha':
- origin: ODL
- - 'k8-nosdn-onap-ha':
- origin: ONAP
-
- jobs:
- - 'openci-opnfv-{scenario}-compose-daily-{stream}'
- - 'openci-opnfv-{scenario}-test-daily-{stream}'
-
-- job-template:
- name: 'openci-opnfv-{scenario}-compose-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - string:
- name: PUBLISH_EVENT_TYPE
- default: CompositionDefinedEvent
- description: 'The type of the event this job publishes'
- - string:
- name: PUBLISH_EVENT_ORIGIN
- default: OPNFV
- description: 'Originating community'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: 'The scenario that is composed'
- - string:
- name: SCENARIO_METADATA_LOCATION
- default: https://url/to/scenario/metadata/on/opnfv/artifact/repo/$BUILD_NUMBER
- description: 'The location of the scenario metadata'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
-
- wrappers:
- - credentials-binding:
- - username-password-separated:
- credential-id: openci-connect-activemq
- username: ACTIVEMQ_USER
- password: ACTIVEMQ_PASSWORD
- - workspace-cleanup
-
- triggers:
- - jms-messaging:
- provider-name: openci.activemq
- selector: JMSType = 'ArtifactPublishedEvent' and JMSOrigin = '{origin}'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo
- echo "Triggered by $type"
- echo "----------------------------------"
- echo $CI_MESSAGE
- echo "----------------------------------"
- - shell:
- !include-raw-escape: ./create-cde.sh
-
-- job-template:
- name: 'openci-opnfv-{scenario}-test-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - string:
- name: PUBLISH_EVENT_TYPE
- default: ConfidenceLevelModifiedEvent
- description: 'The type of the event this job publishes'
- - string:
- name: PUBLISH_EVENT_ORIGIN
- default: OPNFV
- description: 'Originating community'
- - string:
- name: DEPLOY_SCENARIO
- default: '{scenario}'
- description: 'The scenario that is composed'
- - string:
- name: SCENARIO_METADATA_LOCATION
- default: https://url/to/scenario/metadata/on/opnfv/artifact/repo/$BUILD_NUMBER
- description: 'The location of the scenario metadata'
- - string:
- name: CONFIDENCE_LEVEL
- default: "'opnfvdaily': 'SUCCESS'"
- description: 'The confidence level the published artifact gained'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
-
- wrappers:
- - credentials-binding:
- - username-password-separated:
- credential-id: openci-connect-activemq
- username: ACTIVEMQ_USER
- password: ACTIVEMQ_PASSWORD
- - workspace-cleanup
-
- triggers:
- - jms-messaging:
- provider-name: openci.activemq
- selector: JMSType = 'CompositionDefinedEvent' and JMSOrigin = 'OPNFV' and JMSScenario = '{scenario}'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo
- echo "Triggered by $type"
- echo "----------------------------------"
- echo $CI_MESSAGE
- echo "----------------------------------"
- - shell:
- !include-raw-escape: ./create-clme.sh
diff --git a/jjb/openci/openci-views.yaml b/jjb/openci/openci-views.yaml
deleted file mode 100644
index 08cff7c28..000000000
--- a/jjb/openci/openci-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: openci-view
- views:
- - project-view
- project-name: openci
diff --git a/jjb/opnfvdocs/docs-rtd.yaml b/jjb/opnfvdocs/docs-rtd.yaml
deleted file mode 100644
index fbfa051d3..000000000
--- a/jjb/opnfvdocs/docs-rtd.yaml
+++ /dev/null
@@ -1,86 +0,0 @@
----
-- project:
- name: docs-rtd
- project: 'opnfvdocs'
- project-name: 'opnfvdocs'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfvdocsdemo/79560/'
- rtd-token: 'a96c3622a270344cf9bf3f73b0dfa04f59fc59c0'
- project-pattern: 'opnfvdocs'
- jobs:
- - '{project-name}-rtd-jobs'
- - 'docs-merge-rtd-{stream}'
- - 'docs-verify-rtd-{stream}'
-
- stream:
- - master:
- branch: 'master'
- - iruya:
- branch: 'stable/{stream}'
-
-- job-template:
- name: 'docs-merge-rtd-{stream}'
-
- project-type: freestyle
-
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'lf-build1'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
-
- triggers:
- - gerrit-trigger-change-merged:
- project: '**'
- branch: '{branch}'
- files: 'docs/**/*.*'
-
- builders:
- - 'remove-old-docs-from-opnfv-artifacts'
-
-- job-template:
- name: 'docs-verify-rtd-{stream}'
-
- project-type: freestyle
-
- parameters:
- - label:
- name: SLAVE_LABEL
- default: 'lf-build2'
- description: 'Slave label on Jenkins'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/opnfvdocs
- description: 'Git URL to use on this Jenkins Slave'
-
- scm:
- - git-scm-with-submodules:
- branch: '{branch}'
-
- triggers:
- - gerrit-trigger-patchset-created:
- server: 'gerrit.opnfv.org'
- project: 'opnfvdocs'
- branch: '{branch}'
- files: 'docs/**'
-
- builders:
- - shell: |
- sudo -H pip install virtualenv
- virtualenv $WORKSPACE/venv
- . $WORKSPACE/venv/bin/activate
- pip install --upgrade pip
- pip freeze
- pip install tox
- sed -i s,\-b\ html,\-b\ singlehtml,g tox.ini
- tox -edocs
- - 'upload-review-docs'
diff --git a/jjb/opnfvdocs/opnfvdocs-views.yaml b/jjb/opnfvdocs/opnfvdocs-views.yaml
index 27645f9ec..f33c728ed 100644
--- a/jjb/opnfvdocs/opnfvdocs-views.yaml
+++ b/jjb/opnfvdocs/opnfvdocs-views.yaml
@@ -1,6 +1,12 @@
---
-- project:
- name: opnfvdocs-view
- views:
- - project-view
- project-name: opnfvdocs
+- view:
+ name: opnfvdocs
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^opnfvdocs-.*
diff --git a/jjb/opnfvdocs/opnfvdocs.yaml b/jjb/opnfvdocs/opnfvdocs.yaml
index c82b0bdc7..3047a740e 100644
--- a/jjb/opnfvdocs/opnfvdocs.yaml
+++ b/jjb/opnfvdocs/opnfvdocs.yaml
@@ -1,111 +1,62 @@
---
-########################
-# Job configuration for opnfvdocs
-########################
-- project:
-
- name: opnfvdocs
-
- project: '{name}'
-
- jobs:
- - 'opnfvdocs-verify-shellcheck-{stream}'
- - 'opnfvdocs-merge-shellcheck-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'opnfvdocs-verify-shellcheck-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_CLONE_BASE
- default: ssh://gerrit.opnfv.org:29418
- description: "Used for overriding the GIT URL coming from parameters macro."
-
+- scm:
+ name: opnfvdocs-scm
scm:
- - git-scm-gerrit
-
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/opnfvdocs'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ branches:
+ - '{ref}'
+
+- builder:
+ name: opnfvdocs-lint-bash-code
+ builders:
+ - shell: |
+ #!/bin/bash
+ echo "Checking bash code..."
+ for f in $(egrep '\.sh$' modified_files)
+ do
+ bash -n "$f" 2>> bash-violation.log
+ done
+ if [[ -s bash-violation.log ]]; then
+ echo -e "Bash syntax error(s)\n---" >> violation.log
+ sed -e 's/^/ /g' bash-violation.log >> violation.log
+ fi
+
+- trigger:
+ name: opnfvdocs-patchset-created
triggers:
- gerrit:
- server-name: 'gerrit.opnfv.org'
trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
+ - patchset-created-event
- comment-added-contains-event:
comment-contains-value: 'recheck'
- comment-added-contains-event:
comment-contains-value: 'reverify'
projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
+ - project-compare-type: 'ANT'
+ project-pattern: 'opnfvdocs'
branches:
- branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- skip-vote:
- successful: true
- failed: true
- unstable: true
- notbuilt: true
-
- builders:
- - lint-bash-code
+ branch-pattern: master
+ - branch-compare-type: 'ANT'
+ branch-pattern: stable/nile
- job-template:
- name: 'opnfvdocs-merge-shellcheck-{stream}'
-
- disabled: '{obj:disabled}'
-
+ name: opnfvdocs-verify-shellcheck
parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - string:
- name: GIT_CLONE_BASE
- default: ssh://gerrit.opnfv.org:29418
- description: "Used for overriding the GIT URL coming from parameters macro."
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon\
- \ the completion of the build."
-
+ - label:
+ name: node
+ default: opnfv-build
scm:
- - git-scm
-
+ - opnfvdocs-scm:
+ ref: $GERRIT_REFSPEC
triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
+ - opnfvdocs-patchset-created
builders:
- - lint-bash-code
+ - opnfvdocs-lint-bash-code
+
+- project:
+ name: opnfvdocs
+ jobs:
+ - opnfvdocs-verify-shellcheck
diff --git a/jjb/opnfvtsc/opnfvtsc-rtd-jobs.yaml b/jjb/opnfvtsc/opnfvtsc-rtd-jobs.yaml
deleted file mode 100644
index b1923f458..000000000
--- a/jjb/opnfvtsc/opnfvtsc-rtd-jobs.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
- name: opnfvtsc-rtd
- project: opnfvtsc
- project-name: opnfvtsc
-
- gerrit-skip-vote: true
- project-pattern: 'opnfvtsc'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-opnfvtsc/47400/'
- rtd-token: 'b8cbc26c46f1b1bd98adbf8c4488787a58d68fdd'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/opnfvtsc/opnfvtsc-views.yaml b/jjb/opnfvtsc/opnfvtsc-views.yaml
deleted file mode 100644
index 7c2929557..000000000
--- a/jjb/opnfvtsc/opnfvtsc-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: opnfvtsc-view
- views:
- - project-view
- project-name: opnfvtsc
diff --git a/jjb/ovn4nfv/golang-make-test.sh b/jjb/ovn4nfv/golang-make-test.sh
deleted file mode 100644
index 7ed463277..000000000
--- a/jjb/ovn4nfv/golang-make-test.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Intel Corporation.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o pipefail
-
-source /etc/os-release || source /usr/lib/os-release
-case ${ID,,} in
- ubuntu|debian)
- sudo add-apt-repository -y ppa:longsleep/golang-backports
- sudo apt-get update
- sudo apt-get install -y build-essential golang-go
- sudo apt-get -y clean && sudo apt-get -y autoremove
- ;;
-esac
-
-echo "Running unit tests in Go ${golang_version} ..."
-cd $WORKSPACE
-make test
diff --git a/jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml b/jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml
deleted file mode 100644
index 034d6d6a4..000000000
--- a/jjb/ovn4nfv/ovn4nfv-daily-jobs.yaml
+++ /dev/null
@@ -1,88 +0,0 @@
----
-- project:
- name: 'ovn4nfv-daily-jobs'
-
- project: 'ovn4nfv'
-
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
-
- pod:
- - virtual:
- slave-label: 'joid-virtual'
- os-version: 'xenial'
- <<: *master
-
- jobs:
- - 'ovn4nfv-{pod}-daily-{stream}'
-
-- job-template:
- name: 'ovn4nfv-{pod}-daily-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- wrappers:
- - ssh-agent-wrapper
-
- - timeout:
- timeout: 240
- fail: true
-
- triggers:
- - timed: '@daily'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DEPLOY_SCENARIO
- default: os-ovn-nofeature-noha
- - '{slave-label}-defaults'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'joid-deploy-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-ovn-nofeature-noha
- COMPASS_OS_VERSION=xenial
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: functest
- condition: SUCCESSFUL
- projects:
- - name: 'functest-joid-{pod}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=os-ovn-nofeature-ha
- FUNCTEST_MODE=testcase
- FUNCTEST_SUITE_NAME=ovn4nfv_test_suite
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
diff --git a/jjb/ovn4nfv/ovn4nfv-k8s-plugins-project-jobs.yaml b/jjb/ovn4nfv/ovn4nfv-k8s-plugins-project-jobs.yaml
deleted file mode 100644
index 849be93c1..000000000
--- a/jjb/ovn4nfv/ovn4nfv-k8s-plugins-project-jobs.yaml
+++ /dev/null
@@ -1,109 +0,0 @@
----
-- project:
- name: ovn4nfv-k8s-plugin-project-jobs
-
- project: 'ovn4nfv-k8s-plugin'
-
- jobs:
- - 'ovn4nfv-k8s-plugin-verify-{stream}'
- - 'ovn4nfv-k8s-plugin-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'ovn4nfv-k8s-plugin-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - opnfv-build-defaults
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 30
- fail: true
-
- builders:
- - ovn4nfv-k8s-plugin-unit-tests-golang
-
-- job-template:
- name: 'ovn4nfv-k8s-plugin-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - opnfv-build-defaults
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 30
- fail: true
-
- builders:
- - ovn4nfv-k8s-plugin-unit-tests-golang
-
-################################
-# job builders
-################################
-
-- builder:
- name: ovn4nfv-k8s-plugin-unit-tests-golang
- builders:
- - shell:
- !include-raw: ./golang-make-test.sh
diff --git a/jjb/ovn4nfv/ovn4nfv-project-jobs.yaml b/jjb/ovn4nfv/ovn4nfv-project-jobs.yaml
deleted file mode 100644
index 8fbd75ba9..000000000
--- a/jjb/ovn4nfv/ovn4nfv-project-jobs.yaml
+++ /dev/null
@@ -1,60 +0,0 @@
----
-- project:
- name: ovn4nfv
-
- project: '{name}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - gambia: &gambia
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
- jobs:
- - 'ovn4nfv-build-{stream}'
-
-- job-template:
- name: 'ovn4nfv-build-{stream}'
-
- concurrent: true
-
- disabled: '{obj:disabled}'
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-total: 1
- max-per-node: 1
- option: 'project'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - opnfv-build-defaults
-
- scm:
- - git-scm
-
- triggers:
- - timed: 'H 23 * * *'
-
- builders:
- - 'ovn4nfv-build-macro'
-
-- builder:
- name: 'ovn4nfv-build-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "hello world"
diff --git a/jjb/ovn4nfv/ovn4nfv-rtd-jobs.yaml b/jjb/ovn4nfv/ovn4nfv-rtd-jobs.yaml
deleted file mode 100644
index 984ec4f6e..000000000
--- a/jjb/ovn4nfv/ovn4nfv-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: ovn4nfv-rtd
- project: ovn4nfv
- project-name: ovn4nfv
-
- project-pattern: 'ovn4nfv'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ovn4nfv/47381/'
- rtd-token: 'f131200fd878a5c443f18c134c3bfda122538bce'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/ovno/ovno-rtd-jobs.yaml b/jjb/ovno/ovno-rtd-jobs.yaml
deleted file mode 100644
index c5d661dca..000000000
--- a/jjb/ovno/ovno-rtd-jobs.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
- name: ovno-rtd
- project: ovno
- project-name: ovno
-
- gerrit-skip-vote: true
- project-pattern: 'ovno'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ovno/47382/'
- rtd-token: 'd393a62c6ee0b06979d0bb28f0b43e88208ce2c1'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/ovno/ovno-views.yaml b/jjb/ovno/ovno-views.yaml
deleted file mode 100644
index 54633cc0c..000000000
--- a/jjb/ovno/ovno-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: ovno-view
- views:
- - project-view
- project-name: ovno
diff --git a/jjb/pharos/check-jinja2.yaml b/jjb/pharos/check-jinja2.yaml
deleted file mode 100644
index e3f0df78a..000000000
--- a/jjb/pharos/check-jinja2.yaml
+++ /dev/null
@@ -1,98 +0,0 @@
----
-########################
-# Job configuration to validate jinja2 files
-########################
-- project:
-
- name: validate-pdf-templates
-
- project: 'pharos'
-
- jobs:
- - 'validate-pdf-jinja2-templates-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - fraser: &fraser
- branch: 'stable/{stream}'
- disabled: false
- - euphrates:
- branch: 'stable/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'validate-pdf-jinja2-templates-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slave to execute jnija template test
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.yaml'
- - compare-type: ANT
- pattern: 'config/utils/*.py'
- - compare-type: ANT
- pattern: 'config/installers/**/*.j2'
- - compare-type: ANT
- pattern: 'check-*.sh'
- skip-vote:
- successful: false
- failed: false
- unstable: false
- notbuilt: false
-
- builders:
- - check-pdf-jinja
- - check-pdf-schema
-
-- builder:
- name: check-pdf-jinja
- builders:
- - shell: |
- $WORKSPACE/config/utils/check-jinja2.sh
-- builder:
- name: check-pdf-schema
- builders:
- - shell: |
- $WORKSPACE/config/utils/check-schema.sh
diff --git a/jjb/pharos/pharos-rtd-jobs.yaml b/jjb/pharos/pharos-rtd-jobs.yaml
deleted file mode 100644
index 60bcd8a00..000000000
--- a/jjb/pharos/pharos-rtd-jobs.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
----
-- project:
- name: pharos-rtd
- project: pharos
- project-name: pharos
-
- project-pattern: 'pharos'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-pharos/47385/'
- rtd-token: '12cb789478d0c3577fb62c610232b3113d3a16ad'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/pharos/pharos-views.yaml b/jjb/pharos/pharos-views.yaml
deleted file mode 100644
index e413632a8..000000000
--- a/jjb/pharos/pharos-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: pharos-view
- views:
- - project-view
- project-name: pharos
diff --git a/jjb/pharos/pharos.yaml b/jjb/pharos/pharos.yaml
deleted file mode 100644
index 0b5119657..000000000
--- a/jjb/pharos/pharos.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
----
-- project:
- name: pharos
-
- project:
- - '{name}'
-
- disabled: false
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/releng/artifact-cleanup.yaml b/jjb/releng/artifact-cleanup.yaml
deleted file mode 100644
index 2a250618c..000000000
--- a/jjb/releng/artifact-cleanup.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
----
-- project:
- name: releng-artifact-cleanup
-
- project: 'releng'
-
- jobs:
- - 'releng-artifact-cleanup-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
-
-
-- job-template:
- name: 'releng-artifact-cleanup-daily-{stream}'
-
- # Job template for daily builders
- #
- # Required Variables:
- # stream: branch with - in place of / (eg. stable)
- # branch: branch (eg. stable)
- node: master
-
- disabled: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
-
- scm:
- - git-scm
-
- triggers:
- - timed: 'H H * * *'
-
- builders:
- - shell: |
- $WORKSPACE/utils/retention_script.sh
diff --git a/jjb/releng/branch-or-tag.sh b/jjb/releng/branch-or-tag.sh
index 0fdb24fca..f9767eca1 100755
--- a/jjb/releng/branch-or-tag.sh
+++ b/jjb/releng/branch-or-tag.sh
@@ -8,6 +8,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
set -e -o pipefail
+set -x
GIT_URL=${GIT_URL:-https://gerrit.opnfv.org/gerrit}
STREAM=${STREAM:-'nostream'}
@@ -16,7 +17,7 @@ RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
echo "--> Verifying $RELEASE_FILES."
for release_file in $RELEASE_FILES; do
# Verify the release file schema
- python releases/scripts/verify_schema.py \
+ python3 releases/scripts/verify_schema.py \
-s releases/schema.yaml \
-y $release_file
done
@@ -29,12 +30,11 @@ for release_file in $RELEASE_FILES; do
if [ -n "$branch_actual" ]; then
echo "$repo refs/heads/$branch already exists at $branch_actual"
- echo "RUN releng-release-create-venv.sh"
source jjb/releng/releng-release-tagging.sh
else
echo "This is a branching job"
source jjb/releng/releng-release-create-branch.sh
fi
- done < <(python releases/scripts/repos.py -b -f "$release_file")
+ done < <(python3 releases/scripts/repos.py -b -f "$release_file")
done
diff --git a/jjb/releng/opnfv-docker-arm.yaml b/jjb/releng/opnfv-docker-arm.yaml
deleted file mode 100644
index a41332636..000000000
--- a/jjb/releng/opnfv-docker-arm.yaml
+++ /dev/null
@@ -1,230 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-
-- project:
-
- name: opnfv-docker-arm
-
- master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- euphrates: &euphrates
- stream: euphrates
- branch: 'stable/{stream}'
- disabled: false
- hunter: &hunter
- stream: hunter
- branch: 'stable/{stream}'
- disabled: false
- iruya: &iruya
- stream: iruya
- branch: 'stable/{stream}'
- disabled: false
- dovetail-arm-receivers: &dovetail-arm-receivers
- receivers: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- storperf-arm-receivers: &storperf-arm-receivers
- receivers: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- mbeierl@vmware.com
- yardstick-arm-receivers: &yardstick-arm-receivers
- receivers: >
- cristina.pauna@enea.com
- alexandru.avadanii@enea.com
- delia.popescu@enea.com
- other-receivers: &other-receivers
- receivers: ''
-
- dockerfile: "Dockerfile"
- dockerdir: "docker"
- docker_repo_name: "opnfv/{project}_aarch64"
- arch_tag: ""
- extra_build_args: ""
-
- # yamllint disable rule:key-duplicates
- dockerrepo:
- # projects with jobs for master
- - 'dovetail':
- project: 'dovetail'
- <<: *master
- <<: *dovetail-arm-receivers
- - 'storperf-master':
- project: 'storperf'
- dockerdir: 'docker/storperf-master'
- docker_repo_name: 'opnfv/storperf-master'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-graphite':
- project: 'storperf'
- dockerdir: 'docker/storperf-graphite'
- docker_repo_name: 'opnfv/storperf-graphite'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-httpfrontend':
- project: 'storperf'
- dockerdir: 'docker/storperf-httpfrontend'
- docker_repo_name: 'opnfv/storperf-httpfrontend'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-reporting':
- project: 'storperf'
- dockerdir: 'docker/storperf-reporting'
- docker_repo_name: 'opnfv/storperf-reporting'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-swaggerui':
- project: 'storperf'
- dockerdir: 'docker/storperf-swaggerui'
- docker_repo_name: 'opnfv/storperf-swaggerui'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'storperf-workloadagent':
- project: 'storperf'
- dockerdir: 'docker/storperf-workloadagent'
- docker_repo_name: 'opnfv/storperf-workloadagent'
- arch_tag: 'aarch64'
- <<: *master
- <<: *storperf-arm-receivers
- - 'yardstick':
- project: 'yardstick'
- <<: *master
- <<: *yardstick-arm-receivers
-
- # projects with jobs for stable/euphrates
- - 'dovetail':
- project: 'dovetail'
- <<: *euphrates
- <<: *dovetail-arm-receivers
-
- # projects with jobs for stable/hunter
- - 'yardstick':
- project: 'yardstick'
- <<: *hunter
- <<: *yardstick-arm-receivers
-
- # projects with jobs for stable/iruya
- - 'yardstick':
- project: 'yardstick'
- <<: *iruya
- <<: *yardstick-arm-receivers
- - 'storperf-master':
- project: 'storperf'
- dockerdir: 'docker/storperf-master'
- docker_repo_name: 'opnfv/storperf-master'
- arch_tag: 'aarch64'
- <<: *iruya
- <<: *storperf-arm-receivers
- - 'storperf-graphite':
- project: 'storperf'
- dockerdir: 'docker/storperf-graphite'
- docker_repo_name: 'opnfv/storperf-graphite'
- arch_tag: 'aarch64'
- <<: *iruya
- <<: *storperf-arm-receivers
- - 'storperf-httpfrontend':
- project: 'storperf'
- dockerdir: 'docker/storperf-httpfrontend'
- docker_repo_name: 'opnfv/storperf-httpfrontend'
- arch_tag: 'aarch64'
- <<: *iruya
- <<: *storperf-arm-receivers
- - 'storperf-reporting':
- project: 'storperf'
- dockerdir: 'docker/storperf-reporting'
- docker_repo_name: 'opnfv/storperf-reporting'
- arch_tag: 'aarch64'
- <<: *iruya
- <<: *storperf-arm-receivers
- - 'storperf-swaggerui':
- project: 'storperf'
- dockerdir: 'docker/storperf-swaggerui'
- docker_repo_name: 'opnfv/storperf-swaggerui'
- arch_tag: 'aarch64'
- <<: *iruya
- <<: *storperf-arm-receivers
- - 'storperf-workloadagent':
- project: 'storperf'
- dockerdir: 'docker/storperf-workloadagent'
- docker_repo_name: 'opnfv/storperf-workloadagent'
- arch_tag: 'aarch64'
- <<: *iruya
- <<: *storperf-arm-receivers
-
- # yamllint enable rule:key-duplicates
- jobs:
- - '{dockerrepo}-docker-build-arm-push-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{dockerrepo}-docker-build-arm-push-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters: &parameters
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-arm-defaults'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: DOCKER_REPO_NAME
- default: "{docker_repo_name}"
- description: "Dockerhub repo to be pushed to."
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKER_DIR
- default: "{dockerdir}"
- description: "Directory containing files needed by the Dockerfile"
- - string:
- name: DOCKERFILE
- default: "{dockerfile}"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag"
- - string:
- name: EXTRA_BUILD_ARGS
- default: "{extra_build_args}"
- description: "Whitespace separated key=value pairs. If set, these args will be used to build docker image"
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- builders: &builders
- - shell:
- !include-raw-escape: ./opnfv-docker.sh
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- publishers:
- - email:
- recipients: '{receivers}'
- - email-jenkins-admins-on-failure
diff --git a/jjb/releng/opnfv-docker-custom.yaml b/jjb/releng/opnfv-docker-custom.yaml
deleted file mode 100644
index e564ab5ef..000000000
--- a/jjb/releng/opnfv-docker-custom.yaml
+++ /dev/null
@@ -1,111 +0,0 @@
----
-########################
-# Job configuration for opnfv-docker-custom
-########################
-- project:
-
- name: opnfv-docker-custom
-
- project: '{name}'
-
- jobs:
- - 'opnfv-docker-custom-verify-{stream}'
- - 'opnfv-docker-custom-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'opnfv-docker-custom-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'disabled'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'docker/**'
-
- builders:
- - verify-docker
-
-- job-template:
- name: 'opnfv-docker-custom-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'disabled'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'docker/**'
-
- builders:
- - merge-docker
-
-- builder:
- name: verify-docker
- builders:
- - shell: |
- /bin/bash $WORKSPACE/verify-docker.sh
-
-- builder:
- name: merge-docker
- builders:
- - shell: |
- /bin/bash $WORKSPACE/merge-docker.sh
diff --git a/jjb/releng/opnfv-docker.sh b/jjb/releng/opnfv-docker.sh
deleted file mode 100644
index de39f0628..000000000
--- a/jjb/releng/opnfv-docker.sh
+++ /dev/null
@@ -1,164 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-
-echo "Using Docker $(docker --version) on $NODE_NAME"
-echo "Starting Docker build for $DOCKER_REPO_NAME ..."
-echo "--------------------------------------------------------"
-echo
-
-function remove_containers_images()
-{
- # Remove previous running containers if exist
- #
- # $ docker ps -a
- # CONTAINER ID IMAGE COMMAND ...
- # 6a796ed40b8e opnfv/example-tasks:latest "/bin/bash" ...
- # 99fcb59f4787 opnfv/example-tasks-base:latest "/bin/bash" ...
- # cc5eee16b995 opnfv/example-tasks-k8s "/bin/bash" ...
- #
- # Cut image name by leading space and ending space or colon(tag)
- if [[ -n "$(docker ps -a | grep " $DOCKER_REPO_NAME[ :]")" ]]; then
- echo "Removing existing $DOCKER_REPO_NAME containers..."
- docker ps -a | grep " $DOCKER_REPO_NAME[ :]" | awk '{print $1}' | xargs docker rm -f
- t=60
- # Wait max 60 sec for containers to be removed
- while [[ $t -gt 0 ]] && [[ -n "$(docker ps| grep " $DOCKER_REPO_NAME[ :]")" ]]; do
- sleep 1
- let t=t-1
- done
- fi
-
-
- # Remove existing images if exist
- #
- # $ docker images
- # REPOSITORY TAG IMAGE ID ...
- # opnfv/example-tasks latest 6501569fd328 ...
- # opnfv/example-tasks-base latest 8764fe29c434 ...
- # opnfv/example-tasks-k8s latest 61094cac9e65 ...
- #
- # Cut image name by start of line and ending space
- if [[ -n "$(docker images | grep "^$DOCKER_REPO_NAME ")" ]]; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep "^$DOCKER_REPO_NAME "
- image_ids=($(docker images | grep "^$DOCKER_REPO_NAME " | awk '{print $3}'))
- for id in "${image_ids[@]}"; do
- if [[ -n "$(docker images|grep "^$DOCKER_REPO_NAME "|grep $id)" ]]; then
- echo "Removing docker image $DOCKER_REPO_NAME:$id..."
- docker rmi -f $id
- fi
- done
- fi
-}
-
-
-count=30 # docker build jobs might take up to ~30 min
-while [[ -n `ps -ef| grep 'docker build' | grep $DOCKER_REPO_NAME | grep -v grep` ]]; do
- echo "Build or cleanup of $DOCKER_REPO_NAME in progress. Waiting..."
- sleep 60
- count=$(( $count - 1 ))
- if [ $count -eq 0 ]; then
- echo "Timeout. Aborting..."
- exit 1
- fi
-done
-
-# Remove the existing containers and images before building
-remove_containers_images
-
-DOCKER_PATH=$WORKSPACE/$DOCKER_DIR
-
-cd $DOCKER_PATH || exit 1
-HOST_ARCH="$(uname -m)"
-#If there is a patch for other arch then x86, apply the patch and
-#replace Dockerfile file
-dockerfile_patch="Dockerfile.${HOST_ARCH}.patch"
-if [[ -f "${dockerfile_patch}" ]]; then
- patch -f Dockerfile -p1 < "${dockerfile_patch}"
-fi
-
-# Get tag version
-echo "Current branch: $BRANCH"
-
-BUILD_BRANCH=$BRANCH
-
-GERRIT_REFNAME=${GERRIT_REFNAME:-''}
-RELEASE_VERSION=${GERRIT_REFNAME/refs\/tags\/}
-
-# If we're being triggered by a comment-added job, then extract the tag
-# from the comment and use that as the release version.
-# Expected comment format: retag opnfv-x.y.z
-if [[ "${GERRIT_EVENT_TYPE:-}" == "comment-added" ]]; then
- RELEASE_VERSION=$(echo "$GERRIT_EVENT_COMMENT_TEXT" | grep 'retag' | awk '{print $2}')
-fi
-
-if [[ "$BRANCH" == "master" ]]; then
- DOCKER_TAG="latest"
-elif [[ -n "${RELEASE_VERSION-}" ]]; then
- DOCKER_TAG=${RELEASE_VERSION}
- if git checkout ${RELEASE_VERSION}; then
- echo "Successfully checked out the git tag ${RELEASE_VERSION}"
- else
- echo "The tag ${RELEASE_VERSION} doesn't exist in the repository. Existing tags are:"
- git tag
- exit 1
- fi
-else
- DOCKER_TAG="stable"
-fi
-
-if [[ -n "${COMMIT_ID-}" && -n "${RELEASE_VERSION-}" ]]; then
- DOCKER_TAG=$RELEASE_VERSION
- BUILD_BRANCH=$COMMIT_ID
-fi
-
-ARCH_BUILD_ARG=""
-ARCH_TAG=${ARCH_TAG:-}
-if [[ -n "${ARCH_TAG}" ]]; then
- DOCKER_TAG=${ARCH_TAG}-${DOCKER_TAG}
- ARCH_BUILD_ARG="--build-arg ARCH=${ARCH_TAG}"
-fi
-
-EXTRA_BUILD_ARGS=${EXTRA_BUILD_ARGS:-}
-if [ -n "${EXTRA_BUILD_ARGS}" ]; then
- EXTRA_BUILD_ARGS=" "$(echo ${EXTRA_BUILD_ARGS})
- EXTRA_BUILD_ARGS=${EXTRA_BUILD_ARGS// / --build-arg }
-fi
-
-# Start the build
-echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
-echo "--------------------------------------------------------"
-echo
-cmd="docker build --pull=true --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BUILD_BRANCH
- $ARCH_BUILD_ARG $EXTRA_BUILD_ARGS
- -f $DOCKERFILE $DOCKER_PATH"
-
-echo ${cmd}
-${cmd}
-
-
-# list the images
-echo "Available images are:"
-docker images
-
-# Push image to Dockerhub
-if [[ "$PUSH_IMAGE" == "true" ]]; then
- echo "Pushing $DOCKER_REPO_NAME:$DOCKER_TAG to the docker registry..."
- echo "--------------------------------------------------------"
- echo
- docker push $DOCKER_REPO_NAME:$DOCKER_TAG
-fi
-
-# Remove the existing containers and images after building
-remove_containers_images
diff --git a/jjb/releng/opnfv-docker.yaml b/jjb/releng/opnfv-docker.yaml
deleted file mode 100644
index 24b1b85b5..000000000
--- a/jjb/releng/opnfv-docker.yaml
+++ /dev/null
@@ -1,506 +0,0 @@
----
-##############################################
-# job configuration for docker build and push
-##############################################
-
-- project:
-
- name: opnfv-docker
-
- master: &master
- stream: master
- branch: '{stream}'
- disabled: false
- hunter: &hunter
- stream: hunter
- branch: 'stable/{stream}'
- disabled: false
- iruya: &iruya
- stream: iruya
- branch: 'stable/{stream}'
- disabled: false
- laas-receivers: &laas_receivers
- receivers: >
- pberberian@iol.unh.edu
- sbergeron@iol.unh.edu
- storperf-receivers: &storperf-receivers
- receivers: >
- mbeierl@vmware.com
- yardstick-receivers: &yardstick-receivers
- receivers: >
- rexlee8776@gmail.com
- other-receivers: &other-receivers
- receivers: ''
-
- dockerfile: "Dockerfile"
- dockerdir: "docker"
- arch_tag: ""
- extra_build_args: ""
-
- # This is the dockerhub repo the image will be pushed to as
- # 'opnfv/{dockerrepo}. See: DOCKER_REPO_NAME parameter.
- # 'project' is the OPNFV repo we expect to contain the Dockerfile
- # yamllint disable rule:key-duplicates
- dockerrepo:
- # projects with jobs for master
- - 'releng-anteater':
- project: 'releng-anteater'
- <<: *master
- <<: *other-receivers
- - 'barometer-collectd':
- project: 'barometer'
- dockerdir: 'docker/barometer-collectd'
- <<: *master
- <<: *other-receivers
- - 'barometer-collectd-experimental':
- project: 'barometer'
- dockerdir: '.'
- dockerfile: 'docker/barometer-collectd-experimental/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'barometer-collectd-master':
- project: 'barometer'
- dockerdir: '.'
- dockerfile: 'docker/barometer-collectd-master/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'barometer-dma':
- project: 'barometer'
- dockerdir: 'docker/barometer-dma'
- <<: *master
- <<: *other-receivers
- - 'barometer-grafana':
- project: 'barometer'
- dockerdir: 'docker/barometer-grafana'
- <<: *master
- <<: *other-receivers
- - 'barometer-influxdb':
- project: 'barometer'
- dockerdir: 'docker/barometer-influxdb'
- <<: *master
- <<: *other-receivers
- - 'barometer-kafka':
- project: 'barometer'
- dockerdir: 'docker/barometer-kafka'
- <<: *master
- <<: *other-receivers
- - 'barometer-ves':
- project: 'barometer'
- dockerdir: 'docker/barometer-ves'
- <<: *master
- <<: *other-receivers
- - 'barometer-snmp':
- project: 'barometer'
- dockerdir: 'docker/barometer-snmp'
- <<: *master
- <<: *other-receivers
- - 'bottlenecks':
- project: 'bottlenecks'
- <<: *master
- <<: *other-receivers
- - 'clover':
- project: 'clover'
- dockerdir: '.'
- dockerfile: 'docker/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-nginx-lb':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/lb/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-nginx-proxy':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/proxy/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-nginx-server':
- project: 'clover'
- dockerdir: 'samples/services/nginx/docker'
- dockerfile: 'subservices/server/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-snort-ids':
- project: 'clover'
- dockerdir: 'samples/services/snort_ids/docker'
- dockerfile: 'Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-controller':
- project: 'clover'
- dockerdir: 'clover/controller'
- dockerfile: 'docker/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-collector':
- project: 'clover'
- dockerdir: 'clover/collector'
- dockerfile: 'docker/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-jmeter-master':
- project: 'clover'
- dockerdir: 'clover/tools/jmeter'
- dockerfile: 'jmeter-master/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-jmeter-slave':
- project: 'clover'
- dockerdir: 'clover/tools/jmeter'
- dockerfile: 'jmeter-slave/Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-sample-app':
- project: 'clover'
- dockerdir: 'samples/scenarios/sample_app'
- dockerfile: 'Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-ns-modsecurity-crs':
- project: 'clover'
- dockerdir: 'samples/services/modsecurity/docker'
- dockerfile: 'Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-spark':
- project: 'clover'
- dockerdir: 'clover/spark/docker/clover-spark'
- dockerfile: 'Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-spark-submit':
- project: 'clover'
- dockerdir: 'clover/spark/docker/spark-submit'
- dockerfile: 'Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'clover-clovisor':
- project: 'clover'
- dockerdir: 'clover/clovisor'
- dockerfile: 'Dockerfile'
- <<: *master
- <<: *other-receivers
- - 'cperf':
- project: 'cperf'
- <<: *master
- <<: *other-receivers
- - 'dovetail':
- project: 'dovetail'
- <<: *master
- <<: *other-receivers
- - 'dovetail-webportal-api':
- project: 'dovetail-webportal'
- dockerfile: 'Dockerfile.api'
- <<: *master
- <<: *other-receivers
- - 'dovetail-webportal-web':
- project: 'dovetail-webportal'
- dockerfile: 'Dockerfile.web'
- <<: *master
- <<: *other-receivers
- - 'dovetail-webportal-web-onap':
- project: 'dovetail-webportal'
- dockerfile: 'Dockerfile.web'
- extra_build_args: 'CONTAINER=onap GUI=onap-ui'
- <<: *master
- <<: *other-receivers
- - 'nfvbench':
- project: 'nfvbench'
- <<: *master
- <<: *other-receivers
- - 'laas-dashboard':
- project: 'laas'
- dockerdir: '.'
- dockerfile: 'web/Dockerfile'
- <<: *master
- <<: *laas_receivers
- - 'laas-celery':
- project: 'laas'
- dockerdir: '.'
- dockerfile: 'worker/Dockerfile'
- <<: *master
- <<: *laas_receivers
- - 'qtip':
- project: 'qtip'
- dockerdir: '.'
- dockerfile: 'docker/Dockerfile.local'
- <<: *master
- <<: *other-receivers
- - 'qtip-nettest':
- project: 'qtip'
- dockerdir: 'contrib/nettest'
- <<: *master
- <<: *other-receivers
- - 'storperf-master':
- project: 'storperf'
- dockerdir: 'docker/storperf-master'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-graphite':
- project: 'storperf'
- dockerdir: 'docker/storperf-graphite'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-httpfrontend':
- project: 'storperf'
- dockerdir: 'docker/storperf-httpfrontend'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-reporting':
- project: 'storperf'
- dockerdir: 'docker/storperf-reporting'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-swaggerui':
- project: 'storperf'
- dockerdir: 'docker/storperf-swaggerui'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'storperf-workloadagent':
- project: 'storperf'
- dockerdir: 'docker/storperf-workloadagent'
- arch_tag: 'x86_64'
- <<: *master
- <<: *storperf-receivers
- - 'yardstick':
- project: 'yardstick'
- <<: *master
- <<: *yardstick-receivers
- - 'yardstick-ubuntu-18.04':
- project: 'yardstick'
- dockerfile: 'Dockerfile_ubuntu18'
- <<: *master
- <<: *yardstick-receivers
- - 'yardstick-image-k8s':
- project: 'yardstick'
- dockerdir: 'docker/k8s'
- <<: *master
- <<: *yardstick-receivers
-
- # projects with jobs for hunter
- - 'bottlenecks':
- project: 'bottlenecks'
- <<: *hunter
- <<: *other-receivers
- - 'dovetail':
- project: 'dovetail'
- <<: *hunter
- <<: *other-receivers
- - 'yardstick':
- project: 'yardstick'
- <<: *hunter
- <<: *yardstick-receivers
-
- # projects with jobs for iruya
- - 'yardstick':
- project: 'yardstick'
- <<: *iruya
- <<: *yardstick-receivers
- - 'barometer-dma':
- project: 'barometer'
- dockerdir: 'docker/barometer-dma'
- <<: *iruya
- <<: *other-receivers
- - 'barometer-collectd':
- project: 'barometer'
- dockerdir: 'docker/barometer-collectd'
- <<: *iruya
- <<: *other-receivers
- - 'barometer-collectd-experimental':
- project: 'barometer'
- dockerdir: '.'
- dockerfile: 'docker/barometer-collectd-experimental/Dockerfile'
- <<: *iruya
- <<: *other-receivers
- - 'barometer-collectd-master':
- project: 'barometer'
- dockerdir: '.'
- dockerfile: 'docker/barometer-collectd-master/Dockerfile'
- <<: *iruya
- <<: *other-receivers
- - 'barometer-grafana':
- project: 'barometer'
- dockerdir: 'docker/barometer-grafana'
- <<: *iruya
- <<: *other-receivers
- - 'barometer-influxdb':
- project: 'barometer'
- dockerdir: 'docker/barometer-influxdb'
- <<: *iruya
- <<: *other-receivers
- - 'barometer-kafka':
- project: 'barometer'
- dockerdir: 'docker/barometer-kafka'
- <<: *iruya
- <<: *other-receivers
- - 'barometer-ves':
- project: 'barometer'
- dockerdir: 'docker/barometer-ves'
- <<: *iruya
- <<: *other-receivers
- - 'barometer-snmp':
- project: 'barometer'
- dockerdir: 'docker/barometer-snmp'
- <<: *iruya
- <<: *other-receivers
- - 'storperf-master':
- project: 'storperf'
- dockerdir: 'docker/storperf-master'
- arch_tag: 'x86_64'
- <<: *iruya
- <<: *storperf-receivers
- - 'storperf-graphite':
- project: 'storperf'
- dockerdir: 'docker/storperf-graphite'
- arch_tag: 'x86_64'
- <<: *iruya
- <<: *storperf-receivers
- - 'storperf-httpfrontend':
- project: 'storperf'
- dockerdir: 'docker/storperf-httpfrontend'
- arch_tag: 'x86_64'
- <<: *iruya
- <<: *storperf-receivers
- - 'storperf-reporting':
- project: 'storperf'
- dockerdir: 'docker/storperf-reporting'
- arch_tag: 'x86_64'
- <<: *iruya
- <<: *storperf-receivers
- - 'storperf-swaggerui':
- project: 'storperf'
- dockerdir: 'docker/storperf-swaggerui'
- arch_tag: 'x86_64'
- <<: *iruya
- <<: *storperf-receivers
- - 'storperf-workloadagent':
- project: 'storperf'
- dockerdir: 'docker/storperf-workloadagent'
- arch_tag: 'x86_64'
- <<: *iruya
- <<: *storperf-receivers
-
- # yamllint enable rule:key-duplicates
- jobs:
- - "{dockerrepo}-docker-build-push-{stream}"
-
-- project:
-
- name: opnfv-monitor-docker # projects which only monitor dedicated file or path
-
- dockerfile: "Dockerfile"
- dockerdir: "docker"
- arch_tag: ""
- extra_build_args: ""
-
- project:
- # projects with jobs for master
- - 'daisy':
- dockerrepo: 'daisy'
- <<: *master
-
- jobs:
- - '{project}-docker-build-push-monitor-{stream}'
-
-########################
-# job templates
-########################
-- job-template:
- name: '{dockerrepo}-docker-build-push-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters: &parameters
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
- - string:
- name: PUSH_IMAGE
- default: "true"
- description: "To enable/disable pushing the image to Dockerhub."
- - string:
- name: DOCKER_REPO_NAME
- default: "opnfv/{dockerrepo}"
- description: "Dockerhub repo to be pushed to."
- - string:
- name: DOCKER_DIR
- default: "{dockerdir}"
- description: "Directory containing files needed by the Dockerfile"
- - string:
- name: COMMIT_ID
- default: ""
- description: "commit id to make a snapshot docker image"
- - string:
- name: GERRIT_REFNAME
- default: ""
- description: "Docker tag to be built, e.g. refs/tags/5.0.0, refs/tags/opnfv-5.0.0, refs/tags/5.0.RC1"
- - string:
- name: DOCKERFILE
- default: "{dockerfile}"
- description: "Dockerfile to use for creating the image."
- - string:
- name: ARCH_TAG
- default: "{arch_tag}"
- description: "If set, this value will be added to the docker image tag as a prefix"
- - string:
- name: EXTRA_BUILD_ARGS
- default: "{extra_build_args}"
- description: "Whitespace separated key=value pairs. If set, these args will be used to build docker image"
-
- properties:
- - throttle:
- max-per-node: 1
- option: 'project'
-
- scm:
- - git-scm
-
- builders: &builders
- - shell:
- !include-raw-escape: ./opnfv-docker.sh
-
- triggers:
- - pollscm:
- cron: "*/30 * * * *"
- - gerrit-trigger-tag-created:
- project: '{project}'
-
- publishers:
- - email:
- recipients: '{receivers}'
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: '{project}-docker-build-push-monitor-{stream}'
- disabled: '{obj:disabled}'
- parameters: *parameters
-
- scm:
- - git-scm
-
- builders: *builders
-
- # trigger only matching the file name
- triggers:
- - gerrit:
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'docker/**'
diff --git a/jjb/releng/opnfv-lint.yaml b/jjb/releng/opnfv-lint.yaml
deleted file mode 100644
index 6483e3262..000000000
--- a/jjb/releng/opnfv-lint.yaml
+++ /dev/null
@@ -1,186 +0,0 @@
----
-########################
-# Job configuration for opnfv-lint
-########################
-- project:
-
- name: opnfv-lint
-
- project: opnfv-lint
-
- jobs:
- - 'opnfv-lint-verify-{stream}'
- - 'opnfv-yamllint-verify-{stream}'
- - 'opnfv-pylint-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-########################
-# job templates
-########################
-
-- job-template:
- name: 'opnfv-lint-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slaves to execute yamllint
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'releng|doctor|pharos'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.py'
- - compare-type: ANT
- pattern: '**/*.sh'
- - compare-type: ANT
- pattern: '**/*.yml'
- - compare-type: ANT
- pattern: '**/*.yaml'
-
- builders:
- - lint-all-code
-
-- job-template:
- name: 'opnfv-pylint-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slaves to execute yamllint
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'sdnvpn|qtip|daisy|sfc|escalator'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.py'
-
- builders:
- - lint-init
- - lint-python-code
- - lint-report
-
-- job-template:
- name: 'opnfv-yamllint-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - node:
- name: SLAVE_NAME
- description: Slaves to execute yamllint
- default-slaves:
- - lf-build1
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'REG_EXP'
- project-pattern: 'armband|fuel|releng-anteater'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: '**/*.yml'
- - compare-type: ANT
- pattern: '**/*.yaml'
-
- builders:
- - lint-init
- - lint-yaml-code
- - lint-report
diff --git a/jjb/releng/opnfv-utils.yaml b/jjb/releng/opnfv-utils.yaml
deleted file mode 100644
index 2cdb31bb0..000000000
--- a/jjb/releng/opnfv-utils.yaml
+++ /dev/null
@@ -1,178 +0,0 @@
----
-- project:
-
- name: opnfv-utils
-
- jobs:
- - 'prune-docker-images'
- - 'check-status-of-slaves'
- - 'ansible-build-server'
- - 'generate-artifacts-index-pages'
-
-########################
-# job templates
-########################
-- job-template:
- name: 'prune-docker-images'
-
- disabled: false
-
- concurrent: true
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: Slaves to prune docker images
- default-slaves:
- - arm-build3
- - arm-build4
- - arm-build5
- - ericsson-build3
- - ericsson-build4
- - lf-build2
- allowed-multiselect: true
- ignore-offline-nodes: true
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- # yamllint disable rule:line-length
- - shell: |
- #!/bin/bash
- sudo systemctl restart docker
- (docker ps -q; docker ps -aq) | sort | uniq -u | xargs --no-run-if-empty docker rm
- docker images -f dangling=true -q | xargs --no-run-if-empty docker rmi
-
-
- # yamllint enable rule:line-length
- triggers:
- - timed: '@midnight'
-
-- job-template:
- name: 'check-status-of-slaves'
-
- disabled: false
- concurrent: true
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'script lives on master node'
- default-slaves:
- - lf-build1
- allowed-multiselect: false
- ignore-offline-nodes: true
- - project-parameter:
- project: releng
- branch: master
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - shell: |
- cd $WORKSPACE/utils/
- bash slave-monitor-0.1.sh
-
-- job-template:
- name: 'ansible-build-server'
-
- project-type: freestyle
-
- disabled: false
- concurrent: true
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: Build Servers
- default-slaves:
- - lf-build1
- - lf-build2
- - lf-build3
- - ericsson-build3
- - ericsson-build4
- allowed-multiselect: true
- ignore-offline-nodes: true
- - project-parameter:
- project: releng
- branch: master
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@midnight'
-
- builders:
- - description-setter:
- description: '$NODE_NAME'
- - install-ansible
- - run-ansible-build-server-playbook
-
-
-- builder:
- name: install-ansible
- builders:
- - shell: |
- # Install ansible here
- if [ -f /etc/centos-release ] \
- || [ -f /etc/redhat-release ] \
- || [ -f /etc/system-release ]; then
- sudo yum -y install ansible
- fi
- if [ -f /etc/debian_version ] \
- || grep -qi ubuntu /etc/lsb-release \
- || grep -qi ubuntu /etc/os-release; then
- sudo apt-get -y install ansible
- fi
-
-
-- builder:
- name: run-ansible-build-server-playbook
- builders:
- - shell: |
- # run playbook
- sudo ansible-playbook -i \
- $WORKSPACE/utils/build-server-ansible/inventory.ini \
- $WORKSPACE/utils/build-server-ansible/main.yml
-
-
-- job-template:
- name: 'generate-artifacts-index-pages'
-
- project-type: freestyle
-
- disabled: false
-
- concurrent: false
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: Build Servers
- default-slaves:
- - lf-build2
- - project-parameter:
- project: releng
- branch: master
-
- scm:
- - git-scm
-
- triggers:
- - timed: '@hourly'
-
- builders:
- - generate-artifact-html
-
-
-- builder:
- name: generate-artifact-html
- builders:
- - shell: |
- cd $WORKSPACE/utils/
- ./artifacts.opnfv.org.sh
diff --git a/jjb/releng/releng-info-vote.yaml b/jjb/releng/releng-info-vote.yaml
deleted file mode 100644
index 83818585a..000000000
--- a/jjb/releng/releng-info-vote.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: info-vote
- build-node: lf-build2
- jobs:
- - 'info-vote-verify'
diff --git a/jjb/releng/releng-jobs.yaml b/jjb/releng/releng-jobs.yaml
new file mode 100644
index 000000000..309b7ce68
--- /dev/null
+++ b/jjb/releng/releng-jobs.yaml
@@ -0,0 +1,496 @@
+---
+- scm:
+ name: releng-scm
+ scm:
+ - git:
+ url: 'ssh://fbot@gerrit.opnfv.org:29418/releng'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ submodule:
+ recursive: true
+ branches:
+ - '{ref}'
+
+- trigger:
+ name: releng-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: 'master'
+
+- trigger:
+ name: releng-patchset-merged
+ triggers:
+ - gerrit:
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: 'master'
+
+- builder:
+ name: releng-jjb-verify
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install jenkins-job-builder -y
+ jenkins-jobs test --recursive -o tmp jjb
+ rm -rf tmp
+
+- builder:
+ name: releng-jjb-merge
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install jenkins-job-builder -y
+ jenkins-jobs update --recursive --delete-old jjb
+
+- parameter:
+ name: releng-jjb-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- job-template:
+ name: releng-jjb-verify
+ triggers:
+ - releng-patchset-created
+ scm:
+ - releng-scm:
+ ref: $GERRIT_REFSPEC
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ builders:
+ - releng-jjb-verify
+
+- project:
+ name: releng-jjb-verify
+ jobs:
+ - releng-jjb-verify
+
+- job-template:
+ name: releng-jjb-merge
+ triggers:
+ - releng-patchset-merged
+ scm:
+ - releng-scm:
+ ref: master
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ builders:
+ - releng-jjb-merge
+
+- project:
+ name: releng-jjb-merge
+ jobs:
+ - releng-jjb-merge
+
+- scm:
+ name: opnfv-scm
+ scm:
+ - git:
+ url: 'https://gerrit.opnfv.org/gerrit/{project}'
+ refspec: '+refs/heads/*:refs/remotes/origin/* +refs/changes/*:refs/changes/*'
+ submodule:
+ recursive: true
+ branches:
+ - '{ref}'
+
+- trigger:
+ name: releng-tox-patchset-created
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: recheck
+ - comment-added-contains-event:
+ comment-contains-value: reverify
+ server-name: gerrit.opnfv.org
+ projects:
+ - project-compare-type: 'REG_EXP'
+ project-pattern: '^(?!functest).*'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: master
+ - branch-compare-type: 'ANT'
+ branch-pattern: stable/nile
+ - branch-compare-type: 'ANT'
+ branch-pattern: stable/moselle
+
+- builder:
+ name: releng-tox
+ builders:
+ - shell: |
+ [ -f tox.ini ] || exit 0
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install tox -y
+ tox --recreate
+
+- parameter:
+ name: releng-tox-node
+ parameters:
+ - label:
+ name: node
+ default: '{node}'
+
+- job-template:
+ name: releng-tox
+ triggers:
+ - releng-tox-patchset-created
+ scm:
+ - opnfv-scm:
+ ref: $GERRIT_REFSPEC
+ project: $GERRIT_PROJECT
+ parameters:
+ - releng-tox-node:
+ node: opnfv-build
+ builders:
+ - releng-tox
+
+- project:
+ name: releng-tox
+ jobs:
+ - releng-tox
+
+- project:
+ name: releng-release-jobs
+ stream:
+ - nile
+ - orinoco
+ - v1.22
+ - v1.23
+ - v1.24
+ - v1.25
+ - v1.26
+ - v1.27
+ - v1.28
+ - v1.29
+ - xena
+ - wallaby
+ - yoga
+ - zed
+ - '2023.1'
+ - '2023.2'
+ jobs:
+ - 'releng-release-{stream}-verify'
+ - 'releng-release-{stream}-merge'
+
+- parameter:
+ name: stream-parameter
+ parameters:
+ - string:
+ name: STREAM
+ default: '{stream}'
+
+- job-template:
+ name: 'releng-release-{stream}-verify'
+ scm:
+ - releng-scm:
+ ref: $GERRIT_REFSPEC
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ - stream-parameter:
+ stream: '{stream}'
+ - string:
+ name: GIT_URL
+ default: ssh://fbot@gerrit.opnfv.org:29418/
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/master'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'releases/{stream}/**'
+ - compare-type: ANT
+ pattern: 'releases/schema.yaml'
+ - compare-type: ANT
+ pattern: 'releases/scripts/verify_schema.py'
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install python3-pygerrit2 \
+ python3-ruamel.yaml -y
+ - shell: !include-raw-escape:
+ - branch-or-tag.sh
+
+- job-template:
+ name: 'releng-release-{stream}-merge'
+ scm:
+ - releng-scm:
+ ref: $GERRIT_REFSPEC
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ - stream-parameter:
+ stream: '{stream}'
+ - string:
+ name: GIT_URL
+ default: ssh://fbot@gerrit.opnfv.org:29418/
+ triggers:
+ - gerrit:
+ trigger-on:
+ - change-merged-event
+ - comment-added-contains-event:
+ comment-contains-value: 'remerge'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: 'master'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'releases/{stream}/**'
+ builders:
+ - shell: |
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install python3-pygerrit2 \
+ python3-ruamel.yaml -y
+ - shell: !include-raw-escape:
+ - branch-or-tag.sh
+
+- builder:
+ name: lint-init
+ builders:
+ - shell: |
+ #!/bin/bash
+ # Ensure we start with a clean environment
+ rm -f bash-violation.log python-violation.log yaml-violation.log violation.log
+ git --no-pager diff --diff-filter=MCRAT --name-only HEAD^1 > modified_files
+
+- builder:
+ name: lint-report
+ builders:
+ - shell: |
+ #!/bin/bash
+ if [[ -s violation.log ]]; then
+ cat violation.log
+ echo "Reporting lint result...."
+ set -x
+ msg="Found syntax error and/or coding style violation(s) in the files modified by your patchset."
+ sed -i -e "1s#^#${msg}\n\n#" violation.log
+ cmd="gerrit review -p $GERRIT_PROJECT -m \"$(cat violation.log)\" $GERRIT_PATCHSET_REVISION --notify NONE"
+ ssh -o 'PubkeyAcceptedKeyTypes +ssh-rsa' -p 29418 fbot@gerrit.opnfv.org "$cmd"
+
+ # Make sure the caller job failed
+ exit 1
+ fi
+
+- builder:
+ name: lint-bash-code
+ builders:
+ - shell: |
+ #!/bin/bash
+ echo "Checking bash code..."
+ for f in $(egrep '\.sh$' modified_files)
+ do
+ bash -n "$f" 2>> bash-violation.log
+ done
+ if [[ -s bash-violation.log ]]; then
+ echo -e "Bash syntax error(s)\n---" >> violation.log
+ sed -e 's/^/ /g' bash-violation.log >> violation.log
+ fi
+
+- builder:
+ name: lint-python-code
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install flake8 -y
+
+ echo "Checking python code..."
+ for f in $(egrep '\.py$' modified_files)
+ do
+ flake8 "$f" >> python-violation.log
+ done
+ if [[ -s python-violation.log ]]; then
+ echo -e "Python violation(s)\n---" >> violation.log
+ sed -e 's/^/ /g' python-violation.log >> violation.log
+ fi
+
+- builder:
+ name: lint-yaml-code
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 dist-upgrade -y
+ sudo DEBIAN_FRONTEND=noninteractive \
+ apt-get -o DPkg::Lock::Timeout=300 install yamllint -y
+
+ echo "Checking yaml file..."
+ for f in $(egrep '\.ya?ml$' modified_files)
+ do
+ yamllint "$f" >> yaml-violation.log
+ done
+ if [[ -s yaml-violation.log ]]; then
+ echo -e "YAML violation(s)\n---" >> violation.log
+ sed -e 's/^/ /g' yaml-violation.log >> violation.log
+ fi
+
+- builder:
+ name: lint-all-code
+ builders:
+ - lint-init
+ - lint-bash-code
+ - lint-python-code
+ - lint-yaml-code
+ - lint-report
+
+- project:
+ name: releng-lint
+ jobs:
+ - 'releng-lint-verify'
+
+- job-template:
+ name: 'releng-lint-verify'
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ scm:
+ - releng-scm:
+ ref: $GERRIT_REFSPEC
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event
+ - comment-added-contains-event:
+ comment-contains-value: recheck
+ - comment-added-contains-event:
+ comment-contains-value: reverify
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'releng'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: 'master'
+ file-paths:
+ - compare-type: ANT
+ pattern: '**/*.py'
+ - compare-type: ANT
+ pattern: '**/*.sh'
+ - compare-type: ANT
+ pattern: '**/*.yml'
+ - compare-type: ANT
+ pattern: '**/*.yaml'
+ builders:
+ - lint-all-code
+
+- project:
+ name: releng-generate-artifacts-index-pages
+ jobs:
+ - releng-generate-artifacts-index-pages
+
+- job-template:
+ name: releng-generate-artifacts-index-pages
+ scm:
+ - releng-scm:
+ ref: master
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ triggers:
+ - timed: '@daily'
+ builders:
+ - generate-artifact-html
+
+- builder:
+ name: generate-artifact-html
+ builders:
+ - shell: |
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
+ echo "deb https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee \
+ /etc/apt/sources.list.d/google-cloud-sdk.list
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ google-cloud-sdk -y
+ cd $WORKSPACE/utils/
+ ./artifacts.opnfv.org.sh
+
+- project:
+ name: releng-artifact-cleanup
+ jobs:
+ - releng-artifact-cleanup
+
+- job-template:
+ name: releng-artifact-cleanup
+ scm:
+ - releng-scm:
+ ref: master
+ parameters:
+ - releng-jjb-node:
+ node: opnfv-build
+ triggers:
+ - timed: '@daily'
+ builders:
+ - releng-artifact-cleanup
+
+- builder:
+ name: releng-artifact-cleanup
+ builders:
+ - shell: |
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add -
+ echo "deb https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee \
+ /etc/apt/sources.list.d/google-cloud-sdk.list
+ sudo apt-get -o DPkg::Lock::Timeout=300 update && \
+ sudo DEBIAN_FRONTEND=noninteractive apt-get \
+ -o DPkg::Lock::Timeout=300 install \
+ google-cloud-sdk -y
+ $WORKSPACE/utils/retention_script.sh
diff --git a/jjb/releng/releng-release-create-branch.sh b/jjb/releng/releng-release-create-branch.sh
index 7e91d5ace..beb493b1a 100644
--- a/jjb/releng/releng-release-create-branch.sh
+++ b/jjb/releng/releng-release-create-branch.sh
@@ -14,7 +14,7 @@ STREAM=${STREAM:-'nostream'}
RELEASE_FILES=$(git diff HEAD^1 --name-only -- "releases/$STREAM")
# Configure the git user/email as we'll be pushing up changes
-git config --global user.name "jenkins-ci"
+git config --global user.name "fbot"
git config --global user.email "jenkins-opnfv-ci@opnfv.org"
# Ensure we are able to generate Commit-IDs for new patchsets
@@ -44,8 +44,8 @@ fi
run_merge(){
unset NEW_FILES
if [[ $REF_EXISTS = true && "$JOB_NAME" =~ "merge" ]]; then
- ssh -n -f -p 29418 gerrit.opnfv.org gerrit create-branch "$repo" "$branch" "$ref"
- python releases/scripts/create_jobs.py -f $release_file
+ ssh -o 'PubkeyAcceptedKeyTypes +ssh-rsa' -n -f -p 29418 fbot@gerrit.opnfv.org gerrit create-branch "$repo" "$branch" "$ref"
+ python3 releases/scripts/create_jobs.py -f $release_file
NEW_FILES=$(git status --porcelain --untracked=no | cut -c4-)
fi
if [ -n "$NEW_FILES" ]; then
@@ -65,7 +65,7 @@ for release_file in $RELEASE_FILES; do
else
run_merge
fi
- done < <(python releases/scripts/repos.py -b -f "$release_file")
+ done < <(python3 releases/scripts/repos.py -b -f "$release_file")
done
}
diff --git a/jjb/releng/releng-release-jobs.yaml b/jjb/releng/releng-release-jobs.yaml
deleted file mode 100644
index d23779c0e..000000000
--- a/jjb/releng/releng-release-jobs.yaml
+++ /dev/null
@@ -1,122 +0,0 @@
-# SPDX-License-Identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
----
-- project:
- name: releng-release-jobs
-
- build-node: 'opnfv-build'
-
- stream:
- - fraser
- - gambia
- - hunter
- - iruya
- - jerma
-
- jobs:
- - 'releng-release-{stream}-verify'
- - 'releng-release-{stream}-merge'
-
- project: 'releng'
-
-- job-template:
- name: 'releng-release-{stream}-verify'
-
- parameters:
- - '{build-node}-defaults'
- - stream-parameter:
- stream: '{stream}'
- - project-parameter:
- project: '{project}'
- branch: 'master'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'releng'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/master'
- file-paths:
- - compare-type: ANT
- pattern: 'releases/{stream}/**'
- - compare-type: ANT
- pattern: 'releases/schema.yaml'
- - compare-type: ANT
- pattern: 'releases/scripts/verify_schema.py'
-
- builders:
- - shell: !include-raw-escape:
- - releng-release-create-venv.sh
- - branch-or-tag.sh
-
- publishers:
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: 'releng-release-{stream}-merge'
-
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Only run merge job on build1'
- default-slaves:
- - lf-build1
- allowed-multiselect: false
- ignore-offline-nodes: true
- - stream-parameter:
- stream: '{stream}'
- - project-parameter:
- project: '{project}'
- branch: 'master'
- # Override GIT_URL so we can send patches back to Gerrit and
- # modify repos
- - string:
- name: GIT_URL
- default: ssh://$USER@gerrit.opnfv.org:29418/
- description: 'Git URL to use on this Jenkins Slave'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit-trigger-change-merged:
- project: '{project}'
- branch: 'master'
- files: 'releases/{stream}/*'
-
- builders:
- - shell: !include-raw-escape:
- - releng-release-create-venv.sh
- - branch-or-tag.sh
-
- publishers:
- - email-jenkins-admins-on-failure
-
-- parameter:
- name: stream-parameter
- parameters:
- - string:
- name: STREAM
- default: '{stream}'
- description: "OPNFV Stable Stream"
diff --git a/jjb/releng/releng-release-tagging.sh b/jjb/releng/releng-release-tagging.sh
index 88927e54d..1fce35f6c 100644
--- a/jjb/releng/releng-release-tagging.sh
+++ b/jjb/releng/releng-release-tagging.sh
@@ -8,6 +8,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
set -e -o pipefail
+set -x
GIT_URL=${GIT_URL:-https://gerrit.opnfv.org/gerrit}
STREAM=${STREAM:-'nostream'}
@@ -48,7 +49,7 @@ for release_file in $RELEASE_FILES; do
# If the tag doesn't exist and we're in a merge job,
# everything has been verified up to this point and we
# are ready to create the tag.
- git config --global user.name "jenkins-ci"
+ git config --global user.name "fbot"
git config --global user.email "jenkins-opnfv-ci@opnfv.org"
echo "--> Creating $tag tag for $repo at $ref"
git tag -am "$tag" $tag $ref
@@ -62,5 +63,5 @@ for release_file in $RELEASE_FILES; do
popd &> /dev/null
echo "--> Done verifing $repo"
- done < <(python releases/scripts/repos.py -f $release_file)
+ done < <(python3 releases/scripts/repos.py -f $release_file)
done
diff --git a/jjb/releng/releng-rtd-jobs.yaml b/jjb/releng/releng-rtd-jobs.yaml
deleted file mode 100644
index f7c960335..000000000
--- a/jjb/releng/releng-rtd-jobs.yaml
+++ /dev/null
@@ -1,16 +0,0 @@
----
-- project:
- name: releng-builder-jobs
- project: 'releng'
- project-name: 'releng'
- jjb-version: '2.5.0'
-
- build-timeout: 60
-
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-releng/38594/'
- rtd-token: '291c6a0109493b4457e566d06141212452c65784'
- project-pattern: 'releng'
-
- jobs:
- - '{project-name}-ci-jobs'
- - '{project-name}-rtd-jobs'
diff --git a/jjb/releng/releng-views.yaml b/jjb/releng/releng-views.yaml
index 12dfbfe35..fbd69982d 100644
--- a/jjb/releng/releng-views.yaml
+++ b/jjb/releng/releng-views.yaml
@@ -1,7 +1,12 @@
---
-- project:
- name: releng-view
- views:
- - common-view
- view-name: releng
- view-regex: ^releng.*
+- view:
+ name: releng
+ view-type: list
+ columns:
+ - status
+ - weather
+ - job
+ - last-success
+ - last-failure
+ - last-duration
+ regex: ^releng-.*
diff --git a/jjb/samplevnf/samplevnf-rtd-jobs.yaml b/jjb/samplevnf/samplevnf-rtd-jobs.yaml
deleted file mode 100644
index 9c09c827e..000000000
--- a/jjb/samplevnf/samplevnf-rtd-jobs.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
- name: samplevnf-rtd
- project: samplevnf
- project-name: samplevnf
-
- gerrit-skip-vote: true
- project-pattern: 'samplevnf'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-samplevnf/47390/'
- rtd-token: '133bd533bd10428c549c17967469b25e847f42f4'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/samplevnf/samplevnf-views.yaml b/jjb/samplevnf/samplevnf-views.yaml
deleted file mode 100644
index 26ea8068c..000000000
--- a/jjb/samplevnf/samplevnf-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: samplevnf-view
- views:
- - project-view
- project-name: samplevnf
diff --git a/jjb/sdnvpn/sdnvpn-rtd-jobs.yaml b/jjb/sdnvpn/sdnvpn-rtd-jobs.yaml
deleted file mode 100644
index a01544fbd..000000000
--- a/jjb/sdnvpn/sdnvpn-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: sdnvpn-rtd
- project: sdnvpn
- project-name: sdnvpn
-
- project-pattern: 'sdnvpn'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-sdnvpn/47391/'
- rtd-token: '1efdc48a9819be55a28137937674f1f744d02fe0'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/sdnvpn/sdnvpn-views.yaml b/jjb/sdnvpn/sdnvpn-views.yaml
deleted file mode 100644
index dfa6dfa22..000000000
--- a/jjb/sdnvpn/sdnvpn-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: sdnvpn-view
- views:
- - project-view
- project-name: sdnvpn
diff --git a/jjb/sfc/sfc-project-jobs.yaml b/jjb/sfc/sfc-project-jobs.yaml
deleted file mode 100644
index 47a6b542f..000000000
--- a/jjb/sfc/sfc-project-jobs.yaml
+++ /dev/null
@@ -1,110 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: sfc-project-jobs
-
- project: 'sfc'
-
- jobs:
- - 'sfc-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
- name: 'sfc-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - sfc-unit-tests-and-docs
-
- publishers:
- - sfc-unit-tests-and-docs-publisher
-
-################################
-# job builders
-################################
-
-- builder:
- name: sfc-unit-tests-and-docs
- builders:
- - shell: |
- #!/bin/bash
- virtualenv /var/tmp/sfc
- source /var/tmp/sfc/bin/activate
- pip install tox
- cd $WORKSPACE && tox
-
-################################
-# job publishers
-################################
-- publisher:
- name: 'sfc-unit-tests-and-docs-publisher'
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- fail-no-reports: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 0
- unhealthy: 0
- failing: 0
- - method:
- healthy: 0
- unhealthy: 0
- failing: 0
- - email-jenkins-admins-on-failure
diff --git a/jjb/sfc/sfc-rtd-jobs.yaml b/jjb/sfc/sfc-rtd-jobs.yaml
deleted file mode 100644
index a65da54d4..000000000
--- a/jjb/sfc/sfc-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: sfc-rtd
- project: sfc
- project-name: sfc
-
- project-pattern: 'sfc'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-sfc/47392/'
- rtd-token: 'bc4419f4dded5c816071b042ac32c03ac6108700'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/sfc/sfc-views.yaml b/jjb/sfc/sfc-views.yaml
deleted file mode 100644
index b2884baa5..000000000
--- a/jjb/sfc/sfc-views.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-- project:
- name: sfc-view
- views:
- - common-view
- view-name: sfc
- view-regex: ^sfc.*
diff --git a/jjb/snaps/snaps-rtd-jobs.yaml b/jjb/snaps/snaps-rtd-jobs.yaml
deleted file mode 100644
index 2159c1a8a..000000000
--- a/jjb/snaps/snaps-rtd-jobs.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
----
-- project:
- name: snaps-rtd
- project: snaps
- project-name: snaps
-
- project-pattern: 'snaps'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-snaps/47393/'
- rtd-token: '8fa2d732997534df1e91a87d6dc3ee60bb56508b'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/snaps/snaps-verify-jobs.yaml b/jjb/snaps/snaps-verify-jobs.yaml
deleted file mode 100644
index 2055bc1c4..000000000
--- a/jjb/snaps/snaps-verify-jobs.yaml
+++ /dev/null
@@ -1,81 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: snaps
-
- project: '{name}'
-
- jobs:
- - 'snaps-verify-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: true
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: true
-
-- job-template:
- name: 'snaps-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DEPLOYMENT_HOST_IP
- default: 192.168.122.2
- description: 'IP of the deployment node'
- - string:
- name: CONTROLLER_IP
- default: 192.168.122.3
- description: 'IP of the controller node'
- - 'intel-virtual10-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- #!/bin/bash
-
- cd $WORKSPACE/ci
- ./run_tests.sh $DEPLOYMENT_HOST_IP $CONTROLLER_IP
diff --git a/jjb/snaps/snaps-views.yaml b/jjb/snaps/snaps-views.yaml
deleted file mode 100644
index a4fc90ed2..000000000
--- a/jjb/snaps/snaps-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: snaps-view
- views:
- - project-view
- project-name: snaps
diff --git a/jjb/stor4nfv/stor4nfv-project.yaml b/jjb/stor4nfv/stor4nfv-project.yaml
deleted file mode 100644
index 6e796cfd4..000000000
--- a/jjb/stor4nfv/stor4nfv-project.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: stor4nfv
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/stor4nfv/stor4nfv-rtd-jobs.yaml b/jjb/stor4nfv/stor4nfv-rtd-jobs.yaml
deleted file mode 100644
index 0383c77c7..000000000
--- a/jjb/stor4nfv/stor4nfv-rtd-jobs.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
----
-- project:
- name: stor4nfv-rtd
- project: stor4nfv
- project-name: stor4nfv
-
- project-pattern: 'stor4nfv'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-stor4nfv/47394/'
- rtd-token: '9c189b44cf08de75dc06253558cc86ed93982cbb'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/stor4nfv/stor4nfv-views.yaml b/jjb/stor4nfv/stor4nfv-views.yaml
deleted file mode 100644
index 74ddd2eea..000000000
--- a/jjb/stor4nfv/stor4nfv-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: stor4nfv-view
- views:
- - project-view
- project-name: stor4nfv
diff --git a/jjb/storperf/storperf-daily-jobs.yaml b/jjb/storperf/storperf-daily-jobs.yaml
deleted file mode 100644
index b5f510c1d..000000000
--- a/jjb/storperf/storperf-daily-jobs.yaml
+++ /dev/null
@@ -1,178 +0,0 @@
----
-###################################
-# job configuration for storperf
-###################################
-- project:
- name: storperf-daily
-
- project: storperf
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- hunter: &hunter
- stream: hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- docker-tag: 'stable'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # # fuel CI PODs
- # - baremetal:
- # slave-label: fuel-baremetal
- # installer: fuel
- # <<: *master
- # - virtual:
- # slave-label: fuel-virtual
- # installer: fuel
- # <<: *master
- # # joid CI PODs
- # - baremetal:
- # slave-label: joid-baremetal
- # installer: joid
- # <<: *master
- # - virtual:
- # slave-label: joid-virtual
- # installer: joid
- # <<: *master
- # # apex CI PODs
- # - virtual:
- # slave-label: apex-virtual-master
- # installer: apex
- # <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- <<: *hunter
- ## armband CI PODs
- # - armband-baremetal:
- # slave-label: armband-baremetal
- # installer: fuel
- # <<: *master
- # - armband-virtual:
- # slave-label: armband-virtual
- # installer: fuel
- # <<: *master
- ## daisy CI PODs
- # - baremetal:
- # slave-label: daisy-baremetal
- # installer: daisy
- # <<: *master
- # - virtual:
- # slave-label: daisy-virtual
- # installer: daisy
- # <<: *master
-
- jobs:
- - 'storperf-{installer}-{pod}-daily-{stream}'
-
-################################
-# job template
-################################
-- job-template:
- name: 'storperf-{installer}-{pod}-daily-{stream}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: '45'
- abort: true
- - fix-workspace-permissions
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: CLEAN_DOCKER_IMAGES
- default: 'false'
- description: 'Remove downloaded docker images (opnfv/storperf*:*)'
- - string:
- name: GS_PATHNAME
- default: '{gs-pathname}'
- description: "Version directory where the opnfv documents will be stored in gs repository"
- - string:
- name: DISK_TYPE
- default: 'HDD'
- description: 'The type of hard disk that Cinder uses'
- - string:
- name: VOLUME_SIZE
- default: '2'
- description: 'Size of Cinder volume (in GB)'
- - string:
- name: WORKLOADS
- default: 'rw'
- description: 'Workloads to run'
- - string:
- name: BLOCK_SIZES
- default: '16384'
- description: 'Block sizes for VM I/O operations'
- - string:
- name: QUEUE_DEPTHS
- default: '4'
- description: 'Number of simultaneous I/O operations to keep active'
- - string:
- name: STEADY_STATE_SAMPLES
- default: '10'
- description: 'Number of samples to use (1 per minute) to measure steady state'
- - string:
- name: TEST_CASE
- choices:
- - 'snia_steady_state'
- description: 'The test case to run'
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'storperf-daily-builder'
-
-########################
-# builder macros
-########################
-- builder:
- name: storperf-daily-builder
- builders:
- - shell: |
- #!/bin/bash
-
- cd $WORKSPACE
- ./ci/daily.sh
diff --git a/jjb/storperf/storperf-rtd-jobs.yaml b/jjb/storperf/storperf-rtd-jobs.yaml
deleted file mode 100644
index e8ee7b985..000000000
--- a/jjb/storperf/storperf-rtd-jobs.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
----
-- project:
- name: storperf-rtd
- project: storperf
- project-name: storperf
-
- project-pattern: 'storperf'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-storperf/47395/'
- rtd-token: '8a5c04ef90e5f32edc4e805a5018763cd25e9afc'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/storperf/storperf-verify-jobs.yaml b/jjb/storperf/storperf-verify-jobs.yaml
deleted file mode 100644
index 8c43e53e8..000000000
--- a/jjb/storperf/storperf-verify-jobs.yaml
+++ /dev/null
@@ -1,200 +0,0 @@
----
-- project:
- name: storperf-verify
-
- project: 'storperf'
-
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- docker-tag: 'latest'
- - iruya: &iruya
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- docker-tag: 'stable'
- # -------------------------------
- # patch verification phases
- # -------------------------------
- phase:
- - 'unit-test':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-x86_64':
- slave-label: 'opnfv-build-ubuntu'
- - 'build-aarch64':
- slave-label: 'opnfv-build-ubuntu-arm'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'storperf-verify-{stream}'
- - 'storperf-verify-{phase}-{stream}'
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'storperf-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: 'multijob'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'opnfv-build-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - shell: |
- #!/bin/bash
-
- # we do nothing here as the main stuff will be done
- # in phase jobs
- echo "Triggering phase jobs!"
- - multijob:
- name: 'storperf-build-and-unittest'
- execution-type: PARALLEL
- projects:
- - name: 'storperf-verify-unit-test-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'storperf-verify-build-x86_64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=x86_64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
- - name: 'storperf-verify-build-aarch64-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- ARCH=aarch64
- git-revision: true
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: false
-
-- job-template:
- name: 'storperf-verify-{phase}-{stream}'
-
- disabled: '{obj:disabled}'
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 60
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{slave-label}-defaults'
-
- scm:
- - git-scm-gerrit
-
- builders:
- - 'storperf-verify-{phase}-builders-macro'
-
- publishers:
- - 'storperf-verify-{phase}-publishers-macro'
-# -------------------------------
-# builder macros
-# -------------------------------
-- builder:
- name: 'storperf-verify-unit-test-builders-macro'
- builders:
- - shell: |
- $WORKSPACE/ci/verify.sh
-
-- builder:
- name: 'storperf-verify-build-x86_64-builders-macro'
- builders:
- - shell: |
- $WORKSPACE/ci/verify-build.sh
-
-- builder:
- name: 'storperf-verify-build-aarch64-builders-macro'
- builders:
- - shell: |
- $WORKSPACE/ci/verify-build.sh
-# -------------------------------
-# publisher macros
-# -------------------------------
-- publisher:
- name: 'storperf-verify-unit-test-publishers-macro'
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 60
- unhealthy: 50
- failing: 40
- - method:
- healthy: 60
- unhealthy: 50
- failing: 40
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'storperf-verify-build-x86_64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
-
-- publisher:
- name: 'storperf-verify-build-aarch64-publishers-macro'
- publishers:
- - email-jenkins-admins-on-failure
diff --git a/jjb/storperf/storperf-views.yaml b/jjb/storperf/storperf-views.yaml
deleted file mode 100644
index cc687195b..000000000
--- a/jjb/storperf/storperf-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: storperf
- - common-view
- view-name: storperf-view
- view-regex: ^storperf.*
diff --git a/jjb/storperf/storperf.yaml b/jjb/storperf/storperf.yaml
deleted file mode 100644
index 7da934ddb..000000000
--- a/jjb/storperf/storperf.yaml
+++ /dev/null
@@ -1,78 +0,0 @@
----
-- project:
- name: storperf
-
- project: '{name}'
-
- jobs:
- - 'storperf-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- docker-tag: 'latest'
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- docker-tag: 'stable'
-
-- job-template:
- name: 'storperf-merge-{stream}'
-
- node: opnfv-build-ubuntu
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- # yamllint disable rule:line-length
- description: "Used for overriding the GIT URL coming from Global Jenkins\
- \ configuration in case if the stuff is done on none-LF HW."
- # yamllint enable rule:line-length
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- builders:
- - shell: |
- $WORKSPACE/ci/merge.sh
-
- publishers:
- - junit:
- results: nosetests.xml
- - cobertura:
- report-file: "coverage.xml"
- only-stable: "true"
- health-auto-update: "true"
- stability-auto-update: "true"
- zoom-coverage-chart: "true"
- targets:
- - files:
- healthy: 10
- unhealthy: 20
- failing: 30
- - method:
- healthy: 50
- unhealthy: 40
- failing: 30
- - email-jenkins-admins-on-failure
diff --git a/jjb/ves/ves-rtd-jobs.yaml b/jjb/ves/ves-rtd-jobs.yaml
deleted file mode 100644
index 67b611baa..000000000
--- a/jjb/ves/ves-rtd-jobs.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
- name: ves-rtd
- project: ves
- project-name: ves
-
- gerrit-skip-vote: true
- project-pattern: 'ves'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-ves/47396/'
- rtd-token: 'ea5026fc44841e7721529b95a9ebc1b29950e2ce'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/ves/ves-views.yaml b/jjb/ves/ves-views.yaml
deleted file mode 100644
index 6331a6dc8..000000000
--- a/jjb/ves/ves-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: ves-view
- views:
- - project-view
- project-name: ves
diff --git a/jjb/ves/ves.yaml b/jjb/ves/ves.yaml
deleted file mode 100644
index f8c5da2b4..000000000
--- a/jjb/ves/ves.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-- project:
- name: ves
-
- project: '{name}'
-
- jobs:
- - '{project}-verify-basic'
diff --git a/jjb/vswitchperf/vswitchperf-rtd-jobs.yaml b/jjb/vswitchperf/vswitchperf-rtd-jobs.yaml
deleted file mode 100644
index bf4f5944f..000000000
--- a/jjb/vswitchperf/vswitchperf-rtd-jobs.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
----
-- project:
- name: vswitchperf-rtd
- project: vswitchperf
- project-name: vswitchperf
-
- gerrit-skip-vote: true
- project-pattern: 'vswitchperf'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-vswitchperf/47398/'
- rtd-token: '47bbe5675e6cc5a6207fcc9b4db8dac03c27d9de'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/vswitchperf/vswitchperf-views.yaml b/jjb/vswitchperf/vswitchperf-views.yaml
deleted file mode 100644
index 5b8564579..000000000
--- a/jjb/vswitchperf/vswitchperf-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: vswitchperf-view
- views:
- - project-view
- project-name: vswitchperf
diff --git a/jjb/vswitchperf/vswitchperf.yaml b/jjb/vswitchperf/vswitchperf.yaml
deleted file mode 100644
index d46a6cf5f..000000000
--- a/jjb/vswitchperf/vswitchperf.yaml
+++ /dev/null
@@ -1,207 +0,0 @@
----
-- project:
-
- name: vswitchperf
-
- project: '{name}'
-
- jobs:
- - 'vswitchperf-verify-{stream}'
- - 'vswitchperf-merge-{stream}'
- - 'vswitchperf-daily-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - hunter: &hunter
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - gambia:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- - fraser:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-- job-template:
-
- name: 'vswitchperf-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'vswitchperf-verify-.*'
- - 'vswitchperf-merge-.*'
- - 'vswitchperf-daily-.*'
- blocking-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'intel-pod12-defaults'
-
- scm:
- - git-scm
-
- triggers:
- - pollscm:
- cron: '@midnight'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make MORE_MAKE_FLAGS="-j 10"
- cd ../ci
- scl enable rh-python34 "source ~/vsperfenv/bin/activate ; ./build-vsperf.sh daily"
-
-- job-template:
- name: 'vswitchperf-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: freestyle
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'vswitchperf-verify-.*'
- - 'vswitchperf-merge-.*'
- - 'vswitchperf-daily-.*'
- blocking-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod12
- - ericsson-build4
- default-slaves:
- - intel-pod12
- - ericsson-build4
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make MORE_MAKE_FLAGS="-j 5"
- cd ../ci
- ./build-vsperf.sh verify
-
-- job-template:
- name: 'vswitchperf-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- project-type: freestyle
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'vswitchperf-verify-.*'
- - 'vswitchperf-merge-.*'
- - 'vswitchperf-daily-.*'
- blocking-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - intel-pod12
- - ericsson-build4
- default-slaves:
- - intel-pod12
- - ericsson-build4
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**'
-
- builders:
- - shell: |
- pwd
- cd src
- make clobber
- make MORE_MAKE_FLAGS="-j 5"
- cd ../ci
- ./build-vsperf.sh merge
diff --git a/jjb/xci/bifrost-cleanup-job.yaml b/jjb/xci/bifrost-cleanup-job.yaml
deleted file mode 100644
index d51776173..000000000
--- a/jjb/xci/bifrost-cleanup-job.yaml
+++ /dev/null
@@ -1,146 +0,0 @@
----
-- project:
- name: 'openstack-bifrost-cleanup'
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
-
- # -------------------------------
- # projects
- # -------------------------------
- project:
- - 'openstack':
- project-repo: 'https://git.openstack.org/openstack/bifrost'
- clone-location: '/opt/bifrost'
- - 'opnfv':
- project-repo: 'https://gerrit.opnfv.org/gerrit/releng-xci'
- clone-location: '/opt/releng-xci'
-
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - '{project}-bifrost-cleanup-{stream}'
-
-- job-template:
- name: '{project}-bifrost-cleanup-{stream}'
-
- disabled: true
- concurrent: false
-
- node: bifrost-verify-virtual
-
- # Make sure no verify job is running on any of the slaves since that would
- # produce build logs after we wipe the destination directory.
- properties:
- - build-blocker:
- blocking-jobs:
- - '{project}-bifrost-verify-*'
-
- parameters:
- - string:
- name: PROJECT
- default: '{project}'
-
- builders:
- - shell: |
- #!/bin/bash
-
- set -eu
-
- # DO NOT change this unless you know what you are doing.
- BIFROST_GS_URL="gs://artifacts.opnfv.org/cross-community-ci/openstack/bifrost/$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/"
-
- # This should never happen... even 'recheck' uses the last jobs'
- # gerrit information. Better exit with error so we can investigate
- [[ ! -n $GERRIT_NAME ]] || [[ ! -n $GERRIT_CHANGE_NUMBER ]] && exit 1
-
- echo "Removing build artifacts for $GERRIT_NAME/$GERRIT_CHANGE_NUMBER"
-
- if ! [[ "$BIFROST_GS_URL" =~ "/cross-community-ci/openstack/bifrost/" ]]; then
- echo "Oops! BIFROST_GS_URL=$BIFROST_GS_URL does not seem like a valid"
- echo "bifrost location on the Google storage server. Please double-check"
- echo "that it's set properly or fix this line if necessary."
- echo "gsutil will not be executed until this is fixed!"
- exit 1
- fi
- try_to_rm=1
- while [[ $try_to_rm -lt 6 ]]; do
- gsutil -m rm -r $BIFROST_GS_URL && _exitcode=$? && break
- _exitcode=$?
- echo "gsutil rm failed! Trying again... (attempt #$try_to_rm)"
- let try_to_rm += 1
- # Give it some time...
- sleep 10
- done
- exit $_exitcode
-
- triggers:
- - '{project}-gerrit-trigger-cleanup':
- branch: '{branch}'
-
- publishers:
- # yamllint disable rule:line-length
- - email:
- recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
- # yamllint enable rule:line-length
- - email-jenkins-admins-on-failure
-# -------------------------------
-# trigger macros
-# -------------------------------
-- trigger:
- name: 'openstack-gerrit-trigger-cleanup'
- triggers:
- - gerrit:
- server-name: 'review.openstack.org'
- escape-quotes: true
- trigger-on:
- # We only run this when the change is merged or
- # abandoned since we don't need the logs anymore
- - change-merged-event
- - change-abandoned-event
- # This is an OPNFV maintenance job. We don't want to provide
- # feedback on Gerrit
- silent: true
- silent-start: true
- projects:
- - project-compare-type: 'PLAIN'
- project-pattern: 'openstack/bifrost'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'doc/**'
- - compare-type: ANT
- pattern: 'releasenotes/**'
- readable-message: true
-
-- trigger:
- name: 'opnfv-gerrit-trigger-cleanup'
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- # We only run this when the change is merged or
- # abandoned since we don't need the logs anymore
- - change-merged-event
- - change-abandoned-event
- # This is an OPNFV maintenance job. We don't want to provide
- # feedback on Gerrit
- silent: true
- silent-start: true
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'releng-xci'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- file-paths:
- - compare-type: ANT
- pattern: 'bifrost/**'
- readable-message: true
diff --git a/jjb/xci/bifrost-periodic-jobs.yaml b/jjb/xci/bifrost-periodic-jobs.yaml
deleted file mode 100644
index 4c61be193..000000000
--- a/jjb/xci/bifrost-periodic-jobs.yaml
+++ /dev/null
@@ -1,154 +0,0 @@
----
-- project:
- project: 'releng-xci'
-
- name: 'bifrost-periodic'
- # -------------------------------
- # Branch Anchors
- # -------------------------------
- # the versions stated here default to branches which then later
- # on used for checking out the branches, pulling in head of the branch.
- master: &master
- stream: master
- openstack-bifrost-version: '{stream}'
- opnfv-releng-version: 'master'
- gs-pathname: ''
- ocata: &ocata
- stream: ocata
- openstack-bifrost-version: 'stable/{stream}'
- opnfv-releng-version: 'master'
- gs-pathname: '/{stream}'
- # -------------------------------
- # XCI PODs
- # -------------------------------
- pod:
- - virtual:
- <<: *master
- - virtual:
- <<: *ocata
- # -------------------------------
- # XCI PODs
- # -------------------------------
- # -------------------------------
- # Supported Distros
- # -------------------------------
- distro:
- - 'xenial':
- disabled: false
- slave-label: xci-xenial-virtual
- dib-os-release: 'xenial'
- dib-os-element: 'ubuntu-minimal'
- # yamllint disable rule:line-length
- dib-os-packages: 'vlan,vim,less,bridge-utils,sudo,language-pack-en,iputils-ping,rsyslog,curl,python,debootstrap,ifenslave,ifenslave-2.6,lsof,lvm2,tcpdump,nfs-kernel-server,chrony,iptables'
- # yamllint enable rule:line-length
- extra-dib-elements: 'openssh-server'
- - 'centos7':
- disabled: true
- slave-label: xci-centos7-virtual
- dib-os-release: '7'
- dib-os-element: 'centos7'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- - 'suse':
- disabled: true
- slave-label: xci-suse-virtual
- dib-os-release: '42.3'
- dib-os-element: 'opensuse-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
-
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'bifrost-provision-{pod}-{distro}-periodic-{stream}'
-
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'bifrost-provision-{pod}-{distro}-periodic-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '^xci-os.*'
- - '^xci-deploy.*'
- - '^xci-functest.*'
- - '^bifrost-.*periodic.*'
- - '^osa-.*periodic.*'
- blocking-level: 'NODE'
- - logrotate-default
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{opnfv-releng-version}'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- - string:
- name: XCI_FLAVOR
- default: 'ha'
- - string:
- name: OPENSTACK_BIFROST_VERSION
- default: '{openstack-bifrost-version}'
- - string:
- name: OPNFV_RELENG_VERSION
- default: '{opnfv-releng-version}'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: DIB_OS_RELEASE
- default: '{dib-os-release}'
- - string:
- name: DIB_OS_ELEMENT
- default: '{dib-os-element}'
- - string:
- name: DIB_OS_PACKAGES
- default: '{dib-os-packages}'
- - string:
- name: EXTRA_DIB_ELEMENTS
- default: '{extra-dib-elements}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - label:
- name: SLAVE_LABEL
- default: '{slave-label}'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: CI_LOOP
- default: 'periodic'
-
- wrappers:
- - fix-workspace-permissions
-
- scm:
- - git-scm
-
- # trigger is disabled until we know which jobs we will have
- # and adjust stuff accordingly
- triggers:
- - timed: '' # '@midnight'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME - Flavor: $XCI_FLAVOR"
- - 'bifrost-provision-builder'
-
-# --------------------------
-# builder macros
-# --------------------------
-- builder:
- name: bifrost-provision-builder
- builders:
- - shell:
- !include-raw: ./bifrost-provision.sh
diff --git a/jjb/xci/bifrost-provision.sh b/jjb/xci/bifrost-provision.sh
deleted file mode 100755
index 4d646a676..000000000
--- a/jjb/xci/bifrost-provision.sh
+++ /dev/null
@@ -1,109 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-trap cleanup_and_upload EXIT
-
-function fix_ownership() {
- if [ -z "${JOB_URL+x}" ]; then
- echo "Not running as part of Jenkins. Handle the logs manually."
- else
- # Make sure cache exists
- [[ ! -d ${HOME}/.cache ]] && mkdir ${HOME}/.cache
-
- sudo chown -R jenkins:jenkins $WORKSPACE
- sudo chown -R jenkins:jenkins ${HOME}/.cache
- fi
-}
-
-function cleanup_and_upload() {
- original_exit=$?
- fix_ownership
- exit $original_exit
-}
-
-# check distro to see if we support it
-if [[ ! "$DISTRO" =~ (xenial|centos7|suse) ]]; then
- echo "Distro $DISTRO is not supported!"
- exit 1
-fi
-
-# remove previously cloned repos
-sudo /bin/rm -rf /opt/bifrost /opt/openstack-ansible /opt/releng-xci /opt/functest
-
-# Fix up permissions
-fix_ownership
-
-# ensure the versions to checkout are set
-export OPENSTACK_BIFROST_VERSION=${OPENSTACK_BIFROST_VERSION:-master}
-export OPNFV_RELENG_VERSION=${OPNFV_RELENG_VERSION:-master}
-
-# log some info
-echo -e "\n"
-echo "***********************************************************************"
-echo "* *"
-echo "* Provision OpenStack Nodes *"
-echo "* *"
-echo " bifrost version: $OPENSTACK_BIFROST_VERSION"
-echo " releng version: $OPNFV_RELENG_VERSION"
-echo "* *"
-echo "***********************************************************************"
-echo -e "\n"
-
-# clone the repos and checkout the versions
-sudo git clone --quiet https://git.openstack.org/openstack/bifrost /opt/bifrost
-cd /opt/bifrost && sudo git checkout --quiet $OPENSTACK_BIFROST_VERSION
-echo "xci: using bifrost commit"
-git show --oneline -s --pretty=format:'%h - %s (%cr) <%an>'
-
-sudo git clone --quiet https://gerrit.opnfv.org/gerrit/releng-xci /opt/releng-xci
-cd /opt/releng-xci && sudo git checkout --quiet $OPNFV_RELENG_VERSION
-echo "xci: using releng commit"
-git show --oneline -s --pretty=format:'%h - %s (%cr) <%an>'
-
-# source flavor vars
-source "$WORKSPACE/xci/config/${XCI_FLAVOR}-vars"
-
-# combine opnfv and upstream scripts/playbooks
-sudo /bin/cp -rf /opt/releng-xci/bifrost/* /opt/bifrost/
-
-# cleanup remnants of previous deployment
-cd /opt/bifrost
-sudo -E ./scripts/destroy-env.sh
-
-# provision VMs for the flavor
-cd /opt/bifrost
-./scripts/bifrost-provision.sh
-
-# list the provisioned VMs
-cd /opt/bifrost
-source env-vars
-ironic node-list
-sudo -H -E virsh list
-
-echo "OpenStack nodes are provisioned!"
-# here we have to do something in order to capture what was the working sha1
-# hardcoding stuff for the timebeing
-
-cd /opt/bifrost
-BIFROST_GIT_SHA1=$(git rev-parse HEAD)
-
-# log some info
-echo -e "\n"
-echo "***********************************************************************"
-echo "* BIFROST SHA1 TO PIN *"
-echo "* *"
-echo " $BIFROST_GIT_SHA1"
-echo "* *"
-echo "***********************************************************************"
-
-echo -e "\n"
diff --git a/jjb/xci/bifrost-verify-jobs.yaml b/jjb/xci/bifrost-verify-jobs.yaml
deleted file mode 100644
index f895cf672..000000000
--- a/jjb/xci/bifrost-verify-jobs.yaml
+++ /dev/null
@@ -1,225 +0,0 @@
----
-- project:
- name: 'openstack-bifrost-verify'
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- # -------------------------------
- # distros
- # -------------------------------
- distro:
- - 'xenial':
- disabled: true
- dib-os-release: 'xenial'
- dib-os-element: 'ubuntu-minimal'
- dib-os-packages: 'vlan,vim,less,bridge-utils,language-pack-en,iputils-ping,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- - 'centos7':
- disabled: true
- dib-os-release: '7'
- dib-os-element: 'centos-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- - 'opensuse423':
- disabled: true
- dib-os-release: '42.3'
- dib-os-element: 'opensuse-minimal'
- dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
- extra-dib-elements: 'openssh-server'
- # -------------------------------
- # type
- # -------------------------------
- type:
- - virtual
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'openstack-bifrost-verify-{distro}-{type}-{stream}'
-
-# -------------------------------
-# VM defaults
-# -------------------------------
-- defaults:
- name: verify_vm_defaults
- test-vm-num-nodes: '3'
- test-vm-node-names: 'opnfv controller00 compute00'
- vm-domain-type: 'kvm'
- vm-cpu: '2'
- vm-disk: '30'
- vm-memory-size: '4096'
- vm-disk-cache: 'unsafe'
-
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'openstack-bifrost-verify-{distro}-{type}-{stream}'
-
- disabled: '{obj:disabled}'
-
- defaults: verify_vm_defaults
-
- concurrent: false
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'xci-verify-{distro}-.*'
- - 'xci-.*-{distro}-merge-.*'
- - '.*-bifrost-verify.*-{type}'
- - throttle:
- max-per-node: 2
- max-total: 10
- categories:
- - xci-verify-virtual
- option: category
-
- blocking-level: 'NODE'
-
- parameters:
- - string:
- name: PROJECT_REPO
- default: 'https://git.openstack.org/openstack/bifrost'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: DIB_OS_RELEASE
- default: '{dib-os-release}'
- - string:
- name: DIB_OS_ELEMENT
- default: '{dib-os-element}'
- - string:
- name: EXTRA_DIB_ELEMENTS
- default: '{extra-dib-elements}'
- - string:
- name: DIB_OS_PACKAGES
- default: '{dib-os-packages}'
- - string:
- name: TEST_VM_NUM_NODES
- default: '{test-vm-num-nodes}'
- - string:
- name: TEST_VM_NODE_NAMES
- default: '{test-vm-node-names}'
- - string:
- name: VM_DOMAIN_TYPE
- default: '{vm-domain-type}'
- - string:
- name: VM_CPU
- default: '{vm-cpu}'
- - string:
- name: VM_DISK
- default: '{vm-disk}'
- - string:
- name: VM_MEMORY_SIZE
- default: '{vm-memory-size}'
- - string:
- name: VM_DISK_CACHE
- default: '{vm-disk-cache}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: CI_LOOP
- default: 'verify'
-
- scm:
- - git:
- url: '$PROJECT_REPO'
- refspec: '$GERRIT_REFSPEC'
- branches:
- - 'origin/$BRANCH'
- skip-tag: true
- choosing-strategy: 'gerrit'
- timeout: 10
- wipe-workspace: true
-
- triggers:
- - 'openstack-gerrit-trigger':
- branch: '{branch}'
-
- builders:
- - bifrost-set-name
- - bifrost-build
-
- wrappers:
- - fix-workspace-permissions
- - build-timeout:
- timeout: 180
-
- publishers:
- # yamllint disable rule:line-length
- - email:
- recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
- # yamllint enable rule:line-length
- - email-jenkins-admins-on-failure
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - FAILURE
- - NOT_BUILT
- - SUCCESS
- - UNSTABLE
- build-steps:
- - shell: !include-raw: ./xci-cleanup.sh
- mark-unstable-if-failed: true
-
-# -------------------------------
-# trigger macros
-# -------------------------------
-- trigger:
- name: 'openstack-gerrit-trigger'
- triggers:
- - gerrit:
- server-name: 'review.openstack.org'
- escape-quotes: true
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- silent-start: true
- custom-url: '* $JOB_NAME $BUILD_URL'
- projects:
- - project-compare-type: 'PLAIN'
- project-pattern: 'openstack/bifrost'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'doc/**'
- - compare-type: ANT
- pattern: 'releasenotes/**'
- readable-message: true
-
-# --------------------------
-# builder macros
-# --------------------------
-- builder:
- name: bifrost-set-name
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
-
-- builder:
- name: bifrost-build
- builders:
- - shell:
- !include-raw: ./bifrost-verify.sh
diff --git a/jjb/xci/bifrost-verify.sh b/jjb/xci/bifrost-verify.sh
deleted file mode 100755
index c810212e3..000000000
--- a/jjb/xci/bifrost-verify.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE/releng-xci
-
-cd $WORKSPACE
-git fetch $PROJECT_REPO $GERRIT_REFSPEC && sudo git checkout FETCH_HEAD
-
-# combine opnfv and upstream scripts/playbooks
-/bin/cp -rf $WORKSPACE/releng-xci/xci/infra/bifrost/* $WORKSPACE/
-
-cd $WORKSPACE/releng-xci
-cat > bifrost_test.sh<<EOF
-#!/bin/bash
-set -o errexit
-set -o nounset
-set -o pipefail
-
-cd ~/bifrost
-# set path for XCI repository
-export XCI_PATH=~/bifrost/releng-xci
-
-# provision 3 VMs; xcimaster, controller, and compute
-./scripts/bifrost-provision.sh | ts
-
-sudo -H -E virsh list
-EOF
-chmod a+x bifrost_test.sh
-
-# Fix up distros
-case ${DISTRO} in
- xenial) VM_DISTRO=ubuntu ;;
- centos7) VM_DISTRO=centos ;;
- *suse*) VM_DISTRO=opensuse ;;
-esac
-
-export XCI_BUILD_CLEAN_VM_OS=false
-export XCI_UPDATE_CLEAN_VM_OS=true
-
-./xci/scripts/vm/start-new-vm.sh $VM_DISTRO
-
-rsync -a -e "ssh -F $HOME/.ssh/${VM_DISTRO}-xci-vm-config" $WORKSPACE/ ${VM_DISTRO}_xci_vm:~/bifrost
-
-ssh -F $HOME/.ssh/${VM_DISTRO}-xci-vm-config ${VM_DISTRO}_xci_vm "cd ~/bifrost/releng-xci && ./bifrost_test.sh"
diff --git a/jjb/xci/osa-periodic-jobs.yaml b/jjb/xci/osa-periodic-jobs.yaml
deleted file mode 100644
index b731bf52b..000000000
--- a/jjb/xci/osa-periodic-jobs.yaml
+++ /dev/null
@@ -1,263 +0,0 @@
----
-- project:
- name: 'opnfv-osa-periodic'
-
- project: openstack-ansible
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- # -------------------------------
- # distros
- # -------------------------------
- distro:
- - ubuntu:
- disabled: false
- - centos:
- disabled: false
- - opensuse:
- disabled: false
- # -------------------------------
- # type
- # -------------------------------
- type:
- - virtual
- # -------------------------------
- # periodic deploy & test phases
- # -------------------------------
- phase:
- - 'deploy'
- - 'healthcheck'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'xci-osa-periodic-{distro}-{type}-{stream}'
- - 'xci-osa-periodic-{distro}-{phase}-{type}-{stream}'
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'xci-osa-periodic-{distro}-{type}-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'xci-verify-{distro}-.*'
- - 'bifrost-verify-{distro}-.*'
- - 'bifrost-periodic-{distro}-.*'
- - 'xci-osa-verify-{distro}-.*'
- - 'xci-osa-periodic-{distro}-.*'
- blocking-level: 'NODE'
- - throttle:
- max-per-node: 2
- max-total: 10
- categories:
- - xci-verify-virtual
- option: category
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-openstack
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: OPENSTACK_OSA_VERSION
- default: 'master'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: GIT_BASE
- default: 'https://git.openstack.org/openstack/$PROJECT'
- description: 'Git URL to use on this Jenkins Slave'
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'xci-osa-periodic-{distro}-deploy-{type}-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- DEPLOY_SCENARIO=os-nosdn-nofeature-noha
- OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- BRANCH=$BRANCH
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- git-revision: true
- - multijob:
- name: healthcheck
- condition: SUCCESSFUL
- projects:
- - name: 'xci-osa-periodic-{distro}-healthcheck-{type}-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- DEPLOY_SCENARIO=os-nosdn-nofeature-noha
- OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- FUNCTEST_MODE=tier
- FUNCTEST_TIER=healthcheck
- BRANCH=$BRANCH
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
-- job-template:
- name: 'xci-osa-periodic-{distro}-{phase}-{type}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '.*-bifrost-verify-.*'
- - '.*-bifrost-periodic-.*'
- blocking-level: 'NODE'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - string:
- name: DISTRO
- default: 'ubuntu'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-noha'
- - string:
- name: OPENSTACK_OSA_VERSION
- default: 'master'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: XCI_FLAVOR
- default: 'mini'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: INSTALLER_TYPE
- default: 'osa'
- - string:
- name: GIT_BASE
- default: 'https://git.openstack.org/openstack/$PROJECT'
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-openstack
-
- builders:
- - description-setter:
- description: "Built on $NODE_NAME"
- - 'xci-osa-periodic-{phase}-macro'
-
-# -------------------------------
-# builder macros
-# -------------------------------
-- builder:
- name: 'xci-osa-periodic-deploy-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- cd $WORKSPACE
-
- # The start-new-vm.sh script will copy the entire releng-xci directory
- # so lets prepare the test script now so it can be copied by the script.
- # Please do not move it elsewhere or you would have to move it to the VM
- # yourself.
- cat > xci_test.sh<<EOF
- #!/bin/bash
- export DISTRO=$DISTRO
- export DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- export OPENSTACK_OSA_VERSION=$OPENSTACK_OSA_VERSION
- export FUNCTEST_MODE=$FUNCTEST_MODE
- export FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- export XCI_FLAVOR=$XCI_FLAVOR
- export CORE_OPENSTACK_INSTALL=true
- export CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- export OPNFV_RELENG_DEV_PATH=/home/devuser/releng-xci/
- export INSTALLER_TYPE=$INSTALLER_TYPE
- export GIT_BASE=$GIT_BASE
- export JENKINS_HOME=$JENKINS_HOME
-
- cd xci
- ./xci-deploy.sh
- EOF
- chmod a+x xci_test.sh
-
- export XCI_BUILD_CLEAN_VM_OS=false
- export XCI_UPDATE_CLEAN_VM_OS=true
-
- ./xci/scripts/vm/start-new-vm.sh $DISTRO
- - shell: |
- #!/bin/bash
-
- ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "cd releng-xci && ./xci_test.sh"
-
-
-- builder:
- name: 'xci-osa-periodic-healthcheck-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Hello World!"
- - shell: |
- #!/bin/bash
-
- sudo virsh destroy ${DISTRO}_xci_vm || true
- sudo virsh undefine ${DISTRO}_xci_vm || true
-
-# this will be enabled once the xci is prepared
-# - builder:
-# name: 'xci-verify-healthcheck-macro'
-# builders:
-# - shell:
-# !include-raw: ../../utils/fetch_os_creds.sh
-# - shell:
-# !include-raw: ../functest/functest-alpine.sh
diff --git a/jjb/xci/xci-cleanup.sh b/jjb/xci/xci-cleanup.sh
deleted file mode 100755
index 51a863da2..000000000
--- a/jjb/xci/xci-cleanup.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# Need to cover macros with and without parameters
-VM_NAME=$DISTRO
-VM_NAME+=_xci_vm
-
-# skip the deployment if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
- echo "Skipping the deployment!"
- exit 0
-fi
-
-sudo virsh destroy $VM_NAME || true
-sudo virsh undefine $VM_NAME || true
diff --git a/jjb/xci/xci-daily-jobs.yaml b/jjb/xci/xci-daily-jobs.yaml
deleted file mode 100644
index fe3da8b3c..000000000
--- a/jjb/xci/xci-daily-jobs.yaml
+++ /dev/null
@@ -1,356 +0,0 @@
----
-- project:
- project: 'releng-xci'
-
- name: 'xci-daily'
-
- # -------------------------------
- # Branch Anchors
- # -------------------------------
- master: &master
- stream: master
- opnfv-releng-version: master
- gs-pathname: ''
-
- # -------------------------------
- # Scenarios
- # -------------------------------
- scenario:
- - 'os-nosdn-nofeature-noha':
- deploy-scenario: 'os-nosdn-nofeature'
- installer-type: 'osa'
- xci-flavor: 'noha'
- - 'k8-calico-nofeature-noha':
- deploy-scenario: 'k8-calico-nofeature'
- installer-type: 'kubespray'
- xci-flavor: 'noha'
-
- # -------------------------------
- # XCI PODs
- # -------------------------------
- pod:
- - baremetal:
- <<: *master
-
- # -------------------------------
- # Supported Distros
- # -------------------------------
- distro:
- - 'ubuntu':
- disabled: true
- slave-label: xci-baremetal
- - 'centos':
- disabled: true
- slave-label: xci-baremetal
- - 'opensuse':
- disabled: true
- slave-label: xci-baremetal
-
- # -------------------------------
- # Phases
- # -------------------------------
- phase:
- - 'deploy'
- - 'functest'
- - 'yardstick'
-
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'xci-{scenario}-{pod}-{distro}-daily-{stream}'
- - 'xci-{phase}-{pod}-{distro}-daily-{stream}'
-
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'xci-{scenario}-{pod}-{distro}-daily-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '^xci-verify.*'
- - '^xci-merge.*'
- - '^xci-os.*'
- - '^xci-k8.*'
- - '^xci-deploy.*'
- - '^xci-functest.*'
- - '^bifrost-.*periodic.*'
- - '^osa-.*periodic.*'
- blocking-level: 'NODE'
- - logrotate-default
-
- triggers:
- - timed: '@midnight'
-
- parameters:
- - string:
- name: DEPLOY_SCENARIO
- default: '{deploy-scenario}'
- - string:
- name: INSTALLER_TYPE
- default: '{installer-type}'
- - string:
- name: XCI_FLAVOR
- default: '{xci-flavor}'
- - label:
- name: SLAVE_LABEL
- default: '{slave-label}'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: XCI_DISTRO
- default: '{distro}'
- - string:
- name: FUNCTEST_VERSION
- default: 'hunter'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'smoke'
- - string:
- name: CI_LOOP
- default: 'daily'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- builders:
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - trigger-builds:
- - project: 'xci-deploy-{pod}-{distro}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- INSTALLER_TYPE=$INSTALLER_TYPE
- XCI_FLAVOR=$XCI_FLAVOR
- CI_LOOP=$CI_LOOP
- XCI_DISTRO=$XCI_DISTRO
- FUNCTEST_VERSION=$FUNCTEST_VERSION
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- same-node: true
- block: true
- - trigger-builds:
- - project: 'xci-functest-{pod}-{distro}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- INSTALLER_TYPE=$INSTALLER_TYPE
- XCI_FLAVOR=$XCI_FLAVOR
- CI_LOOP=$CI_LOOP
- XCI_DISTRO=$XCI_DISTRO
- FUNCTEST_VERSION=$FUNCTEST_VERSION
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'xci-yardstick-{pod}-{distro}-daily-{stream}'
- current-parameters: false
- predefined-parameters: |
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
- INSTALLER_TYPE=$INSTALLER_TYPE
- XCI_FLAVOR=$XCI_FLAVOR
- CI_LOOP=$CI_LOOP
- XCI_DISTRO=$XCI_DISTRO
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
- publishers:
- # yamllint disable rule:line-length
- # yamllint enable rule:line-length
- - email-jenkins-admins-on-failure
-
-- job-template:
- name: 'xci-{phase}-{pod}-{distro}-daily-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '^xci-verify.*'
- - '^xci-merge.*'
- - '^xci-deploy.*'
- - '^xci-functest.*'
- - '^bifrost-.*periodic.*'
- - '^osa-.*periodic.*'
- blocking-level: 'NODE'
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
- - logrotate-default
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{opnfv-releng-version}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-noha'
- - string:
- name: INSTALLER_TYPE
- default: 'osa'
- - string:
- name: XCI_FLAVOR
- default: 'noha'
- - string:
- name: XCI_DISTRO
- default: 'ubuntu'
- - label:
- name: SLAVE_LABEL
- default: '{slave-label}'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: FUNCTEST_VERSION
- default: 'hunter'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'smoke'
- - string:
- name: CI_LOOP
- default: 'daily'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
-
- builders:
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - 'xci-daily-{phase}-macro'
-
-# --------------------------
-# builder macros
-# --------------------------
-# These need to be properly fixed once the basic deployment, functest, and
-# yardstick jobs are working outside of clean vm.
-# One of the ugly fixes is moving test preparation step into the
-# deployment job itself since test preparation requires some of the
-# things from deployment job. With clean VM, this wasn't an issue
-# since everything was on clean VM. When we move things out of clean
-# VM, things are done in workspaces of the jobs that are different.
-#
-# Apart from these things, we will need to go through the scripts
-# used for verify jobs and make them updated in order to be able to
-# use them for jobs that don't use clean VM.
-- builder:
- name: 'xci-daily-deploy-macro'
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- echo "Cleaning the leftovers from the earlier run"
- echo "---------------------------------------------------------------------------------"
- for vm in $(sudo virsh list --all --name | grep -v xci_vm); do
- echo "Removing $vm"
- sudo virsh destroy $vm > /dev/null 2>&1 || true
- sudo virsh undefine $vm > /dev/null 2>&1 || true
- sudo killall -r vbmc > /dev/null 2>&1 || true
- sudo rm -rf /root/.vbmc > /dev/null 2>&1 || true
- done
- echo "---------------------------------------------------------------------------------"
-
- # select the right PDF/IDF
- export PDF=$WORKSPACE/xci/var/${NODE_NAME}-pdf.yml
- export IDF=$WORKSPACE/xci/var/${NODE_NAME}-idf.yml
- if [[ "$NODE_NAME" =~ "virtual" ]]; then
- export PDF=$WORKSPACE/xci/var/pdf.yml
- export IDF=$WORKSPACE/xci/var/idf.yml
- fi
-
- cd $WORKSPACE/xci && ./xci-deploy.sh -p $PDF -i $IDF
-
- echo "Prepare OPNFV VM for Tests"
- echo "---------------------------------------------------------------------------------"
- export XCI_PATH=$WORKSPACE
- export XCI_VENV=${XCI_PATH}/venv
- source $XCI_VENV/bin/activate
- while read var; do
- declare -x "\${var}" 2>/dev/null
- echo $var
- done < ${XCI_PATH}/.cache/xci.env && cd ${XCI_PATH}/xci && \
- ansible-playbook -i playbooks/dynamic_inventory.py playbooks/prepare-tests.yml
- ssh root@192.168.122.2 "/root/prepare-tests.sh"
- echo "---------------------------------------------------------------------------------"
-
-- builder:
- name: 'xci-daily-functest-macro'
- builders:
- - shell: |
- #!/bin/bash
- set -o pipefail
-
- ssh root@192.168.122.2 "/root/run-functest.sh"
- functest_exit=$?
-
- case ${DEPLOY_SCENARIO[0]} in
- os-*)
- FUNCTEST_LOG=/root/functest-results/functest.log
- ;;
- k8-*)
- FUNCTEST_LOG=/root/functest-results/functest-kubernetes.log
- ;;
- *)
- echo "Unable to determine the installer. Exiting!"
- exit $functest_exit
- ;;
- esac
-
- echo "Functest log"
- echo "---------------------------------------------------------------------------------"
- ssh root@192.168.122.2 "cat $FUNCTEST_LOG"
- echo "---------------------------------------------------------------------------------"
- exit ${functest_exit}
-- builder:
- name: 'xci-daily-yardstick-macro'
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- ssh root@192.168.122.2 "/root/run-yardstick.sh"
diff --git a/jjb/xci/xci-deploy.sh b/jjb/xci/xci-deploy.sh
deleted file mode 100755
index 71cf96d8d..000000000
--- a/jjb/xci/xci-deploy.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
-
-cd $WORKSPACE/xci
-
-# for daily jobs, we want to use working versions
-# for periodic jobs, we will use whatever is set in the job, probably master
-if [[ "$CI_LOOP" == "daily" ]]; then
- # source pinned-vars to get releng version
- source ./config/pinned-versions
-
- # checkout the version
- git checkout -q $OPNFV_RELENG_VERSION
- echo "Info: Using $OPNFV_RELENG_VERSION"
-elif [[ "$CI_LOOP" == "periodic" ]]; then
- echo "Info: Using $OPNFV_RELENG_VERSION"
-fi
-
-# this is just an example to give the idea about what we need to do
-# so ignore this part for the timebeing as we need to adjust xci-deploy.sh
-# to take this into account while deploying anyways
-# clone openstack-ansible
-# stable/ocata already use pinned versions so this is only valid for master
-if [[ "$CI_LOOP" == "periodic" && "$OPENSTACK_OSA_VERSION" == "master" ]]; then
- cd $WORKSPACE
- # get the url to openstack-ansible git
- source ./config/env-vars
- echo "Info: Capture the ansible role requirement versions before doing anything"
- git clone -q $OPENSTACK_OSA_GIT_URL
- cd openstack-ansible
- cat ansible-role-requirements.yml | while IFS= read -r line
- do
- if [[ $line =~ "src:" ]]; then
- repo_url=$(echo $line | awk {'print $2'})
- repo_sha1=$(git ls-remote $repo_url $OPENSTACK_OSA_VERSION | awk {'print $1'})
- fi
- echo "$line" | sed -e "s|master|$repo_sha1|" >> opnfv-ansible-role-requirements.yml
- done
- echo "Info: SHA1s of ansible role requirements"
- echo "-------------------------------------------------------------------------"
- cat opnfv-ansible-role-requirements.yml
- echo "-------------------------------------------------------------------------"
-fi
-
-# proceed with the deployment
-cd $WORKSPACE/xci
-./xci-deploy.sh
-
-if [[ "$JOB_NAME" =~ "periodic" && "$OPENSTACK_OSA_VERSION" == "master" ]]; then
- # if we arrived here without failing, it means we have something we can pin
- # this is again here to show the intention
- cd $WORKSPACE/openstack-ansible
- OSA_GIT_SHA1=$(git rev-parse HEAD)
-
- # log some info
- echo -e "\n"
- echo "***********************************************************************"
- echo "* OSA SHA1 TO PIN *"
- echo "* *"
- echo " $OSA_GIT_SHA1"
- echo "* *"
- echo "***********************************************************************"
-fi
-
-echo -e "\n"
diff --git a/jjb/xci/xci-merge-jobs.yaml b/jjb/xci/xci-merge-jobs.yaml
deleted file mode 100644
index 666ca9680..000000000
--- a/jjb/xci/xci-merge-jobs.yaml
+++ /dev/null
@@ -1,492 +0,0 @@
----
-- project:
- name: 'opnfv-xci-merge'
-
- project: releng-xci
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- disabled: true
- # -------------------------------
- # distros
- # -------------------------------
- distro:
- - ubuntu:
- disabled: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - centos:
- disabled: true
- kill-phase-on: NEVER
- abort-all-job: false
- - opensuse:
- disabled: true
- kill-phase-on: FAILURE
- abort-all-job: true
- # -------------------------------
- # postmerge promotion phases
- # -------------------------------
- phase:
- - 'deploy'
- - 'healthcheck'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'xci-merge-virtual-{stream}'
- - 'xci-merge-promote-virtual-{stream}'
- - 'xci-merge-{distro}-virtual-{stream}'
- - 'xci-merge-{distro}-{phase}-virtual-{stream}'
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'xci-merge-virtual-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'releng-xci'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'false'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'xci/scripts/vm/**'
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: 'prototypes/**'
- - compare-type: ANT
- pattern: 'upstream/**'
- - compare-type: ANT
- pattern: 'INFO.yaml'
- - project-compare-type: 'REG_EXP'
- project-pattern: 'sfc|sdnvpn|releng-xci-scenarios'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- file-paths:
- - compare-type: ANT
- pattern: 'scenarios/**'
- readable-message: true
- custom-url: '* $JOB_NAME $BUILD_URL'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: CI_LOOP
- default: 'merge'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: DISTRO
- default: 'all'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- builders:
- - 'xci-merge-set-scenario-macro'
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - multijob:
- name: deploy and test
- condition: SUCCESSFUL
- projects:
- - name: 'xci-merge-opensuse-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DISTRO=opensuse
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - name: 'xci-merge-ubuntu-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DISTRO=ubuntu
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - name: 'xci-merge-centos-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- DISTRO=centos
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: false
- kill-phase-on: NEVER
- abort-all-job: false
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - multijob:
- name: promote
- condition: SUCCESSFUL
- projects:
- - name: 'xci-merge-promote-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
-- job-template:
- name: 'xci-merge-{distro}-virtual-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'xci-verify-{distro}-.*'
- - 'xci-merge-{distro}-.*'
- - 'xci-os.*'
- - 'xci-k8.*'
- - 'openstack-bifrost-verify-{distro}-.*'
- - 'xci-osa-verify-{distro}-.*'
- - 'xci-osa-periodic-{distro}-.*'
- blocking-level: 'NODE'
- - throttle:
- max-per-node: 1
- max-total: 3
- categories:
- - xci-verify-virtual
- option: category
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: CI_LOOP
- default: 'merge'
- - string:
- name: FUNCTEST_VERSION
- default: 'hunter'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- builders:
- - 'xci-merge-set-scenario-macro'
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'xci-merge-{distro}-deploy-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- FUNCTEST_VERSION=$FUNCTEST_VERSION
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: true
- kill-phase-on: '{kill-phase-on}'
- abort-all-job: '{abort-all-job}'
- - multijob:
- name: healthcheck
- condition: SUCCESSFUL
- projects:
- - name: 'xci-merge-{distro}-healthcheck-virtual-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_VERSION=$FUNCTEST_VERSION
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: true
- kill-phase-on: '{kill-phase-on}'
- abort-all-job: '{abort-all-job}'
-
- publishers:
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - FAILURE
- - NOT_BUILT
- - SUCCESS
- - UNSTABLE
- build-steps:
- - shell: !include-raw: ./xci-cleanup.sh
- mark-unstable-if-failed: true
-
-- job-template:
- name: 'xci-merge-{distro}-{phase}-virtual-{stream}'
-
- disabled: false
-
- concurrent: false
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '.*-bifrost-verify-.*'
- - '.*-bifrost-periodic-.*'
- - 'osa-verify-.*'
- - 'osa-periodic-.*'
- blocking-level: 'NODE'
-
- parameters:
- - string:
- name: PROJECT
- default: $GERRIT_PROJECT
- - string:
- name: DISTRO
- default: 'ubuntu'
- - string:
- name: CI_LOOP
- default: 'merge'
- - string:
- name: FUNCTEST_VERSION
- default: 'hunter'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: XCI_FLAVOR
- default: 'mini'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: OPNFV_RELENG_DEV_PATH
- default: $WORKSPACE/
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- builders:
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - 'xci-merge-{phase}-macro'
-
-- job-template:
- name: 'xci-merge-promote-virtual-{stream}'
-
- disabled: false
-
- concurrent: false
-
- properties:
- - logrotate-default
-
- parameters:
- - string:
- name: PROJECT
- default: $GERRIT_PROJECT
- - string:
- name: CI_LOOP
- default: 'merge'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: DISTRO
- default: 'all'
- - string:
- name: OPNFV_RELENG_DEV_PATH
- default: $WORKSPACE/
- - string:
- name: LOCAL_PROMOTION_METADATA_FILE
- default: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - string:
- name: REMOTE_PROMOTION_METADATA_FILE
- default: "gs://artifacts.opnfv.org/xci/pipeline/merge/$DEPLOY_SCENARIO.properties"
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- builders:
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - 'xci-merge-promote-macro'
-
-# -------------------------------
-# builder macros
-# -------------------------------
-- builder:
- name: 'xci-merge-set-scenario-macro'
- builders:
- - shell:
- !include-raw: ./xci-set-scenario.sh
-
-- builder:
- name: 'xci-merge-deploy-macro'
- builders:
- - shell:
- !include-raw: ./xci-start-new-vm.sh
-
- - shell:
- !include-raw: ./xci-start-deployment.sh
-
-- builder:
- name: 'xci-merge-healthcheck-macro'
- builders:
- - shell:
- !include-raw: ./xci-run-functest.sh
-
-- builder:
- name: 'xci-merge-promote-macro'
- builders:
- - shell:
- !include-raw: ./xci-promote.sh
diff --git a/jjb/xci/xci-promote.sh b/jjb/xci/xci-promote.sh
deleted file mode 100755
index d8d61bee2..000000000
--- a/jjb/xci/xci-promote.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 Ericsson and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# skip the healthcheck if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
- echo "Skipping the healthcheck!"
- exit 0
-fi
-
-# fail if promotion metadata file doesn't exist
-if [ ! -f $LOCAL_PROMOTION_METADATA_FILE ]; then
- echo "Unable to find promotion metadata file $LOCAL_PROMOTION_METADATA_FILE"
- echo "Skipping promotion!"
- exit 1
-fi
-
-# put additional info into the metadata file so we can use that for displaying the information
-echo "PROMOTED_BY=$BUILD_URL" >> $LOCAL_PROMOTION_METADATA_FILE
-echo "PROMOTED_ON=$(date -u '+%F_%H:%M'UTC)" >> $LOCAL_PROMOTION_METADATA_FILE
-
-# upload promotion metadata file to OPNFV artifact repo
-echo "Storing promotion metadata as $REMOTE_PROMOTION_METADATA_FILE"
-gsutil cp $LOCAL_PROMOTION_METADATA_FILE $REMOTE_PROMOTION_METADATA_FILE > /dev/null 2>&1
-
-# update the file metadata on gs to prevent the use of cached version of the file
-gsutil -m setmeta -r -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- $REMOTE_PROMOTION_METADATA_FILE > /dev/null 2>&1
-
-# log the metadata to console
-echo "Stored the metadata for $DEPLOY_SCENARIO"
-echo "---------------------------------------------------------------------------------"
-gsutil cat $REMOTE_PROMOTION_METADATA_FILE
-echo "---------------------------------------------------------------------------------"
-echo "Scenario $DEPLOY_SCENARIO has successfully been promoted!"
diff --git a/jjb/xci/xci-rtd-jobs.yaml b/jjb/xci/xci-rtd-jobs.yaml
deleted file mode 100644
index d21aad5a0..000000000
--- a/jjb/xci/xci-rtd-jobs.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
----
-- project:
- name: releng-xci-rtd
- project: releng-xci
- project-name: releng-xci
-
- gerrit-skip-vote: true
- project-pattern: 'releng-xci'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-releng-xci/47389/'
- rtd-token: '7dbc774ea9a625296d2e973aff3e79af26b15d4d'
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/jjb/xci/xci-run-functest.sh b/jjb/xci/xci-run-functest.sh
deleted file mode 100755
index 992194ca3..000000000
--- a/jjb/xci/xci-run-functest.sh
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o nounset
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# skip the healthcheck if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
- echo "Skipping the healthcheck!"
- exit 0
-fi
-
-# if the scenario is external, we need to wipe WORKSPACE to place releng-xci there since
-# the project where the scenario is coming from is cloned and the patch checked out to the
-# xci/scenarios/$DEPLOY_SCENARIO to be synched on clean VM
-# apart from that, we need releng-xci stuff in WORKSPACE for things to function correctly on Jenkins.
-# if the change is coming to releng-xci, we don't need to do anything since the patch is checked
-# out to the WORKSPACE anyways
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- cd $HOME && /bin/rm -rf $WORKSPACE
- git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE && cd $WORKSPACE
- chmod -R go-rwx $WORKSPACE/xci/scripts/vm
-fi
-
-# skip the deployment if the scenario is not supported on this distro
-OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
-if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
- echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
- exit 0
-fi
-
-# set XCI_VENV for ansible
-export XCI_PATH=/home/devuser/releng-xci
-export XCI_VENV=${XCI_PATH}/venv
-
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "source $XCI_VENV/bin/activate; \
- while read var; do declare -x \"\${var}\" 2>/dev/null; done < ${XCI_PATH}/.cache/xci.env && \
- cd releng-xci/xci && ansible-playbook -i playbooks/dynamic_inventory.py playbooks/prepare-tests.yml"
-echo "Prepare OPNFV VM for Tests"
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "/root/prepare-tests.sh"
-echo "Running Functest"
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "/root/run-functest.sh"
-# Record exit code
-functest_exit=$?
-
-case ${DEPLOY_SCENARIO[0]} in
- os-*)
- FUNCTEST_LOG=/root/functest-results/functest.log
- ;;
- k8-*)
- FUNCTEST_LOG=/root/functest-results/functest-kubernetes.log
- ;;
- *)
- echo "Unable to determine the installer. Exiting!"
- exit 1
- ;;
-esac
-
-echo "Functest log"
-echo "---------------------------------------------------------------------------------"
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm_opnfv "cat $FUNCTEST_LOG"
-echo "---------------------------------------------------------------------------------"
-exit ${functest_exit}
diff --git a/jjb/xci/xci-set-scenario.sh b/jjb/xci/xci-set-scenario.sh
deleted file mode 100755
index ccea16f87..000000000
--- a/jjb/xci/xci-set-scenario.sh
+++ /dev/null
@@ -1,265 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-set -o pipefail
-set -x
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# This function allows developers to specify the impacted scenario by adding
-# the info about installer and scenario into the commit message or using
-# the topic branch names. This results in either skipping the real verification
-# totally or skipping the determining the installer and scenario programmatically.
-# It is important to note that this feature is only available to generic scenarios
-# and only single installer/scenario pair is allowed.
-# The input in commit message should be placed at the end of the commit message body,
-# before the signed-off and change-id lines.
-#
-# Pattern to be searched in Commit Message
-# deploy-scenario:<scenario-name>
-# installer-type:<installer-type>
-# Examples:
-# deploy-scenario:os-odl-nofeature
-# installer-type:osa
-#
-# deploy-scenario:k8-nosdn-nofeature
-# installer-type:kubespray
-#
-# Patterns to be searched in topic branch name
-# skip-verify
-# skip-deployment
-# force-verify
-function override_scenario() {
- echo "Processing $GERRIT_PROJECT patchset $GERRIT_REFSPEC"
-
- # ensure the metadata we record is consistent for all types of patches including skipped ones
- # extract releng-xci sha
- XCI_SHA=$(cd $WORKSPACE && git rev-parse HEAD)
-
- # extract scenario sha which is same as releng-xci sha for generic scenarios
- SCENARIO_SHA=$XCI_SHA
-
- # process topic branch names
- if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment|force-verify ]]; then
- [[ "$GERRIT_TOPIC" =~ force-verify ]] && echo "Forcing CI verification using default scenario and installer!"
- [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]] && echo "Skipping verification!"
- echo "INSTALLER_TYPE=osa" >> $WORK_DIRECTORY/scenario.properties
- echo "DEPLOY_SCENARIO=os-nosdn-nofeature" >> $WORK_DIRECTORY/scenario.properties
- echo "XCI_SHA=$XCI_SHA" >> $WORK_DIRECTORY/scenario.properties
- echo "SCENARIO_SHA=$SCENARIO_SHA" >> $WORK_DIRECTORY/scenario.properties
- echo "PROJECT_NAME=$GERRIT_PROJECT" >> $WORK_DIRECTORY/scenario.properties
- log_scenario_properties
- exit 0
- fi
-
- # process commit message
- if [[ "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "installer-type:" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "deploy-scenario:" ]]; then
- INSTALLER_TYPE=$(echo $GERRIT_CHANGE_COMMIT_MESSAGE | awk '/installer-type:/' RS=" " | cut -d":" -f2)
- DEPLOY_SCENARIO=$(echo $GERRIT_CHANGE_COMMIT_MESSAGE | awk '/deploy-scenario:/' RS=" " | cut -d":" -f2)
-
- if [[ -z "$INSTALLER_TYPE" || -z "$DEPLOY_SCENARIO" ]]; then
- echo "Installer type or deploy scenario is not specified. Falling back to programmatically determining them."
- else
- echo "Recording the installer '$INSTALLER_TYPE' and scenario '$DEPLOY_SCENARIO' for downstream jobs"
- echo "INSTALLER_TYPE=$INSTALLER_TYPE" >> $WORK_DIRECTORY/scenario.properties
- echo "DEPLOY_SCENARIO=$DEPLOY_SCENARIO" >> $WORK_DIRECTORY/scenario.properties
- echo "XCI_SHA=$XCI_SHA" >> $WORK_DIRECTORY/scenario.properties
- echo "SCENARIO_SHA=$SCENARIO_SHA" >> $WORK_DIRECTORY/scenario.properties
- echo "PROJECT_NAME=$GERRIT_PROJECT" >> $WORK_DIRECTORY/scenario.properties
- log_scenario_properties
- exit 0
- fi
- else
- echo "Installer type or deploy scenario is not specified. Falling back to programmatically determining them."
- fi
-}
-
-# This function determines the default scenario for changes coming to releng-xci
-# by processing the Gerrit change and using diff to see what changed.
-#
-# The stuff in releng-xci is for the installer and other common things so the
-# determination is based on those.
-#
-# Pattern
-# releng-xci/installer/<installer_type>/<impacted files>: <scenario>
-function determine_default_scenario() {
- echo "Processing $GERRIT_PROJECT patchset $GERRIT_REFSPEC"
-
- # get the changeset
- cd $WORKSPACE
- # We need to set default scenario for changes that mess with installers
- INSTALLERS=$(git diff HEAD^..HEAD --name-only -- 'xci/installer' | cut -d "/" -f 3 | uniq)
- for CHANGED_INSTALLER in $INSTALLERS; do
- case $CHANGED_INSTALLER in
- kubespray)
- DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='k8-nosdn-nofeature'
- ;;
- # Default case (including OSA changes)
- *)
- DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='os-nosdn-nofeature'
- ;;
- esac
- done
- # For all other changes, we only need to set a default scenario if it's not set already
- if git diff HEAD^..HEAD --name-only | grep -q -v 'xci/installer'; then
- [[ ${#DEPLOY_SCENARIO[@]} -eq 0 ]] && DEPLOY_SCENARIO[${#DEPLOY_SCENARIO[@]}]='os-nosdn-nofeature'
- fi
-
- # extract releng-xci sha
- XCI_SHA=$(cd $WORKSPACE && git rev-parse HEAD)
-
- # TODO: we need to fix this so we actually extract the scenario sha by cloning releng-xci-scenarios
- # for the determined scenario. it is crucial for promotion...
- SCENARIO_SHA=$XCI_SHA
-}
-
-# This function determines the impacted scenario by processing the Gerrit
-# change and using diff to see what changed. If changed files belong to a scenario
-# its name gets recorded for deploying and testing the right scenario.
-#
-# Pattern
-# <project-repo>/scenarios/<scenario>/<impacted files>: <scenario>
-function determine_scenario() {
- echo "Processing $GERRIT_PROJECT patchset $GERRIT_REFSPEC"
-
- # remove the clone that is done via jenkins and place releng-xci there so the
- # things continue functioning properly
- cd $HOME && /bin/rm -rf $WORKSPACE
- git clone -q https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE && cd $WORKSPACE
-
- # fix the permissions so ssh doesn't complain due to having world-readable keyfiles
- chmod -R go-rwx $WORKSPACE/xci/scripts/vm
-
- # clone the project repo and fetch the patchset to process for further processing
- git clone -q https://gerrit.opnfv.org/gerrit/$GERRIT_PROJECT $WORK_DIRECTORY/$GERRIT_PROJECT
- cd $WORK_DIRECTORY/$GERRIT_PROJECT
- git fetch -q https://gerrit.opnfv.org/gerrit/$GERRIT_PROJECT $GERRIT_REFSPEC && git checkout -q FETCH_HEAD
-
- # process the diff to find out what scenario(s) are impacted - there should only be 1
- DEPLOY_SCENARIO+=$(git diff HEAD^..HEAD --name-only | grep scenarios | awk -F '[/|/]' '{print $2}' | uniq)
-
- # extract releng-xci sha
- XCI_SHA=$(cd $WORKSPACE && git rev-parse HEAD)
-
- # extract scenario sha
- SCENARIO_SHA=$(cd $WORK_DIRECTORY/$GERRIT_PROJECT && git rev-parse HEAD)
-}
-
-# This function allows developers to specify the specific XCI flavor for the
-# impacted scenario by adding the XCI Flavor info about the specific scenario.
-# This results in either skipping the real verification
-# totally or skipping the determining the installer and scenario programmatically.
-# It is important to note that this feature is only available to generic scenarios
-# and only single installer/scenario pair is allowed.
-# The input in commit message should be placed at the end of the commit message body,
-# before the signed-off and change-id lines.
-#
-# Pattern to be searched in Commit Message
-# xci-flavor:<xci-flavor>
-# Examples:
-# xci-flavor:noha
-function override_xci_flavor() {
- echo "Processing $GERRIT_PROJECT patchset $GERRIT_REFSPEC"
-
- # process commit message for XCI Flavor
- if [[ "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "xci-flavor:" ]]; then
- XCI_FLAVOR=$(echo $GERRIT_CHANGE_COMMIT_MESSAGE | awk '/xci-flavor:/' RS=" " | cut -d":" -f2)
-
- if [[ -z "$XCI_FLAVOR" ]]; then
- XCI_FLAVOR='mini'
- echo "XCI flavor is not specified. The default is specified instead (i.e. mini). Falling back to programmatically determining them."
- echo "XCI_FLAVOR=mini" >> $WORK_DIRECTORY/scenario.properties
- else
- echo "Recording the XCI flavor '$XCI_FLAVOR' for downstream jobs"
- echo "XCI_FLAVOR=$XCI_FLAVOR" >> $WORK_DIRECTORY/scenario.properties
- fi
- else
- XCI_FLAVOR='mini'
- echo "XCI flavor is not specified. The default is specified instead (i.e. mini). Falling back to programmatically determining them."
- echo "XCI_FLAVOR=mini" >> $WORK_DIRECTORY/scenario.properties
- fi
-
-}
-
-function log_scenario_properties() {
- echo "Processing $GERRIT_PROJECT patchset $GERRIT_REFSPEC"
- echo "-------------------------------------------------------------------------"
- cat $WORK_DIRECTORY/scenario.properties
- echo "-------------------------------------------------------------------------"
-}
-
-echo "Determining the impacted scenario"
-
-declare -a DEPLOY_SCENARIO
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# this directory is where the temporary clones and files are created
-# while extracting the impacted scenario
-WORK_DIRECTORY=/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO
-/bin/rm -rf $WORK_DIRECTORY && mkdir -p $WORK_DIRECTORY
-
-if [[ $GERRIT_PROJECT == "releng-xci" ]]; then
- determine_default_scenario
-else
- determine_scenario
-fi
-override_xci_flavor
-override_scenario
-
-# ensure single scenario is impacted
- if [[ $(IFS=$'\n' echo ${DEPLOY_SCENARIO[@]} | wc -w) != 1 ]]; then
- echo "Change impacts multiple scenarios!"
- echo "XCI doesn't support testing of changes that impact multiple scenarios currently."
- echo "Please split your change into multiple different/dependent changes, each modifying single scenario."
- exit 1
-fi
-
-# set the installer
-case ${DEPLOY_SCENARIO[0]} in
- os-*)
- INSTALLER_TYPE=osa
- ;;
- k8-*)
- INSTALLER_TYPE=kubespray
- ;;
- *)
- echo "Unable to determine the installer. Exiting!"
- exit 1
- ;;
-esac
-
-# save the installer and scenario names into java properties file
-# so they can be injected to downstream jobs via envInject
-echo "Recording the installer '$INSTALLER_TYPE' and scenario '${DEPLOY_SCENARIO[0]}' and SHAs for downstream jobs"
-echo "INSTALLER_TYPE=$INSTALLER_TYPE" >> $WORK_DIRECTORY/scenario.properties
-echo "DEPLOY_SCENARIO=$DEPLOY_SCENARIO" >> $WORK_DIRECTORY/scenario.properties
-echo "XCI_SHA=$XCI_SHA" >> $WORK_DIRECTORY/scenario.properties
-echo "SCENARIO_SHA=$SCENARIO_SHA" >> $WORK_DIRECTORY/scenario.properties
-echo "PROJECT_NAME=$GERRIT_PROJECT" >> $WORK_DIRECTORY/scenario.properties
-
-# log scenario.properties to console to ease the troubleshooting when needed
-log_scenario_properties
-
-# skip scenario support check if the job is promotion job
-if [[ "$JOB_NAME" =~ (os|k8) ]]; then
- exit 0
-fi
-
-# skip the deployment if the scenario is not supported on this distro
-OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
-if ! sed -n "/^- scenario: ${DEPLOY_SCENARIO[0]}$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
- echo "# SKIPPED: Scenario ${DEPLOY_SCENARIO[0]} is NOT supported on $DISTRO"
- exit 0
-fi
diff --git a/jjb/xci/xci-start-deployment.sh b/jjb/xci/xci-start-deployment.sh
deleted file mode 100755
index 102ca41c3..000000000
--- a/jjb/xci/xci-start-deployment.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# skip the deployment if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ skip-verify|skip-deployment ]]; then
- echo "Skipping the deployment!"
- exit 0
-fi
-
-# if the scenario is external, we need to wipe WORKSPACE to place releng-xci there since
-# the project where the scenario is coming from is cloned and the patch checked out to the
-# xci/scenarios/$DEPLOY_SCENARIO to be synched on clean VM
-# apart from that, we need releng-xci stuff in WORKSPACE for things to function correctly on Jenkins.
-# if the change is coming to releng-xci, we don't need to do anything since the patch is checked
-# out to the WORKSPACE anyways
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- cd $HOME && /bin/rm -rf $WORKSPACE
- git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE && cd $WORKSPACE
- chmod -R go-rwx $WORKSPACE/xci/scripts/vm
-fi
-
-# skip the deployment if the scenario is not supported on this distro
-OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
-if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
- echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
- exit 0
-fi
-
-ssh -F $HOME/.ssh/${DISTRO}-xci-vm-config ${DISTRO}_xci_vm "cd releng-xci && ./xci_test.sh"
diff --git a/jjb/xci/xci-start-new-vm.sh b/jjb/xci/xci-start-new-vm.sh
deleted file mode 100755
index 79b6a785a..000000000
--- a/jjb/xci/xci-start-new-vm.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2018 SUSE and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-#----------------------------------------------------------------------
-# This script is used by CI and executed by Jenkins jobs.
-# You are not supposed to use this script manually if you don't know
-# what you are doing.
-#----------------------------------------------------------------------
-
-# ensure GERRIT_TOPIC is set
-GERRIT_TOPIC="${GERRIT_TOPIC:-''}"
-
-# skip the deployment if the patch doesn't impact the deployment
-if [[ "$GERRIT_TOPIC" =~ 'skip-verify' ]]; then
- echo "Skipping the deployment!"
- exit 0
-fi
-
-# if the scenario is external, we need to wipe WORKSPACE to place releng-xci there since
-# the project where the scenario is coming from is cloned and the patch checked out to the
-# xci/scenarios/$DEPLOY_SCENARIO to be synched on clean VM
-# apart from that, we need releng-xci stuff in WORKSPACE for things to function correctly on Jenkins.
-# if the change is coming to releng-xci, we don't need to do anything since the patch is checked
-# out to the WORKSPACE anyways
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- cd $HOME && /bin/rm -rf $WORKSPACE
- git clone https://gerrit.opnfv.org/gerrit/releng-xci $WORKSPACE && cd $WORKSPACE
- chmod -R go-rwx $WORKSPACE/xci/scripts/vm
-fi
-
-# skip the deployment if the scenario is not supported on this distro
-OPNFV_SCENARIO_REQUIREMENTS=$WORKSPACE/xci/opnfv-scenario-requirements.yml
-if ! sed -n "/^- scenario: $DEPLOY_SCENARIO$/,/^$/p" $OPNFV_SCENARIO_REQUIREMENTS | grep -q $DISTRO; then
- echo "# SKIPPED: Scenario $DEPLOY_SCENARIO is NOT supported on $DISTRO"
- exit 0
-fi
-
-cd $WORKSPACE
-
-# The start-new-vm.sh script will copy the entire releng-xci directory
-# so lets prepare the test script now so it can be copied by the script.
-# Please do not move it elsewhere or you would have to move it to the VM
-# yourself.
-cat > xci_test.sh<<EOF
-#!/bin/bash
-set -o pipefail
-export DISTRO=$DISTRO
-export DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-export FUNCTEST_MODE=$FUNCTEST_MODE
-export FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
-export XCI_FLAVOR=$XCI_FLAVOR
-export CORE_OPENSTACK_INSTALL=true
-export BIFROST_USE_PREBUILT_IMAGES=true
-export CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
-export OPNFV_RELENG_DEV_PATH=/home/devuser/releng-xci/
-export INSTALLER_TYPE=$INSTALLER_TYPE
-export GIT_BASE=$GIT_BASE
-export JENKINS_HOME=$JENKINS_HOME
-export CI_LOOP=$CI_LOOP
-export BUILD_TAG=$BUILD_TAG
-export NODE_NAME=$NODE_NAME
-
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- export XCI_ANSIBLE_PARAMS="-e @/home/devuser/releng-xci/scenario_overrides.yml"
-fi
-
-cd xci
-./xci-deploy.sh | ts
-EOF
-
-if [[ $GERRIT_PROJECT != "releng-xci" ]]; then
- cat > scenario_overrides.yml <<-EOF
----
-xci_scenarios_overrides:
- - scenario: $DEPLOY_SCENARIO
- version: $GERRIT_PATCHSET_REVISION
- refspec: $GERRIT_REFSPEC
-EOF
-fi
-
-chmod a+x xci_test.sh
-
-export XCI_BUILD_CLEAN_VM_OS=false
-export XCI_UPDATE_CLEAN_VM_OS=true
-
-./xci/scripts/vm/start-new-vm.sh $DISTRO
diff --git a/jjb/xci/xci-verify-jobs.yaml b/jjb/xci/xci-verify-jobs.yaml
deleted file mode 100644
index 799ef6e96..000000000
--- a/jjb/xci/xci-verify-jobs.yaml
+++ /dev/null
@@ -1,327 +0,0 @@
----
-- project:
- name: 'opnfv-xci-verify'
-
- project: releng-xci
- # -------------------------------
- # branches
- # -------------------------------
- stream:
- - master:
- branch: '{stream}'
- # -------------------------------
- # distros
- # -------------------------------
- distro:
- - ubuntu:
- disabled: false
- successful: false
- failed: false
- unstable: false
- notbuilt: false
- - centos:
- disabled: true
- successful: true
- failed: true
- unstable: true
- notbuilt: true
- - opensuse:
- disabled: false
- successful: true
- failed: true
- unstable: true
- notbuilt: true
- # -------------------------------
- # type
- # -------------------------------
- type:
- - virtual
- # -------------------------------
- # patch verification phases
- # -------------------------------
- phase:
- - 'deploy'
- - 'healthcheck'
- # -------------------------------
- # jobs
- # -------------------------------
- jobs:
- - 'xci-verify-{distro}-{type}-{stream}'
- - 'xci-verify-{distro}-{phase}-{type}-{stream}'
-# -------------------------------
-# job templates
-# -------------------------------
-- job-template:
- name: 'xci-verify-{distro}-{type}-{stream}'
-
- project-type: multijob
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - 'xci-verify-{distro}-.*'
- - 'xci-merge-{distro}-.*'
- - 'xci-os-.*'
- - 'xci-k8-.*'
- - 'openstack-bifrost-verify-{distro}-.*'
- - 'xci-osa-verify-{distro}-.*'
- - 'xci-osa-periodic-{distro}-.*'
- blocking-level: 'NODE'
- - throttle:
- max-per-node: 2
- max-total: 10
- categories:
- - xci-verify-virtual
- option: category
-
- wrappers:
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'true'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: 'releng-xci'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'false'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'xci/scripts/vm/**'
- - compare-type: ANT
- pattern: 'docs/**'
- - compare-type: ANT
- pattern: 'upstream/**'
- - project-compare-type: 'REG_EXP'
- project-pattern: 'sfc|sdnvpn|releng-xci-scenarios'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- disable-strict-forbidden-file-verification: 'true'
- file-paths:
- - compare-type: ANT
- pattern: 'scenarios/**'
- readable-message: true
- custom-url: '* $JOB_NAME $BUILD_URL'
- skip-vote:
- successful: '{obj:successful}'
- failed: '{obj:failed}'
- unstable: '{obj:unstable}'
- notbuilt: '{obj:notbuilt}'
-
- parameters:
- - project-parameter:
- project: $GERRIT_PROJECT
- branch: '{branch}'
- - label:
- name: SLAVE_LABEL
- default: 'xci-virtual'
- all-nodes: false
- node-eligibility: 'ignore-offline'
- - string:
- name: DISTRO
- default: '{distro}'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: CI_LOOP
- default: 'verify'
- - string:
- name: FUNCTEST_VERSION
- default: 'hunter'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- builders:
- - 'xci-verify-clean-vm-macro'
- - 'xci-verify-set-scenario-macro'
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - multijob:
- name: deploy
- condition: SUCCESSFUL
- projects:
- - name: 'xci-verify-{distro}-deploy-{type}-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_VERSION=$FUNCTEST_VERSION
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: true
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: healthcheck
- condition: SUCCESSFUL
- projects:
- - name: 'xci-verify-{distro}-healthcheck-{type}-{stream}'
- current-parameters: true
- predefined-parameters: |
- DISTRO={distro}
- CLEAN_DIB_IMAGES=$CLEAN_DIB_IMAGES
- GERRIT_PROJECT=$GERRIT_PROJECT
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- CI_LOOP=$CI_LOOP
- FUNCTEST_VERSION=$FUNCTEST_VERSION
- FUNCTEST_MODE=$FUNCTEST_MODE
- FUNCTEST_SUITE_NAME=$FUNCTEST_SUITE_NAME
- node-parameters: true
- kill-phase-on: NEVER
- abort-all-job: true
-
- publishers:
- - postbuildscript:
- builders:
- - role: BOTH
- build-on:
- - ABORTED
- - NOT_BUILT
- - SUCCESS
- - UNSTABLE
- build-steps:
- - shell: !include-raw: ./xci-cleanup.sh
- mark-unstable-if-failed: true
-
-
-- job-template:
- name: 'xci-verify-{distro}-{phase}-{type}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - build-blocker:
- use-build-blocker: true
- blocking-jobs:
- - '.*-bifrost-verify-.*'
- - '.*-bifrost-periodic-.*'
- - 'osa-verify-.*'
- - 'osa-periodic-.*'
- blocking-level: 'NODE'
-
- parameters:
- - string:
- name: PROJECT
- default: $GERRIT_PROJECT
- - string:
- name: DISTRO
- default: 'ubuntu'
- - string:
- name: CI_LOOP
- default: 'verify'
- - string:
- name: FUNCTEST_VERSION
- default: 'hunter'
- - string:
- name: FUNCTEST_MODE
- default: 'tier'
- - string:
- name: FUNCTEST_SUITE_NAME
- default: 'healthcheck'
- - string:
- name: XCI_FLAVOR
- default: 'mini'
- - string:
- name: CLEAN_DIB_IMAGES
- default: 'true'
- - string:
- name: OPNFV_RELENG_DEV_PATH
- default: $WORKSPACE/
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-
- wrappers:
- - inject:
- properties-file: "/tmp/$GERRIT_CHANGE_NUMBER/$DISTRO/scenario.properties"
- - ssh-agent-wrapper
- - build-timeout:
- timeout: 240
- - fix-workspace-permissions
-
- scm:
- - git-scm-gerrit
-
- builders:
- - description-setter:
- description: "Scenario: $DEPLOY_SCENARIO | Node: $NODE_NAME"
- - 'xci-verify-{phase}-macro'
-
-# -------------------------------
-# builder macros
-# -------------------------------
-- builder:
- name: 'xci-verify-clean-vm-macro'
- builders:
- - shell:
- !include-raw: ./xci-cleanup.sh
-
-- builder:
- name: 'xci-verify-set-scenario-macro'
- builders:
- - shell:
- !include-raw: ./xci-set-scenario.sh
-
-- builder:
- name: 'xci-verify-deploy-macro'
- builders:
- - shell:
- !include-raw: ./xci-start-new-vm.sh
-
- - shell:
- !include-raw: ./xci-start-deployment.sh
-
-- builder:
- name: 'xci-verify-healthcheck-macro'
- builders:
- - shell:
- !include-raw: ./xci-run-functest.sh
diff --git a/jjb/xci/xci-views.yaml b/jjb/xci/xci-views.yaml
deleted file mode 100644
index ee1e53a59..000000000
--- a/jjb/xci/xci-views.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
----
-- project:
- name: xci-view
- views:
- - project-view
- project-name: xci
diff --git a/jjb/yardstick/yardstick-cleanup.sh b/jjb/yardstick/yardstick-cleanup.sh
deleted file mode 100755
index 47bf9bd10..000000000
--- a/jjb/yardstick/yardstick-cleanup.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-[[ ${CI_DEBUG} == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-# Remove containers along with image opnfv/yardstick*:<none>
-dangling_images=($(docker images -f "dangling=true" | awk '/opnfv[/]yardstick/ {print $3}'))
-if [[ ${#dangling_images[@]} -eq 0 ]] ; then
- echo "Removing opnfv/yardstick:<none> images and their containers..."
- for image_id in "${dangling_images[@]}"; do
- echo " Removing image_id: $image_id and its containers"
- containers=$(docker ps -a | awk "/${image_id}/ {print \$1}")
- if [[ -n "$containers" ]];then
- docker rm -f "${containers}" >${redirect}
- fi
- docker rmi "${image_id}" >${redirect}
- done
-fi
-
-echo "Cleaning up docker containers/images..."
-# Remove previous running containers if exist
-if docker ps -a | grep -q opnfv/yardstick; then
- echo "Removing existing opnfv/yardstick containers..."
- docker ps -a | awk "/${image_id}/ {print \$1}" | xargs docker rm -f >${redirect}
-
-fi
-
-# Remove existing images if exist
-if docker images | grep -q opnfv/yardstick; then
- echo "Docker images to remove:"
- docker images | head -1 && docker images | grep opnfv/yardstick
- image_ids=($(docker images | awk '/opnfv[/]yardstick/ {print $3}'))
- for id in "${image_ids[@]}"; do
- echo "Removing docker image id $id..."
- docker rmi "${id}" >${redirect}
- done
-fi
-
diff --git a/jjb/yardstick/yardstick-daily-jobs.yaml b/jjb/yardstick/yardstick-daily-jobs.yaml
deleted file mode 100644
index e12eee79f..000000000
--- a/jjb/yardstick/yardstick-daily-jobs.yaml
+++ /dev/null
@@ -1,509 +0,0 @@
----
-###################################
-# job configuration for yardstick
-###################################
-- project:
- name: yardstick
-
- project: '{name}'
-
- # -------------------------------
- # BRANCH ANCHORS
- # -------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
- hunter: &hunter
- stream: hunter
- branch: 'stable/{stream}'
- gs-pathname: '{stream}'
- docker-tag: 'stable'
- # -------------------------------
- # POD, INSTALLER, AND BRANCH MAPPING
- # -------------------------------
- # Installers using labels
- # CI PODs
- # This section should only contain the installers
- # that have been switched using labels for slaves
- # -------------------------------
- pod:
- # apex CI PODs
- - virtual:
- slave-label: apex-virtual-master
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: apex-virtual-master
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - baremetal:
- slave-label: apex-baremetal-master
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- # fuel CI PODs
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: fuel-baremetal
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - virtual:
- slave-label: fuel-virtual
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- # armband CI PODs
- - armband-baremetal:
- slave-label: armband-baremetal
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - armband-virtual:
- slave-label: armband-virtual
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - armband-baremetal:
- slave-label: armband-baremetal
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - armband-virtual:
- slave-label: armband-virtual
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- # joid CI PODs
- - baremetal:
- slave-label: joid-baremetal
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - virtual:
- slave-label: joid-virtual
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: joid-baremetal
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - virtual:
- slave-label: joid-virtual
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- # daisy CI PODs
- - baremetal:
- slave-label: daisy-baremetal
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - baremetal:
- slave-label: daisy-baremetal
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - virtual:
- slave-label: daisy-virtual
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- # -------------------------------
- # None-CI PODs
- # -------------------------------
- - orange-pod1:
- slave-label: '{pod}'
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - itri-pod1:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod1:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod1:
- slave-label: '{pod}'
- installer: fuel
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - zte-pod2:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod3:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod3:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - zte-pod9:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - zte-pod9:
- slave-label: '{pod}'
- installer: daisy
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *hunter
- - orange-pod2:
- slave-label: '{pod}'
- installer: joid
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- - flex-pod1:
- slave-label: '{pod}'
- installer: apex
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
- # -------------------------------
- testsuite:
- - 'daily'
-
- jobs:
- - 'yardstick-{installer}-{pod}-{testsuite}-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
- name: 'yardstick-{installer}-{pod}-{testsuite}-{stream}'
-
- disabled: false
-
- concurrent: true
-
- properties:
- - logrotate-default
- - throttle:
- enabled: true
- max-per-node: 1
- option: 'project'
-
- wrappers:
- - build-name:
- name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
- - timeout:
- timeout: 180
- abort: true
-
- triggers:
- - '{auto-trigger-name}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - '{installer}-defaults'
- - '{slave-label}-defaults'
- - 'yardstick-params-{slave-label}'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
- - string:
- name: YARDSTICK_SCENARIO_SUITE_NAME
- default: opnfv_${{DEPLOY_SCENARIO}}_{testsuite}.yaml
- description: 'Path to test scenario suite'
- - string:
- name: CI_DEBUG
- default: 'false'
- description: "Show debut output information"
-
- scm:
- - git-scm
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'yardstick-cleanup'
- - 'yardstick-fetch-creds'
- - 'yardstick-{testsuite}'
- - 'yardstick-store-results'
-
- publishers:
- - email:
- recipients: jean.gaoliang@huawei.com limingjiang@huawei.com
- - email-jenkins-admins-on-failure
-
-########################
-# builder macros
-########################
-- builder:
- name: yardstick-daily
- builders:
- - shell:
- !include-raw: ./yardstick-daily.sh
-
-- builder:
- name: yardstick-fetch-creds
- builders:
- # yamllint disable rule:indentation
- - conditional-step:
- condition-kind: regex-match
- regex: "os-.*"
- label: '$DEPLOY_SCENARIO'
- steps:
- - shell:
- !include-raw: ../../utils/fetch_os_creds.sh
- - conditional-step:
- condition-kind: regex-match
- regex: "k8-.*"
- label: '$DEPLOY_SCENARIO'
- steps:
- - shell:
- !include-raw: ./yardstick-get-k8s-conf.sh
-
-- builder:
- name: yardstick-store-results
- builders:
- - shell:
- !include-raw: ../../utils/push-test-logs.sh
-
-- builder:
- name: yardstick-cleanup
- builders:
- - shell:
- !include-raw: ./yardstick-cleanup.sh
-########################
-# parameter macros
-########################
-- parameter:
- name: 'yardstick-params-apex-virtual-master'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-apex-baremetal-master'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-apex-virtual-hunter'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-apex-baremetal-hunter'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-fuel-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-fuel-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-armband-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-armband-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-joid-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-joid-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-intel-pod8'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-daisy-baremetal'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-daisy-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-itri-pod1'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-zte-pod1'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-zte-pod2'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-zte-pod3'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-zte-pod9'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-orange-pod1'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-orange-pod2'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-virtual'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-huawei-pod3'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-huawei-pod4'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-flex-pod1'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-#######################
-## trigger macros
-#######################
-# trigger for PODs to only run yardstick test suites
-- trigger:
- name: 'yardstick-daily-huawei-pod3-trigger'
- triggers:
- - timed: '0 1 * * *'
-
-- trigger:
- name: 'yardstick-daily-huawei-pod4-trigger'
- triggers:
- - timed: ''
diff --git a/jjb/yardstick/yardstick-daily.sh b/jjb/yardstick/yardstick-daily.sh
deleted file mode 100755
index 91b3e8e0f..000000000
--- a/jjb/yardstick/yardstick-daily.sh
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/bin/bash
-set -e
-[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
-rc_file_vol=""
-cacert_file_vol=""
-sshkey=""
-
-rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
-
-if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
- instack_mac=$(sudo virsh domiflist undercloud | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
- sshkey="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- if [[ -n $(sudo iptables -L FORWARD |grep "REJECT"|grep "reject-with icmp-port-unreachable") ]]; then
- #note: this happens only in opnfv-lf-pod1
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
-fi
-
-if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
- if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
- rc_file_vol="-v ${HOME}/admin.conf:/etc/yardstick/admin.conf"
- else
- # If production lab then creds may be retrieved dynamically
- # creds are on the jumphost, always in the same folder
- rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
- # If dev lab, credentials may not be the default ones, just provide a path to put them into docker
- # replace the default one by the customized one provided by jenkins config
- fi
-elif [[ ${INSTALLER_TYPE} == 'fuel' ]]; then
- if [[ "${DEPLOY_SCENARIO:0:2}" == 'k8' ]]; then
- rc_file_vol="-v ${HOME}/admin.conf:/etc/yardstick/admin.conf"
- else
- cacert_file_vol="-v ${HOME}/os_cacert:/etc/ssl/certs/mcp_os_cacert"
- fi
- sshkey="-v ${SSH_KEY}:/root/.ssh/mcp.rsa"
-fi
-
-opts="--privileged=true --rm"
-envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK} \
- -e YARDSTICK_BRANCH=${BRANCH} -e BRANCH=${BRANCH} \
- -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO} -e CI_DEBUG=true"
-
-if [[ "${INSTALLER_TYPE}" == 'fuel' ]]; then
- envs+=" -e SSH_KEY=/root/.ssh/mcp.rsa"
-fi
-
-# Pull the image with correct tag
-DOCKER_REPO='opnfv/yardstick'
-if [ "$(uname -m)" = 'aarch64' ]; then
- DOCKER_REPO="${DOCKER_REPO}_$(uname -m)"
-fi
-echo "Yardstick: Pulling image ${DOCKER_REPO}:${DOCKER_TAG}"
-docker pull ${DOCKER_REPO}:$DOCKER_TAG >$redirect
-docker images
-
-# map log directory
-branch=${BRANCH##*/}
-dir_result="${HOME}/opnfv/yardstick/results/${branch}"
-mkdir -p ${dir_result}
-sudo rm -rf ${dir_result}/*
-map_log_dir="-v ${dir_result}:/tmp/yardstick"
-
-# Run docker
-cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} ${DOCKER_REPO}:${DOCKER_TAG} \
-exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
-
-echo "Yardstick: Running docker cmd: ${cmd}"
-${cmd}
-
-echo "Yardstick: done!"
diff --git a/jjb/yardstick/yardstick-get-k8s-conf.sh b/jjb/yardstick/yardstick-get-k8s-conf.sh
deleted file mode 100755
index 3af2dcb71..000000000
--- a/jjb/yardstick/yardstick-get-k8s-conf.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-set -e
-
-dest_path="$HOME/admin.conf"
-
-if [[ "${DEPLOY_SCENARIO:0:2}" == "k8" ]];then
- if [[ ${INSTALLER_TYPE} == 'joid' ]];then
- juju scp kubernetes-master/0:config "${dest_path}"
- elif [[ ${INSTALLER_TYPE} == 'fuel' ]];then
- echo "Getting kubernetes config ..."
- docker cp -L fuel:/opt/kubernetes.config "${dest_path}"
- fi
-fi
diff --git a/jjb/yardstick/yardstick-project-jobs.yaml b/jjb/yardstick/yardstick-project-jobs.yaml
deleted file mode 100644
index e4d0be6bf..000000000
--- a/jjb/yardstick/yardstick-project-jobs.yaml
+++ /dev/null
@@ -1,194 +0,0 @@
----
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: yardstick-project-jobs
-
- project: 'yardstick'
-
- jobs:
- - 'yardstick-verify-{stream}'
- - 'yardstick-merge-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- - iruya: &iruya
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
-
-################################
-# job templates
-################################
-
-- job-template:
- name: 'yardstick-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'ericsson-build4-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 30
- fail: true
-
- builders:
- - yardstick-unit-tests-python-3
- - yardstick-functional-tests-python-3
- - yardstick-coverage-tests
- - yardstick-pep8-tests
-
-- job-template:
- name: 'yardstick-merge-{stream}'
-
- disabled: '{obj:disabled}'
-
- parameters:
- - project-parameter:
- project: '{project}'
- branch: '{branch}'
- - 'ericsson-build4-defaults'
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "Directory where the build artifact will be located upon\
- \ the completion of the build."
-
- scm:
- - git-scm
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - change-merged-event
- - comment-added-contains-event:
- comment-contains-value: 'remerge'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
-
- wrappers:
- - ssh-agent-wrapper
- - timeout:
- timeout: 30
- fail: true
-
- builders:
- - yardstick-unit-tests-python-3
- - yardstick-functional-tests-python-3
- - yardstick-coverage-tests
- - yardstick-pep8-tests
-
-################################
-# job builders
-################################
-
-- builder:
- name: yardstick-unit-tests-python-3
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Running unit tests in Python 3 ..."
- cd $WORKSPACE
- tox -epy3
-
-- builder:
- name: yardstick-functional-tests-python-3
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev rabbitmq-server
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Configure RabbitMQ service"
- sudo service rabbitmq-server restart
- sudo rabbitmqctl start_app
- if [ -z "$(sudo rabbitmqctl list_users | grep yardstick)" ]; then
- sudo rabbitmqctl add_user yardstick yardstick
- sudo rabbitmqctl set_permissions yardstick ".*" ".*" ".*"
- fi
-
- echo "Running functional tests in Python 3 ..."
- cd $WORKSPACE
- tox -efunctional-py3
-
-- builder:
- name: yardstick-coverage-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Running coverage tests ..."
- cd $WORKSPACE
- tox -ecoverage
-
-- builder:
- name: yardstick-pep8-tests
- builders:
- - shell: |
- #!/bin/bash
- set -o errexit
- set -o pipefail
-
- sudo apt-get -y autoremove
- sudo apt-get install -y build-essential python-dev python3-dev
- sudo apt-get -y clean && sudo apt-get -y autoremove
-
- echo "Running style guidelines (PEP8) tests ..."
- cd $WORKSPACE
- tox -epep8
diff --git a/jjb/yardstick/yardstick-rtd-jobs.yaml b/jjb/yardstick/yardstick-rtd-jobs.yaml
deleted file mode 100644
index 213ec6ac2..000000000
--- a/jjb/yardstick/yardstick-rtd-jobs.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
----
-- project:
- name: yardstick-rtd
- project: yardstick
- project-name: yardstick
-
- project-pattern: 'yardstick'
- rtd-build-url: 'https://readthedocs.org/api/v2/webhook/opnfv-yardstick/47399/'
- rtd-token: '6aa883824f3917c7db5ffa1fe9168817fb5feb68'
-
- stream:
- - master:
- branch: '{stream}'
- disabled: false
- - hunter:
- branch: 'stable/{stream}'
- disabled: false
-
- jobs:
- - '{project-name}-rtd-jobs'
diff --git a/releases/2023.1/functest.yaml b/releases/2023.1/functest.yaml
new file mode 100644
index 000000000..b48123f96
--- /dev/null
+++ b/releases/2023.1/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/2023.1
+ location:
+ functest-xtesting: 781fc313c1c59018edb892ff0423963191781592
diff --git a/releases/2023.2/functest.yaml b/releases/2023.2/functest.yaml
new file mode 100644
index 000000000..372326435
--- /dev/null
+++ b/releases/2023.2/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/2023.2
+ location:
+ functest-xtesting: 0d6bb30968d1092caeffee4159feccf6319ff565
diff --git a/releases/hunter/vswitchperf.yaml b/releases/hunter/vswitchperf.yaml
index f0335f45f..b5053a5c2 100644
--- a/releases/hunter/vswitchperf.yaml
+++ b/releases/hunter/vswitchperf.yaml
@@ -3,6 +3,11 @@ project: vswitchperf
project-type: testing
release-model: stable
+releases:
+ - version: opnfv-8.0.0
+ location:
+ vswitchperf: 589639d313c24104b2a27ef16baf716d1d274108
+
branches:
- name: stable/hunter
location:
diff --git a/releases/iruya/vswitchperf.yaml b/releases/iruya/vswitchperf.yaml
index d81f27e0d..23e0d66c2 100644
--- a/releases/iruya/vswitchperf.yaml
+++ b/releases/iruya/vswitchperf.yaml
@@ -3,6 +3,11 @@ project: vswitchperf
project-type: testing
release-model: stable
+releases:
+ - version: opnfv-9.0.0
+ location:
+ vswitchperf: a94395daf8d3312659b56a306ea64960a2cdd64a
+
branches:
- name: stable/iruya
location:
diff --git a/releases/jerma/airship.yaml b/releases/jerma/airship.yaml
new file mode 100644
index 000000000..22bb2fdca
--- /dev/null
+++ b/releases/jerma/airship.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: airship
+project-type: installer
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ airship: a9ef2c8eb110daa3a395f7cb118bc5d26c5fca69
+
+branches:
+ - name: stable/jerma
+ location:
+ airship: 4b2c638c2495be08953c1687ddbe689927d6cd8f
+
+release-notes: http://docs.opnfv.org/en/stable-jerma/submodules/airship/docs/release/release-notes/release-notes.html
diff --git a/releases/jerma/barometer.yaml b/releases/jerma/barometer.yaml
new file mode 100644
index 000000000..742d0a2fb
--- /dev/null
+++ b/releases/jerma/barometer.yaml
@@ -0,0 +1,21 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ barometer: 968f66fc2837744dfe8db69d2314cef3f0cb0800
+branches:
+ - name: stable/jerma
+ location:
+ barometer: f521375d6e898bbdef7abcd9cae25677af96bd82
diff --git a/releases/jerma/cirv.yaml b/releases/jerma/cirv.yaml
new file mode 100644
index 000000000..57cb08d87
--- /dev/null
+++ b/releases/jerma/cirv.yaml
@@ -0,0 +1,34 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: cirv
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ cirv: f38765a83c1ba03b5619c1c66ed7bf1a2b6b00be
+ - version: opnfv-10.0.0
+ location:
+ cirv-hdv: 2d145d4f1fd231def2c9d52a71267031b938c0ac
+ - version: opnfv-10.0.0
+ location:
+ cirv-sdv: d1cc451f841f995028d57677e897a39229a065a2
+
+branches:
+ - name: stable/jerma
+ location:
+ cirv: f38765a83c1ba03b5619c1c66ed7bf1a2b6b00be
+ - name: stable/jerma
+ location:
+ cirv-hdv: 2d145d4f1fd231def2c9d52a71267031b938c0ac
+ - name: stable/jerma
+ location:
+ cirv-sdv: d1cc451f841f995028d57677e897a39229a065a2
diff --git a/releases/jerma/doctor.yaml b/releases/jerma/doctor.yaml
new file mode 100644
index 000000000..d28b4e780
--- /dev/null
+++ b/releases/jerma/doctor.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 Nokia Corporation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: doctor
+project-type: feature
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ doctor: 72a1f8c92f1692f1ea8dcb5bc706ec9939c30e0a
+
+branches:
+ - name: stable/jerma
+ location:
+ doctor: 72a1f8c92f1692f1ea8dcb5bc706ec9939c30e0a
+
+release-notes: https://opnfv-doctor.readthedocs.io/en/stable-jerma/release/release-notes/index.html
diff --git a/releases/jerma/dovetail.yaml b/releases/jerma/dovetail.yaml
new file mode 100644
index 000000000..7fd61ecd8
--- /dev/null
+++ b/releases/jerma/dovetail.yaml
@@ -0,0 +1,29 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright 2020 Huawei Technologies Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##############################################################################
+---
+project: dovetail
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ dovetail-webportal: 7a13dabbedff1184c12e3e4f5cf56545f1d01397
+
+branches:
+ - name: stable/jerma
+ location:
+ dovetail-webportal: 7a13dabbedff1184c12e3e4f5cf56545f1d01397
diff --git a/jjb/releng/releng-release-create-venv.sh b/releases/jerma/kuberef.yaml
index 0200376a7..1eaa395f3 100644
--- a/jjb/releng/releng-release-create-venv.sh
+++ b/releases/jerma/kuberef.yaml
@@ -1,19 +1,22 @@
-#!/bin/bash
# SPDX-License-Identifier: Apache-2.0
##############################################################################
-# Copyright (c) 2018 The Linux Foundation and others.
+# Copyright (c) 2020 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-set -e -o pipefail
-echo "---> Create virtualenv"
+---
+project: kuberef
+project-type: installer
+release-model: stable
-sudo -H pip install virtualenv
-virtualenv -p /usr/bin/python3 $WORKSPACE/venv
+releases:
+ - version: opnfv-10.0.0
+ location:
+ kuberef: d3916f20f0b10da360cc6c38b61e1ee04d6278c5
-# shellcheck source=$WORKSPACE/venv/bin/activate disable=SC1091
-source $WORKSPACE/venv/bin/activate
-pip install --upgrade pip
-pip install -r releases/scripts/requirements.txt
+branches:
+ - name: stable/jerma
+ location:
+ kuberef: d3916f20f0b10da360cc6c38b61e1ee04d6278c5
diff --git a/releases/jerma/moon.yaml b/releases/jerma/moon.yaml
new file mode 100644
index 000000000..742bc2165
--- /dev/null
+++ b/releases/jerma/moon.yaml
@@ -0,0 +1,9 @@
+---
+project: moon
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/jerma
+ location:
+ moon: 7bb53c64da2dcf88894bfd31503accdd81498f3d
diff --git a/releases/jerma/opnfvdocs.yaml b/releases/jerma/opnfvdocs.yaml
new file mode 100644
index 000000000..a5d0c1447
--- /dev/null
+++ b/releases/jerma/opnfvdocs.yaml
@@ -0,0 +1,14 @@
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ opnfvdocs: 8ae4bb381bef8ad9f71bd0f46323799a90bb7deb
+
+branches:
+ - name: stable/jerma
+ location:
+ opnfvdocs: 1d0a0c19a3ca2d8d161a37232e7bf2366223bd49
diff --git a/releases/jerma/samplevnf.yaml b/releases/jerma/samplevnf.yaml
new file mode 100644
index 000000000..27aebee0e
--- /dev/null
+++ b/releases/jerma/samplevnf.yaml
@@ -0,0 +1,24 @@
+# SPDX-License-Identifier: Apache-2.0
+# ##############################################################################
+# # Copyright (c) 2020 The Linux Foundation and others.
+# # All rights reserved. This program and the accompanying materials
+# # are made available under the terms of the Apache License, Version 2.0
+# # which accompanies this distribution, and is available at
+# # http://www.apache.org/licenses/LICENSE-2.0
+# ##############################################################################
+---
+project: samplevnf
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ samplevnf: d5801e449ca4c7115c2c11bf164bcce7b149917c
+
+branches:
+ - name: stable/jerma
+ location:
+ samplevnf: d5801e449ca4c7115c2c11bf164bcce7b149917c
+
+release-notes: https://docs.opnfv.org/projects/samplevnf/en/stable-jerma/release/release-notes/index.html
diff --git a/releases/jerma/vswitchperf.yaml b/releases/jerma/vswitchperf.yaml
new file mode 100644
index 000000000..ddb6c4050
--- /dev/null
+++ b/releases/jerma/vswitchperf.yaml
@@ -0,0 +1,14 @@
+---
+project: vswitchperf
+project-type: testing
+release-model: stable
+
+releases:
+ - version: opnfv-10.0.0
+ location:
+ vswitchperf: 90ef48d4342a983c7733b8c21bb902d1dab2685a
+
+branches:
+ - name: stable/jerma
+ location:
+ vswitchperf: 90ef48d4342a983c7733b8c21bb902d1dab2685a
diff --git a/releases/kali/airship.yaml b/releases/kali/airship.yaml
new file mode 100644
index 000000000..6eb5f6ffd
--- /dev/null
+++ b/releases/kali/airship.yaml
@@ -0,0 +1,19 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: airship
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ airship: 88723e1bb827b0efa57f47cb12d0ff1c1fe7802c
+
+release-notes: http://docs.opnfv.org/en/stable-kali/submodules/airship/docs/release/release-notes/release-notes.html
diff --git a/releases/kali/barometer.yaml b/releases/kali/barometer.yaml
new file mode 100644
index 000000000..a916ce16a
--- /dev/null
+++ b/releases/kali/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ barometer: 08fd79b551dde788da6974705df9c9d3e8de01d5
diff --git a/utils/build-server-ansible/vars/docker-compose-CentOS.yml b/releases/kali/cirv.yaml
index fc4bcba7e..3a5fe6240 100644
--- a/utils/build-server-ansible/vars/docker-compose-CentOS.yml
+++ b/releases/kali/cirv.yaml
@@ -1,12 +1,17 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
---
-- name: Ensure docker compose is installed.
- yum:
- name: 'docker-compose'
- state: present
+project: cirv
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ cirv-sdv: 3daf232ec65cb42c9c4a05bed1f185fe1aa4bd0b
diff --git a/releases/kali/functest.yaml b/releases/kali/functest.yaml
new file mode 100644
index 000000000..efe050046
--- /dev/null
+++ b/releases/kali/functest.yaml
@@ -0,0 +1,15 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ functest: f74088bb1ae93feaf56c7ec7d1f1e78c97d8de9a
+ - name: stable/kali
+ location:
+ functest-kubernetes: 83d3161c52c8cc8d9e4f52d7a693bbcc32508379
+ - name: stable/kali
+ location:
+ functest-xtesting: 0997d4c739baf30d13529d4408b3761e5c5e8919
diff --git a/releases/kali/kuberef.yaml b/releases/kali/kuberef.yaml
new file mode 100644
index 000000000..e4c826e85
--- /dev/null
+++ b/releases/kali/kuberef.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ kuberef: e917625c424d33e892fa253cdd02da926b843325
diff --git a/releases/kali/vineperf.yaml b/releases/kali/vineperf.yaml
new file mode 100644
index 000000000..2dcfe5cc9
--- /dev/null
+++ b/releases/kali/vineperf.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: vineperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/kali
+ location:
+ vineperf: 53bfad9e49e8d0ea585e92d6efda6c7a2fd779c1
diff --git a/releases/lakelse/barometer.yaml b/releases/lakelse/barometer.yaml
new file mode 100644
index 000000000..f85809a21
--- /dev/null
+++ b/releases/lakelse/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ barometer: 52e3a8bc19d86205e81442acfa9512006207c023
diff --git a/releases/lakelse/cirv.yaml b/releases/lakelse/cirv.yaml
new file mode 100644
index 000000000..f91d6591f
--- /dev/null
+++ b/releases/lakelse/cirv.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: cirv
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ cirv-sdv: 31fb310d3fd1c9c1f12cfe0c654870e24f5efab6
diff --git a/releases/lakelse/kuberef.yaml b/releases/lakelse/kuberef.yaml
new file mode 100644
index 000000000..f5075d1ac
--- /dev/null
+++ b/releases/lakelse/kuberef.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ kuberef: f773a3c4c6f0f3f749c417eb909b0d153a613b17
diff --git a/releases/lakelse/opnfvdocs.yaml b/releases/lakelse/opnfvdocs.yaml
new file mode 100644
index 000000000..519c31dc2
--- /dev/null
+++ b/releases/lakelse/opnfvdocs.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ opnfvdocs: 5af53a3b05353539a072a41a3493c579c63f7b3d
diff --git a/releases/lakelse/vineperf.yaml b/releases/lakelse/vineperf.yaml
new file mode 100644
index 000000000..199fe02ae
--- /dev/null
+++ b/releases/lakelse/vineperf.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: vineperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/lakelse
+ location:
+ vineperf: 2fcc501a8cf9f5a7dbcedf48946e225dfc485ff8
diff --git a/releases/leguer/functest.yaml b/releases/leguer/functest.yaml
new file mode 100644
index 000000000..8ffd79be0
--- /dev/null
+++ b/releases/leguer/functest.yaml
@@ -0,0 +1,15 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/leguer
+ location:
+ functest: a2af4693b352eaf6a84885eacb120cf4e260e5c4
+ - name: stable/leguer
+ location:
+ functest-kubernetes: 687723872c04c4b889cf4b529f6534776b16908d
+ - name: stable/leguer
+ location:
+ functest-xtesting: cd0f0900cb9290f2e84ad184842d8d8ce543a2e7
diff --git a/releases/moselle/barometer.yaml b/releases/moselle/barometer.yaml
new file mode 100644
index 000000000..c636486fc
--- /dev/null
+++ b/releases/moselle/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ barometer: 31d3f2708010dc36377958b6fefb0459301e50af
diff --git a/releases/moselle/kuberef.yaml b/releases/moselle/kuberef.yaml
new file mode 100644
index 000000000..f483bc90f
--- /dev/null
+++ b/releases/moselle/kuberef.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ kuberef: f845220cd17f89c8e1c68aecb3f0c7e82a56aa03
diff --git a/releases/moselle/opnfvdocs.yaml b/releases/moselle/opnfvdocs.yaml
new file mode 100644
index 000000000..f9be3d357
--- /dev/null
+++ b/releases/moselle/opnfvdocs.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ opnfvdocs: 79b31e661c5676f5f21acb7b2870bb0ed8d0e301
diff --git a/utils/build-server-ansible/inventory.ini b/releases/moselle/thoth.yaml
index 115b1306e..23a3de11a 100644
--- a/utils/build-server-ansible/inventory.ini
+++ b/releases/moselle/thoth.yaml
@@ -1,8 +1,17 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-localhost ansible_connection=local
+---
+project: thoth
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ thoth: 9e9df400ba7f9259a38484d232fe11e08edb4da4
diff --git a/releases/moselle/vineperf.yaml b/releases/moselle/vineperf.yaml
new file mode 100644
index 000000000..709352926
--- /dev/null
+++ b/releases/moselle/vineperf.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: vineperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/moselle
+ location:
+ vineperf: aa75d7b0b5041aafa9a34bc95600db5b01a5791e
diff --git a/releases/nile/barometer.yaml b/releases/nile/barometer.yaml
new file mode 100644
index 000000000..b607a49ec
--- /dev/null
+++ b/releases/nile/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/nile
+ location:
+ barometer: 41195d4c2c3418e8c2daa44c792406be2fc54964
diff --git a/releases/nile/opnfvdocs.yaml b/releases/nile/opnfvdocs.yaml
new file mode 100644
index 000000000..0b427ec32
--- /dev/null
+++ b/releases/nile/opnfvdocs.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2022 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/nile
+ location:
+ opnfvdocs: 397bbce7edc03354802dacd1524f5b306979ebd3
diff --git a/jjb/kvmfornfv/kvmfornfv-build.sh b/releases/nile/thoth.yaml
index ff918492c..59d8c9e77 100755..100644
--- a/jjb/kvmfornfv/kvmfornfv-build.sh
+++ b/releases/nile/thoth.yaml
@@ -1,16 +1,17 @@
-#!/bin/bash
-# SPDX-license-identifier: Apache-2.0
+# SPDX-License-Identifier: Apache-2.0
##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
+# Copyright (c) 2021 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-# build output directory
-OUTPUT_DIR=$WORKSPACE/build_output
-mkdir -p $OUTPUT_DIR
+---
+project: thoth
+project-type: feature
+release-model: stable
-# start the build
-cd $WORKSPACE
-./ci/build.sh $OUTPUT_DIR
+branches:
+ - name: stable/nile
+ location:
+ thoth: 3b4fa67222bd3e33cb921869395b46193d353bb1
diff --git a/releases/nile/vineperf.yaml b/releases/nile/vineperf.yaml
new file mode 100644
index 000000000..88252799c
--- /dev/null
+++ b/releases/nile/vineperf.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: vineperf
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/nile
+ location:
+ vineperf: e0b5af5ab0a36bb63c880b9137dd02e26e4c94cc
diff --git a/releases/orinoco/barometer.yaml b/releases/orinoco/barometer.yaml
new file mode 100644
index 000000000..897e8dba0
--- /dev/null
+++ b/releases/orinoco/barometer.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: barometer
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/orinoco
+ location:
+ barometer: 41195d4c2c3418e8c2daa44c792406be2fc54964
diff --git a/releases/orinoco/kuberef.yaml b/releases/orinoco/kuberef.yaml
new file mode 100644
index 000000000..dfbffa0ce
--- /dev/null
+++ b/releases/orinoco/kuberef.yaml
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2020 The Linux Foundation and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+---
+project: kuberef
+project-type: installer
+release-model: stable
+
+branches:
+ - name: stable/orinoco
+ location:
+ kuberef: 486015325b01283f31595abd8fbdbf93530f6a5a
diff --git a/releases/orinoco/opnfvdocs.yaml b/releases/orinoco/opnfvdocs.yaml
new file mode 100644
index 000000000..e05c3492b
--- /dev/null
+++ b/releases/orinoco/opnfvdocs.yaml
@@ -0,0 +1,9 @@
+---
+project: opnfvdocs
+project-type: infra
+release-model: stable
+
+branches:
+ - name: stable/orinoco
+ location:
+ opnfvdocs: fc7cdf4af32a4ea6ffb0e1719650d8bb4e460185
diff --git a/utils/build-server-ansible/vars/docker-compose-Ubuntu.yml b/releases/orinoco/thoth.yaml
index f985b6a4d..f19c443c1 100644
--- a/utils/build-server-ansible/vars/docker-compose-Ubuntu.yml
+++ b/releases/orinoco/thoth.yaml
@@ -1,12 +1,17 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
+# SPDX-License-Identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2021 The Linux Foundation and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
-#############################################################################
+##############################################################################
---
-- name: Ensure docker compose is installed
- apt:
- name: 'docker-compose'
- state: present
+project: thoth
+project-type: feature
+release-model: stable
+
+branches:
+ - name: stable/orinoco
+ location:
+ thoth: 77a6ce27efa02f72568950e37aed3853f05ecd4b
diff --git a/releases/schema.yaml b/releases/schema.yaml
index c3838760a..f84c87093 100644
--- a/releases/schema.yaml
+++ b/releases/schema.yaml
@@ -46,7 +46,7 @@ properties:
properties:
name:
type: 'string'
- pattern: '^stable/[a-z]+$'
+ pattern: '^stable/[a-z0-9.]+$'
location:
type: 'object'
required: ['name', 'location']
diff --git a/releases/scripts/release-status.sh b/releases/scripts/release-status.sh
index 6790100a7..1fb4f70e2 100755
--- a/releases/scripts/release-status.sh
+++ b/releases/scripts/release-status.sh
@@ -15,7 +15,7 @@ RELEASE="${RELEASE:-fraser}"
[ -a repos.txt ] && rm repos.txt
for project in releases/$RELEASE/*; do
- python releases/scripts/repos.py -n -f $project >> repos.txt
+ python3 releases/scripts/repos.py -n -f $project >> repos.txt
done
while read -r repo
diff --git a/releases/scripts/requirements.txt b/releases/scripts/requirements.txt
deleted file mode 100644
index 5a7d216e9..000000000
--- a/releases/scripts/requirements.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-pygerrit2 < 2.1.0
-PyYAML < 4.0
-jsonschema < 2.7.0
-rfc3987
-ruamel.yaml
diff --git a/releases/v1.21/functest.yaml b/releases/v1.21/functest.yaml
new file mode 100644
index 000000000..44b630cdc
--- /dev/null
+++ b/releases/v1.21/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.21
+ location:
+ functest-kubernetes: 3a58b1019c0f0a3e4ec3ce8be16ffd9a15edf518
diff --git a/releases/v1.22/functest.yaml b/releases/v1.22/functest.yaml
new file mode 100644
index 000000000..c2683e0e5
--- /dev/null
+++ b/releases/v1.22/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.22
+ location:
+ functest-kubernetes: 56d89152af91b73ee32d74062e7c366ca7d72e47
diff --git a/releases/v1.23/functest.yaml b/releases/v1.23/functest.yaml
new file mode 100644
index 000000000..9acccb215
--- /dev/null
+++ b/releases/v1.23/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.23
+ location:
+ functest-kubernetes: e3c09959d8639b6e798759bb74e98a752b98b7f4
diff --git a/releases/v1.24/functest.yaml b/releases/v1.24/functest.yaml
new file mode 100644
index 000000000..5e35ec315
--- /dev/null
+++ b/releases/v1.24/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.24
+ location:
+ functest-kubernetes: 57d0741942c23ab66d38b7269e68694b92de9646
diff --git a/releases/v1.25/functest.yaml b/releases/v1.25/functest.yaml
new file mode 100644
index 000000000..f746d968e
--- /dev/null
+++ b/releases/v1.25/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.25
+ location:
+ functest-kubernetes: 7d1b01dafdfcd65bea716b61a6036a972dcf395b
diff --git a/releases/v1.26/functest.yaml b/releases/v1.26/functest.yaml
new file mode 100644
index 000000000..37ae6037c
--- /dev/null
+++ b/releases/v1.26/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.26
+ location:
+ functest-kubernetes: f6eba4b5b93b994a6773867fdb123c2c34a56415
diff --git a/releases/v1.27/functest.yaml b/releases/v1.27/functest.yaml
new file mode 100644
index 000000000..ae709534a
--- /dev/null
+++ b/releases/v1.27/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.27
+ location:
+ functest-kubernetes: e95b4b94abc1ad55144b07b137e2c53d045bd649
diff --git a/releases/v1.28/functest.yaml b/releases/v1.28/functest.yaml
new file mode 100644
index 000000000..a4cc15ccf
--- /dev/null
+++ b/releases/v1.28/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.28
+ location:
+ functest-kubernetes: e95b4b94abc1ad55144b07b137e2c53d045bd649
diff --git a/releases/v1.29/functest.yaml b/releases/v1.29/functest.yaml
new file mode 100644
index 000000000..dcc78ed85
--- /dev/null
+++ b/releases/v1.29/functest.yaml
@@ -0,0 +1,9 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/v1.29
+ location:
+ functest-kubernetes: 9cb990f8f874f395fe6aa7f045fa9dcc3fdb35c4
diff --git a/releases/wallaby/functest.yaml b/releases/wallaby/functest.yaml
new file mode 100644
index 000000000..19f760798
--- /dev/null
+++ b/releases/wallaby/functest.yaml
@@ -0,0 +1,12 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/wallaby
+ location:
+ functest: be8185f6345d499c80c71bf0dbbd1402069e1c81
+ - name: stable/wallaby
+ location:
+ functest-xtesting: ecedaa857ff821cad5dc9fa423d1a596ea255efe
diff --git a/releases/xena/functest.yaml b/releases/xena/functest.yaml
new file mode 100644
index 000000000..cdd9904d3
--- /dev/null
+++ b/releases/xena/functest.yaml
@@ -0,0 +1,12 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/xena
+ location:
+ functest: 1d976335e832e4b48fb7bd2cda3665c28dfabe86
+ - name: stable/xena
+ location:
+ functest-xtesting: f32ab02c42a775e57ba34153d2d401bee34e88b8
diff --git a/releases/yoga/functest.yaml b/releases/yoga/functest.yaml
new file mode 100644
index 000000000..406ecaaf7
--- /dev/null
+++ b/releases/yoga/functest.yaml
@@ -0,0 +1,12 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/yoga
+ location:
+ functest: 95206bab447d0d707edc1623eca2e14ef3ce1b8a
+ - name: stable/yoga
+ location:
+ functest-xtesting: 33a884eae2153a2ea91b4bce79b06028e783e8bf
diff --git a/releases/zed/functest.yaml b/releases/zed/functest.yaml
new file mode 100644
index 000000000..0efa75ffd
--- /dev/null
+++ b/releases/zed/functest.yaml
@@ -0,0 +1,12 @@
+---
+project: functest
+project-type: testing
+release-model: stable
+
+branches:
+ - name: stable/zed
+ location:
+ functest: 55879c22390bd24e696761d4c04fea34ed5a1e87
+ - name: stable/zed
+ location:
+ functest-xtesting: 25c7404b8d2a8f36170f6067c97915ddb6128bfd
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 000000000..ea58b2475
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,3 @@
+sphinxcontrib-httpdomain
+jenkins-job-builder
+piccolo-theme
diff --git a/tox.ini b/tox.ini
index cb6aa2698..23f7c1a84 100644
--- a/tox.ini
+++ b/tox.ini
@@ -4,31 +4,25 @@
# and then run "tox" from this directory.
[tox]
-envlist = jjb,docs,docs-linkcheck
+envlist = jjb,docs
skipsdist = True
[testenv]
+basepython = python3.10
usedevelop = False
-setenv=
- HOME = {envtmpdir}
- PYTHONPATH = {toxinidir}
+deps =
+ -chttps://opendev.org/openstack/requirements/raw/branch/stable/zed/upper-constraints.txt
+ -cupper-constraints.txt
+ -r{toxinidir}/test-requirements.txt
[testenv:jjb]
-basepython = python3
-deps =
- jenkins-job-builder==2.5.0
commands=
- jenkins-jobs test -o job_output -r {posargs:"jjb/"}
+ jenkins-jobs test -o {envtmpdir}/job_output -r {toxinidir}/jjb
[testenv:docs]
-basepython = python3
-deps = -r{toxinidir}/docs/requirements.txt
commands =
- sphinx-build -b html -n -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/html
- echo "Generated docs available in {toxinidir}/docs/_build/html"
-whitelist_externals = echo
+ sphinx-build -b html -n -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/html
[testenv:docs-linkcheck]
-basepython = python3
-deps = -r{toxinidir}/docs/requirements.txt
-commands = sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/linkcheck
+commands =
+ sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./docs {toxinidir}/docs/_build/linkcheck
diff --git a/upper-constraints.txt b/upper-constraints.txt
new file mode 100644
index 000000000..b6d67807d
--- /dev/null
+++ b/upper-constraints.txt
@@ -0,0 +1,7 @@
+sphinxcontrib-needs===0.7.9
+sphinxcontrib.plantuml===0.24
+sphinx_toolbox==3.2.0
+sphinxcontrib.nwdiag===2.0.0
+sphinxcontrib-swaggerdoc===0.1.7
+sphinx-rtd-theme===1.0.0
+jenkins-job-builder===4.3.0
diff --git a/utils/build-server-ansible/main.yml b/utils/build-server-ansible/main.yml
deleted file mode 100644
index a4d3b08cb..000000000
--- a/utils/build-server-ansible/main.yml
+++ /dev/null
@@ -1,46 +0,0 @@
-############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-#############################################################################
----
-- hosts: "localhost"
- become: "True"
- tasks:
- - debug:
- msg: "{{ inventory_hostname }} is {{ ansible_distribution }}"
- - include_vars: vars/defaults.yml
- - include: vars/CentOS.yml
- when: ansible_distribution == "CentOS"
- - include: vars/Ubuntu.yml
- when: ansible_distribution == "Ubuntu"
- - name: Install Docker.
- package: name={{ docker_package }} state={{ docker_package_state }}
- - name: Ensure Docker is started and enabled at boot.
- service:
- name: docker
- state: started
- enabled: "yes"
- - name: install gsutil
- pip:
- name: gsutil
- state: present
- - name: install tox
- pip:
- name: tox
- state: present
- - name: install yamllint
- pip:
- name: yamllint
- state: present
- - include: vars/docker-compose-CentOS.yml
- when: ansible_distribution == "CentOS"
- - include: vars/docker-compose-Ubuntu.yml
- when: ansible_distribution == "Ubuntu"
- - name: Install manifest-tool
- get_url:
- url: '{{ manifest_tool_url }}/{{ manifest_tool_version }}/manifest-tool-linux-amd64'
- dest: '{{ manifest_tool_bin_dir }}/manifest-tool'
- mode: '755'
diff --git a/utils/build-server-ansible/vars/CentOS.yml b/utils/build-server-ansible/vars/CentOS.yml
deleted file mode 100644
index 50839be70..000000000
--- a/utils/build-server-ansible/vars/CentOS.yml
+++ /dev/null
@@ -1,77 +0,0 @@
-############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-############################################################################
----
-- name: Ensure old versions of Docker are not installed.
- package:
- name: '{{ item }}'
- state: absent
- with_items:
- - docker
- - docker-common
- - docker-engine
-
-- name: Add Docker GPG key.
- rpm_key:
- key: https://download.docker.com/linux/centos/gpg
- state: present
-
-- name: Ensure epel is installed.
- yum:
- name: epel-release
- state: present
-- name: Ensure depdencies are installed.
- yum:
- name: "{{ item }}"
- state: present
- with_items:
- - bc
- - collectd
- - doxygen
- - facter
- - jq
- - kernel-headers
- - libpcap-devel
- - libvirt-devel
- - net-tools
- - numactl-devel
- - openssl-devel
- - python-devel
- - python-pip
- - python-six
- - python-sphinx
- - rpm-build
- - sysstat
- - xmlstarlet
- - zlib-devel
-
-- name: install the 'Development tools' package group
- yum:
- name: "@Development tools"
- state: present
-
-- name: Add Docker repository.
- get_url:
- url: "{{ docker_yum_repo_url }}"
- dest: '/etc/yum.repos.d/docker-ce.repo'
- owner: root
- group: root
- mode: 0644
-
-- name: Configure Docker Edge repo.
- ini_file:
- dest: '/etc/yum.repos.d/docker-ce.repo'
- section: 'docker-ce-edge'
- option: enabled
- value: '{{ docker_yum_repo_enable_edge }}'
-
-- name: Configure Docker Test repo.
- ini_file:
- dest: '/etc/yum.repos.d/docker-ce.repo'
- section: 'docker-ce-test'
- option: enabled
- value: '{{ docker_yum_repo_enable_test }}'
diff --git a/utils/build-server-ansible/vars/Ubuntu.yml b/utils/build-server-ansible/vars/Ubuntu.yml
deleted file mode 100644
index 2e3e3febf..000000000
--- a/utils/build-server-ansible/vars/Ubuntu.yml
+++ /dev/null
@@ -1,158 +0,0 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-#############################################################################
----
-- name: Ensure old versions of Docker are not installed.
- package:
- name: '{{ item }}'
- state: absent
- with_items:
- - docker
- - docker-engine
-
-- name: Ensure depdencies are installed.
- apt:
- name: "{{ item }}"
- state: present
- with_items:
- - apt-transport-https
- - ca-certificates
- - git
- - build-essential
- - curl
- - wget
- - rpm
- - fuseiso
- - createrepo
- - genisoimage
- - libfuse-dev
- - dh-autoreconf
- - pkg-config
- - zlib1g-dev
- - libglib2.0-dev
- - libpixman-1-dev
- - python-virtualenv
- - python-dev
- - libffi-dev
- - libssl-dev
- - libxml2-dev
- - libxslt1-dev
- - bc
- - qemu-kvm
- - libvirt-bin
- - ubuntu-vm-builder
- - bridge-utils
- - monit
- - openjdk-8-jre-headless
- - python-nose
- - dirmngr
- - collectd
- - flex
- - bison
- - libnuma-dev
- - shellcheck
- - python-pip
- - sysstat
- - xmlstarlet
- - facter
- - jq
- - python-tox
- - collectd-dev
- - python3
- - python3-dev
- - libdpdk-dev
- - dpdk-dev
- - fakeroot
- - devscripts
- - debhelper
- - dpkg-dev
- - po-debconf
- - dh-systemd
- - dh-strip-nondeterminism
- - autotools-dev
- - libltdl-dev
- - iptables-dev
- - javahelper
- - libatasmart-dev
- - libcap-dev
- - libcurl4-gnutls-dev
- - libdbi0-dev
- - libesmtp-dev
- - libganglia1-dev
- - libgcrypt11-dev
- - libgps-dev
- - libhiredis-dev
- - libi2c-dev
- - libldap2-dev
- - liblua5.3-dev
- - liblvm2-dev
- - libmemcached-dev
- - libmodbus-dev
- - libmnl-dev
- - libmosquitto-dev
- - libmysqlclient-dev
- - libnotify-dev
- - libopenipmi-dev
- - liboping-dev
- - libow-dev
- - libpcap0.8-dev
- - libpcap-dev
- - libperl-dev
- - libpq-dev
- - libprotobuf-c-dev
- - libriemann-client-dev
- - librdkafka-dev
- - librabbitmq-dev
- - librrd-dev
- - libsensors4-dev
- - libsigrok-dev
- - libsnmp-dev
- - libsnmp9-dev
- - snmp
- - snmp-mibs-downloader
- - snmpd
- - perl
- - libtokyocabinet-dev
- - libtokyotyrant-dev
- - libudev-dev
- - libupsclient-dev
- - libvarnishapi-dev
- - libvirt-dev
- - libvirt-daemon
- - libxen-dev
- - libyajl-dev
- - linux-libc-dev
- - default-jdk
- - protobuf-c-compiler
- - openvswitch-switch
- - libjpeg-dev
-
-- name: Add Docker apt key.
- apt_key:
- url: https://download.docker.com/linux/ubuntu/gpg
- id: 9DC858229FC7DD38854AE2D88D81803C0EBFCD88
- state: present
- register: add_repository_key
- ignore_errors: true
-
-- name: Ensure curl is present (on older systems without SNI).
- package: name=curl state=present
- when: add_repository_key|failed
-
-- name: Add Docker apt key (alternative for older systems without SNI).
- # yamllint disable rule:line-length
- shell: "curl -sSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -"
- # yamllint enable rule:line-length
- args:
- warn: "no"
- when: add_repository_key|failed
-
-- name: Add Docker repository.
- apt_repository:
- repo: "{{ docker_apt_repository }}"
- state: present
- update_cache: "yes"
diff --git a/utils/build-server-ansible/vars/defaults.yml b/utils/build-server-ansible/vars/defaults.yml
deleted file mode 100644
index ff37c953c..000000000
--- a/utils/build-server-ansible/vars/defaults.yml
+++ /dev/null
@@ -1,27 +0,0 @@
-#############################################################################
-# Copyright (c) 2016 The Linux Foundation and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-#############################################################################
----
-docker_package: "docker-ce"
-docker_package_state: present
-
-# Used only for Debian/Ubuntu. Switch 'stable' to 'edge' if needed.
-docker_apt_release_channel: stable
-# yamllint disable rule:line-length
-docker_apt_repository: "deb https://download.docker.com/linux/{{ ansible_distribution|lower }} {{ ansible_distribution_release }} {{ docker_apt_release_channel }}"
-# yamllint enable rule:line-length
-
-# Used only for RedHat/CentOS.
-# yamllint disable rule:line-length
-docker_yum_repo_url: https://download.docker.com/linux/centos/docker-ce.repo
-# yamllint enable rule:line-length
-docker_yum_repo_enable_edge: 0
-docker_yum_repo_enable_test: 0
-
-manifest_tool_version: 'v0.7.0'
-manifest_tool_url: 'https://github.com/estesp/manifest-tool/releases/download'
-manifest_tool_bin_dir: '/usr/local/bin'
diff --git a/utils/retention_script.sh b/utils/retention_script.sh
index b2177930b..3d0673ad0 100755
--- a/utils/retention_script.sh
+++ b/utils/retention_script.sh
@@ -15,10 +15,7 @@
# limitations under the License.
##############################################################################
-PATH=$PATH:/usr/local/bin/
-
-#These are the only projects that generate artifacts
-for x in armband ovsnfv fuel apex compass4nfv
+for x in airship apex armband compass4nfv fuel ovsnfv
do
echo "Looking at artifacts for project $x"