summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore7
-rw-r--r--docs/jenkins-job-builder/opnfv-jjb-usage.rst15
-rw-r--r--jjb-sandbox/releng/releng-sandbox-jobs.yml1
-rwxr-xr-xjjb/3rd_party_ci/create-apex-vms.sh10
-rwxr-xr-xjjb/3rd_party_ci/download-netvirt-artifact.sh27
-rwxr-xr-xjjb/3rd_party_ci/install-netvirt.sh40
-rw-r--r--jjb/3rd_party_ci/odl-netvirt.yml70
-rwxr-xr-xjjb/3rd_party_ci/postprocess-netvirt.sh10
-rwxr-xr-xjjb/apex/apex-build.sh3
-rwxr-xr-xjjb/apex/apex-deploy.sh60
-rw-r--r--jjb/apex/apex-snapshot-create.sh97
-rw-r--r--jjb/apex/apex-snapshot-deploy.sh165
-rwxr-xr-xjjb/apex/apex-upload-artifact.sh12
-rw-r--r--jjb/apex/apex.yml292
-rw-r--r--jjb/armband/armband-ci-jobs.yml4
-rwxr-xr-xjjb/armband/armband-deploy.sh7
-rwxr-xr-xjjb/armband/armband-download-artifact.sh2
-rw-r--r--jjb/armband/armband-project-jobs.yml4
-rw-r--r--jjb/armband/armband-verify-jobs.yml22
-rwxr-xr-xjjb/armband/build.sh1
-rwxr-xr-xjjb/armband/upload-artifacts.sh2
-rw-r--r--jjb/availability/availability.yml1
-rw-r--r--jjb/barometer/barometer.yml5
-rw-r--r--jjb/bottlenecks/bottlenecks-ci-jobs.yml62
-rw-r--r--jjb/bottlenecks/bottlenecks-cleanup.sh111
-rw-r--r--jjb/bottlenecks/bottlenecks-project-jobs.yml6
-rw-r--r--jjb/bottlenecks/bottlenecks-run-suite.sh65
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yml30
-rw-r--r--jjb/compass4nfv/compass-deploy.sh3
-rw-r--r--jjb/compass4nfv/compass-dovetail-jobs.yml208
-rw-r--r--jjb/compass4nfv/compass-project-jobs.yml4
-rw-r--r--jjb/compass4nfv/compass-verify-jobs.yml8
-rw-r--r--jjb/conductor/conductor.yml1
-rw-r--r--jjb/copper/copper.yml8
-rw-r--r--jjb/cperf/cperf-ci-jobs.yml2
-rwxr-xr-xjjb/daisy4nfv/daisy-deploy.sh60
-rw-r--r--jjb/daisy4nfv/daisy-project-jobs.yml232
-rw-r--r--jjb/daisy4nfv/daisy4nfv-merge-jobs.yml160
-rw-r--r--jjb/daisy4nfv/daisy4nfv-verify-jobs.yml134
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-virtual-deploy.sh29
-rw-r--r--jjb/doctor/doctor.yml18
-rw-r--r--jjb/domino/domino.yml1
-rw-r--r--jjb/dovetail/dovetail-artifacts-upload.yml2
-rw-r--r--jjb/dovetail/dovetail-ci-jobs.yml8
-rw-r--r--jjb/dovetail/dovetail-project-jobs.yml2
-rw-r--r--jjb/dovetail/dovetail-weekly-jobs.yml135
-rw-r--r--jjb/dpacc/dpacc.yml1
-rw-r--r--jjb/escalator/escalator.yml16
-rw-r--r--jjb/fuel/fuel-daily-jobs.yml194
-rwxr-xr-xjjb/fuel/fuel-deploy.sh4
-rwxr-xr-xjjb/fuel/fuel-download-artifact.sh2
-rw-r--r--jjb/fuel/fuel-project-jobs.yml11
-rwxr-xr-xjjb/fuel/fuel-upload-artifact.sh2
-rw-r--r--jjb/fuel/fuel-verify-jobs-experimental.yml12
-rw-r--r--jjb/fuel/fuel-verify-jobs.yml12
-rw-r--r--jjb/functest/functest-ci-jobs.yml10
-rwxr-xr-xjjb/functest/functest-cleanup.sh12
-rw-r--r--jjb/functest/functest-exit.sh5
-rwxr-xr-xjjb/functest/functest-loop.sh6
-rw-r--r--jjb/functest/functest-project-jobs.yml1
-rwxr-xr-xjjb/functest/functest-suite.sh11
-rwxr-xr-xjjb/functest/set-functest-env.sh61
-rw-r--r--jjb/global/installer-params.yml27
-rw-r--r--jjb/global/releng-defaults.yml9
-rw-r--r--jjb/global/releng-macros.yml69
-rw-r--r--jjb/global/slave-params.yml148
-rw-r--r--jjb/infra/bifrost-verify-jobs.yml20
-rwxr-xr-xjjb/infra/bifrost-verify.sh62
-rw-r--r--jjb/ipv6/ipv6.yml1
-rw-r--r--jjb/joid/joid-daily-jobs.yml135
-rw-r--r--jjb/joid/joid-deploy.sh97
-rw-r--r--jjb/joid/joid-verify-jobs.yml10
-rwxr-xr-xjjb/kvmfornfv/kvmfornfv-upload-artifact.sh34
-rw-r--r--jjb/kvmfornfv/kvmfornfv.yml50
-rw-r--r--jjb/models/models.yml67
-rw-r--r--jjb/moon/moon.yml1
-rwxr-xr-xjjb/multisite/fuel-deploy-for-multisite.sh121
-rw-r--r--jjb/multisite/multisite-daily-jobs.yml140
-rw-r--r--jjb/multisite/multisite-verify-jobs.yml68
-rw-r--r--jjb/multisite/multisite.yml149
-rw-r--r--jjb/netready/netready.yml2
-rw-r--r--jjb/octopus/octopus.yml1
-rw-r--r--jjb/onosfw/onosfw.yml3
-rw-r--r--jjb/openretriever/openretriever-project.yml62
-rw-r--r--jjb/opera/opera-daily-jobs.yml16
-rw-r--r--jjb/opera/opera-project-jobs.yml4
-rw-r--r--jjb/opera/opera-verify-jobs.yml3
-rw-r--r--jjb/opnfvdocs/docs-post-rtd.sh7
-rw-r--r--jjb/opnfvdocs/docs-rtd.yaml85
-rw-r--r--jjb/opnfvdocs/opnfvdocs.yml10
-rw-r--r--jjb/opnfvdocs/project.cfg1
-rw-r--r--jjb/ovsnfv/ovsnfv.yml3
-rw-r--r--jjb/parser/parser.yml1
-rw-r--r--jjb/pharos/pharos.yml1
-rw-r--r--jjb/prediction/prediction.yml1
-rw-r--r--jjb/promise/promise.yml1
-rw-r--r--jjb/qtip/helpers/cleanup-deploy.sh (renamed from jjb/qtip/qtip-cleanup.sh)0
-rw-r--r--jjb/qtip/helpers/validate-deploy.sh (renamed from jjb/qtip/qtip-daily-ci.sh)7
-rw-r--r--jjb/qtip/helpers/validate-setup.sh24
-rw-r--r--jjb/qtip/qtip-ci-jobs.yml101
-rw-r--r--jjb/qtip/qtip-validate-jobs.yml141
-rw-r--r--jjb/qtip/qtip-verify-jobs.yml (renamed from jjb/qtip/qtip-project-jobs.yml)12
-rw-r--r--jjb/releng/artifact-cleanup.yml1
-rw-r--r--jjb/releng/opnfv-docker-arm.yml77
-rw-r--r--jjb/releng/opnfv-docker.sh25
-rw-r--r--jjb/releng/opnfv-docker.yml30
-rw-r--r--jjb/releng/opnfv-docs.yml4
-rw-r--r--jjb/releng/opnfv-lint.yml4
-rw-r--r--jjb/releng/releng-ci-jobs.yml3
-rw-r--r--jjb/releng/testapi-automate.yml182
-rw-r--r--jjb/releng/testapi-backup-mongodb.sh31
-rw-r--r--jjb/releng/testapi-docker-deploy.sh81
-rw-r--r--jjb/releng/testapi-run-tests.sh17
-rw-r--r--jjb/securityaudit/opnfv-security-audit.yml1
-rw-r--r--jjb/storperf/storperf.yml5
-rw-r--r--jjb/ves/ves.yml68
-rw-r--r--jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml1
-rw-r--r--jjb/vswitchperf/vswitchperf.yml9
-rw-r--r--jjb/yardstick/yardstick-ci-jobs.yml15
-rwxr-xr-xjjb/yardstick/yardstick-daily.sh4
-rw-r--r--jjb/yardstick/yardstick-project-jobs.yml18
-rw-r--r--modules/opnfv/deployment/__init__.py (renamed from modules/opnfv/installer_adapters/__init__.py)0
-rw-r--r--modules/opnfv/deployment/apex/__init__.py (renamed from modules/opnfv/installer_adapters/apex/__init__.py)0
-rw-r--r--modules/opnfv/deployment/apex/adapter.py100
-rw-r--r--modules/opnfv/deployment/example.py36
-rw-r--r--modules/opnfv/deployment/factory.py44
-rw-r--r--modules/opnfv/deployment/fuel/__init__.py (renamed from modules/opnfv/installer_adapters/compass/__init__.py)0
-rw-r--r--modules/opnfv/deployment/fuel/adapter.py199
-rw-r--r--modules/opnfv/deployment/manager.py385
-rw-r--r--modules/opnfv/installer_adapters/InstallerHandler.py81
-rw-r--r--modules/opnfv/installer_adapters/apex/ApexAdapter.py32
-rw-r--r--modules/opnfv/installer_adapters/compass/CompassAdapter.py32
-rw-r--r--modules/opnfv/installer_adapters/daisy/DaisyAdapter.py32
-rw-r--r--modules/opnfv/installer_adapters/daisy/__init__.py0
-rw-r--r--modules/opnfv/installer_adapters/fuel/FuelAdapter.py236
-rw-r--r--modules/opnfv/installer_adapters/fuel/__init__.py0
-rw-r--r--modules/opnfv/installer_adapters/fuel/example.py22
-rw-r--r--modules/opnfv/installer_adapters/joid/JoidAdapter.py32
-rw-r--r--modules/opnfv/installer_adapters/joid/__init__.py0
-rw-r--r--modules/opnfv/utils/Credentials.py2
-rw-r--r--modules/opnfv/utils/constants.py1
-rw-r--r--modules/opnfv/utils/opnfv_logger.py (renamed from modules/opnfv/utils/OPNFVLogger.py)0
-rw-r--r--modules/opnfv/utils/ovs_logger.py27
-rw-r--r--modules/opnfv/utils/ssh_utils.py (renamed from modules/opnfv/utils/SSHUtils.py)40
-rwxr-xr-xprototypes/bifrost/scripts/destroy-env.sh35
-rwxr-xr-xprototypes/bifrost/scripts/test-bifrost-deployment.sh8
-rwxr-xr-xutils/fetch_os_creds.sh10
-rwxr-xr-xutils/jenkins-jnlp-connect.sh15
-rwxr-xr-xutils/lab-reconfiguration/reconfigUcsNet.py146
-rw-r--r--utils/opnfv-artifacts.py79
-rw-r--r--utils/push-test-logs.sh10
-rw-r--r--utils/test/dashboard/dashboard/common/elastic_access.py2
-rw-r--r--utils/test/dashboard/dashboard/conf/testcases.py2
-rw-r--r--utils/test/dashboard/dashboard/elastic2kibana/utility.py3
-rw-r--r--utils/test/dashboard/dashboard/functest/format.py22
-rw-r--r--utils/test/dashboard/dashboard/mongo2elastic/main.py22
-rw-r--r--utils/test/dashboard/kibana_cleanup.py17
-rwxr-xr-xutils/test/reporting/functest/reporting-status.py15
-rwxr-xr-xutils/test/reporting/functest/reporting-tempest.py8
-rwxr-xr-xutils/test/reporting/functest/reporting-vims.py4
-rw-r--r--utils/test/reporting/functest/testCase.py35
-rw-r--r--utils/test/reporting/reporting.yaml22
-rw-r--r--utils/test/reporting/utils/reporting_utils.py38
-rw-r--r--utils/test/testapi/etc/config.ini1
-rw-r--r--utils/test/testapi/htmlize/doc-build.sh10
-rw-r--r--utils/test/testapi/htmlize/finish.sh15
-rw-r--r--utils/test/testapi/htmlize/htmlize.py6
-rw-r--r--utils/test/testapi/htmlize/prepare.sh25
-rw-r--r--utils/test/testapi/opnfv_testapi/cmd/server.py9
-rw-r--r--utils/test/testapi/opnfv_testapi/common/config.py13
-rw-r--r--utils/test/testapi/opnfv_testapi/common/constants.py1
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/handlers.py102
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/pod_handlers.py12
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/project_handlers.py12
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/result_handlers.py30
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py185
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/scenario_models.py37
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/testcase_handlers.py14
-rw-r--r--utils/test/testapi/opnfv_testapi/router/url_mappings.py39
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py1
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_base.py15
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py18
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py35
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_project.py58
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_result.py122
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py239
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py85
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_token.py118
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/test_version.py8
-rwxr-xr-xutils/test/testapi/run_test.sh40
-rw-r--r--utils/test/testapi/test-requirements.txt7
-rw-r--r--utils/test/testapi/update/templates/utils.py2
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/README.md12
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/app.js79
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/bin/www9
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/package.json18
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/public/images/3rd_party/commits.pngbin0 -> 1437 bytes
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/public/images/logo.pngbin0 -> 12138 bytes
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/public/javascripts/global.js16
-rwxr-xr-xutils/test/vnfcatalogue/VNF_Catalogue/public/stylesheets/3rd_party/bootstrap.css1299
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/public/stylesheets/style.css252
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/routes/index.js18
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/routes/search_projects.js19
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/views/error.jade12
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/views/index.jade131
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/views/layout.jade15
-rw-r--r--utils/test/vnfcatalogue/VNF_Catalogue/views/search_projects.jade128
-rw-r--r--utils/test/vnfcatalogue/helpers/README.md22
-rw-r--r--utils/test/vnfcatalogue/helpers/migrate.js78
-rw-r--r--utils/test/vnfcatalogue/helpers/schema.js51
210 files changed, 7781 insertions, 2207 deletions
diff --git a/.gitignore b/.gitignore
index 024dfac4b..918e32154 100644
--- a/.gitignore
+++ b/.gitignore
@@ -26,3 +26,10 @@ wheels/
.venv/
venv/
ENV/
+node_modules/
+.coverage
+=1.3.1
+cover/
+coverage.xml
+nosetests.xml
+testapi_venv/
diff --git a/docs/jenkins-job-builder/opnfv-jjb-usage.rst b/docs/jenkins-job-builder/opnfv-jjb-usage.rst
index 73b31b20a..fc968f841 100644
--- a/docs/jenkins-job-builder/opnfv-jjb-usage.rst
+++ b/docs/jenkins-job-builder/opnfv-jjb-usage.rst
@@ -39,11 +39,24 @@ Job Types
* Trigger: **remerge**
+* Experimental Job
+
+ * Trigger: **check-experimental**
+
The verify and merge jobs are retriggerable in Gerrit by simply leaving
a comment with one of the keywords listed above.
This is useful in case you need to re-run one of those jobs in case
if build issues or something changed with the environment.
+The experimental jobs are not triggered automatically. You need to leave
+a comment with the keyword list above to trigger it manually. It is useful
+for trying out experimental features.
+
+Note that, experimental jobs `skip vote`_ for verified status, which means
+it will reset the verified status to 0. If you want to keep the verified
+status, use **recheck-experimental** in commit message to trigger both
+verify and experimental jobs.
+
You can add below persons as reviewers to your patch in order to get it
reviewed and submitted.
@@ -67,3 +80,5 @@ in `releng-jobs.yaml`_.
.. _releng-jobs.yaml:
https://gerrit.opnfv.org/gerrit/gitweb?p=releng.git;a=blob;f=jjb/releng-jobs.yaml;
+.. _skip vote:
+ https://wiki.jenkins-ci.org/display/JENKINS/Gerrit+Trigger#GerritTrigger-SkipVote \ No newline at end of file
diff --git a/jjb-sandbox/releng/releng-sandbox-jobs.yml b/jjb-sandbox/releng/releng-sandbox-jobs.yml
index adefe363e..97fea8992 100644
--- a/jjb-sandbox/releng/releng-sandbox-jobs.yml
+++ b/jjb-sandbox/releng/releng-sandbox-jobs.yml
@@ -13,7 +13,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: 'master'
scm:
- git-scm-gerrit
diff --git a/jjb/3rd_party_ci/create-apex-vms.sh b/jjb/3rd_party_ci/create-apex-vms.sh
index 3f5dbd1c4..0744ac89a 100755
--- a/jjb/3rd_party_ci/create-apex-vms.sh
+++ b/jjb/3rd_party_ci/create-apex-vms.sh
@@ -1,12 +1,8 @@
#!/bin/bash
-set -e
+set -o errexit
+set -o nounset
+set -o pipefail
-if [ -z ${WORKSPACE} ]; then
- echo "WORKSPACE is unset. Please do so."
- exit 1
-fi
-# wipe the WORKSPACE
-/bin/rm -rf $WORKSPACE/*
# clone opnfv sdnvpn repo
git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
diff --git a/jjb/3rd_party_ci/download-netvirt-artifact.sh b/jjb/3rd_party_ci/download-netvirt-artifact.sh
index be2d4059a..6aea01d2a 100755
--- a/jjb/3rd_party_ci/download-netvirt-artifact.sh
+++ b/jjb/3rd_party_ci/download-netvirt-artifact.sh
@@ -1,27 +1,30 @@
#!/bin/bash
-set -e
+set -o errexit
+set -o nounset
+set -o pipefail
-if [ -z ${WORKSPACE} ]; then
- echo "WORKSPACE is unset. Please do so."
- exit 1
-fi
-# wipe the WORKSPACE
-/bin/rm -rf $WORKSPACE/*
+ODL_ZIP=distribution-karaf-0.6.0-SNAPSHOT.zip
echo "Attempting to fetch the artifact location from ODL Jenkins"
CHANGE_DETAILS_URL="https://git.opendaylight.org/gerrit/changes/netvirt~master~$GERRIT_CHANGE_ID/detail"
# due to limitation with the Jenkins Gerrit Trigger, we need to use Gerrit REST API to get the change details
-ODL_JOB_URL=$(curl -s $CHANGE_DETAILS_URL | grep netvirt-patch-test-current-carbon | tail -1 | \
- sed 's/\\n//g' | awk '{print $6}')
-NETVIRT_ARTIFACT_URL="${ODL_JOB_URL}org.opendaylight.integration\$distribution-karaf/artifact/org.opendaylight.integration/distribution-karaf/0.6.0-SNAPSHOT/distribution-karaf-0.6.0-SNAPSHOT.tar.gz"
+ODL_BUILD_JOB_NUM=$(curl -s $CHANGE_DETAILS_URL | grep -Eo 'netvirt-distribution-check-carbon/[0-9]+' | tail -1 | grep -Eo [0-9]+)
+
+NETVIRT_ARTIFACT_URL="https://jenkins.opendaylight.org/releng/job/netvirt-distribution-check-carbon/${ODL_BUILD_JOB_NUM}/artifact/${ODL_ZIP}"
echo -e "URL to artifact is\n\t$NETVIRT_ARTIFACT_URL"
echo "Downloading the artifact. This could take time..."
-wget -q -O $NETVIRT_ARTIFACT $NETVIRT_ARTIFACT_URL
+wget -q -O $ODL_ZIP $NETVIRT_ARTIFACT_URL
if [[ $? -ne 0 ]]; then
echo "The artifact does not exist! Probably removed due to ODL Jenkins artifact retention policy."
echo "Rerun netvirt-patch-test-current-carbon to get artifact rebuilt."
exit 1
fi
+
+#TODO(trozet) remove this once odl-pipeline accepts zip files
+echo "Converting artifact zip to tar.gz"
+unzip $ODL_ZIP
+tar czf /tmp/${NETVIRT_ARTIFACT} $(echo $ODL_ZIP | sed -n 's/\.zip//p')
+
echo "Download complete"
-ls -al $NETVIRT_ARTIFACT
+ls -al /tmp/${NETVIRT_ARTIFACT}
diff --git a/jjb/3rd_party_ci/install-netvirt.sh b/jjb/3rd_party_ci/install-netvirt.sh
index f111d4847..ed1a12bc8 100755
--- a/jjb/3rd_party_ci/install-netvirt.sh
+++ b/jjb/3rd_party_ci/install-netvirt.sh
@@ -1,15 +1,33 @@
#!/bin/bash
-set -e
+set -o errexit
+set -o nounset
+set -o pipefail
-if [ -z ${WORKSPACE} ]; then
- echo "WORKSPACE is unset. Please do so."
- exit 1
-fi
-# wipe the WORKSPACE
-/bin/rm -rf $WORKSPACE/*
+SNAP_CACHE=$HOME/snap_cache
# clone opnfv sdnvpn repo
git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
-. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh
-pushd $LIB
-./odl_reinstaller.sh --cloner-info $CLONER_INFO --odl-artifact $NETVIRT_ARTIFACT
-popd \ No newline at end of file
+
+if [ ! -f "/tmp/${NETVIRT_ARTIFACT}" ]; then
+ echo "ERROR: /tmp/${NETVIRT_ARTIFACT} specified as NetVirt Artifact, but file does not exist"
+ exit 1
+fi
+
+if [ ! -f "${SNAP_CACHE}/node.yaml" ]; then
+ echo "ERROR: node.yaml pod config missing in ${SNAP_CACHE}"
+ exit 1
+fi
+
+if [ ! -f "${SNAP_CACHE}/id_rsa" ]; then
+ echo "ERROR: id_rsa ssh creds missing in ${SNAP_CACHE}"
+ exit 1
+fi
+
+# TODO (trozet) snapshot should have already been unpacked into cache folder
+# but we really should check the cache here, and not use a single cache folder
+# for when we support multiple jobs on a single slave
+pushd sdnvpn/odl-pipeline/lib > /dev/null
+# FIXME (trozet) remove this once permissions are fixed in sdnvpn repo
+chmod +x odl_reinstaller.sh
+./odl_reinstaller.sh --pod-config ${SNAP_CACHE}/node.yaml \
+ --odl-artifact /tmp/${NETVIRT_ARTIFACT} --ssh-key-file ${SNAP_CACHE}/id_rsa
+popd > /dev/null
diff --git a/jjb/3rd_party_ci/odl-netvirt.yml b/jjb/3rd_party_ci/odl-netvirt.yml
index 054761ea7..470e4335e 100644
--- a/jjb/3rd_party_ci/odl-netvirt.yml
+++ b/jjb/3rd_party_ci/odl-netvirt.yml
@@ -17,11 +17,11 @@
#####################################
phase:
- 'create-apex-vms':
- slave-label: 'ericsson-virtual5'
+ slave-label: 'odl-netvirt-virtual-intel'
- 'install-netvirt':
- slave-label: 'odl-netvirt-virtual'
+ slave-label: 'odl-netvirt-virtual-intel'
- 'postprocess':
- slave-label: 'odl-netvirt-virtual'
+ slave-label: 'odl-netvirt-virtual-intel'
#####################################
# jobs
#####################################
@@ -41,30 +41,38 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 5
max-per-node: 1
option: 'project'
+ scm:
+ - git:
+ url: https://gerrit.opnfv.org/gerrit/apex
+ branches:
+ - 'origin/master'
+ timeout: 15
+ wipe-workspace: true
+
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- string:
name: NETVIRT_ARTIFACT
- default: $WORKSPACE/distribution-karaf.tar.gz
- - 'odl-netvirt-virtual-defaults'
+ default: distribution-karaf.tar.gz
+ - 'odl-netvirt-virtual-intel-defaults'
triggers:
- gerrit:
server-name: 'git.opendaylight.org'
trigger-on:
- - comment-added-contains-event:
- comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS'
- - comment-added-contains-event:
- comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE'
+ # - comment-added-contains-event:
+ # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : SUCCESS'
+ # - comment-added-contains-event:
+ # comment-contains-value: 'https://jenkins.opendaylight.org/releng/job/netvirt-patch-test-current-carbon/.*?/ : UNSTABLE'
- comment-added-contains-event:
comment-contains-value: 'opnfv-test'
projects:
@@ -85,7 +93,7 @@
- name: 'odl-netvirt-verify-virtual-create-apex-vms-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
@@ -93,7 +101,7 @@
GERRIT_PATCHSET_REVISION=$GERRIT_PATCHSET_REVISION
NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
APEX_ENV_NUMBER=$APEX_ENV_NUMBER
- node-parameters: false
+ node-parameters: true
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
@@ -103,7 +111,7 @@
- name: 'odl-netvirt-verify-virtual-install-netvirt-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
@@ -117,10 +125,11 @@
name: functest
condition: SUCCESSFUL
projects:
- - name: 'functest-netvirt-virtual-daily-{stream}'
+ - name: 'functest-netvirt-virtual-suite-{stream}'
predefined-parameters: |
- RC_FILE_PATH=/home/jenkins/cloner-info/overcloudrc
- DEPLOY_SCENARIO=os-odl_l2-bgpvpn-noha
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
+ FUNCTEST_SUITE_NAME=tempest_smoke_serial
+ RC_FILE_PATH=$HOME/cloner-info/overcloudrc
node-parameters: true
kill-phase-on: FAILURE
abort-all-job: false
@@ -131,7 +140,7 @@
- name: 'odl-netvirt-verify-virtual-postprocess-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_ID=$GERRIT_CHANGE_ID
@@ -140,7 +149,7 @@
NETVIRT_ARTIFACT=$NETVIRT_ARTIFACT
node-parameters: true
kill-phase-on: FAILURE
- abort-all-job: true
+ abort-all-job: false
- job-template:
name: 'odl-netvirt-verify-virtual-{phase}-{stream}'
@@ -150,6 +159,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 5
@@ -158,8 +168,9 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'odl-netvirt-verify-virtual-install-.*'
- - 'functest-netvirt-virtual-daily-.*'
+ - 'odl-netvirt-verify-virtual-create-apex-vms-.*'
+ - 'odl-netvirt-verify-virtual-install-netvirt-.*'
+ - 'functest-netvirt-virtual-suite-.*'
- 'odl-netvirt-verify-virtual-postprocess-.*'
block-level: 'NODE'
@@ -169,13 +180,28 @@
timeout: 360
fail: true
+ scm:
+ - git:
+ url: https://gerrit.opnfv.org/gerrit/apex
+ branches:
+ - 'origin/master'
+ timeout: 15
+ wipe-workspace: true
+
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- '{installer}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-odl_l2-bgpvpn-noha'
+ description: 'Scenario to deploy and test'
+ - string:
+ name: GS_URL
+ default: artifacts.opnfv.org/apex
+ description: "URL to Google Storage with snapshot artifacts."
builders:
- description-setter:
@@ -188,7 +214,7 @@
name: 'netvirt-verify-create-apex-vms-builder'
builders:
- shell:
- !include-raw: ./create-apex-vms.sh
+ !include-raw: ../apex/apex-snapshot-deploy.sh
- builder:
name: 'netvirt-verify-install-netvirt-builder'
builders:
diff --git a/jjb/3rd_party_ci/postprocess-netvirt.sh b/jjb/3rd_party_ci/postprocess-netvirt.sh
index 5baf378a9..796514259 100755
--- a/jjb/3rd_party_ci/postprocess-netvirt.sh
+++ b/jjb/3rd_party_ci/postprocess-netvirt.sh
@@ -1,12 +1,8 @@
#!/bin/bash
-set -e
+set -o errexit
+set -o nounset
+set -o pipefail
-if [ -z ${WORKSPACE} ]; then
- echo "WORKSPACE is unset. Please do so."
- exit 1
-fi
-# wipe the WORKSPACE
-/bin/rm -rf $WORKSPACE/*
# clone opnfv sdnvpn repo
git clone https://gerrit.opnfv.org/gerrit/p/sdnvpn.git $WORKSPACE/sdnvpn
. $WORKSPACE/sdnvpn/odl-pipeline/odl-pipeline-common.sh
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
index ee1dfb5d3..220d02435 100755
--- a/jjb/apex/apex-build.sh
+++ b/jjb/apex/apex-build.sh
@@ -12,6 +12,9 @@ echo
if echo $BUILD_TAG | grep "apex-verify" 1> /dev/null; then
export OPNFV_ARTIFACT_VERSION=dev${BUILD_NUMBER}
export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
+elif echo $BUILD_TAG | grep "csit" 1> /dev/null; then
+ export OPNFV_ARTIFACT_VERSION=csit${BUILD_NUMBER}
+ export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY"
elif [ "$ARTIFACT_VERSION" == "daily" ]; then
export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d")
export BUILD_ARGS="-r $OPNFV_ARTIFACT_VERSION -c $CACHE_DIRECTORY --iso"
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
index 8d5c4cb13..63baa5783 100755
--- a/jjb/apex/apex-deploy.sh
+++ b/jjb/apex/apex-deploy.sh
@@ -15,7 +15,7 @@ if ! rpm -q wget > /dev/null; then
sudo yum -y install wget
fi
-if [[ $BUILD_DIRECTORY == *verify* ]]; then
+if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
# Build is from a verify, use local build artifacts (not RPMs)
cd $WORKSPACE/../${BUILD_DIRECTORY}
WORKSPACE=$(pwd)
@@ -62,10 +62,25 @@ fi
if [ -z "$DEPLOY_SCENARIO" ]; then
echo "Deploy scenario not set!"
exit 1
+elif [[ "$DEPLOY_SCENARIO" == *gate* ]]; then
+ echo "Detecting Gating scenario..."
+ if [ -z "$GERRIT_EVENT_COMMENT_TEXT" ]; then
+ echo "ERROR: Gate job triggered without comment!"
+ exit 1
+ else
+ DEPLOY_SCENARIO=$(echo ${GERRIT_EVENT_COMMENT_TEXT} | grep start-gate-scenario | grep -Eo 'os-.*$')
+ if [ -z "$DEPLOY_SCENARIO" ]; then
+ echo "ERROR: Unable to detect scenario in Gerrit Comment!"
+ echo "Format of comment to trigger gate should be 'start-gate-scenario: <scenario>'"
+ exit 1
+ else
+ echo "Gate scenario detected: ${DEPLOY_SCENARIO}"
+ fi
+ fi
fi
-# use local build for verify
-if [[ "$BUILD_DIRECTORY" == *verify* ]]; then
+# use local build for verify and csit promote
+if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
if [ ! -e "${WORKSPACE}/build/lib" ]; then
ln -s ${WORKSPACE}/lib ${WORKSPACE}/build/lib
fi
@@ -144,7 +159,7 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then
else
clean_opts=''
fi
- if [[ "$BUILD_DIRECTORY" == *verify* ]]; then
+ if [[ "$BUILD_DIRECTORY" == *verify* || "$BUILD_DIRECTORY" == *csit* ]]; then
sudo CONFIG=${CONFIG} LIB=${LIB} ./clean.sh ${clean_opts}
else
sudo CONFIG=${CONFIG} LIB=${LIB} opnfv-clean ${clean_opts}
@@ -166,21 +181,16 @@ fi
if [[ "$JOB_NAME" == *virtual* ]]; then
# settings for virtual deployment
- if [ "$IPV6_FLAG" == "True" ]; then
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
- else
- NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
- fi
DEPLOY_CMD="${DEPLOY_CMD} -v"
+ if [[ "${DEPLOY_SCENARIO}" =~ fdio|ovs ]]; then
+ DEPLOY_CMD="${DEPLOY_CMD} --virtual-ram 14"
+ fi
+ if [[ "$JOB_NAME" == *csit* ]]; then
+ DEPLOY_CMD="${DEPLOY_CMD} -e csit-environment.yaml --virtual-computes 2"
+ fi
else
# settings for bare metal deployment
- if [ "$IPV6_FLAG" == "True" ]; then
- NETWORK_FILE="/root/network/network_settings_v6.yaml"
- elif [[ "$JOB_NAME" == *master* ]]; then
- NETWORK_FILE="/root/network/network_settings-master.yaml"
- else
- NETWORK_FILE="/root/network/network_settings.yaml"
- fi
+ NETWORK_SETTINGS_DIR="/root/network"
INVENTORY_FILE="/root/inventory/pod_settings.yaml"
if ! sudo test -e "$INVENTORY_FILE"; then
@@ -191,6 +201,14 @@ else
DEPLOY_CMD="${DEPLOY_CMD} -i ${INVENTORY_FILE}"
fi
+if [ "$IPV6_FLAG" == "True" ]; then
+ NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_v6.yaml"
+elif echo ${DEPLOY_SCENARIO} | grep fdio; then
+ NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings_vpp.yaml"
+else
+ NETWORK_FILE="${NETWORK_SETTINGS_DIR}/network_settings.yaml"
+fi
+
# Check that network settings file exists
if ! sudo test -e "$NETWORK_FILE"; then
echo "ERROR: Required settings file missing: Network Settings file ${NETWORK_FILE}"
@@ -200,6 +218,16 @@ fi
# start deployment
sudo ${DEPLOY_CMD} -d ${DEPLOY_FILE} -n ${NETWORK_FILE} --debug
+if [[ "$JOB_NAME" == *csit* ]]; then
+ echo "CSIT job: setting host route for floating ip routing"
+ # csit route to allow docker container to reach floating ips
+ UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo "[0-9\.]+{3}[0-9]+")
+ if sudo route | grep 192.168.37.128 > /dev/null; then
+ sudo route del -net 192.168.37.128 netmask 255.255.255.128
+ fi
+ sudo route add -net 192.168.37.128 netmask 255.255.255.128 gw ${UNDERCLOUD}
+fi
+
echo
echo "--------------------------------------------------------"
echo "Done!"
diff --git a/jjb/apex/apex-snapshot-create.sh b/jjb/apex/apex-snapshot-create.sh
new file mode 100644
index 000000000..f146dd810
--- /dev/null
+++ b/jjb/apex/apex-snapshot-create.sh
@@ -0,0 +1,97 @@
+#!/usr/bin/env bash
+##############################################################################
+# Copyright (c) 2016 Tim Rozet (Red Hat) and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
+
+echo "Creating Apex snapshot..."
+echo "-------------------------"
+echo
+
+# create tmp directory
+tmp_dir=$(pwd)/.tmp
+mkdir -p ${tmp_dir}
+
+# TODO(trozet) remove this after fix goes in for tripleo_inspector to copy these
+pushd ${tmp_dir} > /dev/null
+echo "Copying overcloudrc and ssh key from Undercloud..."
+# Store overcloudrc
+UNDERCLOUD=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+')
+sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:overcloudrc ./
+# Copy out ssh key of stack from undercloud
+sudo scp ${SSH_OPTIONS[@]} stack@${UNDERCLOUD}:.ssh/id_rsa ./
+popd > /dev/null
+
+echo "Gathering introspection information"
+git clone https://gerrit.opnfv.org/gerrit/sdnvpn.git
+pushd sdnvpn/odl-pipeline/lib > /dev/null
+sudo ./tripleo_introspector.sh --out-file ${tmp_dir}/node.yaml
+popd > /dev/null
+sudo rm -rf sdnvpn
+
+echo "Shutting down nodes"
+# Shut down nodes
+nodes=$(sudo virsh list | grep -Eo "baremetal[0-9]")
+for node in $nodes; do
+ sudo virsh shutdown ${node} --mode acpi
+done
+
+for node in $nodes; do
+ count=0
+ while [ "$count" -lt 10 ]; do
+ sleep 10
+ if sudo virsh list | grep ${node}; then
+ echo "Waiting for $node to shutdown, try $count"
+ else
+ break
+ fi
+ count=$((count+1))
+ done
+
+ if [ "$count" -ge 10 ]; then
+ echo "Node $node failed to shutdown"
+ exit 1
+ fi
+done
+
+pushd ${tmp_dir} > /dev/null
+echo "Gathering virsh definitions"
+# copy qcow2s, virsh definitions
+for node in $nodes; do
+ sudo cp -f /var/lib/libvirt/images/${node}.qcow2 ./
+ sudo virsh dumpxml ${node} > ${node}.xml
+done
+
+# copy virsh net definitions
+for net in admin api external storage tenant; do
+ sudo virsh net-dumpxml ${net} > ${net}.xml
+done
+
+sudo chown jenkins-ci:jenkins-ci *
+
+# tar up artifacts
+DATE=`date +%Y-%m-%d`
+tar czf ../apex-csit-snap-${DATE}.tar.gz .
+popd > /dev/null
+sudo rm -rf ${tmp_dir}
+echo "Snapshot saved as apex-csit-snap-${DATE}.tar.gz"
+
+# update opnfv properties file
+curl -O -L http://$GS_URL/snapshot.properties
+sed -i '/^OPNFV_SNAP_URL=/{h;s#=.*#='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#};${x;/^$/{s##OPNFV_SNAP_URL='${GS_URL}'/apex-csit-snap-'${DATE}'.tar.gz#;H};x}' snapshot.properties
+snap_sha=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)
+sed -i '/^OPNFV_SNAP_SHA512SUM=/{h;s/=.*/='${snap_sha}'/};${x;/^$/{s//OPNFV_SNAP_SHA512SUM='${snap_sha}'/;H};x}' snapshot.properties
+echo "OPNFV_SNAP_URL=$GS_URL/apex-csit-snap-${DATE}.tar.gz"
+echo "OPNFV_SNAP_SHA512SUM=$(sha512sum apex-csit-snap-${DATE}.tar.gz | cut -d' ' -f1)"
+echo "Updated properties file: "
+cat snapshot.properties
diff --git a/jjb/apex/apex-snapshot-deploy.sh b/jjb/apex/apex-snapshot-deploy.sh
new file mode 100644
index 000000000..8274740c8
--- /dev/null
+++ b/jjb/apex/apex-snapshot-deploy.sh
@@ -0,0 +1,165 @@
+#!/usr/bin/env bash
+##############################################################################
+# Copyright (c) 2016 Tim Rozet (Red Hat) and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -o errexit
+set -o nounset
+set -o pipefail
+
+SSH_OPTIONS=(-o StrictHostKeyChecking=no -o GlobalKnownHostsFile=/dev/null -o UserKnownHostsFile=/dev/null -o LogLevel=error)
+SNAP_CACHE=$HOME/snap_cache
+
+
+echo "Deploying Apex snapshot..."
+echo "--------------------------"
+echo
+
+echo "Cleaning server"
+pushd ci > /dev/null
+sudo CONFIG=../build/ LIB=../lib ./clean.sh
+popd > /dev/null
+
+echo "Downloading latest snapshot properties file"
+if ! wget -O $WORKSPACE/opnfv.properties http://$GS_URL/snapshot.properties; then
+ echo "ERROR: Unable to find snapshot.properties at ${GS_URL}...exiting"
+ exit 1
+fi
+
+# find latest check sum
+latest_snap_checksum=$(cat opnfv.properties | grep OPNFV_SNAP_SHA512SUM | awk -F "=" '{print $2}')
+if [ -z "$latest_snap_checksum" ]; then
+ echo "ERROR: checksum of latest snapshot from snapshot.properties is null!"
+ exit 1
+fi
+
+local_snap_checksum=""
+
+# check snap cache directory exists
+# if snapshot cache exists, find the checksum
+if [ -d "$SNAP_CACHE" ]; then
+ latest_snap=$(ls ${SNAP_CACHE} | grep tar.gz | tail -n 1)
+ if [ -n "$latest_snap" ]; then
+ local_snap_checksum=$(sha512sum ${SNAP_CACHE}/${latest_snap} | cut -d' ' -f1)
+ fi
+else
+ mkdir -p ${SNAP_CACHE}
+fi
+
+# compare check sum and download latest snap if not up to date
+if [ "$local_snap_checksum" != "$latest_snap_checksum" ]; then
+ snap_url=$(cat opnfv.properties | grep OPNFV_SNAP_URL | awk -F "=" '{print $2}')
+ if [ -z "$snap_url" ]; then
+ echo "ERROR: Snap URL from snapshot.properties is null!"
+ exit 1
+ fi
+ echo "INFO: SHA mismatch, will download latest snapshot"
+ # wipe cache
+ rm -rf ${SNAP_CACHE}/*
+ wget --directory-prefix=${SNAP_CACHE}/ ${snap_url}
+ snap_tar=$(basename ${snap_url})
+else
+ snap_tar=${latest_snap}
+fi
+
+echo "INFO: Snapshot to be used is ${snap_tar}"
+
+# move to snap cache dir and unpack
+pushd ${SNAP_CACHE} > /dev/null
+tar xvf ${snap_tar}
+
+# create each network
+virsh_networks=$(ls *.xml | grep -v baremetal)
+
+if [ -z "$virsh_networks" ]; then
+ echo "ERROR: no virsh networks found in snapshot unpack"
+ exit 1
+fi
+
+echo "Checking overcloudrc"
+if ! stat overcloudrc; then
+ echo "ERROR: overcloudrc does not exist in snap unpack"
+ exit 1
+fi
+
+for network_def in ${virsh_networks}; do
+ sudo virsh net-create ${network_def}
+ network=$(echo ${network_def} | awk -F '.' '{print $1}')
+ if ! sudo virsh net-list | grep ${network}; then
+ sudo virsh net-start ${network}
+ fi
+ echo "Checking if OVS bridge is missing for network: ${network}"
+ if ! sudo ovs-vsctl show | grep "br-${network}"; then
+ sudo ovs-vsctl add-br br-${network}
+ echo "OVS Bridge created: br-${network}"
+ if [ "br-${network}" == 'br-admin' ]; then
+ echo "Configuring IP 192.0.2.99 on br-admin"
+ sudo ip addr add 192.0.2.99/24 dev br-admin
+ sudo ip link set up dev br-admin
+ elif [ "br-${network}" == 'br-external' ]; then
+ echo "Configuring IP 192.168.37.1 on br-external"
+ sudo ip addr add 192.168.37.1/24 dev br-external
+ sudo ip link set up dev br-external
+ # Routes for admin network
+ # The overcloud controller is multi-homed and will fail to respond
+ # to traffic from the functest container due to reverse-path-filtering
+ # This route allows reverse traffic, by forcing admin network destined
+ # traffic through the external network for controller IPs only.
+ # Compute nodes have no ip on external interfaces.
+ controller_ips=$(cat overcloudrc | grep -Eo "192.0.2.[0-9]+")
+ for ip in $controller_ips; do
+ sudo ip route add ${ip}/32 dev br-external
+ done
+ fi
+ fi
+done
+
+echo "Virsh networks up: $(sudo virsh net-list)"
+echo "Bringing up Overcloud VMs..."
+virsh_vm_defs=$(ls baremetal*.xml)
+
+if [ -z "$virsh_vm_defs" ]; then
+ echo "ERROR: no virsh VMs found in snapshot unpack"
+ exit 1
+fi
+
+for node_def in ${virsh_vm_defs}; do
+ sudo virsh define ${node_def}
+ node=$(echo ${node_def} | awk -F '.' '{print $1}')
+ sudo cp -f ${node}.qcow2 /var/lib/libvirt/images/
+ sudo virsh start ${node}
+ echo "Node: ${node} started"
+done
+
+# copy overcloudrc for functest
+mkdir -p $HOME/cloner-info
+cp -f overcloudrc $HOME/cloner-info/
+
+admin_controller_ip=$(cat overcloudrc | grep -Eo -m 1 "192.0.2.[0-9]+")
+netvirt_url="http://${admin_controller_ip}:8081/restconf/operational/network-topology:network-topology/topology/netvirt:1"
+
+source overcloudrc
+counter=1
+while [ "$counter" -le 10 ]; do
+ if curl --fail --silent ${admin_controller_ip}:80 > /dev/null; then
+ echo "Overcloud Horizon is up...Checking if OpenDaylight NetVirt is up..."
+ if curl --fail --silent -u admin:admin ${netvirt_url} > /dev/null; then
+ echo "OpenDaylight is up. Overcloud deployment complete"
+ exit 0
+ else
+ echo "OpenDaylight not yet up, try ${counter}"
+ fi
+ else
+ echo "Horizon/Apache not yet up, try ${counter}"
+ fi
+ counter=$((counter+1))
+ sleep 60
+done
+
+echo "ERROR: Deployment not up after 10 minutes...exiting."
+exit 1
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh
index 64f13f4e6..ef8ad5329 100755
--- a/jjb/apex/apex-upload-artifact.sh
+++ b/jjb/apex/apex-upload-artifact.sh
@@ -73,7 +73,17 @@ gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION
gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log
}
-if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
+uploadsnap () {
+ # Uploads snapshot artifact and updated properties file
+ echo "Uploading snapshot artifacts"
+ gsutil cp $WORKSPACE/apex-csit-snap-`date +%Y-%m-%d`.tar.gz gs://$GS_URL/ > gsutil.iso.log
+ gsutil cp $WORKSPACE/snapshot.properties gs://$GS_URL/snapshot.properties > gsutil.latest.log
+ echo "Upload complete for Snapshot"
+}
+
+if echo $WORKSPACE | grep csit > /dev/null; then
+ uploadsnap
+elif gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then
echo "Signing Key avaliable"
signiso
uploadiso
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index 5c1bded4c..3938f159b 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -2,6 +2,7 @@
name: apex
jobs:
- 'apex-verify-{stream}'
+ - 'apex-verify-gate-{stream}'
- 'apex-verify-unit-tests-{stream}'
- 'apex-runner-{platform}-{scenario}-{stream}'
- 'apex-runner-cperf-{stream}'
@@ -12,6 +13,7 @@
- 'apex-daily-colorado'
- 'apex-build-colorado'
- 'apex-deploy-baremetal-os-odl_l2-fdio-ha-colorado'
+ - 'apex-csit-promote-daily-{stream}'
# stream: branch with - in place of / (eg. stable-arno)
# branch: branch (eg. stable/arno)
@@ -32,16 +34,20 @@
- 'os-nosdn-ovs-noha'
- 'os-nosdn-fdio-noha'
- 'os-nosdn-fdio-ha'
+ - 'os-odl_l2-fdio-noha'
- 'os-odl_l2-fdio-ha'
- 'os-odl_l2-netvirt_gbp_fdio-noha'
- 'os-odl_l2-sfc-noha'
- 'os-odl_l3-nofeature-ha'
- - 'os-odl_l3-bgpvpn-ha'
+ - 'os-odl-bgpvpn-ha'
+ - 'os-odl-gluon-noha'
- 'os-odl_l3-fdio-noha'
- 'os-odl_l3-fdio-ha'
- 'os-odl_l3-fdio_dvr-noha'
- 'os-odl_l3-fdio_dvr-ha'
+ - 'os-odl_l3-csit-noha'
- 'os-onos-nofeature-ha'
+ - 'gate'
platform:
- 'baremetal'
@@ -60,7 +66,6 @@
gs-pathname: '{gs-pathname}'
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- string:
name: GIT_BASE
@@ -93,6 +98,7 @@
- compare-type: ANT
pattern: 'tests/**'
properties:
+ - logrotate-default
- throttle:
max-per-node: 1
max-total: 10
@@ -114,7 +120,6 @@
gs-pathname: '{gs-pathname}'
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- string:
name: GIT_BASE
@@ -154,6 +159,7 @@
pattern: 'config/**'
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -172,7 +178,7 @@
- 'apex-unit-test'
- 'apex-build'
- trigger-builds:
- - project: 'apex-deploy-virtual-os-nosdn-nofeature-ha-{stream}'
+ - project: 'apex-deploy-virtual-os-odl_l3-nofeature-ha-{stream}'
predefined-parameters: |
BUILD_DIRECTORY=apex-verify-{stream}
OPNFV_CLEAN=yes
@@ -182,22 +188,87 @@
- trigger-builds:
- project: 'functest-apex-{verify-slave}-suite-{stream}'
predefined-parameters: |
- DEPLOY_SCENARIO=os-nosdn-nofeature-ha
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
FUNCTEST_SUITE_NAME=healthcheck
block: true
same-node: true
+ - 'apex-workspace-cleanup'
+
+# Verify Scenario Gate
+- job-template:
+ name: 'apex-verify-gate-{stream}'
+
+ node: '{verify-slave}'
+
+ concurrent: true
+
+ parameters:
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: "Used for overriding the GIT URL coming from parameters macro."
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - comment-added-contains-event:
+ comment-contains-value: '^Patch Set [0-9]+: Code-Review\+2.*start-gate-scenario:.*'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'apex'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'ci/**'
+ - compare-type: ANT
+ pattern: 'build/**'
+ - compare-type: ANT
+ pattern: 'lib/**'
+ - compare-type: ANT
+ pattern: 'config/**'
+
+ properties:
+ - logrotate-default
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-daily.*'
+ - 'apex-deploy.*'
+ - 'apex-build.*'
+ - 'apex-runner.*'
+ - 'apex-verify.*'
+ - throttle:
+ max-per-node: 1
+ max-total: 10
+ option: 'project'
+
+ builders:
+ - 'apex-build'
- trigger-builds:
- - project: 'apex-deploy-virtual-os-odl_l3-nofeature-ha-{stream}'
+ - project: 'apex-deploy-virtual-gate-{stream}'
predefined-parameters: |
- BUILD_DIRECTORY=apex-verify-{stream}
+ BUILD_DIRECTORY=apex-verify-gate-{stream}
OPNFV_CLEAN=yes
+ current-parameters: true
git-revision: false
block: true
same-node: true
- trigger-builds:
- project: 'functest-apex-{verify-slave}-suite-{stream}'
predefined-parameters: |
- DEPLOY_SCENARIO=os-odl_l3-nofeature-ha
+ DEPLOY_SCENARIO=os-nosdn-nofeature-ha
FUNCTEST_SUITE_NAME=healthcheck
block: true
same-node: true
@@ -217,7 +288,6 @@
gs-pathname: '{gs-pathname}'
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- string:
name: GIT_BASE
@@ -228,11 +298,13 @@
- git-scm
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
blocking-jobs:
- 'apex-daily.*'
- 'apex-verify.*'
+ - 'apex-csit.*'
builders:
- trigger-builds:
@@ -274,7 +346,6 @@
gs-pathname: '{gs-pathname}'
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- string:
name: GIT_BASE
@@ -285,6 +356,7 @@
- git-scm
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: false
block-level: 'NODE'
@@ -327,10 +399,9 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: '{branch}'
- apex-parameter:
gs-pathname: '{gs-pathname}'
- - gerrit-parameter:
- branch: '{branch}'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -340,6 +411,7 @@
- git-scm
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -382,7 +454,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- apex-parameter:
gs-pathname: '{gs-pathname}'
@@ -396,6 +467,7 @@
description: "Use yes in lower case to invoke clean. Indicates if the deploy environment should be cleaned before deployment"
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -428,7 +500,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- apex-parameter:
gs-pathname: '{gs-pathname}'
@@ -438,6 +509,7 @@
description: "Scenario to deploy with."
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -470,12 +542,12 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- apex-parameter:
gs-pathname: '{gs-pathname}'
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -484,6 +556,7 @@
- 'apex-deploy.*'
- 'apex-build.*'
- 'apex-runner.*'
+ - 'apex-csit.*'
triggers:
- 'apex-{stream}'
@@ -555,7 +628,126 @@
build-step-failure-threshold: 'never'
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
-
+ - trigger-builds:
+ - project: 'apex-deploy-baremetal-os-odl-bgpvpn-ha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/.build
+ OPNFV_CLEAN=yes
+ git-revision: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ block: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl-bgpvpn-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl-bgpvpn-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'apex-deploy-baremetal-os-odl-gluon-noha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/.build
+ OPNFV_CLEAN=yes
+ git-revision: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ block: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl-gluon-noha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl-gluon-noha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'apex-deploy-baremetal-os-odl_l2-fdio-noha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/.build
+ OPNFV_CLEAN=yes
+ git-revision: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ block: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l2-fdio-noha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l2-fdio-noha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'apex-deploy-baremetal-os-odl_l2-fdio-ha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-build-{stream}/.build
+ OPNFV_CLEAN=yes
+ git-revision: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ block: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l2-fdio-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'yardstick-apex-{slave}-daily-{stream}'
+ predefined-parameters:
+ DEPLOY_SCENARIO=os-odl_l2-fdio-ha
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
# Colorado Build
- job-template:
@@ -575,10 +767,9 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: 'stable/colorado'
- apex-parameter:
gs-pathname: '/colorado'
- - gerrit-parameter:
- branch: 'stable/colorado'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -588,6 +779,7 @@
- git-scm
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -622,7 +814,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: 'stable/colorado'
- apex-parameter:
gs-pathname: '/colorado'
@@ -632,6 +823,7 @@
description: "Scenario to deploy with."
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -664,12 +856,12 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: 'stable/colorado'
- apex-parameter:
gs-pathname: '/colorado'
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
block-level: 'NODE'
@@ -710,6 +902,65 @@
failure-threshold: 'never'
unstable-threshold: 'FAILURE'
+# CSIT promote
+- job-template:
+ name: 'apex-csit-promote-daily-{stream}'
+
+ # Job template for promoting CSIT Snapshots
+ #
+ # Required Variables:
+ # stream: branch with - in place of / (eg. stable)
+ # branch: branch (eg. stable)
+ node: '{daily-slave}'
+
+ disabled: false
+
+ scm:
+ - git-scm
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - apex-parameter:
+ gs-pathname: '{gs-pathname}'
+
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ block-level: 'NODE'
+ blocking-jobs:
+ - 'apex-verify.*'
+ - 'apex-deploy.*'
+ - 'apex-build.*'
+ - 'apex-runner.*'
+ - 'apex-daily.*'
+
+ triggers:
+ - timed: '0 12 * * 0'
+
+ builders:
+ - 'apex-build'
+ - trigger-builds:
+ - project: 'apex-deploy-virtual-os-odl_l3-csit-noha-{stream}'
+ predefined-parameters: |
+ BUILD_DIRECTORY=apex-csit-promote-daily-{stream}
+ OPNFV_CLEAN=yes
+ git-revision: false
+ block: true
+ same-node: true
+ - trigger-builds:
+ - project: 'functest-apex-{daily-slave}-suite-{stream}'
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-odl_l3-nofeature-noha
+ FUNCTEST_SUITE_NAME=tempest_smoke_serial
+ block: true
+ same-node: true
+ - shell:
+ !include-raw-escape: ./apex-snapshot-create.sh
+ - shell:
+ !include-raw-escape: ./apex-upload-artifact.sh
+
- job-template:
name: 'apex-gs-clean-{stream}'
@@ -724,6 +975,7 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: '{branch}'
- apex-parameter:
gs-pathname: '{gs-pathname}'
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 4e88678b4..4cb58d916 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -104,6 +104,7 @@
concurrent: false
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -125,6 +126,7 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults':
installer: '{installer}'
@@ -188,6 +190,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -203,7 +206,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults':
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
index 4df9acfd8..adabfcaeb 100755
--- a/jjb/armband/armband-deploy.sh
+++ b/jjb/armband/armband-deploy.sh
@@ -8,7 +8,6 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-set -o errexit
set -o nounset
set -o pipefail
@@ -49,8 +48,8 @@ mkdir -p $TMPDIR
cd $WORKSPACE
if [[ $LAB_CONFIG_URL =~ ^(git|ssh):// ]]; then
- echo "Cloning securedlab repo ${GIT_BRANCH##origin/}"
- git clone --quiet --branch ${GIT_BRANCH##origin/} $LAB_CONFIG_URL lab-config
+ echo "Cloning securedlab repo $BRANCH"
+ git clone --quiet --branch $BRANCH $LAB_CONFIG_URL lab-config
LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
# Source local_env if present, which contains POD-specific config
@@ -73,7 +72,7 @@ FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
# Deploy Cache (to enable just create the deploy-cache subdir)
# NOTE: Only available when ISO files are cached using ISOSTORE mechanism
-DEPLOY_CACHE=${ISOSTORE:-/iso_mount/opnfv_ci}/${GIT_BRANCH##*/}/deploy-cache
+DEPLOY_CACHE=${ISOSTORE:-/iso_mount/opnfv_ci}/${BRANCH##*/}/deploy-cache
if [[ -d "${DEPLOY_CACHE}" ]]; then
echo "Deploy cache dir present."
echo "--------------------------------------------------------"
diff --git a/jjb/armband/armband-download-artifact.sh b/jjb/armband/armband-download-artifact.sh
index ed7897b8e..e2dd097b6 100755
--- a/jjb/armband/armband-download-artifact.sh
+++ b/jjb/armband/armband-download-artifact.sh
@@ -38,7 +38,7 @@ ISO_FILE=${WORKSPACE}/opnfv.iso
# using ISOs for verify & merge jobs from local storage will be enabled later
if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
# check if we already have the ISO to avoid redownload
- ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}/${GIT_BRANCH##*/}
+ ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}/${BRANCH##*/}
if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
echo "ISO exists locally. Skipping the download and using the file from ISO store"
ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE}
diff --git a/jjb/armband/armband-project-jobs.yml b/jjb/armband/armband-project-jobs.yml
index 981f509c7..fd37c5af6 100644
--- a/jjb/armband/armband-project-jobs.yml
+++ b/jjb/armband/armband-project-jobs.yml
@@ -30,6 +30,7 @@
concurrent: false
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -39,9 +40,8 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-arm-defaults'
+ - 'opnfv-build-enea-defaults'
- '{installer}-defaults'
- armband-project-parameter:
gs-pathname: '{gs-pathname}'
diff --git a/jjb/armband/armband-verify-jobs.yml b/jjb/armband/armband-verify-jobs.yml
index d5333eb07..3486718e4 100644
--- a/jjb/armband/armband-verify-jobs.yml
+++ b/jjb/armband/armband-verify-jobs.yml
@@ -21,13 +21,13 @@
#####################################
phase:
- 'basic':
- slave-label: 'opnfv-build-arm'
+ slave-label: 'opnfv-build-enea'
- 'build':
- slave-label: 'opnfv-build-arm'
+ slave-label: 'opnfv-build-enea'
- 'deploy-virtual':
- slave-label: 'opnfv-build-arm'
+ slave-label: 'opnfv-build-enea'
- 'smoke-test':
- slave-label: 'opnfv-build-arm'
+ slave-label: 'opnfv-build-enea'
#####################################
# jobs
#####################################
@@ -47,6 +47,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -93,9 +94,8 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-arm-defaults'
+ - 'opnfv-build-enea-defaults'
- 'armband-verify-defaults':
gs-pathname: '{gs-pathname}'
@@ -109,7 +109,7 @@
- name: 'armband-verify-basic-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -123,7 +123,7 @@
- name: 'armband-verify-build-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -137,7 +137,7 @@
- name: 'armband-verify-deploy-virtual-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -151,7 +151,7 @@
- name: 'armband-verify-smoke-test-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -167,6 +167,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 6
@@ -189,7 +190,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- '{installer}-defaults'
diff --git a/jjb/armband/build.sh b/jjb/armband/build.sh
index a058ca158..a71cf1112 100755
--- a/jjb/armband/build.sh
+++ b/jjb/armband/build.sh
@@ -96,6 +96,7 @@ ls -al $BUILD_DIRECTORY
echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
+ echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
echo "OPNFV_BUILD_URL=$BUILD_URL"
) > $WORKSPACE/opnfv.properties
diff --git a/jjb/armband/upload-artifacts.sh b/jjb/armband/upload-artifacts.sh
index 7059ac344..97987e2c5 100755
--- a/jjb/armband/upload-artifacts.sh
+++ b/jjb/armband/upload-artifacts.sh
@@ -28,7 +28,7 @@ if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
# store ISO locally on NFS first
ISOSTORE=${ISOSTORE:-/iso_mount/opnfv_ci}
if [[ -d "$ISOSTORE" ]]; then
- ISOSTORE=${ISOSTORE}/${GIT_BRANCH##*/}
+ ISOSTORE=${ISOSTORE}/${BRANCH##*/}
mkdir -p $ISOSTORE
# remove all but most recent 3 ISOs first to keep iso_mount clean & tidy
diff --git a/jjb/availability/availability.yml b/jjb/availability/availability.yml
index c3603a65f..9cb7f8899 100644
--- a/jjb/availability/availability.yml
+++ b/jjb/availability/availability.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/barometer/barometer.yml b/jjb/barometer/barometer.yml
index e789b7f7a..6a17e1706 100644
--- a/jjb/barometer/barometer.yml
+++ b/jjb/barometer/barometer.yml
@@ -30,7 +30,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -77,6 +76,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 3
@@ -86,7 +86,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -128,6 +127,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 3
@@ -137,7 +137,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/bottlenecks/bottlenecks-ci-jobs.yml b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
index 92ec2d866..2779e316b 100644
--- a/jjb/bottlenecks/bottlenecks-ci-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
@@ -72,6 +72,8 @@
suite:
- 'rubbos'
- 'vstf'
+ - 'posca_stress_traffic'
+ - 'posca_stress_ping'
jobs:
- 'bottlenecks-{installer}-{suite}-{pod}-daily-{stream}'
@@ -95,7 +97,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- '{installer}-defaults'
@@ -137,65 +138,14 @@
- builder:
name: bottlenecks-env-cleanup
builders:
- - shell: |
- #!/bin/bash
- set -e
- [[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
- echo "Bottlenecks: docker containers/images cleaning up"
- if [[ ! -z $(docker ps -a | grep opnfv/bottlenecks) ]]; then
- echo "removing existing opnfv/bottlenecks containers"
- docker ps -a | grep opnfv/bottlenecks | awk '{print $1}' | xargs docker rm -f >$redirect
- fi
-
- if [[ ! -z $(docker images | grep opnfv/bottlenecks) ]]; then
- echo "Bottlenecks: docker images to remove:"
- docker images | head -1 && docker images | grep opnfv/bottlenecks
- image_tags=($(docker images | grep opnfv/bottlenecks | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- echo "Removing docker image opnfv/bottlenecks:$tag..."
- docker rmi opnfv/bottlenecks:$tag >$redirect
- done
- fi
+ - shell:
+ !include-raw: ./bottlenecks-cleanup.sh
- builder:
name: bottlenecks-run-suite
builders:
- - shell: |
- #!/bin/bash
- set -e
- [[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
-
- echo "Bottlenecks: to pull image opnfv/bottlenecks:${DOCKER_TAG}"
- docker pull opnfv/bottlenecks:$DOCKER_TAG >${redirect}
-
- echo "Bottlenecks: docker start running"
- opts="--privileged=true -id"
- envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
- -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
- -e BOTTLENECKS_BRANCH=${BOTTLENECKS_BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
- -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL}"
- cmd="sudo docker run ${opts} ${envs} opnfv/bottlenecks:${DOCKER_TAG} /bin/bash"
- echo "Bottlenecks: docker cmd running ${cmd}"
- ${cmd} >${redirect}
-
- echo "Bottlenecks: obtain docker id"
- container_id=$(docker ps | grep "opnfv/bottlenecks:${DOCKER_TAG}" | awk '{print $1}' | head -1)
- if [ -z ${container_id} ]; then
- echo "Cannot find opnfv/bottlenecks container ID ${container_id}. Please check if it exists."
- docker ps -a
- exit 1
- fi
-
- echo "Bottlenecks: to prepare openstack environment"
- prepare_env="${REPO_DIR}/ci/prepare_env.sh"
- echo "Bottlenecks: docker cmd running: ${prepare_env}"
- sudo docker exec ${container_id} ${prepare_env}
-
- echo "Bottlenecks: to run testsuite ${SUITE_NAME}"
- run_testsuite="${REPO_DIR}/run_tests.sh -s ${SUITE_NAME}"
- echo "Bottlenecks: docker cmd running: ${run_testsuite}"
- sudo docker exec ${container_id} ${run_testsuite}
+ - shell:
+ !include-raw: ./bottlenecks-run-suite.sh
####################
# parameter macros
diff --git a/jjb/bottlenecks/bottlenecks-cleanup.sh b/jjb/bottlenecks/bottlenecks-cleanup.sh
new file mode 100644
index 000000000..0ba042318
--- /dev/null
+++ b/jjb/bottlenecks/bottlenecks-cleanup.sh
@@ -0,0 +1,111 @@
+#!/bin/bash
+set -e
+[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+
+BOTTLENECKS_IMAGE=opnfv/bottlenecks
+echo "Bottlenecks: docker containers/images cleaning up"
+
+dangling_images=($(docker images -f "dangling=true" | grep $BOTTLENECKS_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+ echo "Removing $BOTTLENECKS_IMAGE:<none> dangling images and their containers"
+ docker images | head -1 && docker images | grep $dangling_images
+ for image_id in "${dangling_images[@]}"; do
+ echo "Bottlenecks: Removing dangling image $image_id"
+ docker rmi -f $image_id >${redirect}
+ done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+ if [[ -n $(docker ps -a | grep $image_id) ]]; then
+ echo "Bottlenecks: Removing containers associated with dangling image: $image_id"
+ docker ps -a | head -1 && docker ps -a | grep $image_id
+ docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+ fi
+done
+
+if [[ -n $(docker ps -a | grep $BOTTLENECKS_IMAGE) ]]; then
+ echo "Removing existing $BOTTLENECKS_IMAGE containers"
+ docker ps -a | grep $BOTTLENECKS_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $BOTTLENECKS_IMAGE) ]]; then
+ echo "Bottlenecks: docker images to remove:"
+ docker images | head -1 && docker images | grep $BOTTLENECKS_IMAGE
+ image_tags=($(docker images | grep $BOTTLENECKS_IMAGE | awk '{print $2}'))
+ for tag in "${image_tags[@]}"; do
+ echo "Removing docker image $BOTTLENECKS_IMAGE:$tag..."
+ docker rmi $BOTTLENECKS_IMAGE:$tag >$redirect
+ done
+fi
+
+echo "Yardstick: docker containers/images cleaning up"
+YARDSTICK_IMAGE=opnfv/yardstick
+
+dangling_images=($(docker images -f "dangling=true" | grep $YARDSTICK_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+ echo "Removing $YARDSTICK_IMAGE:<none> dangling images and their containers"
+ docker images | head -1 && docker images | grep $dangling_images
+ for image_id in "${dangling_images[@]}"; do
+ echo "Yardstick: Removing dangling image $image_id"
+ docker rmi -f $image_id >${redirect}
+ done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+ if [[ -n $(docker ps -a | grep $image_id) ]]; then
+ echo "Yardstick: Removing containers associated with dangling image: $image_id"
+ docker ps -a | head -1 && docker ps -a | grep $image_id
+ docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+ fi
+done
+
+if [[ -n $(docker ps -a | grep $YARDSTICK_IMAGE) ]]; then
+ echo "Removing existing $YARDSTICK_IMAGE containers"
+ docker ps -a | grep $YARDSTICK_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $YARDSTICK_IMAGE) ]]; then
+ echo "Yardstick: docker images to remove:"
+ docker images | head -1 && docker images | grep $YARDSTICK_IMAGE
+ image_tags=($(docker images | grep $YARDSTICK_IMAGE | awk '{print $2}'))
+ for tag in "${image_tags[@]}"; do
+ echo "Removing docker image $YARDSTICK_IMAGE:$tag..."
+ docker rmi $YARDSTICK_IMAGE:$tag >$redirect
+ done
+fi
+
+echo "InfluxDB: docker containers/images cleaning up"
+INFLUXDB_IMAGE=tutum/influxdb
+
+dangling_images=($(docker images -f "dangling=true" | grep $INFLUXDB_IMAGE | awk '{print $3}'))
+if [[ -n $dangling_images ]]; then
+ echo "Removing $INFLUXDB_IMAGE:<none> dangling images and their containers"
+ docker images | head -1 && docker images | grep $dangling_images
+ for image_id in "${dangling_images[@]}"; do
+ echo "InfluxDB: Removing dangling image $image_id"
+ docker rmi -f $image_id >${redirect}
+ done
+fi
+
+for image_id in "${dangling_images[@]}"; do
+ if [[ -n $(docker ps -a | grep $image_id) ]]; then
+ echo "InfluxDB: Removing containers associated with dangling image: $image_id"
+ docker ps -a | head -1 && docker ps -a | grep $image_id
+ docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+ fi
+done
+
+if [[ -n $(docker ps -a | grep $INFLUXDB_IMAGE) ]]; then
+ echo "Removing existing $INFLUXDB_IMAGE containers"
+ docker ps -a | grep $INFLUXDB_IMAGE | awk '{print $1}' | xargs docker rm -f >$redirect
+fi
+
+if [[ -n $(docker images | grep $INFLUXDB_IMAGE) ]]; then
+ echo "InfluxDB: docker images to remove:"
+ docker images | head -1 && docker images | grep $INFLUXDB_IMAGE
+ image_tags=($(docker images | grep $INFLUXDB_IMAGE | awk '{print $2}'))
+ for tag in "${image_tags[@]}"; do
+ echo "Removing docker image $INFLUXDB_IMAGE:$tag..."
+ docker rmi $INFLUXDB_IMAGE:$tag >$redirect
+ done
+fi \ No newline at end of file
diff --git a/jjb/bottlenecks/bottlenecks-project-jobs.yml b/jjb/bottlenecks/bottlenecks-project-jobs.yml
index 03e40dc0e..a0abb9331 100644
--- a/jjb/bottlenecks/bottlenecks-project-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-project-jobs.yml
@@ -29,6 +29,8 @@
suite:
- 'rubbos'
- 'vstf'
+ - 'posca_stress_traffic'
+ - 'posca_stress_ping'
################################
# job templates
@@ -42,7 +44,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -80,7 +81,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -114,6 +114,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -123,7 +124,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- bottlenecks-parameter:
diff --git a/jjb/bottlenecks/bottlenecks-run-suite.sh b/jjb/bottlenecks/bottlenecks-run-suite.sh
new file mode 100644
index 000000000..f69463fc2
--- /dev/null
+++ b/jjb/bottlenecks/bottlenecks-run-suite.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+#set -e
+[[ $GERRIT_REFSPEC_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
+BOTTLENECKS_IMAGE=opnfv/bottlenecks
+
+if [[ $SUITE_NAME == rubbos || $SUITE_NAME == vstf ]]; then
+ echo "Bottlenecks: to pull image $BOTTLENECKS_IMAGE:${DOCKER_TAG}"
+ docker pull $BOTTLENECKS_IMAGE:$DOCKER_TAG >${redirect}
+
+ echo "Bottlenecks: docker start running"
+ opts="--privileged=true -id"
+ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
+ -e NODE_NAME=${NODE_NAME} -e EXTERNAL_NET=${EXTERNAL_NETWORK} \
+ -e BOTTLENECKS_BRANCH=${BOTTLENECKS_BRANCH} -e GERRIT_REFSPEC_DEBUG=${GERRIT_REFSPEC_DEBUG} \
+ -e BOTTLENECKS_DB_TARGET=${BOTTLENECKS_DB_TARGET} -e PACKAGE_URL=${PACKAGE_URL}"
+ cmd="sudo docker run ${opts} ${envs} $BOTTLENECKS_IMAGE:${DOCKER_TAG} /bin/bash"
+ echo "Bottlenecks: docker cmd running ${cmd}"
+ ${cmd} >${redirect}
+
+ echo "Bottlenecks: obtain docker id"
+ container_id=$(docker ps | grep "$BOTTLENECKS_IMAGE:${DOCKER_TAG}" | awk '{print $1}' | head -1)
+ if [ -z ${container_id} ]; then
+ echo "Cannot find $BOTTLENECKS_IMAGE container ID ${container_id}. Please check if it exists."
+ docker ps -a
+ exit 1
+ fi
+
+ echo "Bottlenecks: to prepare openstack environment"
+ prepare_env="${REPO_DIR}/ci/prepare_env.sh"
+ echo "Bottlenecks: docker cmd running: ${prepare_env}"
+ sudo docker exec ${container_id} ${prepare_env}
+
+ echo "Bottlenecks: to run testsuite ${SUITE_NAME}"
+ run_testsuite="${REPO_DIR}/run_tests.sh -s ${SUITE_NAME}"
+ echo "Bottlenecks: docker cmd running: ${run_testsuite}"
+ sudo docker exec ${container_id} ${run_testsuite}
+else
+ echo "Bottlenecks: installing POSCA docker-compose"
+ if [ -d usr/local/bin/docker-compose ]; then
+ rm -rf usr/local/bin/docker-compose
+ fi
+ curl -L https://github.com/docker/compose/releases/download/1.11.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose
+ chmod +x /usr/local/bin/docker-compose
+
+ echo "Bottlenecks: composing up dockers"
+ cd $WORKSPACE
+ docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml up -d
+
+ echo "Bottlenecks: running traffic stress/factor testing in posca testsuite "
+ POSCA_SCRIPT=/home/opnfv/bottlenecks/testsuites/posca
+ if [[ $SUITE_NAME == posca_stress_traffic ]]; then
+ TEST_CASE=posca_factor_system_bandwidth
+ echo "Bottlenecks: pulling tutum/influxdb for yardstick"
+ docker pull tutum/influxdb:0.13
+ sleep 5
+ docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+ elif [[ $SUITE_NAME == posca_stress_ping ]]; then
+ TEST_CASE=posca_stress_ping
+ sleep 5
+ docker exec bottleneckcompose_bottlenecks_1 python ${POSCA_SCRIPT}/run_posca.py testcase $TEST_CASE
+ fi
+
+ echo "Bottlenecks: cleaning up docker-compose images and dockers"
+ docker-compose -f $WORKSPACE/docker/bottleneck-compose/docker-compose.yml down --rmi all
+fi \ No newline at end of file
diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml
index 8800a155f..7258e89f4 100644
--- a/jjb/compass4nfv/compass-ci-jobs.yml
+++ b/jjb/compass4nfv/compass-ci-jobs.yml
@@ -41,8 +41,8 @@
#--------------------------------
# master
#--------------------------------
- - huawei-pod5:
- slave-label: '{pod}'
+ - baremetal-centos:
+ slave-label: 'intel-pod8'
os-version: 'centos7'
<<: *master
@@ -87,6 +87,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
@@ -106,6 +107,7 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: '{branch}'
- compass-ci-parameter:
installer: '{installer}'
gs-pathname: '{gs-pathname}'
@@ -173,6 +175,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
@@ -194,7 +197,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- compass-ci-parameter:
installer: '{installer}'
@@ -218,12 +220,6 @@
- shell:
!include-raw-escape: ./compass-deploy.sh
- publishers:
- - archive:
- artifacts: 'ansible.log'
- allow-empty: 'true'
- fingerprint: true
-
########################
# parameter macros
########################
@@ -247,35 +243,35 @@
# trigger macros
########################
- trigger:
- name: 'compass-os-nosdn-nofeature-ha-huawei-pod5-master-trigger'
+ name: 'compass-os-nosdn-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- timed: '0 19 * * *'
- trigger:
- name: 'compass-os-odl_l2-nofeature-ha-huawei-pod5-master-trigger'
+ name: 'compass-os-odl_l2-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- timed: '0 23 * * *'
- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-huawei-pod5-master-trigger'
+ name: 'compass-os-odl_l3-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- timed: '0 15 * * *'
- trigger:
- name: 'compass-os-onos-nofeature-ha-huawei-pod5-master-trigger'
+ name: 'compass-os-onos-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- timed: '0 7 * * *'
- trigger:
- name: 'compass-os-ocl-nofeature-ha-huawei-pod5-master-trigger'
+ name: 'compass-os-ocl-nofeature-ha-baremetal-centos-master-trigger'
triggers:
- timed: '0 11 * * *'
- trigger:
- name: 'compass-os-onos-sfc-ha-huawei-pod5-master-trigger'
+ name: 'compass-os-onos-sfc-ha-baremetal-centos-master-trigger'
triggers:
- timed: '0 3 * * *'
- trigger:
- name: 'compass-os-odl_l2-moon-ha-huawei-pod5-master-trigger'
+ name: 'compass-os-odl_l2-moon-ha-baremetal-centos-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'compass-os-nosdn-kvm-ha-huawei-pod5-master-trigger'
+ name: 'compass-os-nosdn-kvm-ha-baremetal-centos-master-trigger'
triggers:
- timed: ''
diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh
index f89d04e6d..6696e4b3d 100644
--- a/jjb/compass4nfv/compass-deploy.sh
+++ b/jjb/compass4nfv/compass-deploy.sh
@@ -65,7 +65,4 @@ echo
echo "--------------------------------------------------------"
echo "Done!"
-ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
-sshpass -p root scp 2>/dev/null $ssh_options root@${INSTALLER_IP}:/var/ansible/run/openstack_${OPENSTACK_VERSION}-opnfv2/ansible.log ./ &> /dev/null
-
exit $deploy_ret
diff --git a/jjb/compass4nfv/compass-dovetail-jobs.yml b/jjb/compass4nfv/compass-dovetail-jobs.yml
new file mode 100644
index 000000000..d49d0ec5f
--- /dev/null
+++ b/jjb/compass4nfv/compass-dovetail-jobs.yml
@@ -0,0 +1,208 @@
+- project:
+
+ name: 'compass-dovetail-jobs'
+ installer: 'compass'
+ project: 'compass4nfv'
+#----------------------------------
+# BRANCH ANCHORS
+#----------------------------------
+ colorado: &colorado
+ stream: colorado
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+ dovetail-branch: master
+#------------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#------------------------------------
+# CI PODs
+#------------------------------------
+ pod:
+ - baremetal:
+ slave-label: compass-baremetal
+ os-version: 'trusty'
+ <<: *colorado
+#-----------------------------------
+# scenarios
+#-----------------------------------
+ scenario:
+ - 'os-nosdn-nofeature-ha':
+ disabled: false
+ auto-trigger-name: 'compass-{scenario}-{pod}-weekly-{stream}-trigger'
+
+ jobs:
+ - 'compass-{scenario}-{pod}-weekly-{stream}'
+ - 'compass-deploy-{pod}-weekly-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+ name: 'compass-{scenario}-{pod}-weekly-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: false
+
+ properties:
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'compass-os-.*?-{pod}-daily-.*?'
+ - 'compass-os-.*?-{pod}-weekly-.*?'
+ block-level: 'NODE'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+ triggers:
+ - '{auto-trigger-name}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - compass-dovetail-parameter:
+ installer: '{installer}'
+ gs-pathname: '{gs-pathname}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: '{scenario}'
+ - '{slave-label}-defaults'
+ - '{installer}-defaults'
+
+ triggers:
+ - '{auto-trigger-name}'
+
+ builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
+ - trigger-builds:
+ - project: 'compass-deploy-{pod}-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO={scenario}
+ COMPASS_OS_VERSION={os-version}
+ same-node: true
+ block: true
+ - trigger-builds:
+ - project: 'dovetail-compass-{pod}-compliance_set-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'dovetail-compass-{pod}-debug-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+ - trigger-builds:
+ - project: 'dovetail-compass-{pod}-proposed_tests-weekly-{stream}'
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO={scenario}
+ block: true
+ same-node: true
+ block-thresholds:
+ build-step-failure-threshold: 'never'
+ failure-threshold: 'never'
+ unstable-threshold: 'FAILURE'
+
+- job-template:
+ name: 'compass-deploy-{pod}-weekly-{stream}'
+
+ disabled: false
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ max-per-node: 1
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'compass-deploy-{pod}-daily-.*?'
+ - 'compass-deploy-{pod}-weekly-.*'
+ - 'compass-verify-deploy-.*?'
+ block-level: 'NODE'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: 120
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - compass-dovetail-parameter:
+ installer: '{installer}'
+ gs-pathname: '{gs-pathname}'
+ - '{slave-label}-defaults'
+ - '{installer}-defaults'
+
+ scm:
+ - git-scm
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER - Scenario: $DEPLOY_SCENARIO'
+
+
+ builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
+ - shell:
+ !include-raw-escape: ./compass-download-artifact.sh
+ - shell:
+ !include-raw-escape: ./compass-deploy.sh
+
+########################
+# parameter macros
+########################
+- parameter:
+ name: compass-dovetail-parameter
+ parameters:
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+ - string:
+ name: GS_URL
+ default: '$GS_BASE{gs-pathname}'
+ description: "URL to Google Storage."
+ - choice:
+ name: COMPASS_OPENSTACK_VERSION
+ choices:
+ - 'mitaka'
+
+########################
+# trigger macros
+########################
+- trigger:
+ name: 'compass-os-nosdn-nofeature-ha-baremetal-weekly-colorado-trigger'
+ triggers:
+ - timed: 'H H * * 0'
+
+- trigger:
+ name: 'dovetail-weekly-trigger'
+ triggers:
+ - timed: 'H H * * 0'
diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml
index 6b4080384..ed0fee6c0 100644
--- a/jjb/compass4nfv/compass-project-jobs.yml
+++ b/jjb/compass4nfv/compass-project-jobs.yml
@@ -33,6 +33,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -42,7 +43,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- compass-project-parameter:
installer: '{installer}'
@@ -76,6 +76,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -85,7 +86,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- compass-project-parameter:
installer: '{installer}'
diff --git a/jjb/compass4nfv/compass-verify-jobs.yml b/jjb/compass4nfv/compass-verify-jobs.yml
index 1af190e6b..d58138088 100644
--- a/jjb/compass4nfv/compass-verify-jobs.yml
+++ b/jjb/compass4nfv/compass-verify-jobs.yml
@@ -51,6 +51,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -102,7 +103,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'compass-virtual-defaults'
- '{installer}-defaults'
@@ -163,6 +163,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
@@ -189,11 +190,6 @@
description: "Built on $NODE_NAME"
- '{project}-verify-{phase}-macro'
- publishers:
- - archive:
- artifacts: 'ansible.log'
- allow-empty: 'true'
- fingerprint: true
#####################################
# builder macros
#####################################
diff --git a/jjb/conductor/conductor.yml b/jjb/conductor/conductor.yml
index fccd53e7f..1d47624e1 100644
--- a/jjb/conductor/conductor.yml
+++ b/jjb/conductor/conductor.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/copper/copper.yml b/jjb/copper/copper.yml
index 24f65a358..b65466e01 100644
--- a/jjb/copper/copper.yml
+++ b/jjb/copper/copper.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -60,4 +59,9 @@
builders:
- shell: |
- echo "Nothing to verify!"
+ #!/bin/bash
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+
+ shellcheck -f tty tests/*.sh
diff --git a/jjb/cperf/cperf-ci-jobs.yml b/jjb/cperf/cperf-ci-jobs.yml
index 4ffc3b013..125937e80 100644
--- a/jjb/cperf/cperf-ci-jobs.yml
+++ b/jjb/cperf/cperf-ci-jobs.yml
@@ -42,6 +42,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
@@ -57,7 +58,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{pod}-defaults'
- '{installer}-defaults'
diff --git a/jjb/daisy4nfv/daisy-deploy.sh b/jjb/daisy4nfv/daisy-deploy.sh
new file mode 100755
index 000000000..b303c2c05
--- /dev/null
+++ b/jjb/daisy4nfv/daisy-deploy.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+set -o nounset
+set -o pipefail
+
+echo "--------------------------------------------------------"
+echo "This is $INSTALLER_TYPE deploy job!"
+echo "--------------------------------------------------------"
+
+DEPLOY_SCENARIO=${DEPLOY_SCENARIO:-"os-nosdn-nofeature-ha"}
+BRIDGE=${BRIDGE:-pxebr}
+LAB_NAME=${NODE_NAME/-*}
+POD_NAME=${NODE_NAME/*-}
+deploy_ret=0
+
+if [[ ! "$NODE_NAME" =~ "-virtual" ]] && [[ ! "$LAB_NAME" =~ (zte) ]]; then
+ echo "Unsupported lab $LAB_NAME for now, Cannot continue!"
+ exit $deploy_ret
+fi
+
+# clone the securedlab repo
+cd $WORKSPACE
+BASE_DIR=$(cd ./;pwd)
+
+echo "Cloning securedlab repo $BRANCH"
+git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
+ --branch $BRANCH
+
+DEPLOY_COMMAND="sudo ./ci/deploy/deploy.sh -b $BASE_DIR \
+ -l $LAB_NAME -p $POD_NAME -B $BRIDGE"
+
+# log info to console
+echo """
+Deployment parameters
+--------------------------------------------------------
+Scenario: $DEPLOY_SCENARIO
+LAB: $LAB_NAME
+POD: $POD_NAME
+BRIDGE: $BRIDGE
+BASE_DIR: $BASE_DIR
+
+Starting the deployment using $INSTALLER_TYPE. This could take some time...
+--------------------------------------------------------
+Issuing command
+$DEPLOY_COMMAND
+"""
+
+# start the deployment
+#$DEPLOY_COMMAND
+
+if [ $? -ne 0 ]; then
+ echo
+ echo "Depolyment failed!"
+ deploy_ret=1
+else
+ echo
+ echo "--------------------------------------------------------"
+ echo "Deployment done!"
+fi
+
+exit $deploy_ret
diff --git a/jjb/daisy4nfv/daisy-project-jobs.yml b/jjb/daisy4nfv/daisy-project-jobs.yml
new file mode 100644
index 000000000..0127ed094
--- /dev/null
+++ b/jjb/daisy4nfv/daisy-project-jobs.yml
@@ -0,0 +1,232 @@
+######################################################################
+# Add daily jobs, for buidoing, deploying and testing
+# TODO:
+# - [ ] Add yardstick and functest for test stage
+# - [x] Use daisy-baremetal-defauls for choosing baremetal deployment
+######################################################################
+
+#############################
+# Job configuration for daisy
+#############################
+- project:
+ name: daisy-project-jobs
+
+ project: 'daisy'
+
+ installer: 'daisy'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: true
+
+ phase:
+ - 'build':
+ slave-label: 'opnfv-build-centos'
+ - 'deploy':
+ slave-label: 'daisy-baremetal'
+ - 'test':
+ slave-label: 'opnfv-build-centos'
+ jobs:
+ - '{installer}-daily-{stream}'
+ - '{installer}-{phase}-daily-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+ name: '{installer}-daily-{stream}'
+
+ project-type: multijob
+
+ disabled: false
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 4
+ option: 'project'
+
+ scm:
+ - git-scm
+
+ triggers:
+ - timed: '0 H/8 * * *'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-centos-defaults'
+ - 'daisy-defaults'
+ - '{installer}-project-parameter':
+ gs-pathname: '{gs-pathname}'
+
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 360
+ fail: true
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: '{installer}-build-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: '{installer}-deploy-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: test
+ condition: SUCCESSFUL
+ projects:
+ - name: '{installer}-test-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+
+ publishers:
+ - '{installer}-recipients'
+
+- job-template:
+ name: '{installer}-{phase}-daily-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-total: 6
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - '{installer}-.*deploy-.*'
+ block-level: 'NODE'
+
+ scm:
+ - git-scm
+
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 360
+ fail: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'daisy-defaults'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-ha'
+ - 'daisy-defaults'
+ - '{slave-label}-defaults'
+ - '{installer}-project-parameter':
+ gs-pathname: '{gs-pathname}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{installer}-{phase}-daily-macro'
+
+#####################################
+# builder macros
+#####################################
+- builder:
+ name: 'daisy-build-daily-macro'
+ builders:
+ - shell:
+ !include-raw: ./daisy4nfv-basic.sh
+ - shell:
+ !include-raw: ./daisy4nfv-build.sh
+ - shell:
+ !include-raw: ./daisy4nfv-upload-artifact.sh
+ - shell:
+ !include-raw: ./daisy4nfv-workspace-cleanup.sh
+
+- builder:
+ name: 'daisy-deploy-daily-macro'
+ builders:
+ - shell:
+ !include-raw: ./daisy4nfv-download-artifact.sh
+ - shell:
+ !include-raw: ./daisy-deploy.sh
+
+- builder:
+ name: 'daisy-test-daily-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ echo "Not activated!"
+
+#####################################
+# parameter macros
+#####################################
+- publisher:
+ name: 'daisy-recipients'
+ publishers:
+ - email:
+ recipients: hu.zhijiang@zte.com.cn lu.yao135@zte.com.cn zhou.ya@zte.com.cn yangyang1@zte.com.cn julienjut@gmail.com
+
+- parameter:
+ name: 'daisy-project-parameter'
+ parameters:
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+ - string:
+ name: CACHE_DIRECTORY
+ default: $HOME/opnfv/cache/$INSTALLER_TYPE
+ description: "Directory where the cache to be used during the build is located."
+ - string:
+ name: GS_URL
+ default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+ description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
index b7a5fec92..95d851cca 100644
--- a/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-merge-jobs.yml
@@ -2,6 +2,14 @@
name: 'daisy4nfv-merge-jobs'
project: 'daisy'
+
+ installer: 'daisy'
+
+###########################################################
+# use alias to keep the jobs'name existed already unchanged
+###########################################################
+ alias: 'daisy4nfv'
+
#####################################
# branch definitions
#####################################
@@ -10,31 +18,29 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: true
#####################################
# patch merge phases
#####################################
phase:
- - 'basic':
- slave-label: 'opnfv-build-centos'
- 'build':
slave-label: 'opnfv-build-centos'
- 'deploy-virtual':
slave-label: 'opnfv-build-centos'
- - 'smoke-test':
- slave-label: 'opnfv-build-centos'
- - 'promote':
- slave-label: 'opnfv-build-centos'
#####################################
# jobs
#####################################
jobs:
- - 'daisy4nfv-merge-{stream}'
- - 'daisy4nfv-merge-{phase}-{stream}'
+ - '{alias}-merge-{stream}'
+ - '{alias}-merge-{phase}-{stream}'
#####################################
# job templates
#####################################
- job-template:
- name: 'daisy4nfv-merge-{stream}'
+ name: '{alias}-merge-{stream}'
project-type: multijob
@@ -43,13 +49,14 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
option: 'project'
scm:
- - git-scm-gerrit
+ - git-scm
wrappers:
- ssh-agent-wrapper
@@ -65,50 +72,44 @@
- comment-added-contains-event:
comment-contains-value: 'remerge'
projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'ci/**'
+ - compare-type: ANT
+ pattern: 'code/**'
+ - compare-type: ANT
+ pattern: 'deploy/**'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**'
+ - compare-type: ANT
+ pattern: '.gitignore'
readable-message: true
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
- - 'daisy4nfv-merge-defaults':
+ - 'opnfv-build-centos-defaults'
+ - '{alias}-merge-defaults':
gs-pathname: '{gs-pathname}'
builders:
- description-setter:
description: "Built on $NODE_NAME"
- multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'daisy4nfv-merge-basic-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
name: build
condition: SUCCESSFUL
projects:
- - name: 'daisy4nfv-merge-build-{stream}'
+ - name: '{alias}-merge-build-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -119,38 +120,10 @@
name: deploy-virtual
condition: SUCCESSFUL
projects:
- - name: 'daisy4nfv-merge-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- - name: 'daisy4nfv-merge-smoke-test-{stream}'
+ - name: '{alias}-merge-deploy-virtual-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: promote
- condition: SUCCESSFUL
- projects:
- - name: 'daisy4nfv-merge-promote-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -159,26 +132,26 @@
abort-all-job: true
- job-template:
- name: 'daisy4nfv-merge-{phase}-{stream}'
+ name: '{alias}-merge-{phase}-{stream}'
disabled: '{obj:disabled}'
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
- max-total: 6
+ max-total: 4
option: 'project'
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'daisy4nfv-merge-deploy-.*'
- - 'daisy4nfv-merge-test-.*'
+ - '{alias}-merge-deploy-.*'
block-level: 'NODE'
scm:
- - git-scm-gerrit
+ - git-scm
wrappers:
- ssh-agent-wrapper
@@ -189,58 +162,41 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- - 'daisy4nfv-merge-defaults':
+ - '{alias}-merge-defaults':
gs-pathname: '{gs-pathname}'
builders:
- description-setter:
description: "Built on $NODE_NAME"
- '{project}-merge-{phase}-macro'
+
#####################################
# builder macros
#####################################
- builder:
- name: 'daisy-merge-basic-macro'
+ name: 'daisy-merge-build-macro'
builders:
- shell:
!include-raw: ./daisy4nfv-basic.sh
-
-- builder:
- name: 'daisy-merge-build-macro'
- builders:
- shell:
- !include-raw:
- - ./daisy4nfv-build.sh
- - ./daisy4nfv-upload-artifact.sh
- - ./daisy4nfv-workspace-cleanup.sh
+ !include-raw: ./daisy4nfv-build.sh
+ - shell:
+ !include-raw: ./daisy4nfv-upload-artifact.sh
+ - shell:
+ !include-raw: ./daisy4nfv-workspace-cleanup.sh
- builder:
name: 'daisy-merge-deploy-virtual-macro'
builders:
- shell:
- !include-raw:
- - ./daisy4nfv-download-artifact.sh
- - ./daisy4nfv-virtual-deploy.sh
- - ./daisy4nfv-workspace-cleanup.sh
-
-- builder:
- name: 'daisy-merge-smoke-test-macro'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Not activated!"
-
-- builder:
- name: 'daisy-merge-promote-macro'
- builders:
- - shell: |
- #!/bin/bash
+ !include-raw: ./daisy4nfv-download-artifact.sh
+ - shell:
+ !include-raw: ./daisy-deploy.sh
+ - shell:
+ !include-raw: ./daisy4nfv-workspace-cleanup.sh
- echo "Not activated!"
#####################################
# parameter macros
#####################################
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
index cba22643c..ee82c14b2 100644
--- a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
+++ b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
@@ -2,6 +2,14 @@
name: 'daisy4nfv-verify-jobs'
project: 'daisy'
+
+ installer: 'daisy'
+
+##########################################################
+# use alias to keep the jobs'name existed alread unchanged
+##########################################################
+ alias: 'daisy4nfv'
+
#####################################
# branch definitions
#####################################
@@ -10,29 +18,27 @@
branch: '{stream}'
gs-pathname: ''
disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: true
#####################################
# patch verification phases
#####################################
phase:
- - 'basic':
- slave-label: 'opnfv-build-centos'
- 'build':
slave-label: 'opnfv-build-centos'
- - 'deploy-virtual':
- slave-label: 'opnfv-build'
- - 'smoke-test':
- slave-label: 'opnfv-build'
#####################################
# jobs
#####################################
jobs:
- - 'daisy4nfv-verify-{stream}'
- - 'daisy4nfv-verify-{phase}-{stream}'
+ - '{alias}-verify-{stream}'
+ - '{alias}-verify-{phase}-{stream}'
#####################################
# job templates
#####################################
- job-template:
- name: 'daisy4nfv-verify-{stream}'
+ name: '{alias}-verify-{stream}'
project-type: multijob
@@ -41,13 +47,14 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
option: 'project'
scm:
- - git-scm-gerrit
+ - git-scm
wrappers:
- ssh-agent-wrapper
@@ -69,78 +76,44 @@
- comment-added-contains-event:
comment-contains-value: 'reverify'
projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'ci/**'
+ - compare-type: ANT
+ pattern: 'code/**'
+ - compare-type: ANT
+ pattern: 'deploy/**'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**'
+ - compare-type: ANT
+ pattern: '.gitignore'
readable-message: true
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- - 'opnfv-build-defaults'
- - 'daisy4nfv-verify-defaults':
+ - 'opnfv-build-centos-defaults'
+ - '{alias}-verify-defaults':
gs-pathname: '{gs-pathname}'
builders:
- description-setter:
description: "Built on $NODE_NAME"
- multijob:
- name: basic
- condition: SUCCESSFUL
- projects:
- - name: 'daisy4nfv-verify-basic-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
name: build
condition: SUCCESSFUL
projects:
- - name: 'daisy4nfv-verify-build-{stream}'
+ - name: '{alias}-verify-build-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: deploy-virtual
- condition: SUCCESSFUL
- projects:
- - name: 'daisy4nfv-verify-deploy-virtual-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
- GERRIT_REFSPEC=$GERRIT_REFSPEC
- GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
- GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
- node-parameters: false
- kill-phase-on: FAILURE
- abort-all-job: true
- - multijob:
- name: smoke-test
- condition: SUCCESSFUL
- projects:
- - name: 'daisy4nfv-verify-smoke-test-{stream}'
- current-parameters: false
- predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -149,13 +122,14 @@
abort-all-job: true
- job-template:
- name: 'daisy4nfv-verify-{phase}-{stream}'
+ name: '{alias}-verify-{phase}-{stream}'
disabled: '{obj:disabled}'
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 6
@@ -163,12 +137,11 @@
- build-blocker:
use-build-blocker: true
blocking-jobs:
- - 'daisy4nfv-verify-deploy-.*'
- - 'daisy4nfv-verify-test-.*'
+ - '{alias}-verify-deploy-.*'
block-level: 'NODE'
scm:
- - git-scm-gerrit
+ - git-scm
wrappers:
- ssh-agent-wrapper
@@ -179,44 +152,29 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- - 'daisy4nfv-verify-defaults':
+ - '{alias}-verify-defaults':
gs-pathname: '{gs-pathname}'
builders:
- description-setter:
description: "Built on $NODE_NAME"
- '{project}-verify-{phase}-macro'
+
#####################################
# builder macros
#####################################
- builder:
- name: 'daisy-verify-basic-macro'
+ name: 'daisy-verify-build-macro'
builders:
- shell:
!include-raw: ./daisy4nfv-basic.sh
-
-- builder:
- name: 'daisy-verify-build-macro'
- builders:
- shell:
!include-raw: ./daisy4nfv-build.sh
-
-- builder:
- name: 'daisy-verify-deploy-virtual-macro'
- builders:
- shell:
- !include-raw: ./daisy4nfv-virtual-deploy.sh
-
-- builder:
- name: 'daisy-verify-smoke-test-macro'
- builders:
- - shell: |
- #!/bin/bash
+ !include-raw: ./daisy4nfv-workspace-cleanup.sh
- echo "Not activated!"
#####################################
# parameter macros
#####################################
diff --git a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
deleted file mode 100755
index 4aa7b0bd5..000000000
--- a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-echo "--------------------------------------------------------"
-echo "This is diasy4nfv virtual deploy job!"
-echo "--------------------------------------------------------"
-
-cd $WORKSPACE
-
-if [[ "$NODE_NAME" =~ "-virtual" ]]; then
- export NETWORK_CONF=./deploy/config/vm_environment/$NODE_NAME/network.yml
- export DHA_CONF=./deploy/config/vm_environment/$NODE_NAME/deploy.yml
-else
- # TODO: For the time being, we need to pass this script to let contributors merge their work.
- echo "No support for non-virtual node"
- exit 0
-fi
-
-./ci/deploy/deploy.sh ${DHA_CONF} ${NETWORK_CONF}
-
-if [ $? -ne 0 ]; then
- echo "depolyment failed!"
- deploy_ret=1
-fi
-
-echo
-echo "--------------------------------------------------------"
-echo "Done!"
-
-exit $deploy_ret
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index 62f89e686..28888d673 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -7,7 +7,7 @@
- master:
branch: '{stream}'
gs-pathname: ''
- docker-tag: 'master'
+ docker-tag: 'latest'
disabled: false
- danube:
branch: 'stable/{stream}'
@@ -19,6 +19,12 @@
- apex:
slave-label: 'ool-virtual1'
pod: 'ool-virtual1'
+ - fuel:
+ slave-label: 'ool-virtual2'
+ pod: 'ool-virtual2'
+ #- joid:
+ # slave-label: 'ool-virtual3'
+ # pod: 'ool-virtual3'
inspector:
- 'sample'
@@ -34,7 +40,7 @@
jobs:
- 'doctor-verify-{stream}'
- - 'doctor-{task}-{installer}-{inspector}-{stream}':
+ - 'doctor-{task}-{installer}-{inspector}-{stream}'
- job-template:
name: 'doctor-verify-{stream}'
@@ -44,7 +50,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -85,17 +90,13 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- string:
name: OS_CREDS
default: /home/jenkins/openstack.creds
description: 'OpenStack credentials'
- '{slave-label}-defaults'
- - string:
- name: INSTALLER_TYPE
- default: '{installer}'
- description: 'Installer used for deploying OPNFV on this POD'
+ - '{installer}-defaults'
- string:
name: DOCKER_TAG
default: '{docker-tag}'
@@ -143,6 +144,7 @@
branch: '{branch}'
builders:
+ - 'clean-workspace-log'
- 'functest-suite-builder'
- shell: |
functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log"
diff --git a/jjb/domino/domino.yml b/jjb/domino/domino.yml
index 532fce687..5fd9db3f1 100644
--- a/jjb/domino/domino.yml
+++ b/jjb/domino/domino.yml
@@ -24,7 +24,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/dovetail/dovetail-artifacts-upload.yml b/jjb/dovetail/dovetail-artifacts-upload.yml
index dc2ae5aa2..3d9af5ed7 100644
--- a/jjb/dovetail/dovetail-artifacts-upload.yml
+++ b/jjb/dovetail/dovetail-artifacts-upload.yml
@@ -33,6 +33,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -42,7 +43,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- dovetail-parameter:
diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml
index 08eb5a5f1..e2a334d40 100644
--- a/jjb/dovetail/dovetail-ci-jobs.yml
+++ b/jjb/dovetail/dovetail-ci-jobs.yml
@@ -126,8 +126,8 @@
#--------------------------------
# None-CI PODs
#--------------------------------
- - huawei-pod5:
- slave-label: '{pod}'
+ - baremetal-centos:
+ slave-label: 'intel-pod8'
SUT: compass
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
@@ -161,6 +161,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
@@ -179,8 +180,7 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
- branch: '{branch}'
+ branch: '{dovetail-branch}'
- '{SUT}-defaults'
- '{slave-label}-defaults'
- string:
diff --git a/jjb/dovetail/dovetail-project-jobs.yml b/jjb/dovetail/dovetail-project-jobs.yml
index 904841396..9dc4808b4 100644
--- a/jjb/dovetail/dovetail-project-jobs.yml
+++ b/jjb/dovetail/dovetail-project-jobs.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -65,7 +64,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/dovetail/dovetail-weekly-jobs.yml b/jjb/dovetail/dovetail-weekly-jobs.yml
new file mode 100644
index 000000000..8edce4246
--- /dev/null
+++ b/jjb/dovetail/dovetail-weekly-jobs.yml
@@ -0,0 +1,135 @@
+- project:
+ name: dovetail-weekly-jobs
+ project: dovetail
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ dovetail-branch: '{stream}'
+ gs-pathname: ''
+ docker-tag: 'latest'
+ colorado: &colorado
+ stream: colorado
+ branch: 'stable/{stream}'
+ dovetail-branch: master
+ gs-pathname: '/{stream}'
+ docker-tag: 'latest'
+
+#--------------------------------
+# POD, INSTALLER, AND BRANCH MAPPING
+#--------------------------------
+# Installers using labels
+# CI PODs
+# This section should only contain the installers
+# that have been switched using labels for slaves
+#--------------------------------
+ pod:
+# - baremetal:
+# slave-label: apex-baremetal
+# sut: apex
+# <<: *colorado
+ - baremetal:
+ slave-label: compass-baremetal
+ sut: compass
+ <<: *colorado
+# - baremetal:
+# slave-label: fuel-baremetal
+# sut: fuel
+# <<: *master
+# - baremetal:
+# slave-label: joid-baremetal
+# sut: joid
+# <<: *colorado
+
+ testsuite:
+ - 'debug'
+ - 'proposed_tests'
+ - 'compliance_set'
+
+ loop:
+ - 'weekly':
+ job-timeout: 60
+
+ jobs:
+ - 'dovetail-{sut}-{pod}-{testsuite}-{loop}-{stream}'
+
+################################
+# job template
+################################
+- job-template:
+ name: 'dovetail-{sut}-{pod}-{testsuite}-{loop}-{stream}'
+
+ disabled: false
+
+ concurrent: true
+
+ properties:
+ - logrotate-default
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ wrappers:
+ - build-name:
+ name: '$BUILD_NUMBER Scenario: $DEPLOY_SCENARIO'
+ - timeout:
+ timeout: '{job-timeout}'
+ abort: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{dovetail-branch}'
+ - '{sut}-defaults'
+ - '{slave-label}-defaults'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-ha'
+ - string:
+ name: DOCKER_TAG
+ default: '{docker-tag}'
+ description: 'Tag to pull dovetail docker image'
+ - string:
+ name: CI_DEBUG
+ default: 'true'
+ description: "Show debug output information"
+ - string:
+ name: TESTSUITE
+ default: '{testsuite}'
+ description: "dovetail testsuite to run"
+ - string:
+ name: DOVETAIL_REPO_DIR
+ default: "/home/opnfv/dovetail"
+ description: "Directory where the dovetail repository is cloned"
+
+ scm:
+ - git-scm
+
+ builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
+ - 'dovetail-cleanup'
+ - 'dovetail-run'
+
+ publishers:
+ - archive:
+ artifacts: 'results/**/*'
+ allow-empty: true
+ fingerprint: true
+
+########################
+# builder macros
+########################
+- builder:
+ name: dovetail-run-weekly
+ builders:
+ - shell:
+ !include-raw: ./dovetail-run.sh
+- builder:
+ name: dovetail-cleanup-weekly
+ builders:
+ - shell:
+ !include-raw: ./dovetail-cleanup.sh
diff --git a/jjb/dpacc/dpacc.yml b/jjb/dpacc/dpacc.yml
index 27e663507..bc61d7447 100644
--- a/jjb/dpacc/dpacc.yml
+++ b/jjb/dpacc/dpacc.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/escalator/escalator.yml b/jjb/escalator/escalator.yml
index 4149ee93c..2265dafce 100644
--- a/jjb/escalator/escalator.yml
+++ b/jjb/escalator/escalator.yml
@@ -39,6 +39,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -80,7 +81,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-defaults'
- 'escalator-defaults':
@@ -96,7 +96,7 @@
- name: 'escalator-verify-basic-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -110,7 +110,7 @@
- name: 'escalator-verify-build-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -137,7 +137,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- 'escalator-defaults':
@@ -158,6 +157,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -193,7 +193,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-defaults'
- 'escalator-defaults':
@@ -209,7 +208,7 @@
- name: 'escalator-merge-basic-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -223,7 +222,7 @@
- name: 'escalator-merge-build-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -239,7 +238,7 @@
concurrent: true
scm:
- - git-scm-gerrit
+ - git-scm
wrappers:
- ssh-agent-wrapper
@@ -250,7 +249,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- 'escalator-defaults':
diff --git a/jjb/fuel/fuel-daily-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml
index cd22c1ac2..237855236 100644
--- a/jjb/fuel/fuel-daily-jobs.yml
+++ b/jjb/fuel/fuel-daily-jobs.yml
@@ -81,6 +81,10 @@
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-kvm_ovs-ha':
auto-trigger-name: 'daily-trigger-disabled'
+ - 'os-nosdn-kvm_ovs_dpdk-ha':
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+ - 'os-nosdn-kvm_ovs_dpdk_bar-ha':
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
# NOHA scenarios
- 'os-nosdn-nofeature-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
@@ -100,6 +104,10 @@
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
- 'os-nosdn-ovs-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+ - 'os-nosdn-kvm_ovs_dpdk-noha':
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+ - 'os-nosdn-kvm_ovs_dpdk_bar-noha':
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
jobs:
- 'fuel-{scenario}-{pod}-daily-{stream}'
@@ -116,6 +124,7 @@
concurrent: false
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -137,6 +146,7 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults':
installer: '{installer}'
@@ -191,6 +201,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -206,7 +217,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults':
@@ -238,7 +248,7 @@
publishers:
- email:
- recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com peter.barabas@ericsson.com fzhadaev@mirantis.com
+ recipients: peter.barabas@ericsson.com fzhadaev@mirantis.com
########################
# parameter macros
@@ -272,11 +282,15 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '' # '5 23 * * *'
+ - timed: '5 23 * * *'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '' # '5 2 * * *'
+ - timed: '5 2 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: '5 5 * * *'
- trigger:
name: 'fuel-os-onos-sfc-ha-baremetal-daily-master-trigger'
triggers:
@@ -288,20 +302,23 @@
- trigger:
name: 'fuel-os-odl_l2-sfc-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '' # '5 11 * * *'
+ - timed: '5 11 * * *'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '' # '5 14 * * *'
+ - timed: '5 14 * * *'
- trigger:
name: 'fuel-os-nosdn-kvm-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '' # '5 17 * * *'
+ - timed: '5 17 * * *'
- trigger:
- name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-master-trigger'
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-master-trigger'
triggers:
- - timed: '5 20 * * *'
-
+ - timed: '30 12 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: '30 8 * * *'
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
@@ -339,6 +356,14 @@
name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# Triggers for job running on fuel-baremetal against danube branch
#-----------------------------------------------
@@ -379,7 +404,14 @@
name: 'fuel-os-nosdn-ovs-ha-baremetal-daily-danube-trigger'
triggers:
- timed: '0 20 * * *'
-
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-danube-trigger'
@@ -417,6 +449,14 @@
name: 'fuel-os-nosdn-ovs-noha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-baremetal-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-baremetal-daily-danube-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# Triggers for job running on fuel-virtual against master branch
#-----------------------------------------------
@@ -456,6 +496,14 @@
name: 'fuel-os-nosdn-ovs-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-master-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
@@ -472,11 +520,11 @@
- trigger:
name: 'fuel-os-onos-sfc-noha-virtual-daily-master-trigger'
triggers:
- - timed: '35 20 * * *'
+ - timed: '' # '35 20 * * *'
- trigger:
name: 'fuel-os-onos-nofeature-noha-virtual-daily-master-trigger'
triggers:
- - timed: '5 23 * * *'
+ - timed: '' # '5 23 * * *'
- trigger:
name: 'fuel-os-odl_l2-sfc-noha-virtual-daily-master-trigger'
triggers:
@@ -493,6 +541,14 @@
name: 'fuel-os-nosdn-ovs-noha-virtual-daily-master-trigger'
triggers:
- timed: '5 9 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-master-trigger'
+ triggers:
+ - timed: '30 16 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-master-trigger'
+ triggers:
+ - timed: '30 20 * * *'
#-----------------------------------------------
# Triggers for job running on fuel-virtual against danube branch
#-----------------------------------------------
@@ -532,6 +588,14 @@
name: 'fuel-os-nosdn-ovs-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-danube-trigger'
@@ -569,6 +633,14 @@
name: 'fuel-os-nosdn-ovs-noha-virtual-daily-danube-trigger'
triggers:
- timed: '0 9 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-virtual-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-virtual-daily-danube-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD1 Triggers running against master branch
#-----------------------------------------------
@@ -608,6 +680,14 @@
name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-master-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-master-trigger'
@@ -645,6 +725,14 @@
name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD2 Triggers running against master branch
@@ -685,6 +773,14 @@
name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-master-trigger'
@@ -722,6 +818,14 @@
name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD3 Triggers running against master branch
#-----------------------------------------------
@@ -761,6 +865,14 @@
name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-master-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-master-trigger'
@@ -798,6 +910,14 @@
name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD1 Triggers running against danube branch
#-----------------------------------------------
@@ -837,6 +957,14 @@
name: 'fuel-os-nosdn-ovs-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger'
@@ -874,6 +1002,14 @@
name: 'fuel-os-nosdn-ovs-noha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod1-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod1-daily-danube-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD2 Triggers running against danube branch
@@ -914,6 +1050,14 @@
name: 'fuel-os-nosdn-ovs-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-danube-trigger'
@@ -951,6 +1095,14 @@
name: 'fuel-os-nosdn-ovs-noha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod2-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod2-daily-danube-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# ZTE POD3 Triggers running against danube branch
#-----------------------------------------------
@@ -990,6 +1142,14 @@
name: 'fuel-os-nosdn-ovs-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-danube-trigger'
@@ -1027,3 +1187,11 @@
name: 'fuel-os-nosdn-ovs-noha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk-noha-zte-pod3-daily-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-noha-zte-pod3-daily-danube-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index 48b1dac2f..4efccd611 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -57,9 +57,9 @@ chmod a+x $TMPDIR
# clone the securedlab repo
cd $WORKSPACE
-echo "Cloning securedlab repo ${GIT_BRANCH##origin/}"
+echo "Cloning securedlab repo $BRANCH"
git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
- --branch ${GIT_BRANCH##origin/}
+ --branch $BRANCH
# log file name
FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
diff --git a/jjb/fuel/fuel-download-artifact.sh b/jjb/fuel/fuel-download-artifact.sh
index 2a0f09a3f..8cc552e8d 100755
--- a/jjb/fuel/fuel-download-artifact.sh
+++ b/jjb/fuel/fuel-download-artifact.sh
@@ -36,7 +36,7 @@ echo "Using $OPNFV_ARTIFACT for deployment"
# using ISOs for verify & merge jobs from local storage will be enabled later
if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
# check if we already have the ISO to avoid redownload
- ISOSTORE="/iso_mount/opnfv_ci/${GIT_BRANCH##*/}"
+ ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}"
if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
echo "ISO exists locally. Skipping the download and using the file from ISO store"
ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso
diff --git a/jjb/fuel/fuel-project-jobs.yml b/jjb/fuel/fuel-project-jobs.yml
index 731b9bd0d..32ad8907e 100644
--- a/jjb/fuel/fuel-project-jobs.yml
+++ b/jjb/fuel/fuel-project-jobs.yml
@@ -35,6 +35,7 @@
concurrent: false
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -44,7 +45,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- '{installer}-defaults'
@@ -78,7 +78,7 @@
publishers:
- email:
- recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com fzhadaev@mirantis.com
+ recipients: fzhadaev@mirantis.com
- job-template:
name: 'fuel-merge-build-{stream}'
@@ -90,7 +90,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- '{installer}-defaults'
@@ -146,6 +145,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 2
@@ -161,7 +161,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'fuel-virtual-defaults':
installer: '{installer}'
@@ -209,7 +208,7 @@
publishers:
- email:
- recipients: jonas.bjurel@ericsson.com stefan.k.berg@ericsson.com fzhadaev@mirantis.com
+ recipients: fzhadaev@mirantis.com
- job-template:
name: 'fuel-deploy-generic-daily-{stream}'
@@ -219,6 +218,7 @@
disabled: '{obj:disabled}'
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
@@ -233,7 +233,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{installer}-defaults'
- string:
diff --git a/jjb/fuel/fuel-upload-artifact.sh b/jjb/fuel/fuel-upload-artifact.sh
index ca4ba00b0..d1ac3509b 100755
--- a/jjb/fuel/fuel-upload-artifact.sh
+++ b/jjb/fuel/fuel-upload-artifact.sh
@@ -23,7 +23,7 @@ nfsstore () {
# storing ISOs for verify & merge jobs will be done once we get the disk array
if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
# store ISO locally on NFS first
- ISOSTORE="/iso_mount/opnfv_ci/${GIT_BRANCH##*/}"
+ ISOSTORE="/iso_mount/opnfv_ci/${BRANCH##*/}"
if [[ -d "$ISOSTORE" ]]; then
# remove all but most recent 5 ISOs first to keep iso_mount clean & tidy
cd $ISOSTORE
diff --git a/jjb/fuel/fuel-verify-jobs-experimental.yml b/jjb/fuel/fuel-verify-jobs-experimental.yml
index 3aa85b22d..ae6458021 100644
--- a/jjb/fuel/fuel-verify-jobs-experimental.yml
+++ b/jjb/fuel/fuel-verify-jobs-experimental.yml
@@ -52,6 +52,7 @@
concurrent: false
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -100,7 +101,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-defaults'
- 'fuel-verify-defaults-exp':
@@ -116,7 +116,7 @@
- name: 'fuel-verify-basic-{stream-exp}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -130,7 +130,7 @@
- name: 'fuel-verify-build-{stream-exp}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -144,7 +144,7 @@
- name: 'fuel-verify-deploy-virtual-{stream-exp}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -158,7 +158,7 @@
- name: 'fuel-verify-smoke-test-{stream-exp}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -174,6 +174,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 6
@@ -196,7 +197,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- '{installer}-defaults'
diff --git a/jjb/fuel/fuel-verify-jobs.yml b/jjb/fuel/fuel-verify-jobs.yml
index c5f913e49..7f9eff04d 100644
--- a/jjb/fuel/fuel-verify-jobs.yml
+++ b/jjb/fuel/fuel-verify-jobs.yml
@@ -47,6 +47,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -95,7 +96,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- 'fuel-verify-defaults':
@@ -111,7 +111,7 @@
- name: 'fuel-verify-basic-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -125,7 +125,7 @@
- name: 'fuel-verify-build-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -139,7 +139,7 @@
- name: 'fuel-verify-deploy-virtual-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -153,7 +153,7 @@
- name: 'fuel-verify-smoke-test-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -169,6 +169,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 6
@@ -191,7 +192,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- '{installer}-defaults'
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml
index 4920bffea..49901bea2 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-ci-jobs.yml
@@ -133,8 +133,8 @@
slave-label: '{pod}'
installer: joid
<<: *master
- - huawei-pod5:
- slave-label: '{pod}'
+ - baremetal-centos:
+ slave-label: 'intel-pod8'
installer: compass
<<: *master
- nokia-pod1:
@@ -204,6 +204,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
@@ -219,7 +220,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults'
@@ -234,7 +234,7 @@
- string:
name: CLEAN_DOCKER_IMAGES
default: 'false'
- description: 'Remove downloaded docker images (opnfv/functest:*)'
+ description: 'Remove downloaded docker images (opnfv/functest*:*)'
- functest-parameter:
gs-pathname: '{gs-pathname}'
@@ -347,6 +347,8 @@
- 'functest-cleanup'
- 'set-functest-env'
- 'functest-suite'
+ - 'functest-store-results'
+ - 'functest-exit'
- builder:
name: functest-daily
diff --git a/jjb/functest/functest-cleanup.sh b/jjb/functest/functest-cleanup.sh
index b03d4778d..fc277b9ed 100755
--- a/jjb/functest/functest-cleanup.sh
+++ b/jjb/functest/functest-cleanup.sh
@@ -3,14 +3,22 @@
[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
echo "Cleaning up docker containers/images..."
+HOST_ARCH=$(uname -m)
FUNCTEST_IMAGE=opnfv/functest
-# Remove containers along with image opnfv/functest:<none>
+if [ "$HOST_ARCH" = "aarch64" ]; then
+ FUNCTEST_IMAGE="${FUNCTEST_IMAGE}_${HOST_ARCH}"
+fi
+
+# Remove containers along with image opnfv/functest*:<none>
dangling_images=($(docker images -f "dangling=true" | grep $FUNCTEST_IMAGE | awk '{print $3}'))
if [[ -n ${dangling_images} ]]; then
echo " Removing $FUNCTEST_IMAGE:<none> images and their containers..."
for image_id in "${dangling_images[@]}"; do
echo " Removing image_id: $image_id and its containers"
- docker ps -a | grep $image_id | awk '{print $1}'| xargs docker rm -f >${redirect}
+ containers=$(docker ps -a | grep $image_id | awk '{print $1}')
+ if [[ -n "$containers" ]];then
+ docker rm -f $containers >${redirect}
+ fi
docker rmi $image_id >${redirect}
done
fi
diff --git a/jjb/functest/functest-exit.sh b/jjb/functest/functest-exit.sh
index 10edab005..925a3cfbb 100644
--- a/jjb/functest/functest-exit.sh
+++ b/jjb/functest/functest-exit.sh
@@ -1,7 +1,6 @@
#!/bin/bash
-branch=${GIT_BRANCH##*/}
-ret_val_file="${HOME}/opnfv/functest/results/${branch}/return_value"
+ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
if [ ! -f ${ret_val_file} ]; then
echo "Return value not found!"
exit -1
@@ -9,4 +8,4 @@ fi
ret_val=`cat ${ret_val_file}`
-exit ${ret_val} \ No newline at end of file
+exit ${ret_val}
diff --git a/jjb/functest/functest-loop.sh b/jjb/functest/functest-loop.sh
index 4528c00d1..893c428a2 100755
--- a/jjb/functest/functest-loop.sh
+++ b/jjb/functest/functest-loop.sh
@@ -3,9 +3,9 @@ set +e
branch=${GIT_BRANCH##*/}
[[ "$PUSH_RESULTS_TO_DB" == "true" ]] && flags+="-r"
-if [[ ${branch} == *"brahmaputra"* ]]; then
+if [[ "$BRANCH" =~ 'brahmaputra' ]]; then
cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh -s ${flags}"
-elif [[ ${branch} == *"colorado"* ]]; then
+elif [[ "$BRANCH" =~ 'colorado' ]]; then
cmd="python ${FUNCTEST_REPO_DIR}/ci/run_tests.py -t all ${flags}"
else
cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/run_tests.py -t all ${flags}"
@@ -14,7 +14,7 @@ container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
docker exec $container_id $cmd
ret_value=$?
-ret_val_file="${HOME}/opnfv/functest/results/${branch}/return_value"
+ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
echo ${ret_value}>${ret_val_file}
exit 0
diff --git a/jjb/functest/functest-project-jobs.yml b/jjb/functest/functest-project-jobs.yml
index 6a0768c9c..42c19a777 100644
--- a/jjb/functest/functest-project-jobs.yml
+++ b/jjb/functest/functest-project-jobs.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/functest/functest-suite.sh b/jjb/functest/functest-suite.sh
index 9dc8deca0..f28d3d037 100755
--- a/jjb/functest/functest-suite.sh
+++ b/jjb/functest/functest-suite.sh
@@ -1,11 +1,10 @@
#!/bin/bash
set -e
-branch=${GIT_BRANCH##*/}
-echo "Functest: run $FUNCTEST_SUITE_NAME on branch ${branch}"
-if [[ ${branch} == *"brahmaputra"* ]]; then
+echo "Functest: run $FUNCTEST_SUITE_NAME on branch $BRANCH"
+if [[ "$BRANCH" =~ 'brahmaputra' ]]; then
cmd="${FUNCTEST_REPO_DIR}/docker/run_tests.sh --test $FUNCTEST_SUITE_NAME"
-elif [[ ${branch} == *"colorado"* ]]; then
+elif [[ "$BRANCH" =~ 'colorado' ]]; then
cmd="python ${FUNCTEST_REPO_DIR}/ci/run_tests.py -t $FUNCTEST_SUITE_NAME"
else
cmd="functest testcase run $FUNCTEST_SUITE_NAME"
@@ -14,5 +13,7 @@ container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
docker exec $container_id $cmd
ret_value=$?
+ret_val_file="${HOME}/opnfv/functest/results/${BRANCH##*/}/return_value"
+echo ${ret_value}>${ret_val_file}
-exit $ret_value
+exit 0
diff --git a/jjb/functest/set-functest-env.sh b/jjb/functest/set-functest-env.sh
index 583ce8041..05e3d5792 100755
--- a/jjb/functest/set-functest-env.sh
+++ b/jjb/functest/set-functest-env.sh
@@ -14,34 +14,37 @@ if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
fi
if [[ ${RC_FILE_PATH} != '' ]] && [[ -f ${RC_FILE_PATH} ]] ; then
+ echo "Credentials file detected: ${RC_FILE_PATH}"
# volume if credentials file path is given to Functest
- rc_file_vol="-v $RC_FILE_PATH:/home/opnfv/functest/conf/openstack.creds"
+ rc_file_vol="-v ${RC_FILE_PATH}:/home/opnfv/functest/conf/openstack.creds"
+ RC_FLAG=1
fi
if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no"
- if sudo virsh list | grep instack; then
- instack_mac=$(sudo virsh domiflist instack | grep default | \
- grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
- elif sudo virsh list | grep undercloud; then
- instack_mac=$(sudo virsh domiflist undercloud | grep default | \
+ if sudo virsh list | grep undercloud; then
+ echo "Installer VM detected"
+ undercloud_mac=$(sudo virsh domiflist undercloud | grep default | \
grep -Eo "[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+:[0-9a-f]+")
+ INSTALLER_IP=$(/usr/sbin/arp -e | grep ${undercloud_mac} | awk {'print $1'})
+ sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
+ sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
+ stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
+
+ if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+ sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
+ fi
+ if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
+ sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
+ fi
+ elif [[ "$RC_FLAG" == 1 ]]; then
+ echo "No available installer VM, but credentials provided...continuing"
else
- echo "No available installer VM exists...exiting"
+ echo "No available installer VM exists and no credentials provided...exiting"
exit 1
fi
- INSTALLER_IP=$(/usr/sbin/arp -e | grep ${instack_mac} | awk {'print $1'})
- sshkey_vol="-v /root/.ssh/id_rsa:/root/.ssh/id_rsa"
- sudo scp $ssh_options root@${INSTALLER_IP}:/home/stack/stackrc ${HOME}/stackrc
- stackrc_vol="-v ${HOME}/stackrc:/home/opnfv/functest/conf/stackrc"
- if sudo iptables -C FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -o virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
- if sudo iptables -C FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable 2> ${redirect}; then
- sudo iptables -D FORWARD -i virbr0 -j REJECT --reject-with icmp-port-unreachable
- fi
fi
@@ -56,8 +59,7 @@ DEPLOY_TYPE=baremetal
echo "Functest: Start Docker and prepare environment"
-branch=${GIT_BRANCH##*/}
-dir_result="${HOME}/opnfv/functest/results/${branch}"
+dir_result="${HOME}/opnfv/functest/results/${BRANCH##*/}"
mkdir -p ${dir_result}
sudo rm -rf ${dir_result}/*
results_vol="-v ${dir_result}:/home/opnfv/functest/results"
@@ -71,17 +73,22 @@ envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
volumes="${results_vol} ${sshkey_vol} ${stackrc_vol} ${rc_file_vol}"
+HOST_ARCH=$(uname -m)
+FUNCTEST_IMAGE="opnfv/functest"
+if [ "$HOST_ARCH" = "aarch64" ]; then
+ FUNCTEST_IMAGE="${FUNCTEST_IMAGE}_${HOST_ARCH}"
+fi
-echo "Functest: Pulling image opnfv/functest:${DOCKER_TAG}"
-docker pull opnfv/functest:$DOCKER_TAG >/dev/null
+echo "Functest: Pulling image ${FUNCTEST_IMAGE}:${DOCKER_TAG}"
+docker pull ${FUNCTEST_IMAGE}:$DOCKER_TAG >/dev/null
cmd="sudo docker run --privileged=true -id ${envs} ${volumes} \
${custom_params} ${TESTCASE_OPTIONS} \
- opnfv/functest:${DOCKER_TAG} /bin/bash"
+ ${FUNCTEST_IMAGE}:${DOCKER_TAG} /bin/bash"
echo "Functest: Running docker run command: ${cmd}"
${cmd} >${redirect}
sleep 5
-container_id=$(docker ps | grep "opnfv/functest:${DOCKER_TAG}" | awk '{print $1}' | head -1)
+container_id=$(docker ps | grep "${FUNCTEST_IMAGE}:${DOCKER_TAG}" | awk '{print $1}' | head -1)
echo "Container ID=${container_id}"
if [ -z ${container_id} ]; then
echo "Cannot find opnfv/functest container ID ${container_id}. Please check if it is existing."
@@ -92,13 +99,13 @@ echo "Starting the container: docker start ${container_id}"
docker start ${container_id}
sleep 5
docker ps >${redirect}
-if [ $(docker ps | grep "opnfv/functest:${DOCKER_TAG}" | wc -l) == 0 ]; then
- echo "The container opnfv/functest with ID=${container_id} has not been properly started. Exiting..."
+if [ $(docker ps | grep "${FUNCTEST_IMAGE}:${DOCKER_TAG}" | wc -l) == 0 ]; then
+ echo "The container ${FUNCTEST_IMAGE} with ID=${container_id} has not been properly started. Exiting..."
exit 1
fi
-if [[ ${branch} == *"brahmaputra"* ]]; then
+if [[ "$BRANCH" =~ 'brahmaputra' ]]; then
cmd="${FUNCTEST_REPO_DIR}/docker/prepare_env.sh"
-elif [[ ${branch} == *"colorado"* ]]; then
+elif [[ "$BRANCH" =~ 'colorado' ]]; then
cmd="python ${FUNCTEST_REPO_DIR}/ci/prepare_env.py start"
else
cmd="python ${FUNCTEST_REPO_DIR}/functest/ci/prepare_env.py start"
diff --git a/jjb/global/installer-params.yml b/jjb/global/installer-params.yml
index 4a50a5906..fc9f34a48 100644
--- a/jjb/global/installer-params.yml
+++ b/jjb/global/installer-params.yml
@@ -10,10 +10,6 @@
default: apex
description: 'Installer used for deploying OPNFV on this POD'
- string:
- name: DEPLOY_SCENARIO
- default: 'none'
- description: 'Scenario to deploy and test'
- - string:
name: EXTERNAL_NETWORK
default: 'external'
description: 'external network for test'
@@ -66,6 +62,10 @@
default: joid
description: 'Installer used for deploying OPNFV on this POD'
- string:
+ name: MODEL
+ default: 'os'
+ description: 'Model to deploy (os|k8)'
+ - string:
name: OS_RELEASE
default: 'newton'
description: 'OpenStack release (mitaka|newton)'
@@ -89,6 +89,19 @@
name: CPU_ARCHITECTURE
default: 'amd64'
description: "CPU Architecture to use for Ubuntu distro "
+
+- parameter:
+ name: 'daisy-defaults'
+ parameters:
+ - string:
+ name: INSTALLER_IP
+ default: '10.20.0.2'
+ description: 'IP of the installer'
+ - string:
+ name: INSTALLER_TYPE
+ default: daisy
+ description: 'Installer used for deploying OPNFV on this POD'
+
- parameter:
name: 'infra-defaults'
parameters:
@@ -109,13 +122,9 @@
description: 'IP of the installer'
- string:
name: INSTALLER_TYPE
- default: netvirt
+ default: apex
description: 'Installer used for deploying OPNFV on this POD'
- string:
- name: DEPLOY_SCENARIO
- default: 'os-odl_l2-bgpvpn-noha'
- description: 'Scenario to deploy and test'
- - string:
name: EXTERNAL_NETWORK
default: 'external'
description: 'external network for test'
diff --git a/jjb/global/releng-defaults.yml b/jjb/global/releng-defaults.yml
index 5003a8f48..283888603 100644
--- a/jjb/global/releng-defaults.yml
+++ b/jjb/global/releng-defaults.yml
@@ -3,15 +3,12 @@
- defaults:
name: global
- logrotate:
- daysToKeep: 60
- numToKeep: 200
- artifactDaysToKeep: 30
- artifactNumToKeep: 100
-
wrappers:
- ssh-agent-wrapper
project-type: freestyle
node: master
+
+ properties:
+ - logrotate-default
diff --git a/jjb/global/releng-macros.yml b/jjb/global/releng-macros.yml
index 404c3dd69..d5eb0c974 100644
--- a/jjb/global/releng-macros.yml
+++ b/jjb/global/releng-macros.yml
@@ -26,14 +26,23 @@
name: GS_BASE_PROXY
default: build.opnfv.org/artifacts.opnfv.org/$PROJECT
description: "URL to Google Storage proxy"
-
-- parameter:
- name: gerrit-parameter
- parameters:
+ - string:
+ name: BRANCH
+ default: '{branch}'
+ description: "JJB configured BRANCH parameter (e.g. master, stable/danube)"
- string:
name: GERRIT_BRANCH
default: '{branch}'
- description: "JJB configured GERRIT_BRANCH parameter"
+ description: "JJB configured GERRIT_BRANCH parameter (deprecated)"
+
+- property:
+ name: logrotate-default
+ properties:
+ - build-discarder:
+ days-to-keep: 60
+ num-to-keep: 200
+ artifact-days-to-keep: 60
+ artifact-num-to-keep: 200
- scm:
name: git-scm
@@ -42,7 +51,7 @@
credentials-id: '$SSH_CREDENTIAL_ID'
url: '$GIT_BASE'
branches:
- - 'origin/$GERRIT_BRANCH'
+ - 'origin/$BRANCH'
timeout: 15
- scm:
@@ -52,15 +61,33 @@
choosing-strategy: 'gerrit'
refspec: '$GERRIT_REFSPEC'
<<: *git-scm-defaults
-
+- scm:
+ name: git-scm-with-submodules
+ scm:
+ - git:
+ credentials-id: '$SSH_CREDENTIAL_ID'
+ url: '$GIT_BASE'
+ refspec: ''
+ branches:
+ - 'refs/heads/{branch}'
+ skip-tag: true
+ wipe-workspace: true
+ submodule:
+ recursive: true
+ timeout: 20
+ shallow-clone: true
- trigger:
name: 'daily-trigger-disabled'
triggers:
- timed: ''
-# NOTE: unused macro, but we may use this for some jobs.
- trigger:
- name: gerrit-trigger-patch-submitted
+ name: 'weekly-trigger-disabled'
+ triggers:
+ - timed: ''
+
+- trigger:
+ name: gerrit-trigger-patchset-created
triggers:
- gerrit:
server-name: 'gerrit.opnfv.org'
@@ -72,16 +99,25 @@
- draft-published-event
- comment-added-contains-event:
comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
projects:
- project-compare-type: 'ANT'
- project-pattern: '{name}'
+ project-pattern: '{project}'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: 'ANT'
+ pattern: '{files}'
+ skip-vote:
+ successful: true
+ failed: true
+ unstable: true
+ notbuilt: true
-# NOTE: unused macro, but we may use this for some jobs.
- trigger:
- name: gerrit-trigger-patch-merged
+ name: gerrit-trigger-change-merged
triggers:
- gerrit:
server-name: 'gerrit.opnfv.org'
@@ -91,7 +127,7 @@
comment-contains-value: 'remerge'
projects:
- project-compare-type: 'ANT'
- project-pattern: '{name}'
+ project-pattern: '{project}'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
@@ -118,7 +154,6 @@
failed: true
unstable: true
notbuilt: true
- silent-start: true
- wrapper:
name: ssh-agent-wrapper
@@ -410,6 +445,12 @@
sed -r -i '4,$s/^/ /g' lint.log
fi
+- builder:
+ name: clean-workspace-log
+ builders:
+ - shell: |
+ find $WORKSPACE -type f -name '*.log' | xargs rm -f
+
- publisher:
name: archive-artifacts
publishers:
diff --git a/jjb/global/slave-params.yml b/jjb/global/slave-params.yml
index 9d08487a3..4b3eaaabf 100644
--- a/jjb/global/slave-params.yml
+++ b/jjb/global/slave-params.yml
@@ -178,6 +178,23 @@
name: EXTERNAL_NETWORK
default: ext-net
description: "External network floating ips"
+- parameter:
+ name: 'daisy-baremetal-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - zte-pod2
+ default-slaves:
+ - zte-pod2
+ - label:
+ name: SLAVE_LABEL
+ default: 'daisy-baremetal'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
#####################################################
# Parameters for CI virtual PODs
#####################################################
@@ -239,15 +256,33 @@
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+ name: 'daisy-virtual-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - zte-virtual1
+ - zte-virtual2
+ default-slaves:
+ - zte-virtual1
+ - label:
+ name: SLAVE_LABEL
+ default: 'daisy-virtual'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
#####################################################
# Parameters for build slaves
#####################################################
- parameter:
- name: 'opnfv-build-arm-defaults'
+ name: 'opnfv-build-enea-defaults'
parameters:
- label:
name: SLAVE_LABEL
- default: 'opnfv-build-arm'
+ default: 'opnfv-build-enea'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -314,6 +349,21 @@
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+ name: 'opnfv-build-ubuntu-arm-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'opnfv-build-ubuntu-arm'
+ description: 'Slave label on Jenkins'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
#####################################################
# Parameters for none-CI PODs
#####################################################
@@ -332,6 +382,20 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
+ name: 'cengn-pod1-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - cengn-pod1
+ default-slaves:
+ - cengn-pod1
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
name: 'intel-pod1-defaults'
parameters:
- node:
@@ -378,15 +442,29 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
- name: 'intel-pod3-defaults'
+ name: 'intel-pod10-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - intel-pod3
+ - intel-pod10
default-slaves:
- - intel-pod3
+ - intel-pod10
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+ name: 'intel-pod12-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod12
+ default-slaves:
+ - intel-pod12
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -426,15 +504,28 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
- name: 'huawei-pod5-defaults'
+ name: 'intel-pod8-defaults'
+ parameters:
+ - node:
+ name: SLAVE_NAME
+ description: 'Slave name on Jenkins'
+ allowed-slaves:
+ - intel-pod8
+ default-slaves:
+ - intel-pod8
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+- parameter:
+ name: 'huawei-virtual7-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- - huawei-pod5
+ - huawei-virtual7
default-slaves:
- - huawei-pod5
+ - huawei-virtual7
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -670,15 +761,17 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
- name: 'ool-virtual1-defaults'
+ name: 'ool-defaults'
parameters:
- node:
name: SLAVE_NAME
description: 'Slave name on Jenkins'
allowed-slaves:
- ool-virtual1
+ - ool-virtual2
+ - ool-virtual3
default-slaves:
- - ool-virtual1
+ - '{default-slave}'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -688,6 +781,21 @@
default: /root/.ssh/id_rsa
description: 'SSH key to be used'
- parameter:
+ name: 'ool-virtual1-defaults'
+ parameters:
+ - 'ool-defaults':
+ default-slave: 'ool-virtual1'
+- parameter:
+ name: 'ool-virtual2-defaults'
+ parameters:
+ - 'ool-defaults':
+ default-slave: 'ool-virtual2'
+- parameter:
+ name: 'ool-virtual3-defaults'
+ parameters:
+ - 'ool-defaults':
+ default-slave: 'ool-virtual3'
+- parameter:
name: 'multisite-virtual-defaults'
parameters:
- label:
@@ -708,6 +816,26 @@
default: https://git.opendaylight.org/gerrit/p/$PROJECT.git
description: 'Git URL to use on this Jenkins Slave'
- parameter:
+ name: 'ericsson-virtual12-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-virtual12'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
+ name: 'ericsson-virtual13-defaults'
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'ericsson-virtual13'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/$PROJECT
+ description: 'Git URL to use on this Jenkins Slave'
+- parameter:
name: 'odl-netvirt-virtual-defaults'
parameters:
- label:
diff --git a/jjb/infra/bifrost-verify-jobs.yml b/jjb/infra/bifrost-verify-jobs.yml
index 8184b6343..d595d4bef 100644
--- a/jjb/infra/bifrost-verify-jobs.yml
+++ b/jjb/infra/bifrost-verify-jobs.yml
@@ -38,9 +38,6 @@
dib-os-element: 'opensuse-minimal'
dib-os-packages: 'vim,less,bridge-utils,iputils,rsyslog,curl'
extra-dib-elements: 'openssh-server'
- vm-disk: '30'
- vm-memory: '4096'
- vm-cpu: '2'
#--------------------------------
# type
#--------------------------------
@@ -57,9 +54,10 @@
#--------------------------------
- defaults:
name: vm_defaults
- vm-disk: '100'
- vm-memory: '8192'
- vm-cpu: '4'
+ vm-disk: '30'
+ vm-disk-cache: 'unsafe'
+ vm-memory: '4096'
+ vm-cpu: '2'
#--------------------------------
# job templates
@@ -74,6 +72,7 @@
concurrent: false
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
blocking-jobs:
@@ -109,6 +108,9 @@
name: VM_DISK
default: '{vm-disk}'
- string:
+ name: VM_DISK_CACHE
+ default: '{vm-disk-cache}'
+ - string:
name: VM_MEMORY
default: '{vm-memory}'
- string:
@@ -129,7 +131,7 @@
url: '$PROJECT_REPO'
refspec: '$GERRIT_REFSPEC'
branches:
- - 'origin/$GERRIT_BRANCH'
+ - 'origin/$BRANCH'
skip-tag: true
choosing-strategy: 'gerrit'
timeout: 10
@@ -145,7 +147,7 @@
publishers:
- email:
- recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com zhang.jun3g@zte.com.cn
+ recipients: fatih.degirmenci@ericsson.com yroblamo@redhat.com mchandras@suse.de jack.morgan@intel.com julienjut@gmail.com
#--------------------------------
# trigger macros
#--------------------------------
@@ -201,8 +203,6 @@
file-paths:
- compare-type: ANT
pattern: 'prototypes/bifrost/**'
- - compare-type: ANT
- pattern: 'jjb/infra/**'
readable-message: true
#---------------------------
diff --git a/jjb/infra/bifrost-verify.sh b/jjb/infra/bifrost-verify.sh
index dbe39762c..4115ffcc4 100755
--- a/jjb/infra/bifrost-verify.sh
+++ b/jjb/infra/bifrost-verify.sh
@@ -17,44 +17,56 @@ function upload_logs() {
BIFROST_CONSOLE_LOG="${BUILD_URL}/consoleText"
BIFROST_GS_URL=${BIFROST_LOG_URL/http:/gs:}
- echo "Uploading build logs to ${BIFROST_LOG_URL}"
-
- echo "Uploading console output"
- curl -s -L ${BIFROST_CONSOLE_LOG} > ${WORKSPACE}/build_log.txt
- gsutil -q cp -Z ${WORKSPACE}/build_log.txt ${BIFROST_GS_URL}/build_log.txt
- rm ${WORKSPACE}/build_log.txt
-
- [[ ! -d ${WORKSPACE}/logs ]] && exit 0
-
- pushd ${WORKSPACE}/logs/ &> /dev/null
- for x in *.log; do
- echo "Compressing and uploading $x"
- gsutil -q cp -Z ${x} ${BIFROST_GS_URL}/${x}
- done
+ # Make sure the old landing page is gone in case
+ # we break later on. We don't want to publish
+ # stale information.
+ # TODO: Maybe cleanup the entire $BIFROST_GS_URL directory
+ # before we upload the new data.
+ gsutil -q rm ${BIFROST_GS_URL}/index.html || true
+
+ echo "Uploading collected bifrost build logs to ${BIFROST_LOG_URL}"
+
+ if [[ -d ${WORKSPACE}/logs ]]; then
+ pushd ${WORKSPACE}/logs &> /dev/null
+ for x in *.log; do
+ echo "Compressing and uploading $x"
+ gsutil -q cp -Z ${x} ${BIFROST_GS_URL}/${x}
+ done
+ popd &> /dev/null
+ fi
- echo "Generating the landing page"
- cat > index.html <<EOF
+ echo "Generating the ${BIFROST_LOG_URL}/index.html landing page"
+ cat > ${WORKSPACE}/index.html <<EOF
<html>
<h1>Build results for <a href=https://$GERRIT_NAME/#/c/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER>$GERRIT_NAME/$GERRIT_CHANGE_NUMBER/$GERRIT_PATCHSET_NUMBER</a></h1>
-<h2>Job: $JOB_NAME</h2>
+<h2>Job: <a href=${BUILD_URL}>$JOB_NAME</a></h2>
<ul>
<li><a href=${BIFROST_LOG_URL}/build_log.txt>build_log.txt</a></li>
EOF
- for x in *.log; do
- echo "<li><a href=${BIFROST_LOG_URL}/${x}>${x}</a></li>" >> index.html
- done
+ if [[ -d ${WORKSPACE}/logs ]]; then
+ pushd ${WORKSPACE}/logs &> /dev/null
+ for x in *.log; do
+ echo "<li><a href=${BIFROST_LOG_URL}/${x}>${x}</a></li>" >> ${WORKSPACE}/index.html
+ done
+ popd &> /dev/null
+ fi
- cat >> index.html << EOF
+ cat >> ${WORKSPACE}/index.html << EOF
</ul>
</html>
EOF
- gsutil -q cp index.html ${BIFROST_GS_URL}/index.html
-
- rm index.html
+ # Finally, download and upload the entire build log so we can retain
+ # as much build information as possible
+ echo "Uploading the final console output"
+ curl -s -L ${BIFROST_CONSOLE_LOG} > ${WORKSPACE}/build_log.txt
+ gsutil -q cp -Z ${WORKSPACE}/build_log.txt ${BIFROST_GS_URL}/build_log.txt
+ rm ${WORKSPACE}/build_log.txt
- popd &> /dev/null
+ # Upload landing page
+ gsutil -q cp ${WORKSPACE}/index.html ${BIFROST_GS_URL}/index.html
+ rm ${WORKSPACE}/index.html
}
function fix_ownership() {
diff --git a/jjb/ipv6/ipv6.yml b/jjb/ipv6/ipv6.yml
index da54c521e..a6745cd99 100644
--- a/jjb/ipv6/ipv6.yml
+++ b/jjb/ipv6/ipv6.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/joid/joid-daily-jobs.yml b/jjb/joid/joid-daily-jobs.yml
index 91fcc8814..88269d3c5 100644
--- a/jjb/joid/joid-daily-jobs.yml
+++ b/jjb/joid/joid-daily-jobs.yml
@@ -46,6 +46,9 @@
- orange-pod1:
slave-label: orange-pod1
<<: *master
+ - cengn-pod1:
+ slave-label: cengn-pod1
+ <<: *master
#--------------------------------
# scenarios
#--------------------------------
@@ -72,6 +75,10 @@
auto-trigger-name: 'daily-trigger-disabled'
- 'os-ocl-nofeature-noha':
auto-trigger-name: 'daily-trigger-disabled'
+ - 'k8-nosdn-nofeature-noha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'k8-nosdn-lb-noha':
+ auto-trigger-name: 'daily-trigger-disabled'
jobs:
- 'joid-{scenario}-{pod}-daily-{stream}'
@@ -88,6 +95,7 @@
concurrent: false
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -109,6 +117,7 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults':
installer: '{installer}'
@@ -157,6 +166,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -178,7 +188,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults':
@@ -226,6 +235,10 @@
name: 'joid-os-nosdn-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 2 * * *'
# os-nosdn-nofeature-ha trigger - branch: danube
- trigger:
name: 'joid-os-nosdn-nofeature-ha-baremetal-danube-trigger'
@@ -239,6 +252,10 @@
name: 'joid-os-nosdn-nofeature-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-odl_l2-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
@@ -252,6 +269,10 @@
name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 7 * * *'
# os-odl_l2-nofeature-ha trigger - branch: danube
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
@@ -265,6 +286,10 @@
name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-onos-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-nofeature-ha-baremetal-master-trigger'
@@ -278,6 +303,10 @@
name: 'joid-os-onos-nofeature-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-onos-nofeature-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 12 * * *'
# os-onos-nofeature-ha trigger - branch: danube
- trigger:
name: 'joid-os-onos-nofeature-ha-baremetal-danube-trigger'
@@ -291,6 +320,10 @@
name: 'joid-os-onos-nofeature-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-onos-nofeature-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-onos-sfc-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
@@ -304,6 +337,10 @@
name: 'joid-os-onos-sfc-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-onos-sfc-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 17 * * *'
# os-onos-sfc-ha trigger - branch: danube
- trigger:
name: 'joid-os-onos-sfc-ha-baremetal-danube-trigger'
@@ -317,6 +354,10 @@
name: 'joid-os-onos-sfc-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-onos-sfc-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-lxd-noha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
@@ -330,6 +371,10 @@
name: 'joid-os-nosdn-lxd-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 22 * * *'
# os-nosdn-lxd-noha trigger - branch: danube
- trigger:
name: 'joid-os-nosdn-lxd-noha-baremetal-danube-trigger'
@@ -343,6 +388,10 @@
name: 'joid-os-nosdn-lxd-noha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-noha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-lxd-ha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
@@ -356,6 +405,10 @@
name: 'joid-os-nosdn-lxd-ha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 10 * * *'
# os-nosdn-lxd-ha trigger - branch: danube
- trigger:
name: 'joid-os-nosdn-lxd-ha-baremetal-danube-trigger'
@@ -369,6 +422,10 @@
name: 'joid-os-nosdn-lxd-ha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-lxd-ha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
# os-nosdn-nofeature-noha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
@@ -382,6 +439,10 @@
name: 'joid-os-nosdn-nofeature-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 4 * * *'
# os-nosdn-nofeature-noha trigger - branch: danube
- trigger:
name: 'joid-os-nosdn-nofeature-noha-baremetal-danube-trigger'
@@ -395,3 +456,75 @@
name: 'joid-os-nosdn-nofeature-noha-orange-pod1-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
+# k8-nosdn-nofeature-noha trigger - branch: master
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
+ triggers:
+ - timed: '5 15 * * *'
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 15 * * *'
+# k8-nosdn-nofeature-noha trigger - branch: danube
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-baremetal-danube-trigger'
+ triggers:
+ - timed: '0 15 * * *'
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
+# k8-nosdn-lb-noha trigger - branch: master
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
+ triggers:
+ - timed: '5 20 * * *'
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-virtual-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-orange-pod1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
+ triggers:
+ - timed: '5 20 * * *'
+# k8-nosdn-lb-noha trigger - branch: danube
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-baremetal-danube-trigger'
+ triggers:
+ - timed: '0 20 * * *'
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-virtual-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-orange-pod1-danube-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'joid-k8-nosdn-lb-noha-cengn-pod1-danube-trigger'
+ triggers:
+ - timed: ''
diff --git a/jjb/joid/joid-deploy.sh b/jjb/joid/joid-deploy.sh
index 05c2de1fc..e197dbd8c 100644
--- a/jjb/joid/joid-deploy.sh
+++ b/jjb/joid/joid-deploy.sh
@@ -45,17 +45,24 @@ export POD_NAME=${POD/-}
##
cd $WORKSPACE/ci
-if [ -e "$LAB_CONFIG/environments.yaml" ] && [ "$MAAS_REINSTALL" == "false" ]; then
+
+if [ -e "$LAB_CONFIG/deployconfig.yaml" ] && [ "$MAAS_REINSTALL" == "false" ]; then
echo "------ Recover Juju environment to use MAAS ------"
- cp $LAB_CONFIG/environments.yaml .
- cp $LAB_CONFIG/deployment.yaml .
- if [ -e $LAB_CONFIG/deployconfig.yaml ]; then
+ if [ ! -e deployconfig.yaml ]; then
cp $LAB_CONFIG/deployconfig.yaml .
+ cp $LAB_CONFIG/deployment.yaml .
+ cp $LAB_CONFIG/labconfig.yaml .
fi
else
- echo "------ Redeploy MAAS ------"
- ./00-maasdeploy.sh $POD_NAME
- exit_on_error $? "MAAS Deploy FAILED"
+ if ["$NODE_NAME" == "default" ]; then
+ echo "------ Redeploy MAAS ------"
+ ./03-maasdeploy.sh default
+ exit_on_error $? "MAAS Deploy FAILED"
+ else
+ echo "------ Redeploy MAAS ------"
+ ./03-maasdeploy.sh custom $LAB_CONFIG/labconfig.yaml
+ exit_on_error $? "MAAS Deploy FAILED"
+ fi
fi
##
@@ -64,8 +71,9 @@ fi
# Based on scenario naming we can get joid options
# naming convention:
-# os-<controller>-<nfvfeature>-<mode>[-<extrastuff>]
+# <model>-<controller>-<nfvfeature>-<mode>[-<extrastuff>]
# With parameters:
+# model=(os|k8)
# controller=(nosdn|odl_l3|odl_l2|onos|ocl)
# No odl_l3 today
# nfvfeature=(kvm|ovs|dpdk|nofeature)
@@ -77,6 +85,7 @@ fi
IFS='-' read -r -a DEPLOY_OPTIONS <<< "${DEPLOY_SCENARIO}--"
#last -- need to avoid nounset error
+JOID_MODEL=${DEPLOY_OPTIONS[0]}
SDN_CONTROLLER=${DEPLOY_OPTIONS[1]}
NFV_FEATURES=${DEPLOY_OPTIONS[2]}
HA_MODE=${DEPLOY_OPTIONS[3]}
@@ -103,49 +112,47 @@ fi
## Configure Joid deployment
##
-echo "------ Deploy with juju ------"
-echo "Execute: ./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES"
+if [ "$JOID_MODEL" == 'k8' ]; then
+ echo "------ Deploy with juju ------"
+ echo "Execute: ./deploy.sh -m $JOID_MODEL -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES"
-./deploy.sh -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES
-exit_on_error $? "Main deploy FAILED"
+ ./deploy.sh -m kubernetes -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES
+ exit_on_error $? "Main deploy FAILED"
+fi
##
## Set Admin RC
##
-JOID_ADMIN_OPENRC=$LAB_CONFIG/admin-openrc
-echo "------ Create OpenRC file [$JOID_ADMIN_OPENRC] ------"
-
-# get controller IP
-case "$SDN_CONTROLLER" in
- "odl")
- SDN_CONTROLLER_IP=$(juju status odl-controller/0 |grep public-address|sed -- 's/.*\: //')
- ;;
- "onos")
- SDN_CONTROLLER_IP=$(juju status onos-controller/0 |grep public-address|sed -- 's/.*\: //')
- ;;
- *)
- SDN_CONTROLLER_IP='none'
- ;;
-esac
-SDN_PASSWORD='admin'
-
-# export the openrc file by getting the one generated by joid and add SDN
-# controller for Functest
-cp ./cloud/admin-openrc $JOID_ADMIN_OPENRC
-cat << EOF >> $JOID_ADMIN_OPENRC
-export SDN_CONTROLLER=$SDN_CONTROLLER_IP
-export SDN_PASSWORD=$SDN_PASSWORD
-EOF
-
-##
-## Backup local juju env
-##
+if [ "$JOID_MODEL" == 'os' ]; then
+ echo "------ Deploy with juju ------"
+ echo "Execute: ./deploy.sh -m $JOID_MODEL -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES"
+
+ ./deploy.sh -m openstack -t $HA_MODE -o $OS_RELEASE -s $SDN_CONTROLLER -l $POD_NAME -d $UBUNTU_DISTRO -f $NFV_FEATURES
+ exit_on_error $? "Main deploy FAILED"
+
+ JOID_ADMIN_OPENRC=$LAB_CONFIG/admin-openrc
+ echo "------ Create OpenRC file [$JOID_ADMIN_OPENRC] ------"
+
+ # get controller IP
+ case "$SDN_CONTROLLER" in
+ "odl")
+ SDN_CONTROLLER_IP=$(juju status odl-controller/0 |grep public-address|sed -- 's/.*\: //')
+ ;;
+ "onos")
+ SDN_CONTROLLER_IP=$(juju status onos-controller/0 |grep public-address|sed -- 's/.*\: //')
+ ;;
+ *)
+ SDN_CONTROLLER_IP='none'
+ ;;
+ esac
+ SDN_PASSWORD='admin'
+
+ # export the openrc file by getting the one generated by joid and add SDN
+ # controller for Functest
+ # cp ./cloud/admin-openrc $JOID_ADMIN_OPENRC
+ echo export SDN_CONTROLLER=$SDN_CONTROLLER_IP >> $JOID_ADMIN_OPENRC
+ echo export SDN_PASSWORD=$SDN_PASSWORD >> $JOID_ADMIN_OPENRC
-echo "------ Backup Juju environment ------"
-cp environments.yaml $LAB_CONFIG/
-cp deployment.yaml $LAB_CONFIG/
-if [ -e deployconfig.yaml ]; then
- cp deployconfig.yaml $LAB_CONFIG
fi
##
diff --git a/jjb/joid/joid-verify-jobs.yml b/jjb/joid/joid-verify-jobs.yml
index 6e821a502..7b8ce7701 100644
--- a/jjb/joid/joid-verify-jobs.yml
+++ b/jjb/joid/joid-verify-jobs.yml
@@ -45,6 +45,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -93,7 +94,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'joid-virtual-defaults'
@@ -107,7 +107,7 @@
- name: 'joid-verify-basic-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -120,7 +120,7 @@
- name: 'joid-verify-deploy-virtual-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -134,7 +134,7 @@
- name: 'joid-verify-smoke-test-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
@@ -150,6 +150,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 4
@@ -174,7 +175,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults'
diff --git a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
index 6f8fff3ff..56fb4f9c1 100755
--- a/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
+++ b/jjb/kvmfornfv/kvmfornfv-upload-artifact.sh
@@ -11,16 +11,17 @@ fi
case "$JOB_TYPE" in
verify)
- OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
- GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
- echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
- gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
- echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
- ;;
+ OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
+ GS_UPLOAD_LOCATION="gs://artifacts.opnfv.org/$PROJECT/review/$GERRIT_CHANGE_NUMBER"
+ echo "Removing outdated artifacts produced for the previous patch for the change $GERRIT_CHANGE_NUMBER"
+ gsutil ls $GS_UPLOAD_LOCATION > /dev/null 2>&1 && gsutil rm -r $GS_UPLOAD_LOCATION
+ echo "Uploading artifacts for the change $GERRIT_CHANGE_NUMBER. This could take some time..."
+ ;;
daily)
echo "Uploading daily artifacts This could take some time..."
OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
GS_UPLOAD_LOCATION="gs://$GS_URL/$OPNFV_ARTIFACT_VERSION"
+ GS_LOG_LOCATION="gs://$GS_URL/logs-$(date -u +"%Y-%m-%d")"/
;;
*)
echo "Artifact upload is not enabled for $JOB_TYPE jobs"
@@ -38,10 +39,23 @@ esac
source $WORKSPACE/opnfv.properties
# upload artifacts
-gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
-gsutil -m setmeta -r \
- -h "Cache-Control:private, max-age=0, no-transform" \
- $GS_UPLOAD_LOCATION > /dev/null 2>&1
+if [[ "$PHASE" == "build" ]]; then
+ gsutil cp -r $WORKSPACE/build_output/* $GS_UPLOAD_LOCATION > $WORKSPACE/gsutil.log 2>&1
+ gsutil -m setmeta -r \
+ -h "Cache-Control:private, max-age=0, no-transform" \
+ $GS_UPLOAD_LOCATION > /dev/null 2>&1
+else
+ if [[ "$JOB_TYPE" == "daily" ]]; then
+ log_dir=$WORKSPACE/build_output/log
+ if [[ -d "$log_dir" ]]; then
+ #Uploading logs to artifacts
+ echo "Uploading artifacts for future debugging needs...."
+ gsutil cp -r $WORKSPACE/build_output/log-*.tar.gz $GS_LOG_LOCATION > $WORKSPACE/gsutil.log 2>&1
+ else
+ echo "No test logs/artifacts available for uploading"
+ fi
+ fi
+fi
# upload metadata file for the artifacts built by daily job
if [[ "$JOB_TYPE" == "daily" ]]; then
diff --git a/jjb/kvmfornfv/kvmfornfv.yml b/jjb/kvmfornfv/kvmfornfv.yml
index 60f8de8c4..a782ee0fa 100644
--- a/jjb/kvmfornfv/kvmfornfv.yml
+++ b/jjb/kvmfornfv/kvmfornfv.yml
@@ -19,7 +19,7 @@
- 'build':
slave-label: 'opnfv-build-ubuntu'
- 'test':
- slave-label: 'intel-pod1'
+ slave-label: 'intel-pod10'
#####################################
# patch verification phases
#####################################
@@ -48,6 +48,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 3
@@ -56,7 +57,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -90,25 +90,25 @@
- name: 'kvmfornfv-verify-build-{stream}'
current-parameters: false
predefined-parameters: |
- GERRIT_BRANCH=$GERRIT_BRANCH
+ BRANCH=$BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: test
+ condition: SUCCESSFUL
+ projects:
+ - name: 'kvmfornfv-verify-test-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ BRANCH=$BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
node-parameters: false
kill-phase-on: FAILURE
abort-all-job: true
-# - multijob:
-# name: test
-# condition: SUCCESSFUL
-# projects:
-# - name: 'kvmfornfv-verify-test-{stream}'
-# current-parameters: false
-# predefined-parameters: |
-# GERRIT_BRANCH=$GERRIT_BRANCH
-# GERRIT_REFSPEC=$GERRIT_REFSPEC
-# GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
-# node-parameters: false
-# kill-phase-on: FAILURE
-# abort-all-job: true
- job-template:
name: 'kvmfornfv-verify-{phase}-{stream}'
@@ -127,11 +127,14 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- 'kvmfornfv-defaults':
gs-pathname: '{gs-pathname}'
+ - string:
+ name: PHASE
+ default: '{phase}'
+ description: "Execution of kvmfornfv daily '{phase}' job ."
builders:
- description-setter:
@@ -146,7 +149,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- 'kvmfornfv-defaults':
@@ -185,7 +187,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- 'kvmfornfv-defaults':
@@ -194,8 +195,8 @@
scm:
- git-scm
-# triggers:
-# - timed: '@midnight'
+ triggers:
+ - timed: '@midnight'
builders:
- description-setter:
@@ -259,7 +260,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
- 'kvmfornfv-defaults':
@@ -268,6 +268,10 @@
name: TEST_NAME
default: '{testname}'
description: "Daily job to execute kvmfornfv '{testname}' testcase."
+ - string:
+ name: PHASE
+ default: '{phase}'
+ description: "Execution of kvmfornfv daily '{phase}' job ."
builders:
- description-setter:
@@ -304,6 +308,8 @@
!include-raw: ./kvmfornfv-download-artifact.sh
- shell:
!include-raw: ./kvmfornfv-test.sh
+ - shell:
+ !include-raw: ./kvmfornfv-upload-artifact.sh
- builder:
name: 'kvmfornfv-packet_forward-daily-build-macro'
builders:
diff --git a/jjb/models/models.yml b/jjb/models/models.yml
new file mode 100644
index 000000000..f419c8821
--- /dev/null
+++ b/jjb/models/models.yml
@@ -0,0 +1,67 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+ name: models
+
+ project: '{name}'
+
+ jobs:
+ - 'models-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+
+- job-template:
+ name: 'models-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+
+ shellcheck -f tty tests/*.sh
diff --git a/jjb/moon/moon.yml b/jjb/moon/moon.yml
index 15c3ddec8..a318bc54d 100644
--- a/jjb/moon/moon.yml
+++ b/jjb/moon/moon.yml
@@ -17,7 +17,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/multisite/fuel-deploy-for-multisite.sh b/jjb/multisite/fuel-deploy-for-multisite.sh
new file mode 100755
index 000000000..d8b40517c
--- /dev/null
+++ b/jjb/multisite/fuel-deploy-for-multisite.sh
@@ -0,0 +1,121 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+set -o nounset
+set -o pipefail
+
+# do not continue with the deployment if FRESH_INSTALL is not requested
+if [[ "$FRESH_INSTALL" == "true" ]]; then
+ echo "Fresh install requested. Proceeding with the installation."
+else
+ echo "Fresh install is not requested. Skipping the installation."
+ exit 0
+fi
+
+export TERM="vt220"
+
+# get the latest successful job console log and extract the properties filename
+FUEL_DEPLOY_BUILD_URL="https://build.opnfv.org/ci/job/fuel-deploy-virtual-daily-master/lastSuccessfulBuild/consoleText"
+FUEL_PROPERTIES_FILE=$(curl -s -L ${FUEL_DEPLOY_BUILD_URL} | grep 'ISO:' | awk '{print $2}' | sed 's/iso/properties/g')
+if [[ -z "FUEL_PROPERTIES_FILE" ]]; then
+ echo "Unable to extract the url to Fuel ISO properties from ${FUEL_DEPLOY_URL}"
+ exit 1
+fi
+curl -L -s -o $WORKSPACE/latest.properties http://artifacts.opnfv.org/fuel/$FUEL_PROPERTIES_FILE
+
+# source the file so we get OPNFV vars
+source latest.properties
+
+# echo the info about artifact that is used during the deployment
+echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+
+# download the iso
+echo "Downloading the ISO using the link http://$OPNFV_ARTIFACT_URL"
+curl -L -s -o $WORKSPACE/opnfv.iso http://$OPNFV_ARTIFACT_URL > gsutil.iso.log 2>&1
+
+
+# set deployment parameters
+DEPLOY_SCENARIO="os-nosdn-nofeature-noha"
+export TMPDIR=$HOME/tmpdir
+BRIDGE=${BRIDGE:-pxebr}
+LAB_NAME=${NODE_NAME/-*}
+POD_NAME=${NODE_NAME/*-}
+
+if [[ "$NODE_NAME" =~ "virtual" ]]; then
+ POD_NAME="virtual_kvm"
+fi
+
+# we currently support ericsson, intel, lf and zte labs
+if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf|zte) ]]; then
+ echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
+ exit 1
+else
+ echo "Using configuration for $LAB_NAME"
+fi
+
+# create TMPDIR if it doesn't exist
+export TMPDIR=$HOME/tmpdir
+mkdir -p $TMPDIR
+
+# change permissions down to TMPDIR
+chmod a+x $HOME
+chmod a+x $TMPDIR
+
+# clone fuel repo and checkout the sha1 that corresponds to the ISO
+echo "Cloning fuel repo"
+git clone https://gerrit.opnfv.org/gerrit/p/fuel.git fuel
+cd $WORKSPACE/fuel
+echo "Checking out $OPNFV_GIT_SHA1"
+git checkout $OPNFV_GIT_SHA1 --quiet
+
+# clone the securedlab repo
+cd $WORKSPACE
+echo "Cloning securedlab repo ${GIT_BRANCH##origin/}"
+git clone ssh://jenkins-ericsson@gerrit.opnfv.org:29418/securedlab --quiet \
+ --branch ${GIT_BRANCH##origin/}
+
+# log file name
+FUEL_LOG_FILENAME="${JOB_NAME}_${BUILD_NUMBER}.log.tar.gz"
+
+# construct the command
+DEPLOY_COMMAND="sudo $WORKSPACE/fuel/ci/deploy.sh -b file://$WORKSPACE/securedlab \
+ -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso \
+ -H -B $BRIDGE -S $TMPDIR -L $WORKSPACE/$FUEL_LOG_FILENAME"
+
+# log info to console
+echo "Deployment parameters"
+echo "--------------------------------------------------------"
+echo "Scenario: $DEPLOY_SCENARIO"
+echo "Lab: $LAB_NAME"
+echo "POD: $POD_NAME"
+echo "ISO: ${OPNFV_ARTIFACT_URL/*\/}"
+echo
+echo "Starting the deployment using $INSTALLER_TYPE. This could take some time..."
+echo "--------------------------------------------------------"
+echo
+
+# start the deployment
+echo "Issuing command"
+echo "$DEPLOY_COMMAND"
+echo
+
+$DEPLOY_COMMAND
+exit_code=$?
+
+echo
+echo "--------------------------------------------------------"
+echo "Deployment is done!"
+
+if [[ $exit_code -ne 0 ]]; then
+ echo "Deployment failed!"
+ exit $exit_code
+else
+ echo "Deployment is successful!"
+ exit 0
+fi
diff --git a/jjb/multisite/multisite-daily-jobs.yml b/jjb/multisite/multisite-daily-jobs.yml
index cfb40a1fa..6b022fd75 100644
--- a/jjb/multisite/multisite-daily-jobs.yml
+++ b/jjb/multisite/multisite-daily-jobs.yml
@@ -8,19 +8,23 @@
- 'multisite-{phase}-{stream}'
phase:
- - 'fuel-deploy-regionone-virtual'
- - 'fuel-deploy-regiontwo-virtual'
- - 'register-endpoints'
- - 'update-auth'
- - 'kingbird-deploy-virtual'
- - 'kingbird-functest'
+ - 'fuel-deploy-regionone-virtual':
+ slave-label: ericsson-virtual12
+ - 'fuel-deploy-regiontwo-virtual':
+ slave-label: ericsson-virtual13
+ - 'register-endpoints':
+ slave-label: ericsson-virtual12
+ - 'update-auth':
+ slave-label: ericsson-virtual13
+ - 'kingbird-deploy-virtual':
+ slave-label: ericsson-virtual12
stream:
- master:
branch: '{stream}'
gs-pathname: ''
disabled: false
- timed: '#@midnight'
+ timed: '@midnight'
- job-template:
name: 'multisite-kingbird-virtual-daily-{stream}'
@@ -34,13 +38,16 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: '{branch}'
+ - choice:
+ name: FRESH_INSTALL
+ choices:
+ - 'true'
+ - 'false'
- string:
name: KINGBIRD_LOG_FILE
default: $WORKSPACE/kingbird.log
- - 'multisite-virtual-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-multisite-noha'
+ - 'opnfv-build-defaults'
triggers:
- timed: '{timed}'
@@ -56,26 +63,28 @@
current-parameters: false
predefined-parameters: |
FUEL_VERSION=latest
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
OS_REGION=RegionOne
- REGIONONE_IP=10.2.117.79
- REGIONTWO_IP=10.2.117.181
+ REGIONONE_IP=100.64.209.10
+ REGIONTWO_IP=100.64.209.11
+ FRESH_INSTALL=$FRESH_INSTALL
node-parameters: false
node-label-name: SLAVE_LABEL
- node-label: intel-virtual2
+ node-label: ericsson-virtual12
kill-phase-on: FAILURE
abort-all-job: true
- name: 'multisite-fuel-deploy-regiontwo-virtual-{stream}'
current-parameters: false
predefined-parameters: |
FUEL_VERSION=latest
- DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ DEPLOY_SCENARIO=os-nosdn-nofeature-noha
OS_REGION=RegionTwo
- REGIONONE_IP=10.2.117.79
- REGIONTWO_IP=10.2.117.181
+ REGIONONE_IP=100.64.209.10
+ REGIONTWO_IP=100.64.209.11
+ FRESH_INSTALL=$FRESH_INSTALL
node-parameters: false
node-label-name: SLAVE_LABEL
- node-label: intel-virtual6
+ node-label: ericsson-virtual13
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
@@ -86,22 +95,24 @@
current-parameters: false
predefined-parameters: |
OS_REGION=RegionOne
- REGIONONE_IP=10.2.117.79
- REGIONTWO_IP=10.2.117.181
+ REGIONONE_IP=100.64.209.10
+ REGIONTWO_IP=100.64.209.11
+ FRESH_INSTALL=$FRESH_INSTALL
node-parameters: false
node-label-name: SLAVE_LABEL
- node-label: intel-virtual2
+ node-label: ericsson-virtual12
kill-phase-on: FAILURE
abort-all-job: true
- name: 'multisite-update-auth-{stream}'
current-parameters: false
predefined-parameters: |
OS_REGION=RegionTwo
- REGIONONE_IP=10.2.117.79
- REGIONTWO_IP=10.2.117.181
+ REGIONONE_IP=100.64.209.10
+ REGIONTWO_IP=100.64.209.11
+ FRESH_INSTALL=$FRESH_INSTALL
node-parameters: false
node-label-name: SLAVE_LABEL
- node-label: intel-virtual6
+ node-label: ericsson-virtual13
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
@@ -112,26 +123,30 @@
current-parameters: false
predefined-parameters: |
OS_REGION=RegionOne
- REGIONONE_IP=10.2.117.79
- REGIONTWO_IP=10.2.117.181
+ REGIONONE_IP=100.64.209.10
+ REGIONTWO_IP=100.64.209.11
+ FRESH_INSTALL=$FRESH_INSTALL
node-parameters: false
node-label-name: SLAVE_LABEL
- node-label: intel-virtual2
+ node-label: ericsson-virtual12
kill-phase-on: FAILURE
abort-all-job: true
- multijob:
name: kingbird-functest
condition: SUCCESSFUL
projects:
- - name: 'multisite-kingbird-functest-{stream}'
+ - name: 'functest-fuel-virtual-suite-{stream}'
current-parameters: false
predefined-parameters: |
+ DEPLOY_SCENARIO='os-nosdn-multisite-noha'
+ FUNCTEST_SUITE_NAME='multisite'
OS_REGION=RegionOne
- REGIONONE_IP=10.2.117.79
- REGIONTWO_IP=10.2.117.181
+ REGIONONE_IP=100.64.209.10
+ REGIONTWO_IP=100.64.209.11
+ FRESH_INSTALL=$FRESH_INSTALL
node-parameters: false
node-label-name: SLAVE_LABEL
- node-label: intel-virtual2
+ node-label: ericsson-virtual12
kill-phase-on: NEVER
abort-all-job: false
@@ -140,6 +155,28 @@
concurrent: false
+ disabled: '{obj:disabled}'
+
+ concurrent: false
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: KINGBIRD_LOG_FILE
+ default: $WORKSPACE/kingbird.log
+ - 'fuel-defaults'
+ - '{slave-label}-defaults'
+ - choice:
+ name: FRESH_INSTALL
+ choices:
+ - 'true'
+ - 'false'
+
+ scm:
+ - git-scm
+
builders:
- description-setter:
description: "Built on $NODE_NAME"
@@ -155,39 +192,57 @@
- builder:
name: 'multisite-fuel-deploy-regionone-virtual-builder'
builders:
+ - shell:
+ !include-raw-escape: ./fuel-deploy-for-multisite.sh
- shell: |
#!/bin/bash
echo "This is where we deploy fuel, extract passwords and save into file"
+
+ cd $WORKSPACE/tools/keystone/
+ ./run.sh -t controller -r fetchpass.sh -o servicepass.ini
+
- builder:
name: 'multisite-fuel-deploy-regiontwo-virtual-builder'
builders:
+ - shell:
+ !include-raw-escape: ./fuel-deploy-for-multisite.sh
- shell: |
#!/bin/bash
echo "This is where we deploy fuel, extract publicUrl, privateUrl, and adminUrl and save into file"
+
+ cd $WORKSPACE/tools/keystone/
+ ./run.sh -t controller -r endpoint.sh -o endpoints.ini
- builder:
name: 'multisite-register-endpoints-builder'
builders:
- copyartifact:
project: 'multisite-fuel-deploy-regiontwo-virtual-{stream}'
which-build: multijob-build
- filter: "RegionTwo-Endpoints.txt"
+ filter: "endpoints.ini"
- shell: |
#!/bin/bash
- echo "This is where we register RegionTwo in RegionOne keystone"
+ echo "This is where we register RegionTwo in RegionOne keystone using endpoints.ini"
+
+ cd $WORKSPACE/tools/keystone/
+ ./run.sh -t controller -r region.sh -d $WORKSPACE/endpoints.ini
- builder:
name: 'multisite-update-auth-builder'
builders:
- copyartifact:
project: 'multisite-fuel-deploy-regionone-virtual-{stream}'
which-build: multijob-build
- filter: "RegionOne-Passwords.txt"
+ filter: "servicepass.ini"
- shell: |
#!/bin/bash
- echo "This is where we read passwords from RegionOne-passwords.txt and replace passwords in RegionTwo"
+ echo "This is where we read passwords from servicepass.ini and replace passwords in RegionTwo"
+
+ cd $WORKSPACE/tools/keystone/
+ ./run.sh -t controller -r writepass.sh -d $WORKSPACE/servicepass.ini
+ ./run.sh -t compute -r writepass.sh -d $WORKSPACE/servicepass.ini
- builder:
name: 'multisite-kingbird-deploy-virtual-builder'
builders:
@@ -195,13 +250,8 @@
#!/bin/bash
echo "This is where we install kingbird"
-- builder:
- name: 'multisite-kingbird-functest-builder'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "This is where we run kingbird-functest"
+ cd $WORKSPACE/tools/kingbird
+ ./deploy.sh
########################
# publisher macros
########################
@@ -209,7 +259,7 @@
name: 'multisite-fuel-deploy-regionone-virtual-publisher'
publishers:
- archive:
- artifacts: '/root/servicepass.ini'
+ artifacts: 'servicepass.ini'
allow-empty: false
only-if-success: true
fingerprint: true
@@ -217,7 +267,7 @@
name: 'multisite-fuel-deploy-regiontwo-virtual-publisher'
publishers:
- archive:
- artifacts: '/root/endpoints.ini'
+ artifacts: 'endpoints.ini'
allow-empty: false
only-if-success: true
fingerprint: true
diff --git a/jjb/multisite/multisite-verify-jobs.yml b/jjb/multisite/multisite-verify-jobs.yml
new file mode 100644
index 000000000..5ecfafb55
--- /dev/null
+++ b/jjb/multisite/multisite-verify-jobs.yml
@@ -0,0 +1,68 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+ name: multisite
+
+ project: '{name}'
+
+ jobs:
+ - 'multisite-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ timed: '@midnight'
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: true
+ timed: ''
+
+- job-template:
+ name: 'multisite-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ echo "Hello World"
diff --git a/jjb/multisite/multisite.yml b/jjb/multisite/multisite.yml
deleted file mode 100644
index 6b6406983..000000000
--- a/jjb/multisite/multisite.yml
+++ /dev/null
@@ -1,149 +0,0 @@
-###################################################
-# All the jobs except verify have been removed!
-# They will only be enabled on request by projects!
-###################################################
-- project:
- name: multisite
-
- project: '{name}'
-
- jobs:
- - 'multisite-verify-{stream}'
- - 'multisite-kingbird-daily-{stream}'
- - 'multisite-kingbird-deploy-{stream}'
-
- stream:
- - master:
- branch: '{stream}'
- gs-pathname: ''
- disabled: false
- timed: '@midnight'
- - danube:
- branch: 'stable/{stream}'
- gs-pathname: '/{stream}'
- disabled: false
- timed: ''
-
-- job-template:
- name: 'multisite-verify-{stream}'
-
- disabled: '{obj:disabled}'
-
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{branch}'
- - 'opnfv-build-ubuntu-defaults'
-
- scm:
- - git-scm-gerrit
-
- triggers:
- - gerrit:
- server-name: 'gerrit.opnfv.org'
- trigger-on:
- - patchset-created-event:
- exclude-drafts: 'false'
- exclude-trivial-rebase: 'false'
- exclude-no-code-change: 'false'
- - draft-published-event
- - comment-added-contains-event:
- comment-contains-value: 'recheck'
- - comment-added-contains-event:
- comment-contains-value: 'reverify'
- projects:
- - project-compare-type: 'ANT'
- project-pattern: '{project}'
- branches:
- - branch-compare-type: 'ANT'
- branch-pattern: '**/{branch}'
- forbidden-file-paths:
- - compare-type: ANT
- pattern: 'docs/**|.gitignore'
-
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Hello World"
-
-- job-template:
- name: 'multisite-kingbird-daily-{stream}'
-
- project-type: freestyle
-
- disabled: '{obj:disabled}'
-
- concurrent: false
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{branch}'
- - string:
- name: KINGBIRD_LOG_FILE
- default: $WORKSPACE/kingbird.log
- - 'intel-virtual6-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-multisite-ha'
-
- scm:
- - git-scm
-
- triggers:
- - timed: '{timed}'
-
- builders:
- - trigger-builds:
- - project: 'multisite-kingbird-deploy-{stream}'
- current-parameters: true
- same-node: true
- block: true
- - trigger-builds:
- - project: 'functest-fuel-virtual-suite-{stream}'
- current-parameters: true
- predefined-parameters:
- FUNCTEST_SUITE_NAME=multisite
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
-
-- job-template:
- name: 'multisite-kingbird-deploy-{stream}'
-
- concurrent: false
-
- scm:
- - git-scm-gerrit
-
- builders:
- - 'multisite-kingbird-deploy'
- - 'multisite-kingbird-log-upload'
-
-########################
-# builder macros
-########################
-- builder:
- name: 'multisite-kingbird-deploy'
- builders:
- - shell: |
- #!/bin/bash
-
- $WORKSPACE/tools/kingbird/deploy.sh
-- builder:
- name: 'multisite-kingbird-log-upload'
- builders:
- - shell: |
- #!/bin/bash
-
- echo "Here is where we upload kingbird logs to artifact repo"
- echo "We just check the existence of log file"
- ls -al $KINGBIRD_LOG_FILE
diff --git a/jjb/netready/netready.yml b/jjb/netready/netready.yml
index 3d043f9e3..382434ae6 100644
--- a/jjb/netready/netready.yml
+++ b/jjb/netready/netready.yml
@@ -21,7 +21,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
scm:
- git-scm-gerrit
@@ -65,7 +64,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- 'netready-parameter':
diff --git a/jjb/octopus/octopus.yml b/jjb/octopus/octopus.yml
index b820ecb0e..cb66112fe 100644
--- a/jjb/octopus/octopus.yml
+++ b/jjb/octopus/octopus.yml
@@ -27,7 +27,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/onosfw/onosfw.yml b/jjb/onosfw/onosfw.yml
index d3a845403..13c96718c 100644
--- a/jjb/onosfw/onosfw.yml
+++ b/jjb/onosfw/onosfw.yml
@@ -31,7 +31,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -72,7 +71,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -96,7 +94,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- string:
diff --git a/jjb/openretriever/openretriever-project.yml b/jjb/openretriever/openretriever-project.yml
new file mode 100644
index 000000000..3d53f9b2e
--- /dev/null
+++ b/jjb/openretriever/openretriever-project.yml
@@ -0,0 +1,62 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+ name: openretriever
+
+ project: '{name}'
+
+ jobs:
+ - 'openretriever-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+
+- job-template:
+ name: 'openretriever-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ echo "Nothing to verify!"
diff --git a/jjb/opera/opera-daily-jobs.yml b/jjb/opera/opera-daily-jobs.yml
index 556d59fcb..d49caf1a6 100644
--- a/jjb/opera/opera-daily-jobs.yml
+++ b/jjb/opera/opera-daily-jobs.yml
@@ -38,6 +38,7 @@
concurrent: false
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -51,7 +52,7 @@
- ssh-agent-wrapper
- timeout:
- timeout: 120
+ timeout: 240
fail: true
triggers:
@@ -60,9 +61,8 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- - 'huawei-pod7-defaults'
+ - 'huawei-virtual7-defaults'
builders:
- description-setter:
@@ -80,8 +80,11 @@
name: deploy
condition: SUCCESSFUL
projects:
- - name: 'opera-daily-deploy-{stream}'
- current-parameters: true
+ - name: 'compass-deploy-virtual-daily-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-openo-noha
+ COMPASS_OS_VERSION=xenial
node-parameters: true
kill-phase-on: FAILURE
abort-all-job: true
@@ -90,7 +93,7 @@
# condition: SUCCESSFUL
# projects:
# - name: 'functest-compass-baremetal-suite-{stream}'
-# current-parameters: true
+# current-parameters: false
# predefined-parameters:
# FUNCTEST_SUITE_NAME=opera
# node-parameters: true
@@ -105,6 +108,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
diff --git a/jjb/opera/opera-project-jobs.yml b/jjb/opera/opera-project-jobs.yml
index 19f066b5f..38efbc159 100644
--- a/jjb/opera/opera-project-jobs.yml
+++ b/jjb/opera/opera-project-jobs.yml
@@ -21,6 +21,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -30,9 +31,8 @@
parameters:
- project-parameter:
project: '{project}'
- - 'opnfv-build-ubuntu-defaults'
- - gerrit-parameter:
branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
scm:
- git-scm
diff --git a/jjb/opera/opera-verify-jobs.yml b/jjb/opera/opera-verify-jobs.yml
index 0e9dba01d..b7b5cb3c9 100644
--- a/jjb/opera/opera-verify-jobs.yml
+++ b/jjb/opera/opera-verify-jobs.yml
@@ -38,6 +38,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-total: 1
@@ -83,7 +84,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'huawei-pod7-defaults'
@@ -117,6 +117,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
diff --git a/jjb/opnfvdocs/docs-post-rtd.sh b/jjb/opnfvdocs/docs-post-rtd.sh
new file mode 100644
index 000000000..7faa26f38
--- /dev/null
+++ b/jjb/opnfvdocs/docs-post-rtd.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+if [ $GERRIT_BRANCH == "master" ]; then
+ RTD_BUILD_VERSION=latest
+else
+ RTD_BUILD_VERSION=${{GERRIT_BRANCH/\//-}}
+fi
+curl -X POST --data "version_slug=$RTD_BUILD_VERSION" https://readthedocs.org/build/{rtdproject}
diff --git a/jjb/opnfvdocs/docs-rtd.yaml b/jjb/opnfvdocs/docs-rtd.yaml
new file mode 100644
index 000000000..151b53550
--- /dev/null
+++ b/jjb/opnfvdocs/docs-rtd.yaml
@@ -0,0 +1,85 @@
+- project:
+ name: docs-rtd
+ jobs:
+ - 'docs-merge-rtd-{stream}'
+ - 'docs-verify-rtd-{stream}'
+
+ stream:
+ - master:
+ branch: 'master'
+
+ project: 'opnfvdocs'
+ rtdproject: 'opnfv'
+ # TODO: Archive Artifacts
+
+- job-template:
+ name: 'docs-merge-rtd-{stream}'
+
+ project-type: freestyle
+
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'lf-build1'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/releng
+ description: 'Git URL to use on this Jenkins Slave'
+ scm:
+ - git-scm
+
+ triggers:
+ - gerrit-trigger-change-merged
+
+ builders:
+ - shell: !include-raw: docs-post-rtd.sh
+
+- job-template:
+ name: 'docs-verify-rtd-{stream}'
+
+ project-type: freestyle
+
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'lf-build1'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/opnfvdocs
+ description: 'Git URL to use on this Jenkins Slave'
+ scm:
+ - git-scm-with-submodules:
+ branch: '{branch}'
+
+ triggers:
+ - gerrit-trigger-patchset-created:
+ server: 'gerrit.opnfv.org'
+ project: '**'
+ branch: '{branch}'
+ files: 'docs/**/*.rst'
+ - timed: 'H H * * *'
+
+ builders:
+ - shell: |
+ if [ "$GERRIT_PROJECT" != "opnfvdocs" ]; then
+ cd docs/submodules/$GERRIT_PROJECT
+ git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
+ else
+ git fetch origin $GERRIT_REFSPEC && git checkout FETCH_HEAD
+ fi
+ - shell: |
+ sudo pip install virtualenv
+ virtualenv $WORKSPACE/venv
+ source $WORKSPACE/venv/bin/activate
+ pip install --upgrade pip
+ pip freeze
+ pip install tox
+ tox -edocs
diff --git a/jjb/opnfvdocs/opnfvdocs.yml b/jjb/opnfvdocs/opnfvdocs.yml
index 0d4c46199..12950338d 100644
--- a/jjb/opnfvdocs/opnfvdocs.yml
+++ b/jjb/opnfvdocs/opnfvdocs.yml
@@ -34,7 +34,6 @@
parameters:
- project-parameter:
project: $GERRIT_PROJECT
- - gerrit-parameter:
branch: '{branch}'
- string:
name: GIT_CLONE_BASE
@@ -75,7 +74,6 @@
parameters:
- project-parameter:
project: $GERRIT_PROJECT
- - gerrit-parameter:
branch: '{branch}'
- string:
name: GIT_CLONE_BASE
@@ -87,7 +85,7 @@
description: "Directory where the build artifact will be located upon the completion of the build."
scm:
- - git-scm-gerrit
+ - git-scm
triggers:
- gerrit:
@@ -114,6 +112,7 @@
parameters:
- project-parameter:
project: '{project}'
+ branch: '{branch}'
- string:
name: GS_URL
default: '$GS_BASE{gs-pathname}'
@@ -122,10 +121,6 @@
name: GIT_CLONE_BASE
default: ssh://gerrit.opnfv.org:29418
description: "Used for overriding the GIT URL coming from parameters macro."
- - string:
- name: GERRIT_BRANCH
- default: '{branch}'
- description: 'Specify the branch in this way in order to be able to use build-opnfv-composite-docs builder.'
scm:
- git-scm
@@ -136,4 +131,3 @@
builders:
- build-html-and-pdf-docs-output
# - upload-generated-docs-to-opnfv-artifacts
-
diff --git a/jjb/opnfvdocs/project.cfg b/jjb/opnfvdocs/project.cfg
index 186e0ea74..1ea05c1d4 100644
--- a/jjb/opnfvdocs/project.cfg
+++ b/jjb/opnfvdocs/project.cfg
@@ -24,6 +24,7 @@ movie
multisite
octopus
onosfw
+openretriever
ovno
ovsnfv
parser
diff --git a/jjb/ovsnfv/ovsnfv.yml b/jjb/ovsnfv/ovsnfv.yml
index 9b2adf3a8..937a367fb 100644
--- a/jjb/ovsnfv/ovsnfv.yml
+++ b/jjb/ovsnfv/ovsnfv.yml
@@ -26,7 +26,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-centos-defaults'
- string:
@@ -73,7 +72,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-centos-defaults'
- string:
@@ -119,7 +117,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-centos-defaults'
- string:
diff --git a/jjb/parser/parser.yml b/jjb/parser/parser.yml
index de5587ed8..69fcefc20 100644
--- a/jjb/parser/parser.yml
+++ b/jjb/parser/parser.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/pharos/pharos.yml b/jjb/pharos/pharos.yml
index dbf1b92a4..6dae9f33c 100644
--- a/jjb/pharos/pharos.yml
+++ b/jjb/pharos/pharos.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/prediction/prediction.yml b/jjb/prediction/prediction.yml
index fba5741ac..b380d8c86 100644
--- a/jjb/prediction/prediction.yml
+++ b/jjb/prediction/prediction.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/promise/promise.yml b/jjb/promise/promise.yml
index a0af6f41e..a5aa302c7 100644
--- a/jjb/promise/promise.yml
+++ b/jjb/promise/promise.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/qtip/qtip-cleanup.sh b/jjb/qtip/helpers/cleanup-deploy.sh
index 95babb318..95babb318 100644
--- a/jjb/qtip/qtip-cleanup.sh
+++ b/jjb/qtip/helpers/cleanup-deploy.sh
diff --git a/jjb/qtip/qtip-daily-ci.sh b/jjb/qtip/helpers/validate-deploy.sh
index 4fdc04345..16455371f 100644
--- a/jjb/qtip/qtip-daily-ci.sh
+++ b/jjb/qtip/helpers/validate-deploy.sh
@@ -27,12 +27,7 @@ if [ $(docker ps | grep 'opnfv/qtip' | wc -l) == 0 ]; then
else
echo "The container ID is: ${container_id}"
QTIP_REPO=/home/opnfv/repos/qtip
-
- echo "Run Qtip test"
- docker exec -t ${container_id} $QTIP_REPO/docker/run_qtip.sh
-
- echo "Pushing available results to DB"
- docker exec -t ${container_id} $QTIP_REPO/docker/push_db.sh
+# TODO(yujunz): execute benchmark plan for compute-qpi
fi
echo "Qtip done!"
diff --git a/jjb/qtip/helpers/validate-setup.sh b/jjb/qtip/helpers/validate-setup.sh
new file mode 100644
index 000000000..8d84e120c
--- /dev/null
+++ b/jjb/qtip/helpers/validate-setup.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+##############################################################################
+# Copyright (c) 2017 ZTE and others.
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -e
+
+# setup virtualenv
+sudo pip install -u virtualenv virtualenvwrapper
+export WORKON_HOME=$HOME/.virtualenvs
+source /usr/local/bin/virtualenvwrapper.sh
+mkvirtualenv qtip
+workon qtip
+
+# setup qtip
+sudo pip install $HOME/repos/qtip
+
+# testing
+qtip --version
+qtip --help
diff --git a/jjb/qtip/qtip-ci-jobs.yml b/jjb/qtip/qtip-ci-jobs.yml
deleted file mode 100644
index 69cb32b41..000000000
--- a/jjb/qtip/qtip-ci-jobs.yml
+++ /dev/null
@@ -1,101 +0,0 @@
-####################################
-# job configuration for qtip
-####################################
-- project:
- name: qtip
-
- project: 'qtip'
-
-#--------------------------------
-# BRANCH ANCHORS
-#--------------------------------
- master: &master
- stream: master
- branch: '{stream}'
- gs-pathname: ''
- docker-tag: 'latest'
-#--------------------------------
-# POD, INSTALLER, AND BRANCH MAPPING
-#--------------------------------
-# master
-#--------------------------------
- pod:
- - zte-pod2:
- installer: fuel
- auto-trigger-name: 'qtip-daily-zte-pod2-trigger'
- <<: *master
- - zte-pod3:
- installer: fuel
- auto-trigger-name: 'qtip-daily-zte-pod3-trigger'
- <<: *master
-
-#--------------------------------
- jobs:
- - 'qtip-{installer}-{pod}-daily-{stream}'
-
-################################
-# job templates
-################################
-- job-template:
- name: 'qtip-{installer}-{pod}-daily-{stream}'
-
- disabled: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{branch}'
- - '{installer}-defaults'
- - '{pod}-defaults'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-nofeature-ha'
- - string:
- name: DOCKER_TAG
- default: '{docker-tag}'
- description: 'Tag to pull docker image'
-
- scm:
- - git-scm
-
- triggers:
- - '{auto-trigger-name}'
-
- builders:
- - description-setter:
- description: "POD: $NODE_NAME"
- - 'qtip-cleanup'
- - 'qtip-daily-ci'
-
- publishers:
- - email:
- recipients: wu.zhihui1@zte.com.cn, zhang.yujunz@zte.com.cn
-
-###########################
-#biuilder macros
-###########################
-- builder:
- name: qtip-daily-ci
- builders:
- - shell:
- !include-raw: ./qtip-daily-ci.sh
-
-- builder:
- name: qtip-cleanup
- builders:
- - shell:
- !include-raw: ./qtip-cleanup.sh
-
-#################
-#trigger macros
-#################
-- trigger:
- name: 'qtip-daily-zte-pod2-trigger'
- triggers:
- - timed: '0 7 * * *'
-
-- trigger:
- name: 'qtip-daily-zte-pod3-trigger'
- triggers:
- - timed: '0 1 * * *'
diff --git a/jjb/qtip/qtip-validate-jobs.yml b/jjb/qtip/qtip-validate-jobs.yml
new file mode 100644
index 000000000..98f7ab90a
--- /dev/null
+++ b/jjb/qtip/qtip-validate-jobs.yml
@@ -0,0 +1,141 @@
+#######################
+# validate after MERGE
+#######################
+- project:
+ name: qtip
+ project: qtip
+
+#--------------------------------
+# BRANCH ANCHORS
+#--------------------------------
+ master: &master
+ stream: master
+ branch: '{stream}'
+ gs-pathname: ''
+ docker-tag: latest
+
+#--------------------------------
+# JOB VARIABLES
+#--------------------------------
+ pod:
+ - zte-pod2:
+ installer: fuel
+ <<: *master
+ - zte-pod3:
+ installer: fuel
+ <<: *master
+ task:
+ - daily:
+ auto-builder-name: qtip-validate-deploy
+ auto-trigger-name: 'qtip-daily-{pod}-trigger'
+ - validate:
+ auto-builder-name: qtip-validate-setup
+ auto-trigger-name: qtip-validate-trigger
+ - experimental:
+ auto-builder-name: qtip-validate-setup
+ auto-trigger-name: experimental
+
+#--------------------------------
+# JOB LIST
+#--------------------------------
+ jobs:
+ - 'qtip-{task}-{installer}-{pod}-{stream}'
+
+################################
+# job templates
+################################
+- job-template:
+ name: 'qtip-{task}-{installer}-{pod}-{stream}'
+ disabled: false
+ parameters:
+ - qtip-common-parameters:
+ project: '{project}'
+ <<: *master
+ - '{installer}-defaults'
+ - '{pod}-defaults'
+ scm:
+ - git-scm
+ triggers:
+ - '{auto-trigger-name}'
+ builders:
+ - qtip-common-builders
+ - '{auto-builder-name}'
+ publishers:
+ - qtip-common-publishers
+
+################
+# MARCOS
+################
+
+#---------
+# builder
+#---------
+
+- builder:
+ name: qtip-common-builders
+ builders:
+ - description-setter:
+ description: "POD: $NODE_NAME"
+
+- builder:
+ name: qtip-validate-deploy
+ builders:
+ - shell:
+ !include-raw: ./helpers/validate-deploy.sh
+ - shell:
+ !include-raw: ./helpers/cleanup-deploy.sh
+
+- builder:
+ name: qtip-validate-setup
+ builders:
+ - shell:
+ !include-raw: ./helpers/validate-setup.sh
+
+#-----------
+# parameter
+#-----------
+
+- parameter:
+ name: qtip-common-parameters
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: DEPLOY_SCENARIO
+ default: 'os-nosdn-nofeature-ha'
+ - string:
+ name: DOCKER_TAG
+ default: '{docker-tag}'
+ description: 'Tag to pull docker image'
+
+#-----------
+# publisher
+#-----------
+
+- publisher:
+ name: qtip-common-publishers
+ publishers:
+ - email:
+ recipients: wu.zhihui1@zte.com.cn, zhang.yujunz@zte.com.cn
+
+#---------
+# trigger
+#---------
+
+- trigger:
+ name: qtip-daily-zte-pod2-trigger
+ triggers:
+ - timed: '0 7 * * *'
+
+- trigger:
+ name: qtip-daily-zte-pod3-trigger
+ triggers:
+ - timed: '0 1 * * *'
+
+- trigger:
+ name: qtip-validate-trigger
+ triggers:
+ - gerrit-trigger-change-merged:
+ project: '{project}'
+ branch: '{branch}'
diff --git a/jjb/qtip/qtip-project-jobs.yml b/jjb/qtip/qtip-verify-jobs.yml
index a9c8251fc..d1fc34d11 100644
--- a/jjb/qtip/qtip-project-jobs.yml
+++ b/jjb/qtip/qtip-verify-jobs.yml
@@ -1,11 +1,12 @@
-- project:
- name: qtip-project-jobs
-
- project: 'qtip'
+######################
+# verify before MERGE
+######################
+- project:
+ name: qtip-verify-jobs
+ project: qtip
jobs:
- 'qtip-verify-{stream}'
-
stream:
- master:
branch: '{stream}'
@@ -23,7 +24,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
diff --git a/jjb/releng/artifact-cleanup.yml b/jjb/releng/artifact-cleanup.yml
index e10d5defb..2d0205660 100644
--- a/jjb/releng/artifact-cleanup.yml
+++ b/jjb/releng/artifact-cleanup.yml
@@ -27,7 +27,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
scm:
diff --git a/jjb/releng/opnfv-docker-arm.yml b/jjb/releng/opnfv-docker-arm.yml
new file mode 100644
index 000000000..09c9f335e
--- /dev/null
+++ b/jjb/releng/opnfv-docker-arm.yml
@@ -0,0 +1,77 @@
+##############################################
+# job configuration for docker build and push
+##############################################
+
+- project:
+
+ name: opnfv-docker-arm
+
+ master: &master
+ stream: master
+ branch: '{stream}'
+ disabled: false
+ danube: &danube
+ stream: danube
+ branch: 'stable/{stream}'
+ disabled: true
+ functest-arm-receivers: &functest-arm-receivers
+ receivers: >
+ cristina.pauna@enea.com
+ alexandru.avadanii@enea.com
+ other-receivers: &other-receivers
+ receivers: ''
+
+ project:
+ # projects with jobs for master
+ - 'functest':
+ <<: *master
+ <<: *functest-arm-receivers
+ # projects with jobs for stable
+
+ jobs:
+ - '{project}-docker-build-arm-push-{stream}'
+
+########################
+# job templates
+########################
+- job-template:
+ name: '{project}-docker-build-arm-push-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters: &parameters
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-arm-defaults'
+ - string:
+ name: PUSH_IMAGE
+ default: "true"
+ description: "To enable/disable pushing the image to Dockerhub."
+ - string:
+ name: DOCKER_REPO_NAME
+ default: "opnfv/{project}_aarch64"
+ description: "Dockerhub repo to be pushed to."
+ - string:
+ name: RELEASE_VERSION
+ default: ""
+ description: "Release version, e.g. 1.0, 2.0, 3.0"
+ - string:
+ name: DOCKERFILE
+ default: "Dockerfile.aarch64"
+ description: "Dockerfile to use for creating the image."
+
+ scm:
+ - git-scm
+
+ builders: &builders
+ - shell:
+ !include-raw-escape: ./opnfv-docker.sh
+
+ triggers:
+ - pollscm:
+ cron: "*/30 * * * *"
+
+ publishers:
+ - email:
+ recipients: '{receivers}'
diff --git a/jjb/releng/opnfv-docker.sh b/jjb/releng/opnfv-docker.sh
index e26727abf..7b93d50bf 100644
--- a/jjb/releng/opnfv-docker.sh
+++ b/jjb/releng/opnfv-docker.sh
@@ -12,6 +12,7 @@ set -o nounset
set -o pipefail
+
echo "Starting opnfv-docker for $DOCKER_REPO_NAME ..."
echo "--------------------------------------------------------"
echo
@@ -42,32 +43,29 @@ fi
if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
echo "Docker images to remove:"
docker images | head -1 && docker images | grep $DOCKER_REPO_NAME
- image_tags=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $2}'))
- for tag in "${image_tags[@]}"; do
- if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $tag)" ]]; then
- echo "Removing docker image $DOCKER_REPO_NAME:$tag..."
- docker rmi -f $DOCKER_REPO_NAME:$tag
+ image_ids=($(docker images | grep $DOCKER_REPO_NAME | awk '{print $3}'))
+ for id in "${image_ids[@]}"; do
+ if [[ -n "$(docker images|grep $DOCKER_REPO_NAME|grep $id)" ]]; then
+ echo "Removing docker image $DOCKER_REPO_NAME:$id..."
+ docker rmi -f $id
fi
done
fi
-
-# cd to directory where Dockerfile is located
cd $WORKSPACE/docker
-if [ ! -f ./Dockerfile ]; then
+if [ ! -f ${DOCKERFILE} ]; then
echo "ERROR: Dockerfile not found."
exit 1
fi
# Get tag version
-branch="${GIT_BRANCH##origin/}"
-echo "Current branch: $branch"
+echo "Current branch: $BRANCH"
-if [[ "$branch" == "master" ]]; then
+if [[ "$BRANCH" == "master" ]]; then
DOCKER_TAG="latest"
else
if [[ "$RELEASE_VERSION" != "" ]]; then
- release=$(echo $branch|sed 's/.*\///')
+ release=${BRANCH##*/}
DOCKER_TAG=${release}.${RELEASE_VERSION}
# e.g. colorado.1.0, colorado.2.0, colorado.3.0
else
@@ -79,7 +77,8 @@ fi
echo "Building docker image: $DOCKER_REPO_NAME:$DOCKER_TAG"
echo "--------------------------------------------------------"
echo
-cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$branch ."
+cmd="docker build --no-cache -t $DOCKER_REPO_NAME:$DOCKER_TAG --build-arg BRANCH=$BRANCH
+ -f $DOCKERFILE ."
echo ${cmd}
${cmd}
diff --git a/jjb/releng/opnfv-docker.yml b/jjb/releng/opnfv-docker.yml
index 02dfb7560..90a91f802 100644
--- a/jjb/releng/opnfv-docker.yml
+++ b/jjb/releng/opnfv-docker.yml
@@ -14,32 +14,51 @@
stream: danube
branch: 'stable/{stream}'
disabled: true
+ functest-receivers: &functest-receivers
+ receivers: >
+ jose.lausuch@ericsson.com morgan.richomme@orange.com
+ cedric.ollivier@orange.com feng.xiaowei@zte.com.cn
+ yaohelan@huawei.com helanyao@gmail.com
+ juha.kosonen@nokia.com
+ other-receivers: &other-receivers
+ receivers: ''
project:
# projects with jobs for master
- 'bottlenecks':
<<: *master
+ <<: *other-receivers
- 'cperf':
<<: *master
+ <<: *other-receivers
- 'dovetail':
<<: *master
+ <<: *other-receivers
- 'functest':
<<: *master
+ <<: *functest-receivers
- 'qtip':
<<: *master
+ <<: *other-receivers
- 'storperf':
<<: *master
+ <<: *other-receivers
- 'yardstick':
<<: *master
+ <<: *other-receivers
# projects with jobs for stable
- 'bottlenecks':
<<: *danube
+ <<: *other-receivers
- 'functest':
<<: *danube
+ <<: *functest-receivers
- 'storperf':
<<: *danube
+ <<: *other-receivers
- 'yardstick':
<<: *danube
+ <<: *other-receivers
jobs:
- '{project}-docker-build-push-{stream}'
@@ -53,6 +72,8 @@
# projects with jobs for master
- 'daisy':
<<: *master
+ - 'escalator':
+ <<: *master
jobs:
- '{project}-docker-build-push-monitor-{stream}'
@@ -68,7 +89,6 @@
parameters: &parameters
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- string:
@@ -83,6 +103,10 @@
name: RELEASE_VERSION
default: ""
description: "Release version, e.g. 1.0, 2.0, 3.0"
+ - string:
+ name: DOCKERFILE
+ default: "Dockerfile"
+ description: "Dockerfile to use for creating the image."
scm:
- git-scm
@@ -95,6 +119,10 @@
- pollscm:
cron: "*/30 * * * *"
+ publishers:
+ - email:
+ recipients: '{receivers}'
+
- job-template:
name: '{project}-docker-build-push-monitor-{stream}'
disabled: '{obj:disabled}'
diff --git a/jjb/releng/opnfv-docs.yml b/jjb/releng/opnfv-docs.yml
index f6092eee0..f4b25017e 100644
--- a/jjb/releng/opnfv-docs.yml
+++ b/jjb/releng/opnfv-docs.yml
@@ -35,7 +35,6 @@
parameters:
- project-parameter:
project: $GERRIT_PROJECT
- - gerrit-parameter:
branch: '{branch}'
scm:
@@ -77,7 +76,6 @@
parameters:
- project-parameter:
project: $GERRIT_PROJECT
- - gerrit-parameter:
branch: '{branch}'
- string:
name: GS_URL
@@ -89,7 +87,7 @@
description: "JJB configured GERRIT_REFSPEC parameter"
scm:
- - git-scm-gerrit
+ - git-scm
triggers:
- gerrit:
diff --git a/jjb/releng/opnfv-lint.yml b/jjb/releng/opnfv-lint.yml
index 590790f89..37cdef28f 100644
--- a/jjb/releng/opnfv-lint.yml
+++ b/jjb/releng/opnfv-lint.yml
@@ -33,7 +33,6 @@
parameters:
- project-parameter:
project: $GERRIT_PROJECT
- - gerrit-parameter:
branch: '{branch}'
scm:
@@ -54,7 +53,7 @@
comment-contains-value: 'reverify'
projects:
- project-compare-type: 'REG_EXP'
- project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator'
+ project-pattern: 'functest|sdnvpn|qtip|daisy|sfc|escalator|releng'
branches:
- branch-compare-type: 'ANT'
branch-pattern: '**/{branch}'
@@ -76,7 +75,6 @@
parameters:
- project-parameter:
project: $GERRIT_PROJECT
- - gerrit-parameter:
branch: '{branch}'
- node:
name: SLAVE_NAME
diff --git a/jjb/releng/releng-ci-jobs.yml b/jjb/releng/releng-ci-jobs.yml
index 626daffbd..ecc87303f 100644
--- a/jjb/releng/releng-ci-jobs.yml
+++ b/jjb/releng/releng-ci-jobs.yml
@@ -13,7 +13,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: 'master'
scm:
- git-scm-gerrit
@@ -64,7 +63,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: 'master'
scm:
@@ -104,7 +102,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: 'master'
scm:
diff --git a/jjb/releng/testapi-automate.yml b/jjb/releng/testapi-automate.yml
index c03835279..dd76538a3 100644
--- a/jjb/releng/testapi-automate.yml
+++ b/jjb/releng/testapi-automate.yml
@@ -4,24 +4,56 @@
- master:
branch: '{stream}'
gs-pathname: ''
+
+ phase:
+ - 'docker-update'
+ - 'docker-deploy':
+ slave-label: 'testresults'
+ - 'generate-doc'
+
jobs:
- 'testapi-automate-{stream}'
+ - 'testapi-automate-{phase}-{stream}'
- 'testapi-verify-{stream}'
project: 'releng'
+- job:
+ name: 'testapi-mongodb-backup'
+
+ parameters:
+ - label:
+ name: SLAVE_LABEL
+ default: 'testresults'
+ description: 'Slave label on Jenkins'
+ - project-parameter:
+ project: 'releng'
+ branch: 'master'
+ - string:
+ name: GIT_BASE
+ default: https://gerrit.opnfv.org/gerrit/releng
+ description: 'Git URL to use on this Jenkins Slave'
+
+ scm:
+ - git-scm
+
+ triggers:
+ - timed: '@weekly'
+
+ builders:
+ - mongodb-backup
+
- job-template:
name: 'testapi-verify-{stream}'
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
scm:
- - git-scm
+ - git-scm-gerrit
triggers:
- gerrit:
@@ -44,18 +76,45 @@
branch-pattern: '**/{branch}'
file-paths:
- compare-type: 'ANT'
- pattern: 'utils/**'
+ pattern: 'utils/test/testapi/**'
builders:
- run-unit-tests
+ publishers:
+ - junit:
+ results: nosetests.xml
+ - cobertura:
+ report-file: "coverage.xml"
+ only-stable: "true"
+ health-auto-update: "false"
+ stability-auto-update: "false"
+ zoom-coverage-chart: "true"
+ targets:
+ - files:
+ healthy: 10
+ unhealthy: 20
+ failing: 30
+ - method:
+ healthy: 50
+ unhealthy: 40
+ failing: 30
+
- job-template:
name: 'testapi-automate-{stream}'
+ project-type: multijob
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
+
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- string:
name: DOCKER_TAG
@@ -66,6 +125,12 @@
scm:
- git-scm
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 360
+ fail: true
+
triggers:
- gerrit:
server-name: 'gerrit.opnfv.org'
@@ -81,51 +146,128 @@
branch-pattern: '**/{branch}'
file-paths:
- compare-type: 'ANT'
- pattern: 'utils/**'
+ pattern: 'utils/test/testapi/**'
builders:
- - docker-update
- - start-testapi-server
- - testapi-doc-build
- - upload-doc-artifact
- - clean-testapi-server
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: docker-update
+ condition: SUCCESSFUL
+ projects:
+ - name: 'testapi-automate-docker-update-{stream}'
+ current-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: docker-deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'testapi-automate-docker-deploy-{stream}'
+ current-parameters: false
+ predefined-parameters: |
+ GIT_BASE=$GIT_BASE
+ node-label-name: SLAVE_LABEL
+ node-label: testresults
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: generate-doc
+ condition: SUCCESSFUL
+ projects:
+ - name: 'testapi-automate-generate-doc-{stream}'
+ current-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+
+ publishers:
+ - 'email-publisher'
+
+- job-template:
+ name: 'testapi-automate-{phase}-{stream}'
+
+ properties:
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: DOCKER_TAG
+ default: "latest"
+ description: "Tag name for testapi docker image"
+
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 120
+ fail: true
+
+ scm:
+ - git-scm
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'testapi-automate-{phase}-macro'
################################
# job builders
################################
+- builder:
+ name: mongodb-backup
+ builders:
+ - shell: |
+ bash ./jjb/releng/testapi-backup-mongodb.sh
- builder:
- name: run-unit-tests
+ name: 'run-unit-tests'
builders:
- shell: |
- bash ./jjb/releng/testapi-run-tests.sh
+ bash ./utils/test/testapi/run_test.sh
- builder:
- name: docker-update
+ name: 'testapi-automate-docker-update-macro'
builders:
- shell: |
bash ./jjb/releng/testapi-docker-update.sh
- builder:
- name: start-testapi-server
+ name: 'testapi-automate-generate-doc-macro'
builders:
- - shell: |
- bash ./utils/test/testapi/htmlize/prepare.sh
+ - 'testapi-doc-build'
+ - 'upload-doc-artifact'
- builder:
- name: testapi-doc-build
+ name: 'testapi-doc-build'
builders:
- shell: |
bash ./utils/test/testapi/htmlize/doc-build.sh
- builder:
- name: upload-doc-artifact
+ name: 'upload-doc-artifact'
builders:
- shell: |
bash ./utils/test/testapi/htmlize/push-doc-artifact.sh
- builder:
- name: clean-testapi-server
+ name: 'testapi-automate-docker-deploy-macro'
builders:
- shell: |
- bash ./utils/test/testapi/htmlize/finish.sh
+ bash ./jjb/releng/testapi-docker-deploy.sh
+
+################################
+# job publishers
+################################
+
+- publisher:
+ name: 'email-publisher'
+ publishers:
+ - email:
+ recipients: rohitsakala@gmail.com feng.xiaowei@zte.com.cn
+ notify-every-unstable-build: false
+ send-to-individuals: true
diff --git a/jjb/releng/testapi-backup-mongodb.sh b/jjb/releng/testapi-backup-mongodb.sh
new file mode 100644
index 000000000..795e479d9
--- /dev/null
+++ b/jjb/releng/testapi-backup-mongodb.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+
+set -e
+
+# Run MongoDB backup
+python $WORKSPACE/utils/test/testapi/update/templates/backup_mongodb.py -o $WORKSPACE/
+
+# Compressing the dump
+now=$(date +"%m_%d_%Y_%H_%M_%S")
+echo $now
+
+file_name="testapi_mongodb_"$now".tar.gz"
+echo $file_name
+
+tar cvfz "$file_name" test_results_collection*
+
+rm -rf test_results_collection*
+
+artifact_dir="testapibackup"
+workspace="$WORKSPACE"
+
+set +e
+/usr/local/bin/gsutil &>/dev/null
+if [ $? != 0 ]; then
+ echo "Not possible to push results to artifact: gsutil not installed"
+ exit 1
+else
+ echo "Uploading mongodump to artifact $artifact_dir"
+ /usr/local/bin/gsutil cp -r "$workspace"/"$file_name" gs://artifacts.opnfv.org/"$artifact_dir"/
+ echo "MongoDump can be found at http://artifacts.opnfv.org/$artifact_dir.html"
+fi
diff --git a/jjb/releng/testapi-docker-deploy.sh b/jjb/releng/testapi-docker-deploy.sh
new file mode 100644
index 000000000..b4e60b09a
--- /dev/null
+++ b/jjb/releng/testapi-docker-deploy.sh
@@ -0,0 +1,81 @@
+#!/bin/bash
+
+function check() {
+
+ # Verify hosted
+ sleep 5
+ cmd=`curl -s --head --request GET http://testresults.opnfv.org/test/swagger/spec | grep '200 OK' > /dev/null`
+ rc=$?
+ echo $rc
+
+ if [[ $rc == 0 ]]
+ then
+ return 0
+ else
+ return 1
+ fi
+
+}
+
+echo "Getting contianer Id of the currently running one"
+contId=$(sudo docker ps | grep "opnfv/testapi:latest" | awk '{print $1}')
+
+echo "Pulling the latest image"
+sudo docker pull opnfv/testapi:latest
+
+echo "Deleting old containers of opnfv/testapi:old"
+sudo docker ps -a | grep "opnfv/testapi" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
+
+echo "Deleting old images of opnfv/testapi:latest"
+sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
+
+
+if [[ -z "$contId" ]]
+then
+ echo "No running testapi container"
+
+ echo "Removing stopped testapi containers in the previous iterations"
+ sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+else
+ echo $contId
+
+ echo "Get the image id of the currently running conatiner"
+ currImgId=$(sudo docker ps | grep "$contId" | awk '{print $2}')
+ echo $currImgId
+
+ if [[ -z "$currImgId" ]]
+ then
+ echo "No image id found for the container id"
+ exit 1
+ fi
+
+ echo "Changing current image tag to old"
+ sudo docker tag "$currImgId" opnfv/testapi:old
+
+ echo "Removing stopped testapi containers in the previous iteration"
+ sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+
+ echo "Renaming the running container name to opnfv_testapi as to identify it."
+ sudo docker rename $contId opnfv_testapi
+
+ echo "Stop the currently running container"
+ sudo docker stop $contId
+fi
+
+echo "Running a container with the new image"
+sudo docker run -dti -p "8082:8000" -e "mongodb_url=mongodb://172.17.0.1:27017" -e "swagger_url=http://testresults.opnfv.org/test" opnfv/testapi:latest
+
+if check; then
+ echo "TestResults Hosted."
+else
+ echo "TestResults Hosting Failed"
+ if [[ $(sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}') ]]; then
+ echo "Running old Image"
+ sudo docker run -dti -p "8082:8000" -e "mongodb_url=mongodb://172.17.0.1:27017" -e "swagger_url=http://testresults.opnfv.org/test" opnfv/testapi:old
+ exit 1
+ fi
+fi
+
+# Echo Images and Containers
+sudo docker images
+sudo docker ps -a
diff --git a/jjb/releng/testapi-run-tests.sh b/jjb/releng/testapi-run-tests.sh
deleted file mode 100644
index 47691ed04..000000000
--- a/jjb/releng/testapi-run-tests.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-set -o errexit
-
-echo "Running unit tests..."
-
-# Creating virtual environment
-virtualenv $WORKSPACE/testapi_test_venv
-source $WORKSPACE/testapi_test_venv/bin/activate
-
-cd $WORKSPACE/utils/test/testapi/
-
-# Install requirements
-pip install -r requirements.txt
-pip install -r test-requirements.txt
-
-# Run unit tests
-bash run_test.sh
diff --git a/jjb/securityaudit/opnfv-security-audit.yml b/jjb/securityaudit/opnfv-security-audit.yml
index 680be20d2..732df8925 100644
--- a/jjb/securityaudit/opnfv-security-audit.yml
+++ b/jjb/securityaudit/opnfv-security-audit.yml
@@ -27,7 +27,6 @@
parameters:
- project-parameter:
project: $GERRIT_PROJECT
- - gerrit-parameter:
branch: '{branch}'
scm:
diff --git a/jjb/storperf/storperf.yml b/jjb/storperf/storperf.yml
index 8f42f8c06..a04a9f4b4 100644
--- a/jjb/storperf/storperf.yml
+++ b/jjb/storperf/storperf.yml
@@ -28,7 +28,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- string:
name: GIT_BASE
@@ -89,8 +88,7 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
- branch: 'master'
+ branch: '{branch}'
- string:
name: GIT_BASE
default: https://gerrit.opnfv.org/gerrit/$PROJECT
@@ -149,7 +147,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'intel-pod9-defaults'
diff --git a/jjb/ves/ves.yml b/jjb/ves/ves.yml
new file mode 100644
index 000000000..5f0da3320
--- /dev/null
+++ b/jjb/ves/ves.yml
@@ -0,0 +1,68 @@
+###################################################
+# All the jobs except verify have been removed!
+# They will only be enabled on request by projects!
+###################################################
+- project:
+ name: ves
+
+ project: '{name}'
+
+ jobs:
+ - 'ves-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+ - danube:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+ disabled: false
+
+- job-template:
+ name: 'ves-verify-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - 'opnfv-build-ubuntu-defaults'
+
+ scm:
+ - git-scm-gerrit
+
+ triggers:
+ - gerrit:
+ server-name: 'gerrit.opnfv.org'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+
+ builders:
+ - shell: |
+ #!/bin/bash
+ set -o errexit
+ set -o nounset
+ set -o pipefail
+
+ shellcheck -f tty tests/*.sh
+ shellcheck -f tty utils/*.sh
diff --git a/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml b/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml
index c129f4937..450599eaf 100644
--- a/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml
+++ b/jjb/vnf_forwarding_graph/vnf_forwarding_graph.yml
@@ -24,7 +24,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
scm:
- git-scm-gerrit
diff --git a/jjb/vswitchperf/vswitchperf.yml b/jjb/vswitchperf/vswitchperf.yml
index 4cfe5d98f..ef0e90a76 100644
--- a/jjb/vswitchperf/vswitchperf.yml
+++ b/jjb/vswitchperf/vswitchperf.yml
@@ -19,7 +19,7 @@
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
disabled: true
- slave-label: 'intel-pod3'
+ slave-label: 'intel-pod12'
- job-template:
@@ -30,9 +30,8 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- - 'intel-pod3-defaults'
+ - 'intel-pod12-defaults'
scm:
- git-scm
@@ -62,6 +61,7 @@
concurrent: true
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
blocking-jobs:
@@ -72,7 +72,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
@@ -123,6 +122,7 @@
concurrent: true
properties:
+ - logrotate-default
- build-blocker:
use-build-blocker: true
blocking-jobs:
@@ -133,7 +133,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{slave-label}-defaults'
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml
index ed2dccab8..604eaed25 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-ci-jobs.yml
@@ -197,8 +197,8 @@
installer: compass
auto-trigger-name: 'yardstick-daily-huawei-pod4-trigger'
<<: *master
- - huawei-pod5:
- slave-label: '{pod}'
+ - baremetal-centos:
+ slave-label: 'intel-pod8'
installer: compass
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
@@ -220,6 +220,7 @@
concurrent: true
properties:
+ - logrotate-default
- throttle:
enabled: true
max-per-node: 1
@@ -238,7 +239,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- '{installer}-defaults'
- '{slave-label}-defaults'
@@ -381,15 +381,6 @@
name: YARDSTICK_DB_BACKEND
default: '-i 104.197.68.199:8086'
description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
- name: 'yardstick-params-huawei-pod5'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
- parameter:
name: 'yardstick-params-zte-pod1'
parameters:
diff --git a/jjb/yardstick/yardstick-daily.sh b/jjb/yardstick/yardstick-daily.sh
index da9042bbc..f769e9cdd 100755
--- a/jjb/yardstick/yardstick-daily.sh
+++ b/jjb/yardstick/yardstick-daily.sh
@@ -31,14 +31,14 @@ fi
opts="--privileged=true --rm"
envs="-e INSTALLER_TYPE=${INSTALLER_TYPE} -e INSTALLER_IP=${INSTALLER_IP} \
-e NODE_NAME=${NODE_NAME} -e EXTERNAL_NETWORK=${EXTERNAL_NETWORK} \
- -e YARDSTICK_BRANCH=${GIT_BRANCH##origin/} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
+ -e YARDSTICK_BRANCH=${BRANCH} -e DEPLOY_SCENARIO=${DEPLOY_SCENARIO}"
# Pull the image with correct tag
echo "Yardstick: Pulling image opnfv/yardstick:${DOCKER_TAG}"
docker pull opnfv/yardstick:$DOCKER_TAG >$redirect
# map log directory
-branch=${GIT_BRANCH##*/}
+branch=${BRANCH##*/}
dir_result="${HOME}/opnfv/yardstick/results/${branch}"
mkdir -p ${dir_result}
sudo rm -rf ${dir_result}/*
diff --git a/jjb/yardstick/yardstick-project-jobs.yml b/jjb/yardstick/yardstick-project-jobs.yml
index a54750ef7..bbfa152a2 100644
--- a/jjb/yardstick/yardstick-project-jobs.yml
+++ b/jjb/yardstick/yardstick-project-jobs.yml
@@ -33,7 +33,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
@@ -70,7 +69,6 @@
parameters:
- project-parameter:
project: '{project}'
- - gerrit-parameter:
branch: '{branch}'
- 'opnfv-build-ubuntu-defaults'
- string:
@@ -110,18 +108,8 @@
set -o errexit
set -o pipefail
+ sudo apt-get install -y build-essential python-dev python3-dev
+
echo "Running unit tests..."
cd $WORKSPACE
- virtualenv $WORKSPACE/yardstick_venv
- source $WORKSPACE/yardstick_venv/bin/activate
-
- # install python packages
- easy_install -U setuptools
- easy_install -U pip
- pip install -r requirements.txt || pip install -r tests/ci/requirements.txt
- pip install -e .
-
- # unit tests
- ./run_tests.sh
-
- deactivate
+ tox
diff --git a/modules/opnfv/installer_adapters/__init__.py b/modules/opnfv/deployment/__init__.py
index e69de29bb..e69de29bb 100644
--- a/modules/opnfv/installer_adapters/__init__.py
+++ b/modules/opnfv/deployment/__init__.py
diff --git a/modules/opnfv/installer_adapters/apex/__init__.py b/modules/opnfv/deployment/apex/__init__.py
index e69de29bb..e69de29bb 100644
--- a/modules/opnfv/installer_adapters/apex/__init__.py
+++ b/modules/opnfv/deployment/apex/__init__.py
diff --git a/modules/opnfv/deployment/apex/adapter.py b/modules/opnfv/deployment/apex/adapter.py
new file mode 100644
index 000000000..225e17438
--- /dev/null
+++ b/modules/opnfv/deployment/apex/adapter.py
@@ -0,0 +1,100 @@
+##############################################################################
+# Copyright (c) 2017 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import re
+
+from opnfv.deployment import manager
+from opnfv.utils import opnfv_logger as logger
+from opnfv.utils import ssh_utils
+
+logger = logger.Logger(__name__).getLogger()
+
+
+class ApexAdapter(manager.DeploymentHandler):
+
+ def __init__(self, installer_ip, installer_user, pkey_file):
+ super(ApexAdapter, self).__init__(installer='apex',
+ installer_ip=installer_ip,
+ installer_user=installer_user,
+ installer_pwd=None,
+ pkey_file=pkey_file)
+
+ def get_nodes(self):
+ nodes = []
+ cmd = "source /home/stack/stackrc;openstack server list"
+ output = self.installer_node.run_cmd(cmd)
+ lines = output.rsplit('\n')
+ if len(lines) < 4:
+ logger.info("No nodes found in the deployment.")
+ return None
+
+ for line in lines:
+ roles = []
+ if any(x in line for x in ['-----', 'Networks']):
+ continue
+ if 'controller' in line:
+ roles.append(manager.Role.CONTROLLER)
+ if 'compute' in line:
+ roles.append(manager.Role.COMPUTE)
+ if 'opendaylight' in line.lower():
+ roles.append(manager.Role.ODL)
+
+ fields = line.split('|')
+ id = re.sub('[!| ]', '', fields[1]).encode()
+ name = re.sub('[!| ]', '', fields[2]).encode()
+ status_node = re.sub('[!| ]', '', fields[3]).encode().lower()
+ ip = re.sub('[!| ctlplane=]', '', fields[4]).encode()
+
+ ssh_client = None
+ if 'active' in status_node:
+ status = manager.NodeStatus.STATUS_OK
+ ssh_client = ssh_utils.get_ssh_client(hostname=ip,
+ username='heat-admin',
+ pkey_file=self.pkey_file)
+ elif 'error' in status_node:
+ status = manager.NodeStatus.STATUS_ERROR
+ elif 'off' in status_node:
+ status = manager.NodeStatus.STATUS_OFFLINE
+ else:
+ status = manager.NodeStatus.STATUS_INACTIVE
+
+ node = manager.Node(id, ip, name, status, roles, ssh_client)
+ nodes.append(node)
+
+ return nodes
+
+ def get_openstack_version(self):
+ cmd = 'source overcloudrc;sudo nova-manage version'
+ result = self.installer_node.run_cmd(cmd)
+ return result
+
+ def get_sdn_version(self):
+ cmd_descr = ("sudo yum info opendaylight 2>/dev/null|"
+ "grep Description|sed 's/^.*\: //'")
+ cmd_ver = ("sudo yum info opendaylight 2>/dev/null|"
+ "grep Version|sed 's/^.*\: //'")
+ description = None
+ for node in self.nodes:
+ if node.is_controller():
+ description = node.run_cmd(cmd_descr)
+ version = node.run_cmd(cmd_ver)
+ break
+
+ if description is None:
+ return None
+ else:
+ return description + ':' + version
+
+ def get_deployment_status(self):
+ cmd = 'source stackrc;openstack stack list|grep CREATE_COMPLETE'
+ result = self.installer_node.run_cmd(cmd)
+ if result is None or len(result) == 0:
+ return 'failed'
+ else:
+ return 'active'
diff --git a/modules/opnfv/deployment/example.py b/modules/opnfv/deployment/example.py
new file mode 100644
index 000000000..3999a11c6
--- /dev/null
+++ b/modules/opnfv/deployment/example.py
@@ -0,0 +1,36 @@
+# This is an example of usage of this Tool
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+
+from opnfv.deployment import factory
+
+print("########## APEX ##########")
+handler = factory.Factory.get_handler('apex',
+ '192.168.122.135',
+ 'stack',
+ pkey_file='/root/.ssh/id_rsa')
+
+
+installer_node = handler.get_installer_node()
+print("Hello, I am node '%s'" % installer_node.run_cmd('hostname'))
+installer_node.get_file('/home/stack/overcloudrc', './overcloudrc')
+
+nodes = handler.get_nodes()
+for node in nodes:
+ print("Hello, I am node '%s' and my ip is %s." %
+ (node.run_cmd('hostname'), node.ip))
+
+print(handler.get_deployment_info())
+
+
+print("########## FUEL ##########")
+handler = factory.Factory.get_handler('fuel',
+ '10.20.0.2',
+ 'root',
+ installer_pwd='r00tme')
+
+print(handler.get_deployment_info())
+
+print("List of nodes in cluster 4:")
+nodes = handler.get_nodes({'cluster': '4'})
+for node in nodes:
+ print(node)
diff --git a/modules/opnfv/deployment/factory.py b/modules/opnfv/deployment/factory.py
new file mode 100644
index 000000000..e48a751ad
--- /dev/null
+++ b/modules/opnfv/deployment/factory.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2017 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from opnfv.deployment.apex import adapter as apex_adapter
+from opnfv.deployment.fuel import adapter as fuel_adapter
+from opnfv.utils import opnfv_logger as logger
+
+logger = logger.Logger(__name__).getLogger()
+
+
+class Factory(object):
+
+ INSTALLERS = ["fuel", "apex", "compass", "joid", "daisy"]
+
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def get_handler(installer,
+ installer_ip,
+ installer_user,
+ installer_pwd=None,
+ pkey_file=None):
+
+ if installer not in Factory.INSTALLERS:
+ raise Exception("This is not an OPNFV installer.")
+
+ if installer.lower() == "apex":
+ return apex_adapter.ApexAdapter(installer_ip=installer_ip,
+ installer_user=installer_user,
+ pkey_file=pkey_file)
+ elif installer.lower() == "fuel":
+ return fuel_adapter.FuelAdapter(installer_ip=installer_ip,
+ installer_user=installer_user,
+ installer_pwd=installer_pwd)
+ else:
+ raise Exception("Installer adapter is not implemented.")
diff --git a/modules/opnfv/installer_adapters/compass/__init__.py b/modules/opnfv/deployment/fuel/__init__.py
index e69de29bb..e69de29bb 100644
--- a/modules/opnfv/installer_adapters/compass/__init__.py
+++ b/modules/opnfv/deployment/fuel/__init__.py
diff --git a/modules/opnfv/deployment/fuel/adapter.py b/modules/opnfv/deployment/fuel/adapter.py
new file mode 100644
index 000000000..a71d6cbf9
--- /dev/null
+++ b/modules/opnfv/deployment/fuel/adapter.py
@@ -0,0 +1,199 @@
+##############################################################################
+# Copyright (c) 2017 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# George Paraskevopoulos (geopar@intracom-telecom.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+from opnfv.deployment import manager
+from opnfv.utils import opnfv_logger as logger
+from opnfv.utils import ssh_utils
+
+logger = logger.Logger(__name__).getLogger()
+
+
+class FuelAdapter(manager.DeploymentHandler):
+
+ def __init__(self, installer_ip, installer_user, installer_pwd):
+ super(FuelAdapter, self).__init__(installer='fuel',
+ installer_ip=installer_ip,
+ installer_user=installer_user,
+ installer_pwd=installer_pwd,
+ pkey_file=None)
+
+ def _get_clusters(self):
+ environments = []
+ output = self.runcmd_fuel_env()
+ lines = output.rsplit('\n')
+ if len(lines) < 2:
+ logger.info("No environments found in the deployment.")
+ return None
+ else:
+ fields = lines[0].rsplit(' | ')
+
+ index_id = -1
+ index_status = -1
+ index_name = -1
+ index_release_id = -1
+
+ for i in range(len(fields)):
+ if "id" in fields[i]:
+ index_id = i
+ elif "status" in fields[i]:
+ index_status = i
+ elif "name" in fields[i]:
+ index_name = i
+ elif "release_id" in fields[i]:
+ index_release_id = i
+
+ # order env info
+ for i in range(2, len(lines)):
+ fields = lines[i].rsplit(' | ')
+ dict = {"id": fields[index_id].strip(),
+ "status": fields[index_status].strip(),
+ "name": fields[index_name].strip(),
+ "release_id": fields[index_release_id].strip()}
+ environments.append(dict)
+
+ return environments
+
+ def get_nodes(self, options=None):
+
+ if options and options['cluster'] and len(self.nodes) > 0:
+ n = []
+ for node in self.nodes:
+ if str(node.info['cluster']) == str(options['cluster']):
+ n.append(node)
+ return n
+
+ try:
+ # if we have retrieved previously all the nodes, don't do it again
+ # This fails the first time when the constructor calls this method
+ # therefore the try/except
+ if len(self.nodes) > 0:
+ return self.nodes
+ except:
+ pass
+
+ nodes = []
+ cmd = 'fuel node'
+ output = self.installer_node.run_cmd(cmd)
+ lines = output.rsplit('\n')
+ if len(lines) < 2:
+ logger.info("No nodes found in the deployment.")
+ return nodes
+
+ # get fields indexes
+ fields = lines[0].rsplit(' | ')
+
+ index_id = -1
+ index_status = -1
+ index_name = -1
+ index_cluster = -1
+ index_ip = -1
+ index_mac = -1
+ index_roles = -1
+ index_online = -1
+
+ for i in range(len(fields)):
+ if "group_id" in fields[i]:
+ break
+ elif "id" in fields[i]:
+ index_id = i
+ elif "status" in fields[i]:
+ index_status = i
+ elif "name" in fields[i]:
+ index_name = i
+ elif "cluster" in fields[i]:
+ index_cluster = i
+ elif "ip" in fields[i]:
+ index_ip = i
+ elif "mac" in fields[i]:
+ index_mac = i
+ elif "roles " in fields[i] and "pending_roles" not in fields[i]:
+ index_roles = i
+ elif "online" in fields[i]:
+ index_online = i
+
+ # order nodes info
+ for i in range(2, len(lines)):
+ fields = lines[i].rsplit(' | ')
+ id = fields[index_id].strip().encode()
+ ip = fields[index_ip].strip().encode()
+ status_node = fields[index_status].strip().encode().lower()
+ name = fields[index_name].strip().encode()
+ roles_all = fields[index_roles].strip().encode().lower()
+
+ roles = [x for x in [manager.Role.CONTROLLER,
+ manager.Role.COMPUTE,
+ manager.Role.ODL] if x in roles_all]
+
+ dict = {"cluster": fields[index_cluster].strip().encode(),
+ "mac": fields[index_mac].strip().encode(),
+ "status_node": status_node,
+ "online": fields[index_online].strip().encode()}
+
+ ssh_client = None
+ if status_node == 'ready':
+ status = manager.NodeStatus.STATUS_OK
+ proxy = {'ip': self.installer_ip,
+ 'username': self.installer_user,
+ 'password': self.installer_pwd}
+ ssh_client = ssh_utils.get_ssh_client(hostname=ip,
+ username='root',
+ proxy=proxy)
+ elif 'error' in status_node:
+ status = manager.NodeStatus.STATUS_ERROR
+ elif 'off' in status_node:
+ status = manager.NodeStatus.STATUS_OFFLINE
+ elif 'discover' in status_node:
+ status = manager.NodeStatus.STATUS_UNUSED
+ else:
+ status = manager.NodeStatus.STATUS_INACTIVE
+
+ node = manager.Node(
+ id, ip, name, status, roles, ssh_client, dict)
+ if options and options['cluster']:
+ if fields[index_cluster].strip() == options['cluster']:
+ nodes.append(node)
+ else:
+ nodes.append(node)
+
+ self.get_nodes_called = True
+ return nodes
+
+ def get_openstack_version(self):
+ cmd = 'source openrc;nova-manage version 2>/dev/null'
+ version = None
+ for node in self.nodes:
+ if node.is_controller() and node.is_active():
+ version = node.run_cmd(cmd)
+ break
+ return version
+
+ def get_sdn_version(self):
+ cmd = "apt-cache show opendaylight|grep Version"
+ version = None
+ for node in self.nodes:
+ if manager.Role.ODL in node.roles and node.is_active():
+ odl_version = node.run_cmd(cmd)
+ if odl_version:
+ version = 'OpenDaylight ' + odl_version.split(' ')[-1]
+ break
+ return version
+
+ def get_deployment_status(self):
+ cmd = "fuel env|tail -1|awk '{print $3}'"
+ result = self.installer_node.run_cmd(cmd)
+ if result is None or len(result) == 0:
+ return 'unknown'
+ elif 'operational' in result:
+ return 'active'
+ elif 'deploy' in result:
+ return 'deploying'
+ else:
+ return 'active'
diff --git a/modules/opnfv/deployment/manager.py b/modules/opnfv/deployment/manager.py
new file mode 100644
index 000000000..7047a4dd3
--- /dev/null
+++ b/modules/opnfv/deployment/manager.py
@@ -0,0 +1,385 @@
+##############################################################################
+# Copyright (c) 2017 Ericsson AB and others.
+# Author: Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from abc import abstractmethod
+import os
+
+
+from opnfv.utils import opnfv_logger as logger
+from opnfv.utils import ssh_utils
+
+logger = logger.Logger(__name__).getLogger()
+
+
+class Deployment(object):
+
+ def __init__(self,
+ installer,
+ installer_ip,
+ scenario,
+ pod,
+ status,
+ openstack_version,
+ sdn_controller,
+ nodes=None):
+
+ self.deployment_info = {
+ 'installer': installer,
+ 'installer_ip': installer_ip,
+ 'scenario': scenario,
+ 'pod': pod,
+ 'status': status,
+ 'openstack_version': openstack_version,
+ 'sdn_controller': sdn_controller,
+ 'nodes': nodes
+ }
+
+ def _get_openstack_release(self):
+ '''
+ Translates an openstack version into the release name
+ '''
+ os_versions = {
+ '12': 'Liberty',
+ '13': 'Mitaka',
+ '14': 'Newton',
+ '15': 'Ocata',
+ '16': 'Pike',
+ '17': 'Queens'
+ }
+ try:
+ version = self.deployment_info['openstack_version'].split('.')[0]
+ name = os_versions[version]
+ return name
+ except Exception:
+ return 'Unknown release'
+
+ def get_dict(self):
+ '''
+ Returns a dictionary will all the attributes
+ '''
+ return self.deployment_info
+
+ def __str__(self):
+ '''
+ Override of the str method
+ '''
+ s = '''
+ INSTALLER: {installer}
+ SCENARIO: {scenario}
+ INSTALLER IP: {installer_ip}
+ POD: {pod}
+ STATUS: {status}
+ OPENSTACK: {openstack_version} ({openstack_release})
+ SDN: {sdn_controller}
+ NODES:
+ '''.format(installer=self.deployment_info['installer'],
+ scenario=self.deployment_info['scenario'],
+ installer_ip=self.deployment_info['installer_ip'],
+ pod=self.deployment_info['pod'],
+ status=self.deployment_info['status'],
+ openstack_version=self.deployment_info[
+ 'openstack_version'],
+ openstack_release=self._get_openstack_release(),
+ sdn_controller=self.deployment_info['sdn_controller'])
+
+ for node in self.deployment_info['nodes']:
+ s += '{node_object}\n'.format(node_object=node)
+
+ return s
+
+
+class Role():
+ CONTROLLER = 'controller'
+ COMPUTE = 'compute'
+ ODL = 'opendaylight'
+ ONOS = 'onos'
+
+
+class NodeStatus():
+ STATUS_OK = 'active'
+ STATUS_INACTIVE = 'inactive'
+ STATUS_OFFLINE = 'offline'
+ STATUS_ERROR = 'error'
+ STATUS_UNUSED = 'unused'
+
+
+class Node(object):
+
+ def __init__(self,
+ id,
+ ip,
+ name,
+ status,
+ roles=None,
+ ssh_client=None,
+ info=None):
+ self.id = id
+ self.ip = ip
+ self.name = name
+ self.status = status
+ self.ssh_client = ssh_client
+ self.roles = roles
+ self.info = info
+
+ self.cpu_info = 'unknown'
+ self.memory = 'unknown'
+ self.ovs = 'unknown'
+
+ if ssh_client:
+ sys_info = self.get_system_info()
+ self.cpu_info = sys_info['cpu_info']
+ self.memory = sys_info['memory']
+ self.ovs = self.get_ovs_info()
+
+ def get_file(self, src, dest):
+ '''
+ SCP file from a node
+ '''
+ if self.status is not NodeStatus.STATUS_OK:
+ logger.info("The node %s is not active" % self.ip)
+ return 1
+ logger.info("Fetching %s from %s" % (src, self.ip))
+ get_file_result = ssh_utils.get_file(self.ssh_client, src, dest)
+ if get_file_result is None:
+ logger.error("SFTP failed to retrieve the file.")
+ else:
+ logger.info("Successfully copied %s:%s to %s" %
+ (self.ip, src, dest))
+ return get_file_result
+
+ def put_file(self, src, dest):
+ '''
+ SCP file to a node
+ '''
+ if self.status is not NodeStatus.STATUS_OK:
+ logger.info("The node %s is not active" % self.ip)
+ return 1
+ logger.info("Copying %s to %s" % (src, self.ip))
+ put_file_result = ssh_utils.put_file(self.ssh_client, src, dest)
+ if put_file_result is None:
+ logger.error("SFTP failed to retrieve the file.")
+ else:
+ logger.info("Successfully copied %s to %s:%s" %
+ (src, dest, self.ip))
+ return put_file_result
+
+ def run_cmd(self, cmd):
+ '''
+ Run command remotely on a node
+ '''
+ if self.status is not NodeStatus.STATUS_OK:
+ logger.error(
+ "Error running command %s. The node %s is not active"
+ % (cmd, self.ip))
+ return None
+ _, stdout, stderr = (self.ssh_client.exec_command(cmd))
+ error = stderr.readlines()
+ if len(error) > 0:
+ logger.error("error %s" % ''.join(error))
+ return None
+ output = ''.join(stdout.readlines()).rstrip()
+ return output
+
+ def get_dict(self):
+ '''
+ Returns a dictionary with all the attributes
+ '''
+ return {
+ 'id': self.id,
+ 'ip': self.ip,
+ 'name': self.name,
+ 'status': self.status,
+ 'roles': self.roles,
+ 'cpu_info': self.cpu_info,
+ 'memory': self.memory,
+ 'ovs': self.ovs,
+ 'info': self.info
+ }
+
+ def is_active(self):
+ '''
+ Returns if the node is active
+ '''
+ if self.status == NodeStatus.STATUS_OK:
+ return True
+ return False
+
+ def is_controller(self):
+ '''
+ Returns if the node is a controller
+ '''
+ return Role.CONTROLLER in self.roles
+
+ def is_compute(self):
+ '''
+ Returns if the node is a compute
+ '''
+ return Role.COMPUTE in self.roles
+
+ def is_odl(self):
+ '''
+ Returns if the node is an opendaylight
+ '''
+ return Role.ODL in self.roles
+
+ def get_ovs_info(self):
+ '''
+ Returns the ovs version installed
+ '''
+ if self.is_active():
+ cmd = "ovs-vsctl --version|head -1| sed 's/^.*) //'"
+ return self.run_cmd(cmd)
+ return None
+
+ def get_system_info(self):
+ '''
+ Returns the ovs version installed
+ '''
+ cmd = 'grep MemTotal /proc/meminfo'
+ memory = self.run_cmd(cmd).partition('MemTotal:')[-1].strip().encode()
+
+ cpu_info = {}
+ cmd = 'lscpu'
+ result = self.run_cmd(cmd)
+ for line in result.splitlines():
+ if line.startswith('CPU(s)'):
+ cpu_info['num_cpus'] = line.split(' ')[-1].encode()
+ elif line.startswith('Thread(s) per core'):
+ cpu_info['threads/core'] = line.split(' ')[-1].encode()
+ elif line.startswith('Core(s) per socket'):
+ cpu_info['cores/socket'] = line.split(' ')[-1].encode()
+ elif line.startswith('Model name'):
+ cpu_info['model'] = line.partition(
+ 'Model name:')[-1].strip().encode()
+ elif line.startswith('Architecture'):
+ cpu_info['arch'] = line.split(' ')[-1].encode()
+
+ return {'memory': memory, 'cpu_info': cpu_info}
+
+ def __str__(self):
+ return '''
+ name: {name}
+ id: {id}
+ ip: {ip}
+ status: {status}
+ roles: {roles}
+ cpu: {cpu_info}
+ memory: {memory}
+ ovs: {ovs}
+ info: {info}'''.format(name=self.name,
+ id=self.id,
+ ip=self.ip,
+ status=self.status,
+ roles=self.roles,
+ cpu_info=self.cpu_info,
+ memory=self.memory,
+ ovs=self.ovs,
+ info=self.info)
+
+
+class DeploymentHandler(object):
+
+ EX_OK = os.EX_OK
+ EX_ERROR = os.EX_SOFTWARE
+ FUNCTION_NOT_IMPLEMENTED = "Function not implemented by adapter!"
+
+ def __init__(self,
+ installer,
+ installer_ip,
+ installer_user,
+ installer_pwd=None,
+ pkey_file=None):
+
+ self.installer = installer.lower()
+ self.installer_ip = installer_ip
+ self.installer_user = installer_user
+ self.installer_pwd = installer_pwd
+ self.pkey_file = pkey_file
+
+ if pkey_file is not None and not os.path.isfile(pkey_file):
+ raise Exception(
+ 'The private key file %s does not exist!' % pkey_file)
+
+ self.installer_connection = ssh_utils.get_ssh_client(
+ hostname=self.installer_ip,
+ username=self.installer_user,
+ password=self.installer_pwd,
+ pkey_file=self.pkey_file)
+
+ if self.installer_connection:
+ self.installer_node = Node(id='',
+ ip=installer_ip,
+ name=installer,
+ status=NodeStatus.STATUS_OK,
+ ssh_client=self.installer_connection,
+ roles='installer node')
+ else:
+ raise Exception(
+ 'Cannot establish connection to the installer node!')
+
+ self.nodes = self.get_nodes()
+
+ @abstractmethod
+ def get_openstack_version(self):
+ '''
+ Returns a string of the openstack version (nova-compute)
+ '''
+ raise Exception(DeploymentHandler.FUNCTION_NOT_IMPLEMENTED)
+
+ @abstractmethod
+ def get_sdn_version(self):
+ '''
+ Returns a string of the sdn controller and its version, if exists
+ '''
+ raise Exception(DeploymentHandler.FUNCTION_NOT_IMPLEMENTED)
+
+ @abstractmethod
+ def get_deployment_status(self):
+ '''
+ Returns a string of the status of the deployment
+ '''
+ raise Exception(DeploymentHandler.FUNCTION_NOT_IMPLEMENTED)
+
+ @abstractmethod
+ def get_nodes(self, options=None):
+ '''
+ Generates a list of all the nodes in the deployment
+ '''
+ raise Exception(DeploymentHandler.FUNCTION_NOT_IMPLEMENTED)
+
+ def get_installer_node(self):
+ '''
+ Returns the installer node object
+ '''
+ return self.installer_node
+
+ def get_arch(self):
+ '''
+ Returns the architecture of the first compute node found
+ '''
+ arch = None
+ for node in self.nodes:
+ if node.is_compute():
+ arch = node.cpu_info.get('arch', None)
+ if arch:
+ break
+ return arch
+
+ def get_deployment_info(self):
+ '''
+ Returns an object of type Deployment
+ '''
+ return Deployment(installer=self.installer,
+ installer_ip=self.installer_ip,
+ scenario=os.getenv('DEPLOY_SCENARIO', 'Unknown'),
+ status=self.get_deployment_status(),
+ pod=os.getenv('NODE_NAME', 'Unknown'),
+ openstack_version=self.get_openstack_version(),
+ sdn_controller=self.get_sdn_version(),
+ nodes=self.get_nodes())
diff --git a/modules/opnfv/installer_adapters/InstallerHandler.py b/modules/opnfv/installer_adapters/InstallerHandler.py
deleted file mode 100644
index dc5bdb9d6..000000000
--- a/modules/opnfv/installer_adapters/InstallerHandler.py
+++ /dev/null
@@ -1,81 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Ericsson AB and others.
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-from opnfv.installer_adapters.fuel.FuelAdapter import FuelAdapter
-from opnfv.installer_adapters.apex.ApexAdapter import ApexAdapter
-from opnfv.installer_adapters.compass.CompassAdapter import CompassAdapter
-from opnfv.installer_adapters.joid.JoidAdapter import JoidAdapter
-from opnfv.installer_adapters.daisy.DaisyAdapter import DaisyAdapter
-
-
-INSTALLERS = ["fuel", "apex", "compass", "joid", "daisy"]
-
-
-class InstallerHandler:
-
- def __init__(self,
- installer,
- installer_ip,
- installer_user,
- installer_pwd=None):
- self.installer = installer.lower()
- self.installer_ip = installer_ip
- self.installer_user = installer_user
- self.installer_pwd = installer_pwd
-
- if self.installer == INSTALLERS[0]:
- self.InstallerAdapter = FuelAdapter(self.installer_ip,
- self.installer_user,
- self.installer_pwd)
- elif self.installer == INSTALLERS[1]:
- self.InstallerAdapter = ApexAdapter(self.installer_ip)
- elif self.installer == INSTALLERS[2]:
- self.InstallerAdapter = CompassAdapter(self.installer_ip)
- elif self.installer == INSTALLERS[3]:
- self.InstallerAdapter = JoidAdapter(self.installer_ip)
- elif self.installer == INSTALLERS[4]:
- self.InstallerAdapter = DaisyAdapter(self.installer_ip)
- else:
- print("Installer %s is not valid. "
- "Please use one of the followings: %s"
- % (self.installer, INSTALLERS))
- exit(1)
-
- def get_deployment_info(self):
- return self.InstallerAdapter.get_deployment_info()
-
- def get_nodes(self, options=None):
- return self.InstallerAdapter.get_nodes(options=options)
-
- def get_controller_ips(self, options=None):
- return self.InstallerAdapter.get_controller_ips(options=options)
-
- def get_compute_ips(self, options=None):
- return self.InstallerAdapter.get_compute_ips(options=options)
-
- def get_file_from_installer(self,
- remote_path,
- local_path,
- options=None):
- return self.InstallerAdapter.get_file_from_installer(remote_path,
- local_path,
- options=options)
-
- def get_file_from_controller(self,
- remote_path,
- local_path,
- ip=None,
- options=None):
- return self.InstallerAdapter.get_file_from_controller(remote_path,
- local_path,
- ip=ip,
- options=options)
-
- def get_all(self):
- pass
diff --git a/modules/opnfv/installer_adapters/apex/ApexAdapter.py b/modules/opnfv/installer_adapters/apex/ApexAdapter.py
deleted file mode 100644
index 17a27b10a..000000000
--- a/modules/opnfv/installer_adapters/apex/ApexAdapter.py
+++ /dev/null
@@ -1,32 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-class ApexAdapter:
-
- def __init__(self, installer_ip):
- self.installer_ip = installer_ip
-
- def get_deployment_info(self):
- pass
-
- def get_nodes(self):
- pass
-
- def get_controller_ips(self):
- pass
-
- def get_compute_ips(self):
- pass
-
- def get_file_from_installer(self, origin, target, options=None):
- pass
-
- def get_file_from_controller(self, origin, target, ip=None, options=None):
- pass
diff --git a/modules/opnfv/installer_adapters/compass/CompassAdapter.py b/modules/opnfv/installer_adapters/compass/CompassAdapter.py
deleted file mode 100644
index 47cbc646d..000000000
--- a/modules/opnfv/installer_adapters/compass/CompassAdapter.py
+++ /dev/null
@@ -1,32 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-class CompassAdapter:
-
- def __init__(self, installer_ip):
- self.installer_ip = installer_ip
-
- def get_deployment_info(self):
- pass
-
- def get_nodes(self):
- pass
-
- def get_controller_ips(self):
- pass
-
- def get_compute_ips(self):
- pass
-
- def get_file_from_installer(self, origin, target, options=None):
- pass
-
- def get_file_from_controller(self, origin, target, ip=None, options=None):
- pass
diff --git a/modules/opnfv/installer_adapters/daisy/DaisyAdapter.py b/modules/opnfv/installer_adapters/daisy/DaisyAdapter.py
deleted file mode 100644
index 9b06f4c3c..000000000
--- a/modules/opnfv/installer_adapters/daisy/DaisyAdapter.py
+++ /dev/null
@@ -1,32 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-class DaisyAdapter:
-
- def __init__(self, installer_ip):
- self.installer_ip = installer_ip
-
- def get_deployment_info(self):
- pass
-
- def get_nodes(self):
- pass
-
- def get_controller_ips(self):
- pass
-
- def get_compute_ips(self):
- pass
-
- def get_file_from_installer(self, origin, target, options=None):
- pass
-
- def get_file_from_controller(self, origin, target, ip=None, options=None):
- pass
diff --git a/modules/opnfv/installer_adapters/daisy/__init__.py b/modules/opnfv/installer_adapters/daisy/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/modules/opnfv/installer_adapters/daisy/__init__.py
+++ /dev/null
diff --git a/modules/opnfv/installer_adapters/fuel/FuelAdapter.py b/modules/opnfv/installer_adapters/fuel/FuelAdapter.py
deleted file mode 100644
index 8ed8f8937..000000000
--- a/modules/opnfv/installer_adapters/fuel/FuelAdapter.py
+++ /dev/null
@@ -1,236 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-# George Paraskevopoulos (geopar@intracom-telecom.com)
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import opnfv.utils.SSHUtils as ssh_utils
-import opnfv.utils.OPNFVLogger as logger
-
-
-class FuelAdapter:
-
- def __init__(self, installer_ip, user="root", password="r00tme"):
- self.installer_ip = installer_ip
- self.installer_user = user
- self.installer_password = password
- self.installer_connection = ssh_utils.get_ssh_client(
- installer_ip,
- self.installer_user,
- password=self.installer_password)
- self.logger = logger.Logger("FuelHandler").getLogger()
-
- def runcmd_fuel_installer(self, cmd):
- _, stdout, stderr = (self
- .installer_connection
- .exec_command(cmd))
- error = stderr.readlines()
- if len(error) > 0:
- self.logger.error("error %s" % ''.join(error))
- return error
- output = ''.join(stdout.readlines())
- return output
-
- def runcmd_fuel_nodes(self):
- return self.runcmd_fuel_installer('fuel nodes')
-
- def runcmd_fuel_env(self):
- return self.runcmd_fuel_installer('fuel env')
-
- def get_clusters(self):
- environments = []
- output = self.runcmd_fuel_env()
- lines = output.rsplit('\n')
- if len(lines) < 2:
- self.logger.infp("No environments found in the deployment.")
- return None
- else:
- fields = lines[0].rsplit(' | ')
-
- index_id = -1
- index_status = -1
- index_name = -1
- index_release_id = -1
-
- for i in range(0, len(fields) - 1):
- if "id" in fields[i]:
- index_id = i
- elif "status" in fields[i]:
- index_status = i
- elif "name" in fields[i]:
- index_name = i
- elif "release_id" in fields[i]:
- index_release_id = i
-
- # order env info
- for i in range(2, len(lines) - 1):
- fields = lines[i].rsplit(' | ')
- dict = {"id": fields[index_id].strip(),
- "status": fields[index_status].strip(),
- "name": fields[index_name].strip(),
- "release_id": fields[index_release_id].strip()}
- environments.append(dict)
-
- return environments
-
- def get_nodes(self, options=None):
- nodes = []
- output = self.runcmd_fuel_nodes()
- lines = output.rsplit('\n')
- if len(lines) < 2:
- self.logger.info("No nodes found in the deployment.")
- return None
- else:
- # get fields indexes
- fields = lines[0].rsplit(' | ')
-
- index_id = -1
- index_status = -1
- index_name = -1
- index_cluster = -1
- index_ip = -1
- index_mac = -1
- index_roles = -1
- index_online = -1
-
- for i in range(0, len(fields) - 1):
- if "id" in fields[i]:
- index_id = i
- elif "status" in fields[i]:
- index_status = i
- elif "name" in fields[i]:
- index_name = i
- elif "cluster" in fields[i]:
- index_cluster = i
- elif "ip" in fields[i]:
- index_ip = i
- elif "mac" in fields[i]:
- index_mac = i
- elif "roles " in fields[i]:
- index_roles = i
- elif "online" in fields[i]:
- index_online = i
-
- # order nodes info
- for i in range(2, len(lines) - 1):
- fields = lines[i].rsplit(' | ')
- dict = {"id": fields[index_id].strip(),
- "status": fields[index_status].strip(),
- "name": fields[index_name].strip(),
- "cluster": fields[index_cluster].strip(),
- "ip": fields[index_ip].strip(),
- "mac": fields[index_mac].strip(),
- "roles": fields[index_roles].strip(),
- "online": fields[index_online].strip()}
- if options and options['cluster']:
- if fields[index_cluster].strip() == options['cluster']:
- nodes.append(dict)
- else:
- nodes.append(dict)
-
- return nodes
-
- def get_controller_ips(self, options):
- nodes = self.get_nodes(options=options)
- controllers = []
- for node in nodes:
- if "controller" in node["roles"]:
- controllers.append(node['ip'])
- return controllers
-
- def get_compute_ips(self, options=None):
- nodes = self.get_nodes(options=options)
- computes = []
- for node in nodes:
- if "compute" in node["roles"]:
- computes.append(node['ip'])
- return computes
-
- def get_deployment_info(self):
- str = "Deployment details:\n"
- str += "\tInstaller: Fuel\n"
- str += "\tScenario: Unknown\n"
- sdn = "None"
- clusters = self.get_clusters()
- str += "\tN.Clusters: %s\n" % len(clusters)
- for cluster in clusters:
- cluster_dic = {'cluster': cluster['id']}
- str += "\tCluster info:\n"
- str += "\t ID: %s\n" % cluster['id']
- str += "\t NAME: %s\n" % cluster['name']
- str += "\t STATUS: %s\n" % cluster['status']
- nodes = self.get_nodes(options=cluster_dic)
- num_nodes = len(nodes)
- for node in nodes:
- if "opendaylight" in node['roles']:
- sdn = "OpenDaylight"
- elif "onos" in node['roles']:
- sdn = "ONOS"
- num_controllers = len(
- self.get_controller_ips(options=cluster_dic))
- num_computes = len(self.get_compute_ips(options=cluster_dic))
- ha = False
- if num_controllers > 1:
- ha = True
-
- str += "\t HA: %s\n" % ha
- str += "\t NUM.NODES: %s\n" % num_nodes
- str += "\t CONTROLLERS: %s\n" % num_controllers
- str += "\t COMPUTES: %s\n" % num_computes
- str += "\t SDN CONTR.: %s\n\n" % sdn
- str += self.runcmd_fuel_nodes()
- return str
-
- def get_file_from_installer(self, remote_path, local_path, options=None):
- self.logger.debug("Fetching %s from %s" %
- (remote_path, self.installer_ip))
- get_file_result = ssh_utils.get_file(self.installer_connection,
- remote_path,
- local_path)
- if get_file_result is None:
- self.logger.error("SFTP failed to retrieve the file.")
- return 1
- self.logger.info("%s successfully copied from Fuel to %s" %
- (remote_path, local_path))
-
- def get_file_from_controller(self,
- remote_path,
- local_path,
- ip=None,
- user='root',
- options=None):
- if ip is None:
- controllers = self.get_controller_ips(options=options)
- if len(controllers) == 0:
- self.logger.info("No controllers found in the deployment.")
- return 1
- else:
- target_ip = controllers[0]
- else:
- target_ip = ip
-
- installer_proxy = {
- 'ip': self.installer_ip,
- 'username': self.installer_user,
- 'password': self.installer_password
- }
- controller_conn = ssh_utils.get_ssh_client(
- target_ip,
- user,
- proxy=installer_proxy)
-
- self.logger.debug("Fetching %s from %s" %
- (remote_path, target_ip))
-
- get_file_result = ssh_utils.get_file(controller_conn,
- remote_path,
- local_path)
- if get_file_result is None:
- self.logger.error("SFTP failed to retrieve the file.")
- return 1
- self.logger.info("%s successfully copied from %s to %s" %
- (remote_path, target_ip, local_path))
diff --git a/modules/opnfv/installer_adapters/fuel/__init__.py b/modules/opnfv/installer_adapters/fuel/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/modules/opnfv/installer_adapters/fuel/__init__.py
+++ /dev/null
diff --git a/modules/opnfv/installer_adapters/fuel/example.py b/modules/opnfv/installer_adapters/fuel/example.py
deleted file mode 100644
index 7fea4dfd7..000000000
--- a/modules/opnfv/installer_adapters/fuel/example.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# This is an example of usage of this Tool
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-
-import opnfv.installer_adapters.InstallerHandler as ins_handler
-
-fuel_handler = ins_handler.InstallerHandler(installer='fuel',
- installer_ip='10.20.0.2',
- installer_user='root',
- installer_pwd='r00tme')
-print("Nodes in cluster 1:\n%s\n" %
- fuel_handler.get_nodes(options={'cluster': '1'}))
-print("Nodes in cluster 2:\n%s\n" %
- fuel_handler.get_nodes(options={'cluster': '2'}))
-print("Nodes:\n%s\n" % fuel_handler.get_nodes())
-print("Controller nodes:\n%s\n" % fuel_handler.get_controller_ips())
-print("Compute nodes:\n%s\n" % fuel_handler.get_compute_ips())
-print("\n%s\n" % fuel_handler.get_deployment_info())
-fuel_handler.get_file_from_installer('/root/deploy/dea.yaml', './dea.yaml')
-fuel_handler.get_file_from_controller(
- '/etc/neutron/neutron.conf', './neutron.conf')
-fuel_handler.get_file_from_controller(
- '/root/openrc', './openrc')
diff --git a/modules/opnfv/installer_adapters/joid/JoidAdapter.py b/modules/opnfv/installer_adapters/joid/JoidAdapter.py
deleted file mode 100644
index be8c2ebac..000000000
--- a/modules/opnfv/installer_adapters/joid/JoidAdapter.py
+++ /dev/null
@@ -1,32 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Ericsson AB and others.
-# Author: Jose Lausuch (jose.lausuch@ericsson.com)
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-
-class JoidAdapter:
-
- def __init__(self, installer_ip):
- self.installer_ip = installer_ip
-
- def get_deployment_info(self):
- pass
-
- def get_nodes(self):
- pass
-
- def get_controller_ips(self):
- pass
-
- def get_compute_ips(self):
- pass
-
- def get_file_from_installer(self, origin, target, options=None):
- pass
-
- def get_file_from_controller(self, origin, target, ip=None, options=None):
- pass
diff --git a/modules/opnfv/installer_adapters/joid/__init__.py b/modules/opnfv/installer_adapters/joid/__init__.py
deleted file mode 100644
index e69de29bb..000000000
--- a/modules/opnfv/installer_adapters/joid/__init__.py
+++ /dev/null
diff --git a/modules/opnfv/utils/Credentials.py b/modules/opnfv/utils/Credentials.py
index 6441b841c..141ecbd93 100644
--- a/modules/opnfv/utils/Credentials.py
+++ b/modules/opnfv/utils/Credentials.py
@@ -77,7 +77,7 @@ class Credentials(object):
creds_file = '/root/openrc'
try:
self.handler.get_file_from_controller(creds_file, target_path)
- except Exception, e:
+ except Exception as e:
self.logger.error(
"Cannot get %s from controller. %e" % (creds_file, e))
pass
diff --git a/modules/opnfv/utils/constants.py b/modules/opnfv/utils/constants.py
index ed83488d4..56008c37f 100644
--- a/modules/opnfv/utils/constants.py
+++ b/modules/opnfv/utils/constants.py
@@ -14,6 +14,7 @@ EXIT_OK = 0
EXIT_RUN_ERROR = -1
EXIT_PUSH_TO_TEST_DB_ERROR = -2
+
class Constants(object):
INSTALLERS = ['apex', 'fuel', 'compass', 'joid', "daisy"]
VERSIONS = ['arno', 'brahmaputra', 'colorado', 'danube']
diff --git a/modules/opnfv/utils/OPNFVLogger.py b/modules/opnfv/utils/opnfv_logger.py
index 6fa4ef2e2..6fa4ef2e2 100644
--- a/modules/opnfv/utils/OPNFVLogger.py
+++ b/modules/opnfv/utils/opnfv_logger.py
diff --git a/modules/opnfv/utils/ovs_logger.py b/modules/opnfv/utils/ovs_logger.py
index 3159609f1..7777a9a16 100644
--- a/modules/opnfv/utils/ovs_logger.py
+++ b/modules/opnfv/utils/ovs_logger.py
@@ -7,7 +7,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-import opnfv.utils.OPNFVLogger as OPNFVLogger
+import opnfv.utils.opnfv_logger as OPNFVLogger
import os
import time
import shutil
@@ -16,6 +16,7 @@ logger = OPNFVLogger.Logger('ovs_logger').getLogger()
class OVSLogger(object):
+
def __init__(self, basedir, ft_resdir):
self.ovs_dir = basedir
self.ft_resdir = ft_resdir
@@ -32,7 +33,7 @@ class OVSLogger(object):
hosts = stdout.readline().strip().split(' ')
found_host = [h for h in hosts if h.startswith(host_prefix)][0]
return found_host
- except Exception, e:
+ except Exception as e:
logger.error(e)
def __dump_to_file(self, operation, host, text, timestamp=None):
@@ -55,7 +56,7 @@ class OVSLogger(object):
.format(cmd, host))
output = ''.join(stdout.readlines())
return output
- except Exception, e:
+ except Exception as e:
logger.error('[__remote_command(ssh_client, {0})]: {1}'
.format(cmd, e))
return None
@@ -78,7 +79,7 @@ class OVSLogger(object):
host = self.__ssh_host(ssh_conn)
self.__dump_to_file(operation, host, output, timestamp=timestamp)
return output
- except Exception, e:
+ except Exception as e:
logger.error('[ofctl_dump_flows(ssh_client, {0}, {1})]: {2}'
.format(br, choose_table, e))
return None
@@ -91,7 +92,7 @@ class OVSLogger(object):
host = self.__ssh_host(ssh_conn)
self.__dump_to_file(operation, host, output, timestamp=timestamp)
return output
- except Exception, e:
+ except Exception as e:
logger.error('[vsctl_show(ssh_client)]: {0}'.format(e))
return None
@@ -100,19 +101,13 @@ class OVSLogger(object):
if timestamp is None:
timestamp = time.strftime("%Y%m%d-%H%M%S")
- for controller_client in controller_clients:
- self.ofctl_dump_flows(controller_client,
- timestamp=timestamp)
- self.vsctl_show(controller_client,
- timestamp=timestamp)
-
- for compute_client in compute_clients:
- self.ofctl_dump_flows(compute_client,
- timestamp=timestamp)
- self.vsctl_show(compute_client,
- timestamp=timestamp)
+ clients = controller_clients + compute_clients
+ for client in clients:
+ self.ofctl_dump_flows(client, timestamp=timestamp)
+ self.vsctl_show(client, timestamp=timestamp)
if related_error is not None:
dumpdir = os.path.join(self.ovs_dir, timestamp)
+ self.__mkdir_p(dumpdir)
with open(os.path.join(dumpdir, 'error'), 'w') as f:
f.write(related_error)
diff --git a/modules/opnfv/utils/SSHUtils.py b/modules/opnfv/utils/ssh_utils.py
index 16e34c3e5..d17f5ae81 100644
--- a/modules/opnfv/utils/SSHUtils.py
+++ b/modules/opnfv/utils/ssh_utils.py
@@ -9,14 +9,19 @@
##############################################################################
-import paramiko
-import opnfv.utils.OPNFVLogger as OPNFVLogger
import os
+import paramiko
-logger = OPNFVLogger.Logger('SSHUtils').getLogger()
+from opnfv.utils import opnfv_logger as logger
+logger = logger.Logger("SSH utils").getLogger()
-def get_ssh_client(hostname, username, password=None, proxy=None):
+
+def get_ssh_client(hostname,
+ username,
+ password=None,
+ proxy=None,
+ pkey_file=None):
client = None
try:
if proxy is None:
@@ -26,16 +31,23 @@ def get_ssh_client(hostname, username, password=None, proxy=None):
client.configure_jump_host(proxy['ip'],
proxy['username'],
proxy['password'])
-
if client is None:
raise Exception('Could not connect to client')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
- client.connect(hostname,
- username=username,
- password=password)
+ if pkey_file is not None:
+ key = paramiko.RSAKey.from_private_key_file(pkey_file)
+ client.load_system_host_keys()
+ client.connect(hostname,
+ username=username,
+ pkey=key)
+ else:
+ client.connect(hostname,
+ username=username,
+ password=password)
+
return client
- except Exception, e:
+ except Exception as e:
logger.error(e)
return None
@@ -45,7 +57,7 @@ def get_file(ssh_conn, src, dest):
sftp = ssh_conn.open_sftp()
sftp.get(src, dest)
return True
- except Exception, e:
+ except Exception as e:
logger.error("Error [get_file(ssh_conn, '%s', '%s']: %s" %
(src, dest, e))
return None
@@ -56,7 +68,7 @@ def put_file(ssh_conn, src, dest):
sftp = ssh_conn.open_sftp()
sftp.put(src, dest)
return True
- except Exception, e:
+ except Exception as e:
logger.error("Error [put_file(ssh_conn, '%s', '%s']: %s" %
(src, dest, e))
return None
@@ -66,8 +78,8 @@ class ProxyHopClient(paramiko.SSHClient):
'''
Connect to a remote server using a proxy hop
'''
+
def __init__(self, *args, **kwargs):
- self.logger = OPNFVLogger.Logger("ProxyHopClient").getLogger()
self.proxy_ssh = None
self.proxy_transport = None
self.proxy_channel = None
@@ -116,5 +128,5 @@ class ProxyHopClient(paramiko.SSHClient):
pkey=proxy_key,
sock=self.proxy_channel)
os.remove(self.local_ssh_key)
- except Exception, e:
- self.logger.error(e)
+ except Exception as e:
+ logger.error(e)
diff --git a/prototypes/bifrost/scripts/destroy-env.sh b/prototypes/bifrost/scripts/destroy-env.sh
index cdc55df1b..b73092b0f 100755
--- a/prototypes/bifrost/scripts/destroy-env.sh
+++ b/prototypes/bifrost/scripts/destroy-env.sh
@@ -14,24 +14,23 @@ if [[ $(whoami) != "root" ]]; then
exit 1
fi
-virsh destroy jumphost.opnfvlocal || true
-virsh destroy controller00.opnfvlocal || true
-virsh destroy compute00.opnfvlocal || true
-virsh undefine jumphost.opnfvlocal || true
-virsh undefine controller00.opnfvlocal || true
-virsh undefine compute00.opnfvlocal || true
-
-service ironic-conductor stop
-
-echo "removing from database"
-mysql -u root ironic --execute "truncate table ports;"
-mysql -u root ironic --execute "delete from node_tags;"
-mysql -u root ironic --execute "delete from nodes;"
-mysql -u root ironic --execute "delete from conductors;"
+# Delete all VMs on the slave since proposed patchsets
+# may leave undesired VM leftovers
+for vm in $(virsh list --all --name); do
+ virsh destroy $vm || true
+ virsh undefine $vm || true
+done
+
+service ironic-conductor stop || true
+
+echo "removing ironic database"
+if $(which mysql &> /dev/null); then
+ mysql -u root ironic --execute "drop database ironic;"
+fi
echo "removing leases"
[[ -e /var/lib/misc/dnsmasq/dnsmasq.leases ]] && > /var/lib/misc/dnsmasq/dnsmasq.leases
echo "removing logs"
-rm -rf /var/log/libvirt/baremetal_logs/*.log
+rm -rf /var/log/libvirt/baremetal_logs/*
# clean up dib images only if requested explicitly
CLEAN_DIB_IMAGES=${CLEAN_DIB_IMAGES:-false}
@@ -48,6 +47,6 @@ rm -rf /var/lib/libvirt/images/*.qcow2
echo "restarting services"
service dnsmasq restart || true
service libvirtd restart
-service ironic-api restart
-service ironic-conductor start
-service ironic-inspector restart
+service ironic-api restart || true
+service ironic-conductor start || true
+service ironic-inspector restart || true
diff --git a/prototypes/bifrost/scripts/test-bifrost-deployment.sh b/prototypes/bifrost/scripts/test-bifrost-deployment.sh
index 90f014c74..3e2381fea 100755
--- a/prototypes/bifrost/scripts/test-bifrost-deployment.sh
+++ b/prototypes/bifrost/scripts/test-bifrost-deployment.sh
@@ -36,6 +36,7 @@ export TEST_VM_NODE_NAMES="jumphost.opnfvlocal controller00.opnfvlocal compute00
export VM_DOMAIN_TYPE="kvm"
export VM_CPU=${VM_CPU:-4}
export VM_DISK=${VM_DISK:-100}
+export VM_DISK_CACHE=${VM_DISK_CACHE:-unsafe}
TEST_PLAYBOOK="test-bifrost-infracloud.yaml"
USE_INSPECTOR=true
USE_CIRROS=false
@@ -78,6 +79,11 @@ source ${ANSIBLE_INSTALL_ROOT}/ansible/hacking/env-setup
ANSIBLE=$(which ansible-playbook)
set -x -o nounset
+logs_on_exit() {
+ $SCRIPT_HOME/collect-test-info.sh
+}
+trap logs_on_exit EXIT
+
# Change working directory
cd $BIFROST_HOME/playbooks
@@ -128,6 +134,4 @@ if [ $EXITCODE != 0 ]; then
echo "****************************"
fi
-$SCRIPT_HOME/collect-test-info.sh
-
exit $EXITCODE
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh
index 856f69a27..f00e022f9 100755
--- a/utils/fetch_os_creds.sh
+++ b/utils/fetch_os_creds.sh
@@ -121,6 +121,14 @@ if [ "$installer_type" == "fuel" ]; then
# but sometimes the output of endpoint-list is like this: http://172.30.9.70:8004/v1/%(tenant_id)s
# Fuel virtual need a fix
+ #convert to v3 URL
+ auth_url=$(cat $dest_path|grep AUTH_URL)
+ if [[ -z `echo $auth_url |grep v3` ]]; then
+ auth_url=$(echo $auth_url |sed "s|'$|v3&|")
+ fi
+ sed -i '/AUTH_URL/d' $dest_path
+ echo $auth_url >> $dest_path
+
elif [ "$installer_type" == "apex" ]; then
verify_connectivity $installer_ip
@@ -136,7 +144,7 @@ elif [ "$installer_type" == "compass" ]; then
verify_connectivity $installer_ip
controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \
'mysql -ucompass -pcompass -Dcompass -e"select * from cluster;"' \
- | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"host[1-5]\"/) {print $(i+1);break;}}' \
+ | awk -F"," '{for(i=1;i<NF;i++)if($i~/\"127.0.0.1\"/) {print $(i+2);break;}}' \
| grep -oP "\d+.\d+.\d+.\d+")
if [ -z $controller_ip ]; then
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index be9fe184d..8fce2e021 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -92,13 +92,16 @@ main () {
exit 1
fi
+ chown=$(type -p chown)
+ mkdir=$(type -p mkdir)
+
makemonit () {
echo "Writing the following as monit config:"
cat << EOF | tee $monitconfdir/jenkins
check directory jenkins_piddir path /var/run/$jenkinsuser
-if does not exist then exec "/usr/bin/mkdir -p /var/run/$jenkinsuser"
-if failed uid $jenkinsuser then exec "/usr/bin/chown $jenkinsuser /var/run/$jenkinsuser"
-if failed gid $jenkinsuser then exec "/usr/bin/chown :$jenkinsuser /var/run/$jenkinsuser"
+if does not exist then exec "$mkdir -p /var/run/$jenkinsuser"
+if failed uid $jenkinsuser then exec "$chown $jenkinsuser /var/run/$jenkinsuser"
+if failed gid $jenkinsuser then exec "$chown :$jenkinsuser /var/run/$jenkinsuser"
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $jenkinshome; export started_monit=true; $0 $@' with timeout 60 seconds"
@@ -111,9 +114,9 @@ EOF
#test for diff
if [[ "$(diff $monitconfdir/jenkins <(echo "\
check directory jenkins_piddir path /var/run/$jenkinsuser
-if does not exist then exec \"/usr/bin/mkdir -p /var/run/$jenkinsuser\"
-if failed uid $jenkinsuser then exec \"/usr/bin/chown $jenkinsuser /var/run/$jenkinsuser\"
-if failed gid $jenkinsuser then exec \"/usr/bin/chown :$jenkinsuser /var/run/$jenkinsuser\"
+if does not exist then exec \"$mkdir -p /var/run/$jenkinsuser\"
+if failed uid $jenkinsuser then exec \"$chown $jenkinsuser /var/run/$jenkinsuser\"
+if failed gid $jenkinsuser then exec \"$chown :$jenkinsuser /var/run/$jenkinsuser\"
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $jenkinshome; export started_monit=true; $0 $@' with timeout 60 seconds\"
diff --git a/utils/lab-reconfiguration/reconfigUcsNet.py b/utils/lab-reconfiguration/reconfigUcsNet.py
index 4c08f3dc9..0dd902f6d 100755
--- a/utils/lab-reconfiguration/reconfigUcsNet.py
+++ b/utils/lab-reconfiguration/reconfigUcsNet.py
@@ -22,8 +22,10 @@
# -p PASSWORD, --password=PASSWORD
# [Mandatory] Account Password for UCSM Login
# -f FILE, --file=FILE
-# [Optional] Yaml file with network config you want to set for POD
-# If not present only current network config will be printed
+# [Optional] Yaml file with network config you
+# want to set for POD
+# If not present only current network config
+# will be printed
#
import getpass
@@ -32,12 +34,14 @@ import platform
import yaml
import time
import sys
-from UcsSdk import *
-from collections import defaultdict
+from UcsSdk import LsmaintAck, LsPower, LsServer, OrgOrg
+from UcsSdk import UcsHandle, VnicEther, VnicEtherIf, YesOrNo
+
POD_PREFIX = "POD-2"
INSTALLER = "POD-21"
+
def getpassword(prompt):
if platform.system() == "Linux":
return getpass.unix_getpass(prompt=prompt)
@@ -51,7 +55,8 @@ def get_servers(handle=None):
"""
Return list of servers
"""
- orgObj = handle.GetManagedObject(None, OrgOrg.ClassId(), {OrgOrg.DN : "org-root"})[0]
+ orgObj = handle.GetManagedObject(
+ None, OrgOrg.ClassId(), {OrgOrg.DN: "org-root"})[0]
servers = handle.GetManagedObject(orgObj, LsServer.ClassId())
for server in servers:
if server.Type == 'instance' and POD_PREFIX in server.Dn:
@@ -63,10 +68,10 @@ def set_boot_policy(handle=None, server=None, policy=None):
Modify Boot policy of server
"""
obj = handle.GetManagedObject(None, LsServer.ClassId(), {
- LsServer.DN: server.Dn})
+ LsServer.DN: server.Dn})
handle.SetManagedObject(obj, LsServer.ClassId(), {
- LsServer.BOOT_POLICY_NAME: policy} )
- print " Configured boot policy: {}".format(policy)
+ LsServer.BOOT_POLICY_NAME: policy})
+ print(" Configured boot policy: {}".format(policy))
def ack_pending(handle=None, server=None):
@@ -74,30 +79,32 @@ def ack_pending(handle=None, server=None):
Acknowledge pending state of server
"""
handle.AddManagedObject(server, LsmaintAck.ClassId(), {
- LsmaintAck.DN: server.Dn + "/ack",
- LsmaintAck.DESCR:"",
- LsmaintAck.ADMIN_STATE:"trigger-immediate",
- LsmaintAck.SCHEDULER:"",
- LsmaintAck.POLICY_OWNER:"local"}, True)
- print " Pending-reboot -> Acknowledged."
+ LsmaintAck.DN: server.Dn + "/ack",
+ LsmaintAck.DESCR: "",
+ LsmaintAck.ADMIN_STATE: "trigger-immediate",
+ LsmaintAck.SCHEDULER: "",
+ LsmaintAck.POLICY_OWNER: "local"}, True)
+ print(" Pending-reboot -> Acknowledged.")
def boot_server(handle=None, server=None):
"""
Boot server (when is in power-off state)
"""
- obj = handle.GetManagedObject(None, LsServer.ClassId(), {LsServer.DN: server.Dn})
+ obj = handle.GetManagedObject(
+ None, LsServer.ClassId(), {LsServer.DN: server.Dn})
handle.AddManagedObject(obj, LsPower.ClassId(), {
- LsPower.DN: server.Dn + "/power",
- LsPower.STATE:"admin-up"}, True)
- print " Booting."
+ LsPower.DN: server.Dn + "/power",
+ LsPower.STATE: "admin-up"}, True)
+ print(" Booting.")
def get_vnics(handle=None, server=None):
"""
Return list of vnics for given server
"""
- vnics = handle.ConfigResolveChildren(VnicEther.ClassId(), server.Dn, None, YesOrNo.TRUE)
+ vnics = handle.ConfigResolveChildren(
+ VnicEther.ClassId(), server.Dn, None, YesOrNo.TRUE)
return vnics.OutConfigs.GetChild()
@@ -105,28 +112,36 @@ def get_network_config(handle=None):
"""
Print current network config
"""
- print "\nCURRENT NETWORK CONFIG:"
- print " d - default, t - tagged"
+ print("\nCURRENT NETWORK CONFIG:")
+ print(" d - default, t - tagged")
for server in get_servers(handle):
- print ' {}'.format(server.Name)
- print ' Boot policy: {}'.format(server.OperBootPolicyName)
+ print(' {}'.format(server.Name))
+ print(' Boot policy: {}'.format(server.OperBootPolicyName))
for vnic in get_vnics(handle, server):
- print ' {}'.format(vnic.Name)
- print ' {}'.format(vnic.Addr)
- vnicIfs = handle.ConfigResolveChildren(VnicEtherIf.ClassId(), vnic.Dn, None, YesOrNo.TRUE)
+ print(' {}'.format(vnic.Name))
+ print(' {}'.format(vnic.Addr))
+ vnicIfs = handle.ConfigResolveChildren(
+ VnicEtherIf.ClassId(), vnic.Dn, None, YesOrNo.TRUE)
for vnicIf in vnicIfs.OutConfigs.GetChild():
if vnicIf.DefaultNet == 'yes':
- print ' Vlan: {}d'.format(vnicIf.Vnet)
+ print(' Vlan: {}d'.format(vnicIf.Vnet))
else:
- print ' Vlan: {}t'.format(vnicIf.Vnet)
+ print(' Vlan: {}t'.format(vnicIf.Vnet))
-def add_interface(handle=None, lsServerDn=None, vnicEther=None, templName=None, order=None, macAddr=None):
+def add_interface(handle=None,
+ lsServerDn=None,
+ vnicEther=None,
+ templName=None,
+ order=None,
+ macAddr=None):
"""
Add interface to server specified by server.DN name
"""
- print " Adding interface: {}, template: {}, server.Dn: {}".format(vnicEther, templName, lsServerDn)
- obj = handle.GetManagedObject(None, LsServer.ClassId(), {LsServer.DN:lsServerDn})
+ print(" Adding interface: {}, template: {}, server.Dn: {}".format(
+ vnicEther, templName, lsServerDn))
+ obj = handle.GetManagedObject(
+ None, LsServer.ClassId(), {LsServer.DN: lsServerDn})
vnicEtherDn = lsServerDn + "/ether-" + vnicEther
params = {
VnicEther.STATS_POLICY_NAME: "default",
@@ -146,8 +161,9 @@ def remove_interface(handle=None, vnicEtherDn=None):
"""
Remove interface specified by Distinguished Name (vnicEtherDn)
"""
- print " Removing interface: {}".format(vnicEtherDn)
- obj = handle.GetManagedObject(None, VnicEther.ClassId(), {VnicEther.DN:vnicEtherDn})
+ print(" Removing interface: {}".format(vnicEtherDn))
+ obj = handle.GetManagedObject(
+ None, VnicEther.ClassId(), {VnicEther.DN: vnicEtherDn})
handle.RemoveManagedObject(obj)
@@ -165,32 +181,37 @@ def set_network(handle=None, yamlFile=None):
Configure VLANs on POD according specified network
"""
# add interfaces and bind them with vNIC templates
- print "\nRECONFIGURING VNICs..."
+ print("\nRECONFIGURING VNICs...")
pod_data = read_yaml_file(yamlFile)
network = pod_data['network']
for index, server in enumerate(get_servers(handle)):
# Assign template to interface
for iface, data in network.iteritems():
- add_interface(handle, server.Dn, iface, data['template'], data['order'], data['mac-list'][index])
+ add_interface(handle, server.Dn, iface, data['template'], data[
+ 'order'], data['mac-list'][index])
- # Remove other interfaces which have not assigned required vnic template
+ # Remove other interfaces which have not assigned required vnic
+ # template
vnics = get_vnics(handle, server)
for vnic in vnics:
- if not any(data['template'] in vnic.OperNwTemplName for iface, data in network.iteritems()):
+ if not any(data['template'] in vnic.OperNwTemplName for
+ iface, data in network.iteritems()):
remove_interface(handle, vnic.Dn)
- print " {} removed, template: {}".format(vnic.Name, vnic.OperNwTemplName)
+ print(" {} removed, template: {}".format(
+ vnic.Name, vnic.OperNwTemplName))
# Set boot policy template
- if not INSTALLER in server.Dn:
+ if INSTALLER not in server.Dn:
set_boot_policy(handle, server, pod_data['boot-policy'])
if __name__ == "__main__":
- print "\n*** SKIPING RECONFIGURATION.***\n"
+ print("\n*** SKIPING RECONFIGURATION.***\n")
sys.exit(0)
# Latest urllib2 validate certs by default
- # The process wide "revert to the old behaviour" hook is to monkeypatch the ssl module
+ # The process wide "revert to the old behaviour" hook is to monkeypatch
+ # the ssl module
# https://bugs.python.org/issue22417
import ssl
if hasattr(ssl, '_create_unverified_context'):
@@ -198,14 +219,15 @@ if __name__ == "__main__":
try:
handle = UcsHandle()
parser = optparse.OptionParser()
- parser.add_option('-i', '--ip',dest="ip",
- help="[Mandatory] UCSM IP Address")
- parser.add_option('-u', '--username',dest="userName",
- help="[Mandatory] Account Username for UCSM Login")
- parser.add_option('-p', '--password',dest="password",
- help="[Mandatory] Account Password for UCSM Login")
- parser.add_option('-f', '--file',dest="yamlFile",
- help="[Optional] Yaml file contains network config you want to set on UCS POD1")
+ parser.add_option('-i', '--ip', dest="ip",
+ help="[Mandatory] UCSM IP Address")
+ parser.add_option('-u', '--username', dest="userName",
+ help="[Mandatory] Account Username for UCSM Login")
+ parser.add_option('-p', '--password', dest="password",
+ help="[Mandatory] Account Password for UCSM Login")
+ parser.add_option('-f', '--file', dest="yamlFile",
+ help=("[Optional] Yaml file contains network "
+ "config you want to set on UCS POD1"))
(options, args) = parser.parse_args()
if not options.ip:
@@ -215,26 +237,27 @@ if __name__ == "__main__":
parser.print_help()
parser.error("Provide UCSM UserName")
if not options.password:
- options.password=getpassword("UCSM Password:")
+ options.password = getpassword("UCSM Password:")
handle.Login(options.ip, options.userName, options.password)
# Change vnic template if specified in cli option
- if (options.yamlFile != None):
+ if (options.yamlFile is not None):
set_network(handle, options.yamlFile)
time.sleep(5)
- print "\nWait until Overall Status of all nodes is OK..."
- timeout = time.time() + 60*10 #10 minutes timeout
+ print("\nWait until Overall Status of all nodes is OK...")
+ timeout = time.time() + 60 * 10 # 10 minutes timeout
while True:
list_of_states = []
for server in get_servers(handle):
if server.OperState == "power-off":
- boot_server(handle,server)
+ boot_server(handle, server)
if server.OperState == "pending-reboot":
- ack_pending(handle,server)
+ ack_pending(handle, server)
list_of_states.append(server.OperState)
- print " {}, {} seconds remains.".format(list_of_states, round(timeout-time.time()))
+ print(" {}, {} seconds remains.".format(
+ list_of_states, round(timeout - time.time())))
if all(state == "ok" for state in list_of_states):
break
if time.time() > timeout:
@@ -246,11 +269,12 @@ if __name__ == "__main__":
handle.Logout()
- except Exception, err:
+ except Exception as err:
handle.Logout()
- print "Exception:", str(err)
- import traceback, sys
- print '-'*60
+ print("Exception:", str(err))
+ import traceback
+ import sys
+ print('-' * 60)
traceback.print_exc(file=sys.stdout)
- print '-'*60
+ print('-' * 60)
sys.exit(1)
diff --git a/utils/opnfv-artifacts.py b/utils/opnfv-artifacts.py
index 876efedba..2f2cc41ba 100644
--- a/utils/opnfv-artifacts.py
+++ b/utils/opnfv-artifacts.py
@@ -28,56 +28,55 @@ from apiclient.errors import HttpError
import argparse
import json
-import os
import sys
api = {
- 'projects': {},
- 'docs': {},
- 'releases': {},
+ 'projects': {},
+ 'docs': {},
+ 'releases': {},
}
releases = [
- 'arno.2015.1.0',
- 'arno.2015.2.0',
- 'brahmaputra.1.0',
+ 'arno.2015.1.0',
+ 'arno.2015.2.0',
+ 'brahmaputra.1.0',
]
# List of file extensions to filter out
ignore_extensions = [
- '.buildinfo',
- '.woff',
- '.ttf',
- '.svg',
- '.eot',
- '.pickle',
- '.doctree',
- '.js',
- '.png',
- '.css',
- '.gif',
- '.jpeg',
- '.jpg',
- '.bmp',
+ '.buildinfo',
+ '.woff',
+ '.ttf',
+ '.svg',
+ '.eot',
+ '.pickle',
+ '.doctree',
+ '.js',
+ '.png',
+ '.css',
+ '.gif',
+ '.jpeg',
+ '.jpg',
+ '.bmp',
]
parser = argparse.ArgumentParser(
- description='OPNFV Artifacts JSON Generator')
+ description='OPNFV Artifacts JSON Generator')
parser.add_argument(
- '-k',
- dest='key',
- default='',
- help='API Key for Google Cloud Storage')
+ '-k',
+ dest='key',
+ default='',
+ help='API Key for Google Cloud Storage')
parser.add_argument(
- '-p',
- default=None,
- dest='pretty',
- action='store_const',
- const=2,
- help='pretty print the output')
+ '-p',
+ default=None,
+ dest='pretty',
+ action='store_const',
+ const=2,
+ help='pretty print the output')
# Parse and assign arguments
args = parser.parse_args()
@@ -130,7 +129,6 @@ def has_logs(gerrit_review):
return False
-
def has_ignorable_extension(filename):
for extension in ignore_extensions:
if filename.lower().endswith(extension):
@@ -148,11 +146,11 @@ def get_results(key):
files = storage.objects().list(bucket='artifacts.opnfv.org',
fields='nextPageToken,'
'items('
- 'name,'
- 'mediaLink,'
- 'updated,'
- 'contentType,'
- 'size'
+ 'name,'
+ 'mediaLink,'
+ 'updated,'
+ 'contentType,'
+ 'size'
')')
while (files is not None):
sites = files.execute()
@@ -173,7 +171,8 @@ def get_results(key):
project = site_split[0]
name = '/'.join(site_split[1:])
- proxy = "http://build.opnfv.org/artifacts.opnfv.org/%s" % site['name']
+ proxy = "http://build.opnfv.org/artifacts.opnfv.org/%s" % site[
+ 'name']
if name.endswith('.html'):
href = "http://artifacts.opnfv.org/%s" % site['name']
href_type = 'view'
@@ -183,7 +182,7 @@ def get_results(key):
gerrit = has_gerrit_review(site_split)
logs = False # has_logs(gerrit)
- documentation = has_documentation(site_split)
+ # documentation = has_documentation(site_split)
release = has_release(site_split)
category = 'project'
diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh
index 265ca6120..9099657c8 100644
--- a/utils/push-test-logs.sh
+++ b/utils/push-test-logs.sh
@@ -15,20 +15,20 @@ export PATH=$PATH:/usr/local/bin/
git_sha1="$(git rev-parse HEAD)"
res_build_date=${1:-$(date -u +"%Y-%m-%d_%H-%M-%S")}
project=$PROJECT
-branch=${GIT_BRANCH##*/}
+branch=${BRANCH##*/}
testbed=$NODE_NAME
dir_result="${HOME}/opnfv/$project/results/${branch}"
# src: https://wiki.opnfv.org/display/INF/Hardware+Infrastructure
-# + intel-pod3 (vsperf)
+# + intel-pod12 (vsperf)
node_list=(\
-'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \
+'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod12' \
'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
'ericsson-pod1' 'ericsson-pod2' \
'ericsson-virtual1' 'ericsson-virtual2' 'ericsson-virtual3' \
-'ericsson-virtual4' 'ericsson-virtual5' \
+'ericsson-virtual4' 'ericsson-virtual5' 'ericsson-virtual12' \
'arm-pod1' 'arm-pod3' \
'huawei-pod1' 'huawei-pod2' 'huawei-pod3' 'huawei-pod4' 'huawei-pod5' \
-'huawei-pod6' 'huawei-pod7' \
+'huawei-pod6' 'huawei-pod7' 'huawei-pod12' \
'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
diff --git a/utils/test/dashboard/dashboard/common/elastic_access.py b/utils/test/dashboard/dashboard/common/elastic_access.py
index aaf776f7a..eb29ce879 100644
--- a/utils/test/dashboard/dashboard/common/elastic_access.py
+++ b/utils/test/dashboard/dashboard/common/elastic_access.py
@@ -30,7 +30,7 @@ def publish_docs(url, creds=None, body=None):
def _get_docs_nr(url, creds=None, body=None):
res_data = _get('{}/_search?size=0'.format(url), creds=creds, body=body)
- print type(res_data), res_data
+ print(type(res_data), res_data)
return res_data['hits']['total']
diff --git a/utils/test/dashboard/dashboard/conf/testcases.py b/utils/test/dashboard/dashboard/conf/testcases.py
index ff801b4c9..98ce20984 100644
--- a/utils/test/dashboard/dashboard/conf/testcases.py
+++ b/utils/test/dashboard/dashboard/conf/testcases.py
@@ -21,4 +21,4 @@ def get_format(project, case):
if __name__ == '__main__':
fmt = get_format('functest', 'vping_ssh')
- print fmt
+ print(fmt)
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/utility.py b/utils/test/dashboard/dashboard/elastic2kibana/utility.py
index 55578bd8c..40d9202a6 100644
--- a/utils/test/dashboard/dashboard/elastic2kibana/utility.py
+++ b/utils/test/dashboard/dashboard/elastic2kibana/utility.py
@@ -2,7 +2,8 @@ import json
from jinja2 import Environment, PackageLoader
-env = Environment(loader=PackageLoader('dashboard', 'elastic2kibana/templates'))
+env = Environment(loader=PackageLoader('dashboard',
+ 'elastic2kibana/templates'))
env.filters['jsonify'] = json.dumps
diff --git a/utils/test/dashboard/dashboard/functest/format.py b/utils/test/dashboard/dashboard/functest/format.py
index ef485bae0..75d361ff8 100644
--- a/utils/test/dashboard/dashboard/functest/format.py
+++ b/utils/test/dashboard/dashboard/functest/format.py
@@ -6,7 +6,8 @@ def _convert_value(value):
def _convert_duration(duration):
- if (isinstance(duration, str) or isinstance(duration, unicode)) and ':' in duration:
+ if ((isinstance(duration, str) or
+ isinstance(duration, unicode)) and ':' in duration):
hours, minutes, seconds = duration.split(":")
hours = _convert_value(hours)
minutes = _convert_value(minutes)
@@ -42,11 +43,11 @@ def format_normal(testcase):
testcase_tests = float(testcase_details['tests'])
testcase_failures = float(testcase_details['failures'])
if testcase_tests != 0:
- testcase_details['success_percentage'] = 100 * (testcase_tests - testcase_failures) / testcase_tests
+ testcase_details['success_percentage'] = 100 * \
+ (testcase_tests - testcase_failures) / testcase_tests
else:
testcase_details['success_percentage'] = 0
-
return found
@@ -115,28 +116,33 @@ def format_onos(testcase):
"""
testcase_details = testcase['details']
- if 'FUNCvirNet' not in testcase_details or 'FUNCvirNetL3' not in testcase_details:
+ if ('FUNCvirNet' not in testcase_details or
+ 'FUNCvirNetL3' not in testcase_details):
return False
funcvirnet_details = testcase_details['FUNCvirNet']['status']
- funcvirnet_stats = _get_statistics(funcvirnet_details, ('Case result',), ('PASS', 'FAIL'))
+ funcvirnet_stats = _get_statistics(
+ funcvirnet_details, ('Case result',), ('PASS', 'FAIL'))
funcvirnet_passed = funcvirnet_stats['PASS']
funcvirnet_failed = funcvirnet_stats['FAIL']
funcvirnet_all = funcvirnet_passed + funcvirnet_failed
funcvirnetl3_details = testcase_details['FUNCvirNetL3']['status']
- funcvirnetl3_stats = _get_statistics(funcvirnetl3_details, ('Case result',), ('PASS', 'FAIL'))
+ funcvirnetl3_stats = _get_statistics(
+ funcvirnetl3_details, ('Case result',), ('PASS', 'FAIL'))
funcvirnetl3_passed = funcvirnetl3_stats['PASS']
funcvirnetl3_failed = funcvirnetl3_stats['FAIL']
funcvirnetl3_all = funcvirnetl3_passed + funcvirnetl3_failed
testcase_details['FUNCvirNet'] = {
- 'duration': _convert_duration(testcase_details['FUNCvirNet']['duration']),
+ 'duration':
+ _convert_duration(testcase_details['FUNCvirNet']['duration']),
'tests': funcvirnet_all,
'failures': funcvirnet_failed
}
testcase_details['FUNCvirNetL3'] = {
- 'duration': _convert_duration(testcase_details['FUNCvirNetL3']['duration']),
+ 'duration':
+ _convert_duration(testcase_details['FUNCvirNetL3']['duration']),
'tests': funcvirnetl3_all,
'failures': funcvirnetl3_failed
}
diff --git a/utils/test/dashboard/dashboard/mongo2elastic/main.py b/utils/test/dashboard/dashboard/mongo2elastic/main.py
index 688f55f7d..e33252df2 100644
--- a/utils/test/dashboard/dashboard/mongo2elastic/main.py
+++ b/utils/test/dashboard/dashboard/mongo2elastic/main.py
@@ -27,7 +27,8 @@ parser.add_argument('-ld', '--latest-days',
metavar='N',
help='get entries old at most N days from mongodb and'
' parse those that are not already in elasticsearch.'
- ' If not present, will get everything from mongodb, which is the default')
+ ' If not present, will get everything from mongodb,'
+ ' which is the default')
args = parser.parse_args()
CONF = APIConfig().parse(args.config_file)
@@ -37,6 +38,7 @@ tmp_docs_file = './mongo-{}.json'.format(uuid.uuid4())
class DocumentVerification(object):
+
def __init__(self, doc):
super(DocumentVerification, self).__init__()
self.doc = doc
@@ -55,8 +57,8 @@ class DocumentVerification(object):
for key, value in self.doc.items():
if key in mandatory_fields:
if value is None:
- logger.info("Skip testcase '%s' because field '%s' missing" %
- (self.doc_id, key))
+ logger.info("Skip testcase '%s' because field "
+ "'%s' missing" % (self.doc_id, key))
self.skip = True
else:
mandatory_fields.remove(key)
@@ -131,10 +133,12 @@ class DocumentPublisher(object):
self._publish()
def _publish(self):
- status, data = elastic_access.publish_docs(self.elastic_url, self.creds, self.doc)
+ status, data = elastic_access.publish_docs(
+ self.elastic_url, self.creds, self.doc)
if status > 300:
logger.error('Publish record[{}] failed, due to [{}]'
- .format(self.doc, json.loads(data)['error']['reason']))
+ .format(self.doc,
+ json.loads(data)['error']['reason']))
def _fix_date(self, date_string):
if isinstance(date_string, dict):
@@ -163,7 +167,8 @@ class DocumentsPublisher(object):
def export(self):
if self.days > 0:
- past_time = datetime.datetime.today() - datetime.timedelta(days=self.days)
+ past_time = datetime.datetime.today(
+ ) - datetime.timedelta(days=self.days)
query = '''{{
"project_name": "{}",
"case_name": "{}",
@@ -182,7 +187,7 @@ class DocumentsPublisher(object):
try:
subprocess.check_call(cmd)
return self
- except Exception, err:
+ except Exception as err:
logger.error("export mongodb failed: %s" % err)
self._remove()
exit(-1)
@@ -217,7 +222,8 @@ class DocumentsPublisher(object):
}}'''.format(self.project, self.case, self.days)
else:
raise Exception('Update days must be non-negative')
- self.existed_docs = elastic_access.get_docs(self.elastic_url, self.creds, body)
+ self.existed_docs = elastic_access.get_docs(
+ self.elastic_url, self.creds, body)
return self
def publish(self):
diff --git a/utils/test/dashboard/kibana_cleanup.py b/utils/test/dashboard/kibana_cleanup.py
index ee0190049..7e3662c29 100644
--- a/utils/test/dashboard/kibana_cleanup.py
+++ b/utils/test/dashboard/kibana_cleanup.py
@@ -9,7 +9,8 @@ from dashboard.common import elastic_access
logger = logging.getLogger('clear_kibana')
logger.setLevel(logging.DEBUG)
file_handler = logging.FileHandler('/var/log/{}.log'.format('clear_kibana'))
-file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
+file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: '
+ '%(message)s'))
logger.addHandler(file_handler)
@@ -21,12 +22,17 @@ def delete_all(url, es_creds):
if __name__ == '__main__':
- parser = argparse.ArgumentParser(description='Delete saved kibana searches, visualizations and dashboards')
- parser.add_argument('-e', '--elasticsearch-url', default='http://localhost:9200',
- help='the url of elasticsearch, defaults to http://localhost:9200')
+ parser = argparse.ArgumentParser(
+ description=('Delete saved kibana searches, '
+ 'visualizations and dashboards'))
+ parser.add_argument('-e', '--elasticsearch-url',
+ default='http://localhost:9200',
+ help=('the url of elasticsearch, '
+ 'defaults to http://localhost:9200'))
parser.add_argument('-u', '--elasticsearch-username', default=None,
- help='The username with password for elasticsearch in format username:password')
+ help=('The username with password for elasticsearch '
+ 'in format username:password'))
args = parser.parse_args()
base_elastic_url = args.elasticsearch_url
@@ -38,4 +44,3 @@ if __name__ == '__main__':
for url in urls:
delete_all(url, es_creds)
-
diff --git a/utils/test/reporting/functest/reporting-status.py b/utils/test/reporting/functest/reporting-status.py
index 66bdd57c1..df5632335 100755
--- a/utils/test/reporting/functest/reporting-status.py
+++ b/utils/test/reporting/functest/reporting-status.py
@@ -40,6 +40,9 @@ versions = rp_utils.get_config('general.versions')
installers = rp_utils.get_config('general.installers')
blacklist = rp_utils.get_config('functest.blacklist')
log_level = rp_utils.get_config('general.log.log_level')
+exclude_noha = rp_utils.get_config('functest.exclude_noha')
+exclude_virtual = rp_utils.get_config('functest.exclude_virtual')
+
response = requests.get(cf)
functest_yaml_config = yaml.safe_load(response.text)
@@ -48,20 +51,23 @@ logger.info("*******************************************")
logger.info("* *")
logger.info("* Generating reporting scenario status *")
logger.info("* Data retention: %s days *" % period)
-logger.info("* Log level: %s *" % log_level)
+logger.info("* Log level: %s *" % log_level)
+logger.info("* *")
+logger.info("* Virtual PODs exluded: %s *" % exclude_virtual)
+logger.info("* NOHA scenarios excluded: %s *" % exclude_noha)
logger.info("* *")
logger.info("*******************************************")
# Retrieve test cases of Tier 1 (smoke)
config_tiers = functest_yaml_config.get("tiers")
-# we consider Tier 1 (smoke),2 (features)
+# we consider Tier 0 (Healthcheck), Tier 1 (smoke),2 (features)
# to validate scenarios
-# Tier > 4 are not used to validate scenarios but we display the results anyway
+# Tier > 2 are not used to validate scenarios but we display the results anyway
# tricky thing for the API as some tests are Functest tests
# other tests are declared directly in the feature projects
for tier in config_tiers:
- if tier['order'] > 0 and tier['order'] < 2:
+ if tier['order'] >= 0 and tier['order'] < 2:
for case in tier['testcases']:
if case['name'] not in blacklist:
testValid.append(tc.TestCase(case['name'],
@@ -90,7 +96,6 @@ for version in versions:
scenario_stats = rp_utils.getScenarioStats(scenario_results)
items = {}
scenario_result_criteria = {}
-
scenario_file_name = ("./display/" + version +
"/functest/scenario_history.txt")
# initiate scenario file if it does not exist
diff --git a/utils/test/reporting/functest/reporting-tempest.py b/utils/test/reporting/functest/reporting-tempest.py
index 5d6bcc062..6e6585a32 100755
--- a/utils/test/reporting/functest/reporting-tempest.py
+++ b/utils/test/reporting/functest/reporting-tempest.py
@@ -44,7 +44,7 @@ for version in rp_utils.get_config('general.versions'):
response = urlopen(request)
k = response.read()
results = json.loads(k)
- except URLError, e:
+ except URLError as e:
logger.error("Error code: %s" % e)
test_results = results['results']
@@ -73,9 +73,9 @@ for version in rp_utils.get_config('general.versions'):
nb_tests_run = result['details']['tests']
nb_tests_failed = result['details']['failures']
if nb_tests_run != 0:
- success_rate = 100*((int(nb_tests_run) -
- int(nb_tests_failed)) /
- int(nb_tests_run))
+ success_rate = 100 * ((int(nb_tests_run) -
+ int(nb_tests_failed)) /
+ int(nb_tests_run))
else:
success_rate = 0
diff --git a/utils/test/reporting/functest/reporting-vims.py b/utils/test/reporting/functest/reporting-vims.py
index 2077d2a4a..b236b8963 100755
--- a/utils/test/reporting/functest/reporting-vims.py
+++ b/utils/test/reporting/functest/reporting-vims.py
@@ -51,7 +51,7 @@ for version in versions:
response = urlopen(request)
k = response.read()
results = json.loads(k)
- except URLError, e:
+ except URLError as e:
logger.error("Error code: %s" % e)
test_results = results['results']
@@ -91,7 +91,7 @@ for version in versions:
result['pr_step_ok'] = 0
if nb_step != 0:
- result['pr_step_ok'] = (float(nb_step_ok)/nb_step)*100
+ result['pr_step_ok'] = (float(nb_step_ok) / nb_step) * 100
try:
logger.debug("Scenario %s, Installer %s"
% (s_result[1]['scenario'], installer))
diff --git a/utils/test/reporting/functest/testCase.py b/utils/test/reporting/functest/testCase.py
index 8d90fc861..22196c86b 100644
--- a/utils/test/reporting/functest/testCase.py
+++ b/utils/test/reporting/functest/testCase.py
@@ -36,14 +36,15 @@ class TestCase(object):
'moon': 'Moon',
'copper': 'Copper',
'security_scan': 'Security',
- 'multisite':'Multisite',
- 'domino':'Domino',
- 'odl-sfc':'SFC',
- 'onos_sfc':'SFC',
- 'parser':'Parser',
- 'connection_check':'Health (connection)',
- 'api_check':'Health (api)',
- 'snaps_smoke':'SNAPS' }
+ 'multisite': 'Multisite',
+ 'domino': 'Domino',
+ 'odl-sfc': 'SFC',
+ 'onos_sfc': 'SFC',
+ 'parser': 'Parser',
+ 'connection_check': 'Health (connection)',
+ 'api_check': 'Health (api)',
+ 'snaps_smoke': 'SNAPS',
+ 'snaps_health_check': 'Health (dhcp)'}
try:
self.displayName = display_name_matrix[self.name]
except:
@@ -131,14 +132,15 @@ class TestCase(object):
'moon': 'moon_authentication',
'copper': 'copper-notification',
'security_scan': 'security',
- 'multisite':'multisite',
- 'domino':'domino-multinode',
- 'odl-sfc':'functest-odl-sfc',
- 'onos_sfc':'onos_sfc',
- 'parser':'parser-basics',
- 'connection_check':'connection_check',
- 'api_check':'api_check',
- 'snaps_smoke':'snaps_smoke'
+ 'multisite': 'multisite',
+ 'domino': 'domino-multinode',
+ 'odl-sfc': 'functest-odl-sfc',
+ 'onos_sfc': 'onos_sfc',
+ 'parser': 'parser-basics',
+ 'connection_check': 'connection_check',
+ 'api_check': 'api_check',
+ 'snaps_smoke': 'snaps_smoke',
+ 'snaps_health_check': 'snaps_health_check'
}
try:
return test_match_matrix[self.name]
@@ -147,4 +149,3 @@ class TestCase(object):
def getDisplayName(self):
return self.displayName
-
diff --git a/utils/test/reporting/reporting.yaml b/utils/test/reporting/reporting.yaml
index fa9862615..2fb6b7831 100644
--- a/utils/test/reporting/reporting.yaml
+++ b/utils/test/reporting/reporting.yaml
@@ -2,13 +2,13 @@ general:
installers:
- apex
- compass
+ - daisy
- fuel
- joid
- - daisy
versions:
- master
- - colorado
+
log:
log_file: reporting.log
log_level: ERROR
@@ -30,17 +30,27 @@ general:
testapi:
url: testresults.opnfv.org/test/api/v1/results
-
+
functest:
blacklist:
- ovno
- security_scan
+ - rally_sanity
+ - healthcheck
+ - odl_netvirt
+ - aaa
+ - cloudify_ims
+ - orchestra_ims
+ - juju_epc
+ - orchestra
+ - promise
max_scenario_criteria: 50
test_conf: https://git.opnfv.org/cgit/functest/plain/functest/ci/testcases.yaml
log_level: ERROR
- jenkins_url: https://build.opnfv.org/ci/view/functest/job
-
-
+ jenkins_url: https://build.opnfv.org/ci/view/functest/job/
+ exclude_noha: False
+ exclude_virtual: False
+
yardstick:
test_conf: https://git.opnfv.org/cgit/yardstick/plain/tests/ci/report_config.yaml
log_level: ERROR
diff --git a/utils/test/reporting/utils/reporting_utils.py b/utils/test/reporting/utils/reporting_utils.py
index 0af60c78a..1879fb628 100644
--- a/utils/test/reporting/utils/reporting_utils.py
+++ b/utils/test/reporting/utils/reporting_utils.py
@@ -93,8 +93,8 @@ def getApiResults(case, installer, scenario, version):
response = urlopen(request)
k = response.read()
results = json.loads(k)
- except URLError, e:
- print 'No kittez. Got an error code:', e
+ except URLError as e:
+ print('No kittez. Got an error code:', e)
return results
@@ -115,8 +115,8 @@ def getScenarios(case, installer, version):
k = response.read()
results = json.loads(k)
test_results = results['results']
- except URLError, e:
- print 'Got an error code:', e
+ except URLError as e:
+ print('Got an error code:', e)
if test_results is not None:
test_results.reverse()
@@ -127,7 +127,15 @@ def getScenarios(case, installer, version):
# Retrieve all the scenarios per installer
if not r['scenario'] in scenario_results.keys():
scenario_results[r['scenario']] = []
- scenario_results[r['scenario']].append(r)
+ # Do we consider results from virtual pods ...
+ # Do we consider results for non HA scenarios...
+ exclude_virtual_pod = get_config('functest.exclude_virtual')
+ exclude_noha = get_config('functest.exclude_noha')
+ if ((exclude_virtual_pod and "virtual" in r['pod_name']) or
+ (exclude_noha and "noha" in r['scenario'])):
+ print("exclude virtual pod results...")
+ else:
+ scenario_results[r['scenario']].append(r)
return scenario_results
@@ -156,8 +164,8 @@ def getScenarioStatus(installer, version):
response.close()
results = json.loads(k)
test_results = results['results']
- except URLError, e:
- print 'Got an error code:', e
+ except URLError as e:
+ print('Got an error code:', e)
scenario_results = {}
result_dict = {}
@@ -190,7 +198,7 @@ def getNbtestOk(results):
if "PASS" in v:
nb_test_ok += 1
except:
- print "Cannot retrieve test status"
+ print("Cannot retrieve test status")
return nb_test_ok
@@ -254,16 +262,18 @@ def getResult(testCase, installer, scenario, version):
def getJenkinsUrl(build_tag):
# e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246
# id = 246
+ # jenkins-functest-compass-huawei-pod5-daily-master-136
+ # id = 136
# note it is linked to jenkins format
# if this format changes...function to be adapted....
url_base = get_config('functest.jenkins_url')
try:
build_id = [int(s) for s in build_tag.split("-") if s.isdigit()]
- jenkins_path = filter(lambda c: not c.isdigit(), build_tag)
- url_id = jenkins_path[8:-1] + "/" + str(build_id[0])
+ url_id = (build_tag[8:-(len(str(build_id[0])) + 1)] +
+ "/" + str(build_id[0]))
jenkins_url = url_base + url_id + "/console"
except:
- print 'Impossible to get jenkins url:'
+ print('Impossible to get jenkins url:')
return jenkins_url
@@ -273,7 +283,7 @@ def getScenarioPercent(scenario_score, scenario_criteria):
try:
score = float(scenario_score) / float(scenario_criteria) * 100
except:
- print 'Impossible to calculate the percentage score'
+ print('Impossible to calculate the percentage score')
return score
@@ -321,8 +331,8 @@ def get_percent(four_list, ten_list):
def _test():
status = getScenarioStatus("compass", "master")
- print "status:++++++++++++++++++++++++"
- print json.dumps(status, indent=4)
+ print("status:++++++++++++++++++++++++")
+ print(json.dumps(status, indent=4))
# ----------------------------------------------------------
diff --git a/utils/test/testapi/etc/config.ini b/utils/test/testapi/etc/config.ini
index 0edb73a3f..77cc6c6ee 100644
--- a/utils/test/testapi/etc/config.ini
+++ b/utils/test/testapi/etc/config.ini
@@ -11,6 +11,7 @@ dbname = test_results_collection
port = 8000
# With debug_on set to true, error traces will be shown in HTTP responses
debug = True
+authenticate = False
[swagger]
base_url = http://localhost:8000
diff --git a/utils/test/testapi/htmlize/doc-build.sh b/utils/test/testapi/htmlize/doc-build.sh
index 427b4378b..33560ceea 100644
--- a/utils/test/testapi/htmlize/doc-build.sh
+++ b/utils/test/testapi/htmlize/doc-build.sh
@@ -3,8 +3,18 @@
set -o errexit
# Create virtual environment
+virtualenv $WORKSPACE/testapi_venv
source $WORKSPACE/testapi_venv/bin/activate
+# Swgger Codegen Tool
+url="http://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.2.1/swagger-codegen-cli-2.2.1.jar"
+
+# Check for jar file locally and in the repo
+if [ ! -f swagger-codegen-cli.jar ];
+then
+ wget http://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.2.1/swagger-codegen-cli-2.2.1.jar -O swagger-codegen-cli.jar
+fi
+
# Install Pre-requistics
pip install requests
diff --git a/utils/test/testapi/htmlize/finish.sh b/utils/test/testapi/htmlize/finish.sh
deleted file mode 100644
index dc3aa868b..000000000
--- a/utils/test/testapi/htmlize/finish.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-
-# Stop opnfv-testapi server
-proc_number=`ps -ef | grep opnfv-testapi | grep -v grep | wc -l`
-
-if [ $proc_number -gt 0 ]; then
- procs=`ps -ef | grep opnfv-testapi | grep -v grep`
- echo "Kill opnfv-testapi server $procs"
- ps -ef | grep opnfv-testapi | grep -v grep | awk '{print $2}' | xargs kill -kill &>/dev/null
-fi
diff --git a/utils/test/testapi/htmlize/htmlize.py b/utils/test/testapi/htmlize/htmlize.py
index c07f98ecf..b8c4fb43f 100644
--- a/utils/test/testapi/htmlize/htmlize.py
+++ b/utils/test/testapi/htmlize/htmlize.py
@@ -39,12 +39,14 @@ if __name__ == '__main__':
parser.add_argument('-ru', '--resource-listing-url',
type=str,
required=False,
- default='http://localhost:8000/swagger/spec.json',
+ default=('http://testresults.opnfv.org'
+ '/test/swagger/spec.json'),
help='Resource Listing Spec File')
parser.add_argument('-au', '--api-declaration-url',
type=str,
required=False,
- default='http://localhost:8000/swagger/spec',
+ default=('http://testresults.opnfv.org'
+ '/test/swagger/spec'),
help='API Declaration Spec File')
parser.add_argument('-o', '--output-directory',
required=True,
diff --git a/utils/test/testapi/htmlize/prepare.sh b/utils/test/testapi/htmlize/prepare.sh
deleted file mode 100644
index e79ac5693..000000000
--- a/utils/test/testapi/htmlize/prepare.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-
-#Creating virtual environment
-virtualenv testapi_venv
-source testapi_venv/bin/activate
-
-# Swgger Codegen Tool
-url="http://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.2.1/swagger-codegen-cli-2.2.1.jar"
-
-#Check for jar file locally and in the repo
-if [ ! -f swagger-codegen-cli.jar ];
-then
- wget http://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.2.1/swagger-codegen-cli-2.2.1.jar -O swagger-codegen-cli.jar
-fi
-
-# Start OPNFV Test API Server
-cd utils/test/testapi/
-pip install -r requirements.txt
-./install.sh
-opnfv-testapi -c ../../../testapi_venv/etc/opnfv_testapi/config.ini &
diff --git a/utils/test/testapi/opnfv_testapi/cmd/server.py b/utils/test/testapi/opnfv_testapi/cmd/server.py
index c3d734607..013ee6642 100644
--- a/utils/test/testapi/opnfv_testapi/cmd/server.py
+++ b/utils/test/testapi/opnfv_testapi/cmd/server.py
@@ -31,19 +31,19 @@ TODOs :
import argparse
-import tornado.ioloop
import motor
+import tornado.ioloop
-from opnfv_testapi.common.config import APIConfig
-from opnfv_testapi.tornado_swagger import swagger
+from opnfv_testapi.common import config
from opnfv_testapi.router import url_mappings
+from opnfv_testapi.tornado_swagger import swagger
# optionally get config file from command line
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config-file", dest='config_file',
help="Config file location")
args = parser.parse_args()
-CONF = APIConfig().parse(args.config_file)
+CONF = config.APIConfig().parse(args.config_file)
# connecting to MongoDB server, and choosing database
client = motor.MotorClient(CONF.mongo_url)
@@ -57,6 +57,7 @@ def make_app():
url_mappings.mappings,
db=db,
debug=CONF.api_debug_on,
+ auth=CONF.api_authenticate_on
)
diff --git a/utils/test/testapi/opnfv_testapi/common/config.py b/utils/test/testapi/opnfv_testapi/common/config.py
index ecab88ae3..84a127391 100644
--- a/utils/test/testapi/opnfv_testapi/common/config.py
+++ b/utils/test/testapi/opnfv_testapi/common/config.py
@@ -7,9 +7,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
# feng.xiaowei@zte.com.cn remove prepare_put_request 5-30-2016
##############################################################################
-
-
-from ConfigParser import SafeConfigParser, NoOptionError
+import ConfigParser
class ParseError(Exception):
@@ -36,13 +34,14 @@ class APIConfig:
self.mongo_dbname = None
self.api_port = None
self.api_debug_on = None
+ self.api_authenticate_on = None
self._parser = None
self.swagger_base_url = None
def _get_parameter(self, section, param):
try:
return self._parser.get(section, param)
- except NoOptionError:
+ except ConfigParser.NoOptionError:
raise ParseError("[%s.%s] parameter not found" % (section, param))
def _get_int_parameter(self, section, param):
@@ -68,7 +67,7 @@ class APIConfig:
if config_location is None:
config_location = obj._default_config_location
- obj._parser = SafeConfigParser()
+ obj._parser = ConfigParser.SafeConfigParser()
obj._parser.read(config_location)
if not obj._parser:
raise ParseError("%s not found" % config_location)
@@ -79,6 +78,9 @@ class APIConfig:
obj.api_port = obj._get_int_parameter("api", "port")
obj.api_debug_on = obj._get_bool_parameter("api", "debug")
+ obj.api_authenticate_on = obj._get_bool_parameter("api",
+ "authenticate")
+
obj.swagger_base_url = obj._get_parameter("swagger", "base_url")
return obj
@@ -92,4 +94,5 @@ class APIConfig:
self.mongo_dbname,
self.api_port,
self.api_debug_on,
+ self.api_authenticate_on,
self.swagger_base_url)
diff --git a/utils/test/testapi/opnfv_testapi/common/constants.py b/utils/test/testapi/opnfv_testapi/common/constants.py
index 4d39a142d..71bd95216 100644
--- a/utils/test/testapi/opnfv_testapi/common/constants.py
+++ b/utils/test/testapi/opnfv_testapi/common/constants.py
@@ -10,6 +10,7 @@
DEFAULT_REPRESENTATION = "application/json"
HTTP_BAD_REQUEST = 400
+HTTP_UNAUTHORIZED = 401
HTTP_FORBIDDEN = 403
HTTP_NOT_FOUND = 404
HTTP_OK = 200
diff --git a/utils/test/testapi/opnfv_testapi/resources/handlers.py b/utils/test/testapi/opnfv_testapi/resources/handlers.py
index 5f6c3df57..8255b526a 100644
--- a/utils/test/testapi/opnfv_testapi/resources/handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/handlers.py
@@ -20,19 +20,19 @@
# feng.xiaowei@zte.com.cn remove DashboardHandler 5-30-2016
##############################################################################
-import json
from datetime import datetime
+import functools
+import json
from tornado import gen
-from tornado.web import RequestHandler, asynchronous, HTTPError
+from tornado import web
-from models import CreateResponse
-from opnfv_testapi.common.constants import DEFAULT_REPRESENTATION, \
- HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_FORBIDDEN
+import models
+from opnfv_testapi.common import constants
from opnfv_testapi.tornado_swagger import swagger
-class GenericApiHandler(RequestHandler):
+class GenericApiHandler(web.RequestHandler):
def __init__(self, application, request, **kwargs):
super(GenericApiHandler, self).__init__(application, request, **kwargs)
self.db = self.settings["db"]
@@ -44,49 +44,71 @@ class GenericApiHandler(RequestHandler):
self.db_testcases = 'testcases'
self.db_results = 'results'
self.db_scenarios = 'scenarios'
+ self.auth = self.settings["auth"]
def prepare(self):
if self.request.method != "GET" and self.request.method != "DELETE":
if self.request.headers.get("Content-Type") is not None:
if self.request.headers["Content-Type"].startswith(
- DEFAULT_REPRESENTATION):
+ constants.DEFAULT_REPRESENTATION):
try:
self.json_args = json.loads(self.request.body)
except (ValueError, KeyError, TypeError) as error:
- raise HTTPError(HTTP_BAD_REQUEST,
- "Bad Json format [{}]".
- format(error))
+ raise web.HTTPError(constants.HTTP_BAD_REQUEST,
+ "Bad Json format [{}]".
+ format(error))
def finish_request(self, json_object=None):
if json_object:
self.write(json.dumps(json_object))
- self.set_header("Content-Type", DEFAULT_REPRESENTATION)
+ self.set_header("Content-Type", constants.DEFAULT_REPRESENTATION)
self.finish()
def _create_response(self, resource):
href = self.request.full_url() + '/' + str(resource)
- return CreateResponse(href=href).format()
+ return models.CreateResponse(href=href).format()
def format_data(self, data):
cls_data = self.table_cls.from_dict(data)
return cls_data.format_http()
- @asynchronous
+ def authenticate(method):
+ @web.asynchronous
+ @gen.coroutine
+ @functools.wraps(method)
+ def wrapper(self, *args, **kwargs):
+ if self.auth:
+ try:
+ token = self.request.headers['X-Auth-Token']
+ except KeyError:
+ raise web.HTTPError(constants.HTTP_UNAUTHORIZED,
+ "No Authentication Header.")
+ query = {'access_token': token}
+ check = yield self._eval_db_find_one(query, 'tokens')
+ if not check:
+ raise web.HTTPError(constants.HTTP_FORBIDDEN,
+ "Invalid Token.")
+ ret = yield gen.coroutine(method)(self, *args, **kwargs)
+ raise gen.Return(ret)
+ return wrapper
+
+ @web.asynchronous
@gen.coroutine
+ @authenticate
def _create(self, miss_checks, db_checks, **kwargs):
"""
:param miss_checks: [miss1, miss2]
:param db_checks: [(table, exist, query, error)]
"""
if self.json_args is None:
- raise HTTPError(HTTP_BAD_REQUEST, "no body")
+ raise web.HTTPError(constants.HTTP_BAD_REQUEST, "no body")
data = self.table_cls.from_dict(self.json_args)
for miss in miss_checks:
miss_data = data.__getattribute__(miss)
if miss_data is None or miss_data == '':
- raise HTTPError(HTTP_BAD_REQUEST,
- '{} missing'.format(miss))
+ raise web.HTTPError(constants.HTTP_BAD_REQUEST,
+ '{} missing'.format(miss))
for k, v in kwargs.iteritems():
data.__setattr__(k, v)
@@ -95,7 +117,7 @@ class GenericApiHandler(RequestHandler):
check = yield self._eval_db_find_one(query(data), table)
if (exist and not check) or (not exist and check):
code, message = error(data)
- raise HTTPError(code, message)
+ raise web.HTTPError(code, message)
if self.table != 'results':
data.creation_date = datetime.now()
@@ -107,7 +129,7 @@ class GenericApiHandler(RequestHandler):
resource = _id
self.finish_request(self._create_response(resource))
- @asynchronous
+ @web.asynchronous
@gen.coroutine
def _list(self, query=None, res_op=None, *args, **kwargs):
if query is None:
@@ -126,40 +148,42 @@ class GenericApiHandler(RequestHandler):
res = res_op(data, *args)
self.finish_request(res)
- @asynchronous
+ @web.asynchronous
@gen.coroutine
def _get_one(self, query):
data = yield self._eval_db_find_one(query)
if data is None:
- raise HTTPError(HTTP_NOT_FOUND,
- "[{}] not exist in table [{}]"
- .format(query, self.table))
+ raise web.HTTPError(constants.HTTP_NOT_FOUND,
+ "[{}] not exist in table [{}]"
+ .format(query, self.table))
self.finish_request(self.format_data(data))
- @asynchronous
+ @web.asynchronous
@gen.coroutine
+ @authenticate
def _delete(self, query):
data = yield self._eval_db_find_one(query)
if data is None:
- raise HTTPError(HTTP_NOT_FOUND,
- "[{}] not exit in table [{}]"
- .format(query, self.table))
+ raise web.HTTPError(constants.HTTP_NOT_FOUND,
+ "[{}] not exit in table [{}]"
+ .format(query, self.table))
yield self._eval_db(self.table, 'remove', query)
self.finish_request()
- @asynchronous
+ @web.asynchronous
@gen.coroutine
+ @authenticate
def _update(self, query, db_keys):
if self.json_args is None:
- raise HTTPError(HTTP_BAD_REQUEST, "No payload")
+ raise web.HTTPError(constants.HTTP_BAD_REQUEST, "No payload")
# check old data exist
from_data = yield self._eval_db_find_one(query)
if from_data is None:
- raise HTTPError(HTTP_NOT_FOUND,
- "{} could not be found in table [{}]"
- .format(query, self.table))
+ raise web.HTTPError(constants.HTTP_NOT_FOUND,
+ "{} could not be found in table [{}]"
+ .format(query, self.table))
data = self.table_cls.from_dict(from_data)
# check new data exist
@@ -167,13 +191,12 @@ class GenericApiHandler(RequestHandler):
if not equal:
to_data = yield self._eval_db_find_one(new_query)
if to_data is not None:
- raise HTTPError(HTTP_FORBIDDEN,
- "{} already exists in table [{}]"
- .format(new_query, self.table))
+ raise web.HTTPError(constants.HTTP_FORBIDDEN,
+ "{} already exists in table [{}]"
+ .format(new_query, self.table))
# we merge the whole document """
- edit_request = data.format()
- edit_request.update(self._update_requests(data))
+ edit_request = self._update_requests(data)
""" Updating the DB """
yield self._eval_db(self.table, 'update', query, edit_request,
@@ -187,8 +210,11 @@ class GenericApiHandler(RequestHandler):
request = self._update_request(request, k, v,
data.__getattribute__(k))
if not request:
- raise HTTPError(HTTP_FORBIDDEN, "Nothing to update")
- return request
+ raise web.HTTPError(constants.HTTP_FORBIDDEN, "Nothing to update")
+
+ edit_request = data.format()
+ edit_request.update(request)
+ return edit_request
@staticmethod
def _update_request(edit_request, key, new_value, old_value):
diff --git a/utils/test/testapi/opnfv_testapi/resources/pod_handlers.py b/utils/test/testapi/opnfv_testapi/resources/pod_handlers.py
index e1bd9d359..65c27f60a 100644
--- a/utils/test/testapi/opnfv_testapi/resources/pod_handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/pod_handlers.py
@@ -6,17 +6,17 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+import handlers
+from opnfv_testapi.common import constants
from opnfv_testapi.tornado_swagger import swagger
-from handlers import GenericApiHandler
-from pod_models import Pod
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
+import pod_models
-class GenericPodHandler(GenericApiHandler):
+class GenericPodHandler(handlers.GenericApiHandler):
def __init__(self, application, request, **kwargs):
super(GenericPodHandler, self).__init__(application, request, **kwargs)
self.table = 'pods'
- self.table_cls = Pod
+ self.table_cls = pod_models.Pod
class PodCLHandler(GenericPodHandler):
@@ -46,7 +46,7 @@ class PodCLHandler(GenericPodHandler):
def error(data):
message = '{} already exists as a pod'.format(data.name)
- return HTTP_FORBIDDEN, message
+ return constants.HTTP_FORBIDDEN, message
miss_checks = ['name']
db_checks = [(self.table, False, query, error)]
diff --git a/utils/test/testapi/opnfv_testapi/resources/project_handlers.py b/utils/test/testapi/opnfv_testapi/resources/project_handlers.py
index 94c65b722..f3521961d 100644
--- a/utils/test/testapi/opnfv_testapi/resources/project_handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/project_handlers.py
@@ -6,19 +6,19 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+import handlers
+from opnfv_testapi.common import constants
from opnfv_testapi.tornado_swagger import swagger
-from handlers import GenericApiHandler
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
-from project_models import Project
+import project_models
-class GenericProjectHandler(GenericApiHandler):
+class GenericProjectHandler(handlers.GenericApiHandler):
def __init__(self, application, request, **kwargs):
super(GenericProjectHandler, self).__init__(application,
request,
**kwargs)
self.table = 'projects'
- self.table_cls = Project
+ self.table_cls = project_models.Project
class ProjectCLHandler(GenericProjectHandler):
@@ -48,7 +48,7 @@ class ProjectCLHandler(GenericProjectHandler):
def error(data):
message = '{} already exists as a project'.format(data.name)
- return HTTP_FORBIDDEN, message
+ return constants.HTTP_FORBIDDEN, message
miss_checks = ['name']
db_checks = [(self.table, False, query, error)]
diff --git a/utils/test/testapi/opnfv_testapi/resources/result_handlers.py b/utils/test/testapi/opnfv_testapi/resources/result_handlers.py
index 2a1ed56ee..d41ba4820 100644
--- a/utils/test/testapi/opnfv_testapi/resources/result_handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/result_handlers.py
@@ -6,30 +6,32 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-from datetime import datetime, timedelta
+from datetime import datetime
+from datetime import timedelta
-from bson.objectid import ObjectId
-from tornado.web import HTTPError
+from bson import objectid
+from tornado import web
-from opnfv_testapi.common.constants import HTTP_BAD_REQUEST, HTTP_NOT_FOUND
-from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.resources.result_models import TestResult
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import handlers
+from opnfv_testapi.resources import result_models
from opnfv_testapi.tornado_swagger import swagger
-class GenericResultHandler(GenericApiHandler):
+class GenericResultHandler(handlers.GenericApiHandler):
def __init__(self, application, request, **kwargs):
super(GenericResultHandler, self).__init__(application,
request,
**kwargs)
self.table = self.db_results
- self.table_cls = TestResult
+ self.table_cls = result_models.TestResult
def get_int(self, key, value):
try:
value = int(value)
except:
- raise HTTPError(HTTP_BAD_REQUEST, '{} must be int'.format(key))
+ raise web.HTTPError(constants.HTTP_BAD_REQUEST,
+ '{} must be int'.format(key))
return value
def set_query(self):
@@ -144,14 +146,14 @@ class ResultsCLHandler(GenericResultHandler):
def pod_error(data):
message = 'Could not find pod [{}]'.format(data.pod_name)
- return HTTP_NOT_FOUND, message
+ return constants.HTTP_NOT_FOUND, message
def project_query(data):
return {'name': data.project_name}
def project_error(data):
message = 'Could not find project [{}]'.format(data.project_name)
- return HTTP_NOT_FOUND, message
+ return constants.HTTP_NOT_FOUND, message
def testcase_query(data):
return {'project_name': data.project_name, 'name': data.case_name}
@@ -159,7 +161,7 @@ class ResultsCLHandler(GenericResultHandler):
def testcase_error(data):
message = 'Could not find testcase [{}] in project [{}]'\
.format(data.case_name, data.project_name)
- return HTTP_NOT_FOUND, message
+ return constants.HTTP_NOT_FOUND, message
miss_checks = ['pod_name', 'project_name', 'case_name']
db_checks = [('pods', True, pod_query, pod_error),
@@ -178,7 +180,7 @@ class ResultsGURHandler(GenericResultHandler):
@raise 404: test result not exist
"""
query = dict()
- query["_id"] = ObjectId(result_id)
+ query["_id"] = objectid.ObjectId(result_id)
self._get_one(query)
@swagger.operation(nickname="updateTestResultById")
@@ -193,6 +195,6 @@ class ResultsGURHandler(GenericResultHandler):
@raise 404: result not exist
@raise 403: nothing to update
"""
- query = {'_id': ObjectId(result_id)}
+ query = {'_id': objectid.ObjectId(result_id)}
db_keys = []
self._update(query, db_keys)
diff --git a/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py b/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
index a9b89eb89..083bf59fc 100644
--- a/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/scenario_handlers.py
@@ -1,16 +1,16 @@
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
-from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.resources.scenario_models import Scenario
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import handlers
+import opnfv_testapi.resources.scenario_models as models
from opnfv_testapi.tornado_swagger import swagger
-class GenericScenarioHandler(GenericApiHandler):
+class GenericScenarioHandler(handlers.GenericApiHandler):
def __init__(self, application, request, **kwargs):
super(GenericScenarioHandler, self).__init__(application,
request,
**kwargs)
self.table = self.db_scenarios
- self.table_cls = Scenario
+ self.table_cls = models.Scenario
class ScenariosCLHandler(GenericScenarioHandler):
@@ -80,7 +80,7 @@ class ScenariosCLHandler(GenericScenarioHandler):
def error(data):
message = '{} already exists as a scenario'.format(data.name)
- return HTTP_FORBIDDEN, message
+ return constants.HTTP_FORBIDDEN, message
miss_checks = ['name']
db_checks = [(self.table, False, query, error)]
@@ -104,11 +104,180 @@ class ScenarioGURHandler(GenericScenarioHandler):
"""
@description: update a single scenario by name
@param body: fields to be updated
- @type body: L{ScenarioCreateRequest}
+ @type body: L{ScenarioUpdateRequest}
@in body: body
@rtype: L{Scenario}
@return 200: update success
@raise 404: scenario not exist
@raise 403: nothing to update
"""
- pass
+ query = {'name': name}
+ db_keys = ['name']
+ self._update(query, db_keys)
+
+ @swagger.operation(nickname="deleteScenarioByName")
+ def delete(self, name):
+ """
+ @description: delete a scenario by name
+ @return 200: delete success
+ @raise 404: scenario not exist:
+ """
+
+ query = {'name': name}
+ self._delete(query)
+
+ def _update_query(self, keys, data):
+ query = dict()
+ equal = True
+ if self._is_rename():
+ new = self._term.get('name')
+ if data.name != new:
+ equal = False
+ query['name'] = new
+
+ return equal, query
+
+ def _update_requests(self, data):
+ updates = {
+ ('name', 'update'): self._update_requests_rename,
+ ('installer', 'add'): self._update_requests_add_installer,
+ ('installer', 'delete'): self._update_requests_delete_installer,
+ ('version', 'add'): self._update_requests_add_version,
+ ('version', 'delete'): self._update_requests_delete_version,
+ ('owner', 'update'): self._update_requests_change_owner,
+ ('project', 'add'): self._update_requests_add_project,
+ ('project', 'delete'): self._update_requests_delete_project,
+ ('customs', 'add'): self._update_requests_add_customs,
+ ('customs', 'delete'): self._update_requests_delete_customs,
+ ('score', 'add'): self._update_requests_add_score,
+ ('trust_indicator', 'add'): self._update_requests_add_ti,
+ }
+
+ updates[(self._field, self._op)](data)
+
+ return data.format()
+
+ def _iter_installers(xstep):
+ def magic(self, data):
+ [xstep(self, installer)
+ for installer in self._filter_installers(data.installers)]
+ return magic
+
+ def _iter_versions(xstep):
+ def magic(self, installer):
+ [xstep(self, version)
+ for version in (self._filter_versions(installer.versions))]
+ return magic
+
+ def _iter_projects(xstep):
+ def magic(self, version):
+ [xstep(self, project)
+ for project in (self._filter_projects(version.projects))]
+ return magic
+
+ def _update_requests_rename(self, data):
+ data.name = self._term.get('name')
+
+ def _update_requests_add_installer(self, data):
+ data.installers.append(models.ScenarioInstaller.from_dict(self._term))
+
+ def _update_requests_delete_installer(self, data):
+ data.installers = self._remove_installers(data.installers)
+
+ @_iter_installers
+ def _update_requests_add_version(self, installer):
+ installer.versions.append(models.ScenarioVersion.from_dict(self._term))
+
+ @_iter_installers
+ def _update_requests_delete_version(self, installer):
+ installer.versions = self._remove_versions(installer.versions)
+
+ @_iter_installers
+ @_iter_versions
+ def _update_requests_change_owner(self, version):
+ version.owner = self._term.get('owner')
+
+ @_iter_installers
+ @_iter_versions
+ def _update_requests_add_project(self, version):
+ version.projects.append(models.ScenarioProject.from_dict(self._term))
+
+ @_iter_installers
+ @_iter_versions
+ def _update_requests_delete_project(self, version):
+ version.projects = self._remove_projects(version.projects)
+
+ @_iter_installers
+ @_iter_versions
+ @_iter_projects
+ def _update_requests_add_customs(self, project):
+ project.customs = list(set(project.customs + self._term))
+
+ @_iter_installers
+ @_iter_versions
+ @_iter_projects
+ def _update_requests_delete_customs(self, project):
+ project.customs = filter(
+ lambda f: f not in self._term,
+ project.customs)
+
+ @_iter_installers
+ @_iter_versions
+ @_iter_projects
+ def _update_requests_add_score(self, project):
+ project.scores.append(
+ models.ScenarioScore.from_dict(self._term))
+
+ @_iter_installers
+ @_iter_versions
+ @_iter_projects
+ def _update_requests_add_ti(self, project):
+ project.trust_indicators.append(
+ models.ScenarioTI.from_dict(self._term))
+
+ def _is_rename(self):
+ return self._field == 'name' and self._op == 'update'
+
+ def _remove_installers(self, installers):
+ return self._remove('installer', installers)
+
+ def _filter_installers(self, installers):
+ return self._filter('installer', installers)
+
+ def _remove_versions(self, versions):
+ return self._remove('version', versions)
+
+ def _filter_versions(self, versions):
+ return self._filter('version', versions)
+
+ def _remove_projects(self, projects):
+ return self._remove('project', projects)
+
+ def _filter_projects(self, projects):
+ return self._filter('project', projects)
+
+ def _remove(self, field, fields):
+ return filter(
+ lambda f: getattr(f, field) != self._locate.get(field),
+ fields)
+
+ def _filter(self, field, fields):
+ return filter(
+ lambda f: getattr(f, field) == self._locate.get(field),
+ fields)
+
+ @property
+ def _field(self):
+ return self.json_args.get('field')
+
+ @property
+ def _op(self):
+ return self.json_args.get('op')
+
+ @property
+ def _locate(self):
+ return self.json_args.get('locate')
+
+ @property
+ def _term(self):
+ return self.json_args.get('term')
diff --git a/utils/test/testapi/opnfv_testapi/resources/scenario_models.py b/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
index f89a12428..73bcbe99e 100644
--- a/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
+++ b/utils/test/testapi/opnfv_testapi/resources/scenario_models.py
@@ -2,6 +2,14 @@ import models
from opnfv_testapi.tornado_swagger import swagger
+def list_default(value):
+ return value if value else list()
+
+
+def dict_default(value):
+ return value if value else dict()
+
+
@swagger.model()
class ScenarioTI(models.ModelBase):
def __init__(self, date=None, status='silver'):
@@ -32,9 +40,9 @@ class ScenarioProject(models.ModelBase):
scores=None,
trust_indicators=None):
self.project = project
- self.customs = customs
- self.scores = scores
- self.trust_indicators = trust_indicators
+ self.customs = list_default(customs)
+ self.scores = list_default(scores)
+ self.trust_indicators = list_default(trust_indicators)
@staticmethod
def attr_parser():
@@ -50,7 +58,7 @@ class ScenarioVersion(models.ModelBase):
"""
def __init__(self, version=None, projects=None):
self.version = version
- self.projects = projects
+ self.projects = list_default(projects)
@staticmethod
def attr_parser():
@@ -65,7 +73,7 @@ class ScenarioInstaller(models.ModelBase):
"""
def __init__(self, installer=None, versions=None):
self.installer = installer
- self.versions = versions if versions else list()
+ self.versions = list_default(versions)
@staticmethod
def attr_parser():
@@ -80,7 +88,7 @@ class ScenarioCreateRequest(models.ModelBase):
"""
def __init__(self, name='', installers=None):
self.name = name
- self.installers = installers if installers else list()
+ self.installers = list_default(installers)
@staticmethod
def attr_parser():
@@ -88,6 +96,21 @@ class ScenarioCreateRequest(models.ModelBase):
@swagger.model()
+class ScenarioUpdateRequest(models.ModelBase):
+ """
+ @property field: update field
+ @property op: add/delete/update
+ @property locate: information used to locate the field
+ @property term: new value
+ """
+ def __init__(self, field=None, op=None, locate=None, term=None):
+ self.field = field
+ self.op = op
+ self.locate = dict_default(locate)
+ self.term = dict_default(term)
+
+
+@swagger.model()
class Scenario(models.ModelBase):
"""
@property installers:
@@ -97,7 +120,7 @@ class Scenario(models.ModelBase):
self.name = name
self._id = _id
self.creation_date = create_date
- self.installers = installers if installers else list()
+ self.installers = list_default(installers)
@staticmethod
def attr_parser():
diff --git a/utils/test/testapi/opnfv_testapi/resources/testcase_handlers.py b/utils/test/testapi/opnfv_testapi/resources/testcase_handlers.py
index 100a4fd91..3debd6918 100644
--- a/utils/test/testapi/opnfv_testapi/resources/testcase_handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/testcase_handlers.py
@@ -6,19 +6,19 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
-from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.resources.testcase_models import Testcase
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import handlers
+from opnfv_testapi.resources import testcase_models
from opnfv_testapi.tornado_swagger import swagger
-class GenericTestcaseHandler(GenericApiHandler):
+class GenericTestcaseHandler(handlers.GenericApiHandler):
def __init__(self, application, request, **kwargs):
super(GenericTestcaseHandler, self).__init__(application,
request,
**kwargs)
self.table = self.db_testcases
- self.table_cls = Testcase
+ self.table_cls = testcase_models.Testcase
class TestcaseCLHandler(GenericTestcaseHandler):
@@ -58,12 +58,12 @@ class TestcaseCLHandler(GenericTestcaseHandler):
def p_error(data):
message = 'Could not find project [{}]'.format(data.project_name)
- return HTTP_FORBIDDEN, message
+ return constants.HTTP_FORBIDDEN, message
def tc_error(data):
message = '{} already exists as a testcase in project {}'\
.format(data.name, data.project_name)
- return HTTP_FORBIDDEN, message
+ return constants.HTTP_FORBIDDEN, message
miss_checks = ['name']
db_checks = [(self.db_projects, True, p_query, p_error),
diff --git a/utils/test/testapi/opnfv_testapi/router/url_mappings.py b/utils/test/testapi/opnfv_testapi/router/url_mappings.py
index 0ae3c31c3..39cf006af 100644
--- a/utils/test/testapi/opnfv_testapi/router/url_mappings.py
+++ b/utils/test/testapi/opnfv_testapi/router/url_mappings.py
@@ -6,37 +6,34 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-from opnfv_testapi.resources.handlers import VersionHandler
-from opnfv_testapi.resources.testcase_handlers import TestcaseCLHandler, \
- TestcaseGURHandler
-from opnfv_testapi.resources.pod_handlers import PodCLHandler, PodGURHandler
-from opnfv_testapi.resources.project_handlers import ProjectCLHandler, \
- ProjectGURHandler
-from opnfv_testapi.resources.result_handlers import ResultsCLHandler, \
- ResultsGURHandler
-from opnfv_testapi.resources.scenario_handlers import ScenariosCLHandler
-from opnfv_testapi.resources.scenario_handlers import ScenarioGURHandler
+from opnfv_testapi.resources import handlers
+from opnfv_testapi.resources import pod_handlers
+from opnfv_testapi.resources import project_handlers
+from opnfv_testapi.resources import result_handlers
+from opnfv_testapi.resources import scenario_handlers
+from opnfv_testapi.resources import testcase_handlers
mappings = [
# GET /versions => GET API version
- (r"/versions", VersionHandler),
+ (r"/versions", handlers.VersionHandler),
# few examples:
# GET /api/v1/pods => Get all pods
# GET /api/v1/pods/1 => Get details on POD 1
- (r"/api/v1/pods", PodCLHandler),
- (r"/api/v1/pods/([^/]+)", PodGURHandler),
+ (r"/api/v1/pods", pod_handlers.PodCLHandler),
+ (r"/api/v1/pods/([^/]+)", pod_handlers.PodGURHandler),
# few examples:
# GET /projects
# GET /projects/yardstick
- (r"/api/v1/projects", ProjectCLHandler),
- (r"/api/v1/projects/([^/]+)", ProjectGURHandler),
+ (r"/api/v1/projects", project_handlers.ProjectCLHandler),
+ (r"/api/v1/projects/([^/]+)", project_handlers.ProjectGURHandler),
# few examples
# GET /projects/qtip/cases => Get cases for qtip
- (r"/api/v1/projects/([^/]+)/cases", TestcaseCLHandler),
- (r"/api/v1/projects/([^/]+)/cases/([^/]+)", TestcaseGURHandler),
+ (r"/api/v1/projects/([^/]+)/cases", testcase_handlers.TestcaseCLHandler),
+ (r"/api/v1/projects/([^/]+)/cases/([^/]+)",
+ testcase_handlers.TestcaseGURHandler),
# new path to avoid a long depth
# GET /results?project=functest&case=keystone.catalog&pod=1
@@ -44,10 +41,10 @@ mappings = [
# POST /results =>
# Push results with mandatory request payload parameters
# (project, case, and pod)
- (r"/api/v1/results", ResultsCLHandler),
- (r"/api/v1/results/([^/]+)", ResultsGURHandler),
+ (r"/api/v1/results", result_handlers.ResultsCLHandler),
+ (r"/api/v1/results/([^/]+)", result_handlers.ResultsGURHandler),
# scenarios
- (r"/api/v1/scenarios", ScenariosCLHandler),
- (r"/api/v1/scenarios/([^/]+)", ScenarioGURHandler),
+ (r"/api/v1/scenarios", scenario_handlers.ScenariosCLHandler),
+ (r"/api/v1/scenarios/([^/]+)", scenario_handlers.ScenarioGURHandler),
]
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py b/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
index 3c4fd01a3..ef74a0857 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
@@ -242,3 +242,4 @@ projects = MemDb('projects')
testcases = MemDb('testcases')
results = MemDb('results')
scenarios = MemDb('scenarios')
+tokens = MemDb('tokens')
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py
index fc780e44c..b2be8d593 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py
@@ -8,20 +8,20 @@
##############################################################################
import json
-from tornado.web import Application
-from tornado.testing import AsyncHTTPTestCase
+from tornado import testing
+from tornado import web
-from opnfv_testapi.router import url_mappings
-from opnfv_testapi.resources.models import CreateResponse
import fake_pymongo
+from opnfv_testapi.resources import models
+from opnfv_testapi.router import url_mappings
-class TestBase(AsyncHTTPTestCase):
+class TestBase(testing.AsyncHTTPTestCase):
headers = {'Content-Type': 'application/json; charset=UTF-8'}
def setUp(self):
self.basePath = ''
- self.create_res = CreateResponse
+ self.create_res = models.CreateResponse
self.get_res = None
self.list_res = None
self.update_res = None
@@ -31,10 +31,11 @@ class TestBase(AsyncHTTPTestCase):
super(TestBase, self).setUp()
def get_app(self):
- return Application(
+ return web.Application(
url_mappings.mappings,
db=fake_pymongo,
debug=True,
+ auth=False
)
def create_d(self, *args):
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py
index 5f50ba867..7c43fca62 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_fake_pymongo.py
@@ -9,13 +9,13 @@
import unittest
from tornado import gen
-from tornado.testing import AsyncHTTPTestCase, gen_test
-from tornado.web import Application
+from tornado import testing
+from tornado import web
import fake_pymongo
-class MyTest(AsyncHTTPTestCase):
+class MyTest(testing.AsyncHTTPTestCase):
def setUp(self):
super(MyTest, self).setUp()
self.db = fake_pymongo
@@ -23,7 +23,7 @@ class MyTest(AsyncHTTPTestCase):
self.io_loop.run_sync(self.fixture_setup)
def get_app(self):
- return Application()
+ return web.Application()
@gen.coroutine
def fixture_setup(self):
@@ -32,13 +32,13 @@ class MyTest(AsyncHTTPTestCase):
yield self.db.pods.insert({'_id': '1', 'name': 'test1'})
yield self.db.pods.insert({'name': 'test2'})
- @gen_test
+ @testing.gen_test
def test_find_one(self):
user = yield self.db.pods.find_one({'name': 'test1'})
self.assertEqual(user, self.test1)
self.db.pods.remove()
- @gen_test
+ @testing.gen_test
def test_find(self):
cursor = self.db.pods.find()
names = []
@@ -47,7 +47,7 @@ class MyTest(AsyncHTTPTestCase):
names.append(ob.get('name'))
self.assertItemsEqual(names, ['test1', 'test2'])
- @gen_test
+ @testing.gen_test
def test_update(self):
yield self.db.pods.update({'_id': '1'}, {'name': 'new_test1'})
user = yield self.db.pods.find_one({'_id': '1'})
@@ -71,7 +71,7 @@ class MyTest(AsyncHTTPTestCase):
None,
check_keys=False)
- @gen_test
+ @testing.gen_test
def test_remove(self):
yield self.db.pods.remove({'_id': '1'})
user = yield self.db.pods.find_one({'_id': '1'})
@@ -104,7 +104,7 @@ class MyTest(AsyncHTTPTestCase):
def _insert_assert(self, docs, error=None, **kwargs):
self._db_assert('insert', error, docs, **kwargs)
- @gen_test
+ @testing.gen_test
def _db_assert(self, method, error, *args, **kwargs):
name_error = None
try:
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py
index a1184d554..922bd46e2 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_pod.py
@@ -8,20 +8,19 @@
##############################################################################
import unittest
-from test_base import TestBase
-from opnfv_testapi.resources.pod_models import PodCreateRequest, Pod, Pods
-from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
- HTTP_FORBIDDEN, HTTP_NOT_FOUND
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import pod_models
+import test_base as base
-class TestPodBase(TestBase):
+class TestPodBase(base.TestBase):
def setUp(self):
super(TestPodBase, self).setUp()
- self.req_d = PodCreateRequest('zte-1', 'virtual',
- 'zte pod 1', 'ci-pod')
- self.req_e = PodCreateRequest('zte-2', 'metal', 'zte pod 2')
- self.get_res = Pod
- self.list_res = Pods
+ self.req_d = pod_models.PodCreateRequest('zte-1', 'virtual',
+ 'zte pod 1', 'ci-pod')
+ self.req_e = pod_models.PodCreateRequest('zte-2', 'metal', 'zte pod 2')
+ self.get_res = pod_models.Pod
+ self.list_res = pod_models.Pods
self.basePath = '/api/v1/pods'
def assert_get_body(self, pod, req=None):
@@ -38,36 +37,36 @@ class TestPodBase(TestBase):
class TestPodCreate(TestPodBase):
def test_withoutBody(self):
(code, body) = self.create()
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
def test_emptyName(self):
- req_empty = PodCreateRequest('')
+ req_empty = pod_models.PodCreateRequest('')
(code, body) = self.create(req_empty)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('name missing', body)
def test_noneName(self):
- req_none = PodCreateRequest(None)
+ req_none = pod_models.PodCreateRequest(None)
(code, body) = self.create(req_none)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('name missing', body)
def test_success(self):
code, body = self.create_d()
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assert_create_body(body)
def test_alreadyExist(self):
self.create_d()
code, body = self.create_d()
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn('already exists', body)
class TestPodGet(TestPodBase):
def test_notExist(self):
code, body = self.get('notExist')
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
def test_getOne(self):
self.create_d()
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_project.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_project.py
index 327ddf7b2..afd4a6601 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_project.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_project.py
@@ -8,21 +8,21 @@
##############################################################################
import unittest
-from test_base import TestBase
-from opnfv_testapi.resources.project_models import ProjectCreateRequest, \
- Project, Projects, ProjectUpdateRequest
-from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
- HTTP_FORBIDDEN, HTTP_NOT_FOUND
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import project_models
+import test_base as base
-class TestProjectBase(TestBase):
+class TestProjectBase(base.TestBase):
def setUp(self):
super(TestProjectBase, self).setUp()
- self.req_d = ProjectCreateRequest('vping', 'vping-ssh test')
- self.req_e = ProjectCreateRequest('doctor', 'doctor test')
- self.get_res = Project
- self.list_res = Projects
- self.update_res = Project
+ self.req_d = project_models.ProjectCreateRequest('vping',
+ 'vping-ssh test')
+ self.req_e = project_models.ProjectCreateRequest('doctor',
+ 'doctor test')
+ self.get_res = project_models.Project
+ self.list_res = project_models.Projects
+ self.update_res = project_models.Project
self.basePath = '/api/v1/projects'
def assert_body(self, project, req=None):
@@ -37,41 +37,41 @@ class TestProjectBase(TestBase):
class TestProjectCreate(TestProjectBase):
def test_withoutBody(self):
(code, body) = self.create()
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
def test_emptyName(self):
- req_empty = ProjectCreateRequest('')
+ req_empty = project_models.ProjectCreateRequest('')
(code, body) = self.create(req_empty)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('name missing', body)
def test_noneName(self):
- req_none = ProjectCreateRequest(None)
+ req_none = project_models.ProjectCreateRequest(None)
(code, body) = self.create(req_none)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('name missing', body)
def test_success(self):
(code, body) = self.create_d()
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assert_create_body(body)
def test_alreadyExist(self):
self.create_d()
(code, body) = self.create_d()
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn('already exists', body)
class TestProjectGet(TestProjectBase):
def test_notExist(self):
code, body = self.get('notExist')
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
def test_getOne(self):
self.create_d()
code, body = self.get(self.req_d.name)
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assert_body(body)
def test_list(self):
@@ -88,23 +88,23 @@ class TestProjectGet(TestProjectBase):
class TestProjectUpdate(TestProjectBase):
def test_withoutBody(self):
code, _ = self.update(None, 'noBody')
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
def test_notFound(self):
code, _ = self.update(self.req_e, 'notFound')
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
def test_newNameExist(self):
self.create_d()
self.create_e()
code, body = self.update(self.req_e, self.req_d.name)
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn("already exists", body)
def test_noUpdate(self):
self.create_d()
code, body = self.update(self.req_d, self.req_d.name)
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn("Nothing to update", body)
def test_success(self):
@@ -112,9 +112,9 @@ class TestProjectUpdate(TestProjectBase):
code, body = self.get(self.req_d.name)
_id = body._id
- req = ProjectUpdateRequest('newName', 'new description')
+ req = project_models.ProjectUpdateRequest('newName', 'new description')
code, body = self.update(req, self.req_d.name)
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assertEqual(_id, body._id)
self.assert_body(body, req)
@@ -126,16 +126,16 @@ class TestProjectUpdate(TestProjectBase):
class TestProjectDelete(TestProjectBase):
def test_notFound(self):
code, body = self.delete('notFound')
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
def test_success(self):
self.create_d()
code, body = self.delete(self.req_d.name)
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assertEqual(body, '')
code, body = self.get(self.req_d.name)
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
if __name__ == '__main__':
unittest.main()
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_result.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_result.py
index 10575a9f5..2c7268eb6 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_result.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_result.py
@@ -7,17 +7,15 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
import copy
-import unittest
from datetime import datetime, timedelta
+import unittest
-from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
- HTTP_NOT_FOUND
-from opnfv_testapi.resources.pod_models import PodCreateRequest
-from opnfv_testapi.resources.project_models import ProjectCreateRequest
-from opnfv_testapi.resources.result_models import ResultCreateRequest, \
- TestResult, TestResults, ResultUpdateRequest, TI, TIHistory
-from opnfv_testapi.resources.testcase_models import TestcaseCreateRequest
-from test_base import TestBase
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import pod_models
+from opnfv_testapi.resources import project_models
+from opnfv_testapi.resources import result_models
+from opnfv_testapi.resources import testcase_models
+import test_base as base
class Details(object):
@@ -49,7 +47,7 @@ class Details(object):
return t
-class TestResultBase(TestBase):
+class TestResultBase(base.TestBase):
def setUp(self):
self.pod = 'zte-pod1'
self.project = 'functest'
@@ -59,34 +57,41 @@ class TestResultBase(TestBase):
self.build_tag = 'v3.0'
self.scenario = 'odl-l2'
self.criteria = 'passed'
- self.trust_indicator = TI(0.7)
+ self.trust_indicator = result_models.TI(0.7)
self.start_date = "2016-05-23 07:16:09.477097"
self.stop_date = "2016-05-23 07:16:19.477097"
self.update_date = "2016-05-24 07:16:19.477097"
self.update_step = -0.05
super(TestResultBase, self).setUp()
self.details = Details(timestart='0', duration='9s', status='OK')
- self.req_d = ResultCreateRequest(pod_name=self.pod,
- project_name=self.project,
- case_name=self.case,
- installer=self.installer,
- version=self.version,
- start_date=self.start_date,
- stop_date=self.stop_date,
- details=self.details.format(),
- build_tag=self.build_tag,
- scenario=self.scenario,
- criteria=self.criteria,
- trust_indicator=self.trust_indicator)
- self.get_res = TestResult
- self.list_res = TestResults
- self.update_res = TestResult
+ self.req_d = result_models.ResultCreateRequest(
+ pod_name=self.pod,
+ project_name=self.project,
+ case_name=self.case,
+ installer=self.installer,
+ version=self.version,
+ start_date=self.start_date,
+ stop_date=self.stop_date,
+ details=self.details.format(),
+ build_tag=self.build_tag,
+ scenario=self.scenario,
+ criteria=self.criteria,
+ trust_indicator=self.trust_indicator)
+ self.get_res = result_models.TestResult
+ self.list_res = result_models.TestResults
+ self.update_res = result_models.TestResult
self.basePath = '/api/v1/results'
- self.req_pod = PodCreateRequest(self.pod, 'metal', 'zte pod 1')
- self.req_project = ProjectCreateRequest(self.project, 'vping test')
- self.req_testcase = TestcaseCreateRequest(self.case,
- '/cases/vping',
- 'vping-ssh test')
+ self.req_pod = pod_models.PodCreateRequest(
+ self.pod,
+ 'metal',
+ 'zte pod 1')
+ self.req_project = project_models.ProjectCreateRequest(
+ self.project,
+ 'vping test')
+ self.req_testcase = testcase_models.TestcaseCreateRequest(
+ self.case,
+ '/cases/vping',
+ 'vping-ssh test')
self.create_help('/api/v1/pods', self.req_pod)
self.create_help('/api/v1/projects', self.req_project)
self.create_help('/api/v1/projects/%s/cases',
@@ -94,7 +99,7 @@ class TestResultBase(TestBase):
self.project)
def assert_res(self, code, result, req=None):
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
if req is None:
req = self.req_d
self.assertEqual(result.pod_name, req.pod_name)
@@ -129,78 +134,78 @@ class TestResultBase(TestBase):
class TestResultCreate(TestResultBase):
def test_nobody(self):
(code, body) = self.create(None)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('no body', body)
def test_podNotProvided(self):
req = self.req_d
req.pod_name = None
(code, body) = self.create(req)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('pod_name missing', body)
def test_projectNotProvided(self):
req = self.req_d
req.project_name = None
(code, body) = self.create(req)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('project_name missing', body)
def test_testcaseNotProvided(self):
req = self.req_d
req.case_name = None
(code, body) = self.create(req)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('case_name missing', body)
def test_noPod(self):
req = self.req_d
req.pod_name = 'notExistPod'
(code, body) = self.create(req)
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
self.assertIn('Could not find pod', body)
def test_noProject(self):
req = self.req_d
req.project_name = 'notExistProject'
(code, body) = self.create(req)
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
self.assertIn('Could not find project', body)
def test_noTestcase(self):
req = self.req_d
req.case_name = 'notExistTestcase'
(code, body) = self.create(req)
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
self.assertIn('Could not find testcase', body)
def test_success(self):
(code, body) = self.create_d()
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assert_href(body)
def test_key_with_doc(self):
req = copy.deepcopy(self.req_d)
req.details = {'1.name': 'dot_name'}
(code, body) = self.create(req)
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assert_href(body)
def test_no_ti(self):
- req = ResultCreateRequest(pod_name=self.pod,
- project_name=self.project,
- case_name=self.case,
- installer=self.installer,
- version=self.version,
- start_date=self.start_date,
- stop_date=self.stop_date,
- details=self.details.format(),
- build_tag=self.build_tag,
- scenario=self.scenario,
- criteria=self.criteria)
+ req = result_models.ResultCreateRequest(pod_name=self.pod,
+ project_name=self.project,
+ case_name=self.case,
+ installer=self.installer,
+ version=self.version,
+ start_date=self.start_date,
+ stop_date=self.stop_date,
+ details=self.details.format(),
+ build_tag=self.build_tag,
+ scenario=self.scenario,
+ criteria=self.criteria)
(code, res) = self.create(req)
_id = res.href.split('/')[-1]
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
code, body = self.get(_id)
self.assert_res(code, body, req)
@@ -240,7 +245,7 @@ class TestResultGet(TestResultBase):
def test_queryPeriodNotInt(self):
code, body = self.query(self._set_query('period=a'))
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('period must be int', body)
def test_queryPeriodFail(self):
@@ -253,7 +258,7 @@ class TestResultGet(TestResultBase):
def test_queryLastNotInt(self):
code, body = self.query(self._set_query('last=a'))
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('last must be int', body)
def test_queryLast(self):
@@ -292,7 +297,7 @@ class TestResultGet(TestResultBase):
req = self._create_changed_date(**kwargs)
code, body = self.query(query)
if not found:
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assertEqual(0, len(body.results))
else:
self.assertEqual(1, len(body.results))
@@ -326,10 +331,11 @@ class TestResultUpdate(TestResultBase):
new_ti = copy.deepcopy(self.trust_indicator)
new_ti.current += self.update_step
- new_ti.histories.append(TIHistory(self.update_date, self.update_step))
+ new_ti.histories.append(
+ result_models.TIHistory(self.update_date, self.update_step))
new_data = copy.deepcopy(self.req_d)
new_data.trust_indicator = new_ti
- update = ResultUpdateRequest(trust_indicator=new_ti)
+ update = result_models.ResultUpdateRequest(trust_indicator=new_ti)
code, body = self.update(update, _id)
self.assertEqual(_id, body._id)
self.assert_res(code, body, new_data)
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py
index ff5979524..f604c5750 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_scenario.py
@@ -1,20 +1,18 @@
+from copy import deepcopy
+from datetime import datetime
import json
import os
-from opnfv_testapi.common.constants import HTTP_BAD_REQUEST
-from opnfv_testapi.common.constants import HTTP_FORBIDDEN
-from opnfv_testapi.common.constants import HTTP_OK
-from opnfv_testapi.resources.scenario_models import Scenario
-from opnfv_testapi.resources.scenario_models import ScenarioCreateRequest
-from opnfv_testapi.resources.scenario_models import Scenarios
-from test_testcase import TestBase
+from opnfv_testapi.common import constants
+import opnfv_testapi.resources.scenario_models as models
+import test_base as base
-class TestScenarioBase(TestBase):
+class TestScenarioBase(base.TestBase):
def setUp(self):
super(TestScenarioBase, self).setUp()
- self.get_res = Scenario
- self.list_res = Scenarios
+ self.get_res = models.Scenario
+ self.list_res = models.Scenarios
self.basePath = '/api/v1/scenarios'
self.req_d = self._load_request('scenario-c1.json')
self.req_2 = self._load_request('scenario-c2.json')
@@ -38,7 +36,7 @@ class TestScenarioBase(TestBase):
return res.href.split('/')[-1]
def assert_res(self, code, scenario, req=None):
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
if req is None:
req = self.req_d
scenario_dict = scenario.format_http()
@@ -46,33 +44,44 @@ class TestScenarioBase(TestBase):
self.assertIsNotNone(scenario_dict['creation_date'])
self.assertDictContainsSubset(req, scenario_dict)
+ @staticmethod
+ def _set_query(*args):
+ uri = ''
+ for arg in args:
+ uri += arg + '&'
+ return uri[0: -1]
+
+ def _get_and_assert(self, name, req=None):
+ code, body = self.get(name)
+ self.assert_res(code, body, req)
+
class TestScenarioCreate(TestScenarioBase):
def test_withoutBody(self):
(code, body) = self.create()
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
def test_emptyName(self):
- req_empty = ScenarioCreateRequest('')
+ req_empty = models.ScenarioCreateRequest('')
(code, body) = self.create(req_empty)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('name missing', body)
def test_noneName(self):
- req_none = ScenarioCreateRequest(None)
+ req_none = models.ScenarioCreateRequest(None)
(code, body) = self.create(req_none)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('name missing', body)
def test_success(self):
(code, body) = self.create_d()
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assert_create_body(body)
def test_alreadyExist(self):
self.create_d()
(code, body) = self.create_d()
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn('already exists', body)
@@ -83,8 +92,7 @@ class TestScenarioGet(TestScenarioBase):
self.scenario_2 = self.create_return_name(self.req_2)
def test_getByName(self):
- code, body = self.get(self.scenario_1)
- self.assert_res(code, body, req=self.req_d)
+ self._get_and_assert(self.scenario_1, self.req_d)
def test_getAll(self):
self._query_and_assert(query=None, reqs=[self.req_d, self.req_2])
@@ -113,17 +121,10 @@ class TestScenarioGet(TestScenarioBase):
self._query_and_assert(query, reqs=[self.req_d])
- @staticmethod
- def _set_query(*args):
- uri = ''
- for arg in args:
- uri += arg + '&'
- return uri[0: -1]
-
def _query_and_assert(self, query, found=True, reqs=None):
code, body = self.query(query)
if not found:
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assertEqual(0, len(body.scenarios))
else:
self.assertEqual(len(reqs), len(body.scenarios))
@@ -131,3 +132,185 @@ class TestScenarioGet(TestScenarioBase):
for scenario in body.scenarios:
if req['name'] == scenario.name:
self.assert_res(code, scenario, req)
+
+
+class TestScenarioUpdate(TestScenarioBase):
+ def setUp(self):
+ super(TestScenarioUpdate, self).setUp()
+ self.scenario = self.create_return_name(self.req_d)
+
+ def _execute(set_update):
+ def magic(self):
+ update, scenario = set_update(self, deepcopy(self.req_d))
+ self._update_and_assert(update, scenario)
+ return magic
+
+ def test_renameScenario(self):
+ new_name = 'nosdn-nofeature-noha'
+ new_scenario = deepcopy(self.req_d)
+ new_scenario['name'] = new_name
+ update_req = models.ScenarioUpdateRequest(field='name',
+ op='update',
+ locate={},
+ term={'name': new_name})
+ self._update_and_assert(update_req, new_scenario, new_name)
+
+ @_execute
+ def test_addInstaller(self, scenario):
+ add = models.ScenarioInstaller(installer='daisy', versions=list())
+ scenario['installers'].append(add.format())
+ update = models.ScenarioUpdateRequest(field='installer',
+ op='add',
+ locate={},
+ term=add.format())
+ return update, scenario
+
+ @_execute
+ def test_deleteInstaller(self, scenario):
+ scenario['installers'] = filter(lambda f: f['installer'] != 'apex',
+ scenario['installers'])
+
+ update = models.ScenarioUpdateRequest(field='installer',
+ op='delete',
+ locate={'installer': 'apex'})
+ return update, scenario
+
+ @_execute
+ def test_addVersion(self, scenario):
+ add = models.ScenarioVersion(version='danube', projects=list())
+ scenario['installers'][0]['versions'].append(add.format())
+ update = models.ScenarioUpdateRequest(field='version',
+ op='add',
+ locate={'installer': 'apex'},
+ term=add.format())
+ return update, scenario
+
+ @_execute
+ def test_deleteVersion(self, scenario):
+ scenario['installers'][0]['versions'] = filter(
+ lambda f: f['version'] != 'master',
+ scenario['installers'][0]['versions'])
+
+ update = models.ScenarioUpdateRequest(field='version',
+ op='delete',
+ locate={'installer': 'apex',
+ 'version': 'master'})
+ return update, scenario
+
+ @_execute
+ def test_changeOwner(self, scenario):
+ scenario['installers'][0]['versions'][0]['owner'] = 'lucy'
+
+ update = models.ScenarioUpdateRequest(field='owner',
+ op='update',
+ locate={'installer': 'apex',
+ 'version': 'master'},
+ term={'owner': 'lucy'})
+ return update, scenario
+
+ @_execute
+ def test_addProject(self, scenario):
+ add = models.ScenarioProject(project='qtip').format()
+ scenario['installers'][0]['versions'][0]['projects'].append(add)
+ update = models.ScenarioUpdateRequest(field='project',
+ op='add',
+ locate={'installer': 'apex',
+ 'version': 'master'},
+ term=add)
+ return update, scenario
+
+ @_execute
+ def test_deleteProject(self, scenario):
+ scenario['installers'][0]['versions'][0]['projects'] = filter(
+ lambda f: f['project'] != 'functest',
+ scenario['installers'][0]['versions'][0]['projects'])
+
+ update = models.ScenarioUpdateRequest(field='project',
+ op='delete',
+ locate={
+ 'installer': 'apex',
+ 'version': 'master',
+ 'project': 'functest'})
+ return update, scenario
+
+ @_execute
+ def test_addCustoms(self, scenario):
+ add = ['odl', 'parser', 'vping_ssh']
+ projects = scenario['installers'][0]['versions'][0]['projects']
+ functest = filter(lambda f: f['project'] == 'functest', projects)[0]
+ functest['customs'] = ['healthcheck', 'odl', 'parser', 'vping_ssh']
+ update = models.ScenarioUpdateRequest(field='customs',
+ op='add',
+ locate={
+ 'installer': 'apex',
+ 'version': 'master',
+ 'project': 'functest'},
+ term=add)
+ return update, scenario
+
+ @_execute
+ def test_deleteCustoms(self, scenario):
+ projects = scenario['installers'][0]['versions'][0]['projects']
+ functest = filter(lambda f: f['project'] == 'functest', projects)[0]
+ functest['customs'] = ['healthcheck']
+ update = models.ScenarioUpdateRequest(field='customs',
+ op='delete',
+ locate={
+ 'installer': 'apex',
+ 'version': 'master',
+ 'project': 'functest'},
+ term=['vping_ssh'])
+ return update, scenario
+
+ @_execute
+ def test_addScore(self, scenario):
+ add = models.ScenarioScore(date=str(datetime.now()), score='11/12')
+ projects = scenario['installers'][0]['versions'][0]['projects']
+ functest = filter(lambda f: f['project'] == 'functest', projects)[0]
+ functest['scores'].append(add.format())
+ update = models.ScenarioUpdateRequest(field='score',
+ op='add',
+ locate={
+ 'installer': 'apex',
+ 'version': 'master',
+ 'project': 'functest'},
+ term=add.format())
+ return update, scenario
+
+ @_execute
+ def test_addTi(self, scenario):
+ add = models.ScenarioTI(date=str(datetime.now()), status='gold')
+ projects = scenario['installers'][0]['versions'][0]['projects']
+ functest = filter(lambda f: f['project'] == 'functest', projects)[0]
+ functest['trust_indicators'].append(add.format())
+ update = models.ScenarioUpdateRequest(field='trust_indicator',
+ op='add',
+ locate={
+ 'installer': 'apex',
+ 'version': 'master',
+ 'project': 'functest'},
+ term=add.format())
+ return update, scenario
+
+ def _update_and_assert(self, update_req, new_scenario, name=None):
+ code, _ = self.update(update_req, self.scenario)
+ self.assertEqual(code, constants.HTTP_OK)
+ self._get_and_assert(self._none_default(name, self.scenario),
+ new_scenario)
+
+ @staticmethod
+ def _none_default(check, default):
+ return check if check else default
+
+
+class TestScenarioDelete(TestScenarioBase):
+ def test_notFound(self):
+ code, body = self.delete('notFound')
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
+
+ def test_success(self):
+ scenario = self.create_return_name(self.req_d)
+ code, _ = self.delete(scenario)
+ self.assertEqual(code, constants.HTTP_OK)
+ code, _ = self.get(scenario)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py
index cb767844a..c0494db5d 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_testcase.py
@@ -6,35 +6,33 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-import unittest
import copy
+import unittest
-from test_base import TestBase
-from opnfv_testapi.resources.testcase_models import TestcaseCreateRequest, \
- Testcase, Testcases, TestcaseUpdateRequest
-from opnfv_testapi.resources.project_models import ProjectCreateRequest
-from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \
- HTTP_FORBIDDEN, HTTP_NOT_FOUND
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import project_models
+from opnfv_testapi.resources import testcase_models
+import test_base as base
-class TestCaseBase(TestBase):
+class TestCaseBase(base.TestBase):
def setUp(self):
super(TestCaseBase, self).setUp()
- self.req_d = TestcaseCreateRequest('vping_1',
- '/cases/vping_1',
- 'vping-ssh test')
- self.req_e = TestcaseCreateRequest('doctor_1',
- '/cases/doctor_1',
- 'create doctor')
- self.update_d = TestcaseUpdateRequest('vping_1',
- 'vping-ssh test',
- 'functest')
- self.update_e = TestcaseUpdateRequest('doctor_1',
- 'create doctor',
- 'functest')
- self.get_res = Testcase
- self.list_res = Testcases
- self.update_res = Testcase
+ self.req_d = testcase_models.TestcaseCreateRequest('vping_1',
+ '/cases/vping_1',
+ 'vping-ssh test')
+ self.req_e = testcase_models.TestcaseCreateRequest('doctor_1',
+ '/cases/doctor_1',
+ 'create doctor')
+ self.update_d = testcase_models.TestcaseUpdateRequest('vping_1',
+ 'vping-ssh test',
+ 'functest')
+ self.update_e = testcase_models.TestcaseUpdateRequest('doctor_1',
+ 'create doctor',
+ 'functest')
+ self.get_res = testcase_models.Testcase
+ self.list_res = testcase_models.Testcases
+ self.update_res = testcase_models.Testcase
self.basePath = '/api/v1/projects/%s/cases'
self.create_project()
@@ -57,7 +55,8 @@ class TestCaseBase(TestBase):
self.assertIsNotNone(new.creation_date)
def create_project(self):
- req_p = ProjectCreateRequest('functest', 'vping-ssh test')
+ req_p = project_models.ProjectCreateRequest('functest',
+ 'vping-ssh test')
self.create_help('/api/v1/projects', req_p)
self.project = req_p.name
@@ -80,46 +79,46 @@ class TestCaseBase(TestBase):
class TestCaseCreate(TestCaseBase):
def test_noBody(self):
(code, body) = self.create(None, 'vping')
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
def test_noProject(self):
code, body = self.create(self.req_d, 'noProject')
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn('Could not find project', body)
def test_emptyName(self):
- req_empty = TestcaseCreateRequest('')
+ req_empty = testcase_models.TestcaseCreateRequest('')
(code, body) = self.create(req_empty, self.project)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('name missing', body)
def test_noneName(self):
- req_none = TestcaseCreateRequest(None)
+ req_none = testcase_models.TestcaseCreateRequest(None)
(code, body) = self.create(req_none, self.project)
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
self.assertIn('name missing', body)
def test_success(self):
code, body = self.create_d()
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assert_create_body(body, None, self.project)
def test_alreadyExist(self):
self.create_d()
code, body = self.create_d()
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn('already exists', body)
class TestCaseGet(TestCaseBase):
def test_notExist(self):
code, body = self.get('notExist')
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
def test_getOne(self):
self.create_d()
code, body = self.get(self.req_d.name)
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assert_body(body)
def test_list(self):
@@ -136,23 +135,23 @@ class TestCaseGet(TestCaseBase):
class TestCaseUpdate(TestCaseBase):
def test_noBody(self):
code, _ = self.update(case='noBody')
- self.assertEqual(code, HTTP_BAD_REQUEST)
+ self.assertEqual(code, constants.HTTP_BAD_REQUEST)
def test_notFound(self):
code, _ = self.update(self.update_e, 'notFound')
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
def test_newNameExist(self):
self.create_d()
self.create_e()
code, body = self.update(self.update_e, self.req_d.name)
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn("already exists", body)
def test_noUpdate(self):
self.create_d()
code, body = self.update(self.update_d, self.req_d.name)
- self.assertEqual(code, HTTP_FORBIDDEN)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
self.assertIn("Nothing to update", body)
def test_success(self):
@@ -161,7 +160,7 @@ class TestCaseUpdate(TestCaseBase):
_id = body._id
code, body = self.update(self.update_e, self.req_d.name)
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assertEqual(_id, body._id)
self.assert_update_body(self.req_d, body, self.update_e)
@@ -174,22 +173,22 @@ class TestCaseUpdate(TestCaseBase):
update = copy.deepcopy(self.update_d)
update.description = {'2. change': 'dollar change'}
code, body = self.update(update, self.req_d.name)
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
class TestCaseDelete(TestCaseBase):
def test_notFound(self):
code, body = self.delete('notFound')
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
def test_success(self):
self.create_d()
code, body = self.delete(self.req_d.name)
- self.assertEqual(code, HTTP_OK)
+ self.assertEqual(code, constants.HTTP_OK)
self.assertEqual(body, '')
code, body = self.get(self.req_d.name)
- self.assertEqual(code, HTTP_NOT_FOUND)
+ self.assertEqual(code, constants.HTTP_NOT_FOUND)
if __name__ == '__main__':
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_token.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_token.py
new file mode 100644
index 000000000..19b9e3e07
--- /dev/null
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_token.py
@@ -0,0 +1,118 @@
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+import unittest
+
+from tornado import web
+
+import fake_pymongo
+from opnfv_testapi.common import constants
+from opnfv_testapi.resources import project_models
+from opnfv_testapi.router import url_mappings
+import test_base as base
+
+
+class TestToken(base.TestBase):
+ def get_app(self):
+ return web.Application(
+ url_mappings.mappings,
+ db=fake_pymongo,
+ debug=True,
+ auth=True
+ )
+
+
+class TestTokenCreateProject(TestToken):
+ def setUp(self):
+ super(TestTokenCreateProject, self).setUp()
+ self.req_d = project_models.ProjectCreateRequest('vping')
+ fake_pymongo.tokens.insert({"access_token": "12345"})
+ self.basePath = '/api/v1/projects'
+
+ def test_projectCreateTokenInvalid(self):
+ self.headers['X-Auth-Token'] = '1234'
+ code, body = self.create_d()
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
+ self.assertIn('Invalid Token.', body)
+
+ def test_projectCreateTokenUnauthorized(self):
+ self.headers.pop('X-Auth-Token')
+ code, body = self.create_d()
+ self.assertEqual(code, constants.HTTP_UNAUTHORIZED)
+ self.assertIn('No Authentication Header.', body)
+
+ def test_projectCreateTokenSuccess(self):
+ self.headers['X-Auth-Token'] = '12345'
+ code, body = self.create_d()
+ self.assertEqual(code, constants.HTTP_OK)
+
+
+class TestTokenDeleteProject(TestToken):
+ def setUp(self):
+ super(TestTokenDeleteProject, self).setUp()
+ self.req_d = project_models.ProjectCreateRequest('vping')
+ fake_pymongo.tokens.insert({"access_token": "12345"})
+ self.basePath = '/api/v1/projects'
+
+ def test_projectDeleteTokenIvalid(self):
+ self.headers['X-Auth-Token'] = '12345'
+ self.create_d()
+ self.headers['X-Auth-Token'] = '1234'
+ code, body = self.delete(self.req_d.name)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
+ self.assertIn('Invalid Token.', body)
+
+ def test_projectDeleteTokenUnauthorized(self):
+ self.headers['X-Auth-Token'] = '12345'
+ self.create_d()
+ self.headers.pop('X-Auth-Token')
+ code, body = self.delete(self.req_d.name)
+ self.assertEqual(code, constants.HTTP_UNAUTHORIZED)
+ self.assertIn('No Authentication Header.', body)
+
+ def test_projectDeleteTokenSuccess(self):
+ self.headers['X-Auth-Token'] = '12345'
+ self.create_d()
+ code, body = self.delete(self.req_d.name)
+ self.assertEqual(code, constants.HTTP_OK)
+
+
+class TestTokenUpdateProject(TestToken):
+ def setUp(self):
+ super(TestTokenUpdateProject, self).setUp()
+ self.req_d = project_models.ProjectCreateRequest('vping')
+ fake_pymongo.tokens.insert({"access_token": "12345"})
+ self.basePath = '/api/v1/projects'
+
+ def test_projectUpdateTokenIvalid(self):
+ self.headers['X-Auth-Token'] = '12345'
+ self.create_d()
+ code, body = self.get(self.req_d.name)
+ self.headers['X-Auth-Token'] = '1234'
+ req = project_models.ProjectUpdateRequest('newName', 'new description')
+ code, body = self.update(req, self.req_d.name)
+ self.assertEqual(code, constants.HTTP_FORBIDDEN)
+ self.assertIn('Invalid Token.', body)
+
+ def test_projectUpdateTokenUnauthorized(self):
+ self.headers['X-Auth-Token'] = '12345'
+ self.create_d()
+ code, body = self.get(self.req_d.name)
+ self.headers.pop('X-Auth-Token')
+ req = project_models.ProjectUpdateRequest('newName', 'new description')
+ code, body = self.update(req, self.req_d.name)
+ self.assertEqual(code, constants.HTTP_UNAUTHORIZED)
+ self.assertIn('No Authentication Header.', body)
+
+ def test_projectUpdateTokenSuccess(self):
+ self.headers['X-Auth-Token'] = '12345'
+ self.create_d()
+ code, body = self.get(self.req_d.name)
+ req = project_models.ProjectUpdateRequest('newName', 'new description')
+ code, body = self.update(req, self.req_d.name)
+ self.assertEqual(code, constants.HTTP_OK)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_version.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_version.py
index b6fbf45dc..c8f3f5062 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/test_version.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_version.py
@@ -8,14 +8,14 @@
##############################################################################
import unittest
-from test_base import TestBase
-from opnfv_testapi.resources.models import Versions
+from opnfv_testapi.resources import models
+import test_base as base
-class TestVersionBase(TestBase):
+class TestVersionBase(base.TestBase):
def setUp(self):
super(TestVersionBase, self).setUp()
- self.list_res = Versions
+ self.list_res = models.Versions
self.basePath = '/versions'
diff --git a/utils/test/testapi/run_test.sh b/utils/test/testapi/run_test.sh
index 9b25f8ffc..51db09f65 100755
--- a/utils/test/testapi/run_test.sh
+++ b/utils/test/testapi/run_test.sh
@@ -1,10 +1,36 @@
-#! /bin/bash
+#!/bin/bash
-# Before run this script, make sure that testtools and discover
-# had been installed in your env
-# or else using pip to install them as follows:
-# pip install testtools, discover
+set -o errexit
+
+# Get script directory
+SCRIPTDIR=`dirname $0`
+
+echo "Running unit tests..."
+
+# Creating virtual environment
+virtualenv $SCRIPTDIR/testapi_venv
+source $SCRIPTDIR/testapi_venv/bin/activate
+
+# Install requirements
+pip install -r $SCRIPTDIR/requirements.txt
+pip install coverage
+pip install nose>=1.3.1
find . -type f -name "*.pyc" -delete
-testrargs="discover ./opnfv_testapi/tests/unit"
-python -m testtools.run $testrargs
+
+nosetests --with-xunit \
+ --with-coverage \
+ --cover-erase \
+ --cover-package=$SCRIPTDIR/opnfv_testapi/cmd \
+ --cover-package=$SCRIPTDIR/opnfv_testapi/common \
+ --cover-package=$SCRIPTDIR/opnfv_testapi/resources \
+ --cover-package=$SCRIPTDIR/opnfv_testapi/router \
+ --cover-xml \
+ --cover-html \
+ $SCRIPTDIR/opnfv_testapi/tests
+
+exit_code=$?
+
+deactivate
+
+exit $exit_code
diff --git a/utils/test/testapi/test-requirements.txt b/utils/test/testapi/test-requirements.txt
deleted file mode 100644
index ddbdefcfd..000000000
--- a/utils/test/testapi/test-requirements.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-# The order of packages is significant, because pip processes them in the order
-# of appearance. Changing the order has an impact on the overall integration
-# process, which may cause wedges in the gate later.
-
-testtools>=1.4.0
-discover
-futures \ No newline at end of file
diff --git a/utils/test/testapi/update/templates/utils.py b/utils/test/testapi/update/templates/utils.py
index a18ff0389..4254fee34 100644
--- a/utils/test/testapi/update/templates/utils.py
+++ b/utils/test/testapi/update/templates/utils.py
@@ -44,5 +44,5 @@ def main(method, parser):
args = parser.parse_args()
try:
method(args)
- except AssertionError, msg:
+ except AssertionError as msg:
print(msg)
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/README.md b/utils/test/vnfcatalogue/VNF_Catalogue/README.md
new file mode 100644
index 000000000..32ad65416
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/README.md
@@ -0,0 +1,12 @@
+#VNF_Catalogue Nodejs + Jade + MySql server
+
+
+## Quickstart
+
+First install the dependencies
+
+ ```npm install```
+
+Then Start the Server
+
+ ```npm start```
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/app.js b/utils/test/vnfcatalogue/VNF_Catalogue/app.js
new file mode 100644
index 000000000..0f842b62d
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/app.js
@@ -0,0 +1,79 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+
+var express = require('express');
+var path = require('path');
+var favicon = require('serve-favicon');
+var logger = require('morgan');
+var cookieParser = require('cookie-parser');
+var bodyParser = require('body-parser');
+
+var routes = require('./routes/index');
+var search_projects = require('./routes/search_projects');
+
+var app = express();
+
+// view engine setup
+app.set('views', path.join(__dirname, 'views'));
+app.set('view engine', 'jade');
+
+// Database
+var db = require('mysql2');
+
+// uncomment after placing your favicon in /public
+//app.use(favicon(__dirname + '/public/favicon.ico'));
+app.use(logger('dev'));
+app.use(bodyParser.json());
+app.use(bodyParser.urlencoded({ extended: false }));
+app.use(cookieParser());
+app.use(express.static(path.join(__dirname, 'public')));
+
+// Make our db accessible to our router
+app.use(function(req,res,next){
+ req.db = db;
+ next();
+});
+
+app.use('/', routes);
+app.use('/search_projects', search_projects);
+
+
+// Some Error handling for now #TODO Remove
+
+/// catch 404 and forwarding to error handler
+app.use(function(req, res, next) {
+ var err = new Error('Not Found');
+ err.status = 404;
+ next(err);
+});
+
+
+// development error handler
+// will print stacktrace
+if (app.get('env') === 'development') {
+ app.use(function(err, req, res, next) {
+ res.status(err.status || 500);
+ res.render('error', {
+ message: err.message,
+ error: err
+ });
+ });
+}
+
+// production error handler
+// no stacktraces leaked to user
+app.use(function(err, req, res, next) {
+ res.status(err.status || 500);
+ res.render('error', {
+ message: err.message,
+ error: {}
+ });
+});
+
+module.exports = app;
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/bin/www b/utils/test/vnfcatalogue/VNF_Catalogue/bin/www
new file mode 100644
index 000000000..3cfbf7796
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/bin/www
@@ -0,0 +1,9 @@
+#!/usr/bin/env node
+var debug = require('debug')('my-application');
+var app = require('../app');
+
+app.set('port', process.env.PORT || 3000);
+
+var server = app.listen(app.get('port'), function() {
+ debug('Express server listening on port ' + server.address().port);
+});
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/package.json b/utils/test/vnfcatalogue/VNF_Catalogue/package.json
new file mode 100644
index 000000000..7c6a86730
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/package.json
@@ -0,0 +1,18 @@
+{
+ "name": "VNF_Catalogue",
+ "version": "0.0.1",
+ "private": true,
+ "scripts": {
+ "start": "node ./bin/www"
+ },
+ "dependencies": {
+ "body-parser": "~1.15.1",
+ "cookie-parser": "~1.4.3",
+ "debug": "~2.2.0",
+ "express": "~4.13.4",
+ "jade": "~1.11.0",
+ "morgan": "~1.7.0",
+ "serve-favicon": "~2.3.0",
+ "mysql2": "*"
+ }
+} \ No newline at end of file
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/public/images/3rd_party/commits.png b/utils/test/vnfcatalogue/VNF_Catalogue/public/images/3rd_party/commits.png
new file mode 100644
index 000000000..1247621a7
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/public/images/3rd_party/commits.png
Binary files differ
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/public/images/logo.png b/utils/test/vnfcatalogue/VNF_Catalogue/public/images/logo.png
new file mode 100644
index 000000000..fe18194ec
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/public/images/logo.png
Binary files differ
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/public/javascripts/global.js b/utils/test/vnfcatalogue/VNF_Catalogue/public/javascripts/global.js
new file mode 100644
index 000000000..73f16b67d
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/public/javascripts/global.js
@@ -0,0 +1,16 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+
+$(document).ready( function() {
+ $('#Search').click(function() {
+ var tags = $('#Tags').val().toLowerCase().split(/[ ,]+/);
+ window.location.href = '/search_projects?tags=' + tags;
+ return false;
+ });
+});
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/public/stylesheets/3rd_party/bootstrap.css b/utils/test/vnfcatalogue/VNF_Catalogue/public/stylesheets/3rd_party/bootstrap.css
new file mode 100755
index 000000000..b9c239621
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/public/stylesheets/3rd_party/bootstrap.css
@@ -0,0 +1,1299 @@
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2017 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ */
+
+/*!
+ * Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=73eb1273dd80c57866adeff88f30374f)
+ * Config saved to config.json and https://gist.github.com/73eb1273dd80c57866adeff88f30374f
+ */
+/*!
+ * Bootstrap v3.3.7 (http://getbootstrap.com)
+ * Copyright 2011-2016 Twitter, Inc.
+ * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
+ */
+/*! normalize.css v3.0.3 | MIT License | github.com/necolas/normalize.css */
+html {
+ font-family: sans-serif;
+ -ms-text-size-adjust: 100%;
+ -webkit-text-size-adjust: 100%;
+}
+body {
+ margin: 0;
+}
+article,
+aside,
+details,
+figcaption,
+figure,
+footer,
+header,
+hgroup,
+main,
+menu,
+nav,
+section,
+summary {
+ display: block;
+}
+audio,
+canvas,
+progress,
+video {
+ display: inline-block;
+ vertical-align: baseline;
+}
+audio:not([controls]) {
+ display: none;
+ height: 0;
+}
+[hidden],
+template {
+ display: none;
+}
+a {
+ background-color: transparent;
+}
+a:active,
+a:hover {
+ outline: 0;
+}
+abbr[title] {
+ border-bottom: 1px dotted;
+}
+b,
+strong {
+ font-weight: bold;
+}
+dfn {
+ font-style: italic;
+}
+h1 {
+ font-size: 2em;
+ margin: 0.67em 0;
+}
+mark {
+ background: #ff0;
+ color: #000;
+}
+small {
+ font-size: 80%;
+}
+sub,
+sup {
+ font-size: 75%;
+ line-height: 0;
+ position: relative;
+ vertical-align: baseline;
+}
+sup {
+ top: -0.5em;
+}
+sub {
+ bottom: -0.25em;
+}
+img {
+ border: 0;
+}
+svg:not(:root) {
+ overflow: hidden;
+}
+figure {
+ margin: 1em 40px;
+}
+hr {
+ -webkit-box-sizing: content-box;
+ -moz-box-sizing: content-box;
+ box-sizing: content-box;
+ height: 0;
+}
+pre {
+ overflow: auto;
+}
+code,
+kbd,
+pre,
+samp {
+ font-family: monospace, monospace;
+ font-size: 1em;
+}
+button,
+input,
+optgroup,
+select,
+textarea {
+ color: inherit;
+ font: inherit;
+ margin: 0;
+}
+button {
+ overflow: visible;
+}
+button,
+select {
+ text-transform: none;
+}
+button,
+html input[type="button"],
+input[type="reset"],
+input[type="submit"] {
+ -webkit-appearance: button;
+ cursor: pointer;
+}
+button[disabled],
+html input[disabled] {
+ cursor: default;
+}
+button::-moz-focus-inner,
+input::-moz-focus-inner {
+ border: 0;
+ padding: 0;
+}
+input {
+ line-height: normal;
+}
+input[type="checkbox"],
+input[type="radio"] {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+ padding: 0;
+}
+input[type="number"]::-webkit-inner-spin-button,
+input[type="number"]::-webkit-outer-spin-button {
+ height: auto;
+}
+input[type="search"] {
+ -webkit-appearance: textfield;
+ -webkit-box-sizing: content-box;
+ -moz-box-sizing: content-box;
+ box-sizing: content-box;
+}
+input[type="search"]::-webkit-search-cancel-button,
+input[type="search"]::-webkit-search-decoration {
+ -webkit-appearance: none;
+}
+fieldset {
+ border: 1px solid #c0c0c0;
+ margin: 0 2px;
+ padding: 0.35em 0.625em 0.75em;
+}
+legend {
+ border: 0;
+ padding: 0;
+}
+textarea {
+ overflow: auto;
+}
+optgroup {
+ font-weight: bold;
+}
+table {
+ border-collapse: collapse;
+ border-spacing: 0;
+}
+td,
+th {
+ padding: 0;
+}
+/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */
+@media print {
+ *,
+ *:before,
+ *:after {
+ background: transparent !important;
+ color: #000 !important;
+ -webkit-box-shadow: none !important;
+ box-shadow: none !important;
+ text-shadow: none !important;
+ }
+ a,
+ a:visited {
+ text-decoration: underline;
+ }
+ a[href]:after {
+ content: " (" attr(href) ")";
+ }
+ abbr[title]:after {
+ content: " (" attr(title) ")";
+ }
+ a[href^="#"]:after,
+ a[href^="javascript:"]:after {
+ content: "";
+ }
+ pre,
+ blockquote {
+ border: 1px solid #999;
+ page-break-inside: avoid;
+ }
+ thead {
+ display: table-header-group;
+ }
+ tr,
+ img {
+ page-break-inside: avoid;
+ }
+ img {
+ max-width: 100% !important;
+ }
+ p,
+ h2,
+ h3 {
+ orphans: 3;
+ widows: 3;
+ }
+ h2,
+ h3 {
+ page-break-after: avoid;
+ }
+ .navbar {
+ display: none;
+ }
+ .btn > .caret,
+ .dropup > .btn > .caret {
+ border-top-color: #000 !important;
+ }
+ .label {
+ border: 1px solid #000;
+ }
+ .table {
+ border-collapse: collapse !important;
+ }
+ .table td,
+ .table th {
+ background-color: #fff !important;
+ }
+ .table-bordered th,
+ .table-bordered td {
+ border: 1px solid #ddd !important;
+ }
+}
+* {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+}
+*:before,
+*:after {
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
+}
+html {
+ font-size: 10px;
+ -webkit-tap-highlight-color: rgba(0, 0, 0, 0);
+}
+body {
+ font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
+ font-size: 14px;
+ line-height: 1.42857143;
+ color: #333333;
+ background-color: #ffffff;
+}
+input,
+button,
+select,
+textarea {
+ font-family: inherit;
+ font-size: inherit;
+ line-height: inherit;
+}
+a {
+ color: #337ab7;
+ text-decoration: none;
+}
+a:hover,
+a:focus {
+ color: #23527c;
+ text-decoration: underline;
+}
+a:focus {
+ outline: 5px auto -webkit-focus-ring-color;
+ outline-offset: -2px;
+}
+figure {
+ margin: 0;
+}
+img {
+ vertical-align: middle;
+}
+.img-responsive {
+ display: block;
+ max-width: 100%;
+ height: auto;
+}
+.img-rounded {
+ border-radius: 6px;
+}
+.img-thumbnail {
+ padding: 4px;
+ line-height: 1.42857143;
+ background-color: #ffffff;
+ border: 1px solid #dddddd;
+ border-radius: 4px;
+ -webkit-transition: all 0.2s ease-in-out;
+ -o-transition: all 0.2s ease-in-out;
+ transition: all 0.2s ease-in-out;
+ display: inline-block;
+ max-width: 100%;
+ height: auto;
+}
+.img-circle {
+ border-radius: 50%;
+}
+hr {
+ margin-top: 20px;
+ margin-bottom: 20px;
+ border: 0;
+ border-top: 1px solid #eeeeee;
+}
+.sr-only {
+ position: absolute;
+ width: 1px;
+ height: 1px;
+ margin: -1px;
+ padding: 0;
+ overflow: hidden;
+ clip: rect(0, 0, 0, 0);
+ border: 0;
+}
+.sr-only-focusable:active,
+.sr-only-focusable:focus {
+ position: static;
+ width: auto;
+ height: auto;
+ margin: 0;
+ overflow: visible;
+ clip: auto;
+}
+[role="button"] {
+ cursor: pointer;
+}
+.container {
+ margin-right: auto;
+ margin-left: auto;
+ padding-left: 15px;
+ padding-right: 15px;
+}
+@media (min-width: 768px) {
+ .container {
+ width: 750px;
+ }
+}
+@media (min-width: 992px) {
+ .container {
+ width: 970px;
+ }
+}
+@media (min-width: 1200px) {
+ .container {
+ width: 1170px;
+ }
+}
+.container-fluid {
+ margin-right: auto;
+ margin-left: auto;
+ padding-left: 15px;
+ padding-right: 15px;
+}
+.row {
+ margin-left: -15px;
+ margin-right: -15px;
+}
+.col-xs-1, .col-sm-1, .col-md-1, .col-lg-1, .col-xs-2, .col-sm-2, .col-md-2, .col-lg-2, .col-xs-3, .col-sm-3, .col-md-3, .col-lg-3, .col-xs-4, .col-sm-4, .col-md-4, .col-lg-4, .col-xs-5, .col-sm-5, .col-md-5, .col-lg-5, .col-xs-6, .col-sm-6, .col-md-6, .col-lg-6, .col-xs-7, .col-sm-7, .col-md-7, .col-lg-7, .col-xs-8, .col-sm-8, .col-md-8, .col-lg-8, .col-xs-9, .col-sm-9, .col-md-9, .col-lg-9, .col-xs-10, .col-sm-10, .col-md-10, .col-lg-10, .col-xs-11, .col-sm-11, .col-md-11, .col-lg-11, .col-xs-12, .col-sm-12, .col-md-12, .col-lg-12 {
+ position: relative;
+ min-height: 1px;
+ padding-left: 15px;
+ padding-right: 15px;
+}
+.col-xs-1, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9, .col-xs-10, .col-xs-11, .col-xs-12 {
+ float: left;
+}
+.col-xs-12 {
+ width: 100%;
+}
+.col-xs-11 {
+ width: 91.66666667%;
+}
+.col-xs-10 {
+ width: 83.33333333%;
+}
+.col-xs-9 {
+ width: 75%;
+}
+.col-xs-8 {
+ width: 66.66666667%;
+}
+.col-xs-7 {
+ width: 58.33333333%;
+}
+.col-xs-6 {
+ width: 50%;
+}
+.col-xs-5 {
+ width: 41.66666667%;
+}
+.col-xs-4 {
+ width: 33.33333333%;
+}
+.col-xs-3 {
+ width: 25%;
+}
+.col-xs-2 {
+ width: 16.66666667%;
+}
+.col-xs-1 {
+ width: 8.33333333%;
+}
+.col-xs-pull-12 {
+ right: 100%;
+}
+.col-xs-pull-11 {
+ right: 91.66666667%;
+}
+.col-xs-pull-10 {
+ right: 83.33333333%;
+}
+.col-xs-pull-9 {
+ right: 75%;
+}
+.col-xs-pull-8 {
+ right: 66.66666667%;
+}
+.col-xs-pull-7 {
+ right: 58.33333333%;
+}
+.col-xs-pull-6 {
+ right: 50%;
+}
+.col-xs-pull-5 {
+ right: 41.66666667%;
+}
+.col-xs-pull-4 {
+ right: 33.33333333%;
+}
+.col-xs-pull-3 {
+ right: 25%;
+}
+.col-xs-pull-2 {
+ right: 16.66666667%;
+}
+.col-xs-pull-1 {
+ right: 8.33333333%;
+}
+.col-xs-pull-0 {
+ right: auto;
+}
+.col-xs-push-12 {
+ left: 100%;
+}
+.col-xs-push-11 {
+ left: 91.66666667%;
+}
+.col-xs-push-10 {
+ left: 83.33333333%;
+}
+.col-xs-push-9 {
+ left: 75%;
+}
+.col-xs-push-8 {
+ left: 66.66666667%;
+}
+.col-xs-push-7 {
+ left: 58.33333333%;
+}
+.col-xs-push-6 {
+ left: 50%;
+}
+.col-xs-push-5 {
+ left: 41.66666667%;
+}
+.col-xs-push-4 {
+ left: 33.33333333%;
+}
+.col-xs-push-3 {
+ left: 25%;
+}
+.col-xs-push-2 {
+ left: 16.66666667%;
+}
+.col-xs-push-1 {
+ left: 8.33333333%;
+}
+.col-xs-push-0 {
+ left: auto;
+}
+.col-xs-offset-12 {
+ margin-left: 100%;
+}
+.col-xs-offset-11 {
+ margin-left: 91.66666667%;
+}
+.col-xs-offset-10 {
+ margin-left: 83.33333333%;
+}
+.col-xs-offset-9 {
+ margin-left: 75%;
+}
+.col-xs-offset-8 {
+ margin-left: 66.66666667%;
+}
+.col-xs-offset-7 {
+ margin-left: 58.33333333%;
+}
+.col-xs-offset-6 {
+ margin-left: 50%;
+}
+.col-xs-offset-5 {
+ margin-left: 41.66666667%;
+}
+.col-xs-offset-4 {
+ margin-left: 33.33333333%;
+}
+.col-xs-offset-3 {
+ margin-left: 25%;
+}
+.col-xs-offset-2 {
+ margin-left: 16.66666667%;
+}
+.col-xs-offset-1 {
+ margin-left: 8.33333333%;
+}
+.col-xs-offset-0 {
+ margin-left: 0%;
+}
+@media (min-width: 768px) {
+ .col-sm-1, .col-sm-2, .col-sm-3, .col-sm-4, .col-sm-5, .col-sm-6, .col-sm-7, .col-sm-8, .col-sm-9, .col-sm-10, .col-sm-11, .col-sm-12 {
+ float: left;
+ }
+ .col-sm-12 {
+ width: 100%;
+ }
+ .col-sm-11 {
+ width: 91.66666667%;
+ }
+ .col-sm-10 {
+ width: 83.33333333%;
+ }
+ .col-sm-9 {
+ width: 75%;
+ }
+ .col-sm-8 {
+ width: 66.66666667%;
+ }
+ .col-sm-7 {
+ width: 58.33333333%;
+ }
+ .col-sm-6 {
+ width: 50%;
+ }
+ .col-sm-5 {
+ width: 41.66666667%;
+ }
+ .col-sm-4 {
+ width: 33.33333333%;
+ }
+ .col-sm-3 {
+ width: 25%;
+ }
+ .col-sm-2 {
+ width: 16.66666667%;
+ }
+ .col-sm-1 {
+ width: 8.33333333%;
+ }
+ .col-sm-pull-12 {
+ right: 100%;
+ }
+ .col-sm-pull-11 {
+ right: 91.66666667%;
+ }
+ .col-sm-pull-10 {
+ right: 83.33333333%;
+ }
+ .col-sm-pull-9 {
+ right: 75%;
+ }
+ .col-sm-pull-8 {
+ right: 66.66666667%;
+ }
+ .col-sm-pull-7 {
+ right: 58.33333333%;
+ }
+ .col-sm-pull-6 {
+ right: 50%;
+ }
+ .col-sm-pull-5 {
+ right: 41.66666667%;
+ }
+ .col-sm-pull-4 {
+ right: 33.33333333%;
+ }
+ .col-sm-pull-3 {
+ right: 25%;
+ }
+ .col-sm-pull-2 {
+ right: 16.66666667%;
+ }
+ .col-sm-pull-1 {
+ right: 8.33333333%;
+ }
+ .col-sm-pull-0 {
+ right: auto;
+ }
+ .col-sm-push-12 {
+ left: 100%;
+ }
+ .col-sm-push-11 {
+ left: 91.66666667%;
+ }
+ .col-sm-push-10 {
+ left: 83.33333333%;
+ }
+ .col-sm-push-9 {
+ left: 75%;
+ }
+ .col-sm-push-8 {
+ left: 66.66666667%;
+ }
+ .col-sm-push-7 {
+ left: 58.33333333%;
+ }
+ .col-sm-push-6 {
+ left: 50%;
+ }
+ .col-sm-push-5 {
+ left: 41.66666667%;
+ }
+ .col-sm-push-4 {
+ left: 33.33333333%;
+ }
+ .col-sm-push-3 {
+ left: 25%;
+ }
+ .col-sm-push-2 {
+ left: 16.66666667%;
+ }
+ .col-sm-push-1 {
+ left: 8.33333333%;
+ }
+ .col-sm-push-0 {
+ left: auto;
+ }
+ .col-sm-offset-12 {
+ margin-left: 100%;
+ }
+ .col-sm-offset-11 {
+ margin-left: 91.66666667%;
+ }
+ .col-sm-offset-10 {
+ margin-left: 83.33333333%;
+ }
+ .col-sm-offset-9 {
+ margin-left: 75%;
+ }
+ .col-sm-offset-8 {
+ margin-left: 66.66666667%;
+ }
+ .col-sm-offset-7 {
+ margin-left: 58.33333333%;
+ }
+ .col-sm-offset-6 {
+ margin-left: 50%;
+ }
+ .col-sm-offset-5 {
+ margin-left: 41.66666667%;
+ }
+ .col-sm-offset-4 {
+ margin-left: 33.33333333%;
+ }
+ .col-sm-offset-3 {
+ margin-left: 25%;
+ }
+ .col-sm-offset-2 {
+ margin-left: 16.66666667%;
+ }
+ .col-sm-offset-1 {
+ margin-left: 8.33333333%;
+ }
+ .col-sm-offset-0 {
+ margin-left: 0%;
+ }
+}
+@media (min-width: 992px) {
+ .col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12 {
+ float: left;
+ }
+ .col-md-12 {
+ width: 100%;
+ }
+ .col-md-11 {
+ width: 91.66666667%;
+ }
+ .col-md-10 {
+ width: 83.33333333%;
+ }
+ .col-md-9 {
+ width: 75%;
+ }
+ .col-md-8 {
+ width: 66.66666667%;
+ }
+ .col-md-7 {
+ width: 58.33333333%;
+ }
+ .col-md-6 {
+ width: 50%;
+ }
+ .col-md-5 {
+ width: 41.66666667%;
+ }
+ .col-md-4 {
+ width: 33.33333333%;
+ }
+ .col-md-3 {
+ width: 25%;
+ }
+ .col-md-2 {
+ width: 16.66666667%;
+ }
+ .col-md-1 {
+ width: 8.33333333%;
+ }
+ .col-md-pull-12 {
+ right: 100%;
+ }
+ .col-md-pull-11 {
+ right: 91.66666667%;
+ }
+ .col-md-pull-10 {
+ right: 83.33333333%;
+ }
+ .col-md-pull-9 {
+ right: 75%;
+ }
+ .col-md-pull-8 {
+ right: 66.66666667%;
+ }
+ .col-md-pull-7 {
+ right: 58.33333333%;
+ }
+ .col-md-pull-6 {
+ right: 50%;
+ }
+ .col-md-pull-5 {
+ right: 41.66666667%;
+ }
+ .col-md-pull-4 {
+ right: 33.33333333%;
+ }
+ .col-md-pull-3 {
+ right: 25%;
+ }
+ .col-md-pull-2 {
+ right: 16.66666667%;
+ }
+ .col-md-pull-1 {
+ right: 8.33333333%;
+ }
+ .col-md-pull-0 {
+ right: auto;
+ }
+ .col-md-push-12 {
+ left: 100%;
+ }
+ .col-md-push-11 {
+ left: 91.66666667%;
+ }
+ .col-md-push-10 {
+ left: 83.33333333%;
+ }
+ .col-md-push-9 {
+ left: 75%;
+ }
+ .col-md-push-8 {
+ left: 66.66666667%;
+ }
+ .col-md-push-7 {
+ left: 58.33333333%;
+ }
+ .col-md-push-6 {
+ left: 50%;
+ }
+ .col-md-push-5 {
+ left: 41.66666667%;
+ }
+ .col-md-push-4 {
+ left: 33.33333333%;
+ }
+ .col-md-push-3 {
+ left: 25%;
+ }
+ .col-md-push-2 {
+ left: 16.66666667%;
+ }
+ .col-md-push-1 {
+ left: 8.33333333%;
+ }
+ .col-md-push-0 {
+ left: auto;
+ }
+ .col-md-offset-12 {
+ margin-left: 100%;
+ }
+ .col-md-offset-11 {
+ margin-left: 91.66666667%;
+ }
+ .col-md-offset-10 {
+ margin-left: 83.33333333%;
+ }
+ .col-md-offset-9 {
+ margin-left: 75%;
+ }
+ .col-md-offset-8 {
+ margin-left: 66.66666667%;
+ }
+ .col-md-offset-7 {
+ margin-left: 58.33333333%;
+ }
+ .col-md-offset-6 {
+ margin-left: 50%;
+ }
+ .col-md-offset-5 {
+ margin-left: 41.66666667%;
+ }
+ .col-md-offset-4 {
+ margin-left: 33.33333333%;
+ }
+ .col-md-offset-3 {
+ margin-left: 25%;
+ }
+ .col-md-offset-2 {
+ margin-left: 16.66666667%;
+ }
+ .col-md-offset-1 {
+ margin-left: 8.33333333%;
+ }
+ .col-md-offset-0 {
+ margin-left: 0%;
+ }
+}
+@media (min-width: 1200px) {
+ .col-lg-1, .col-lg-2, .col-lg-3, .col-lg-4, .col-lg-5, .col-lg-6, .col-lg-7, .col-lg-8, .col-lg-9, .col-lg-10, .col-lg-11, .col-lg-12 {
+ float: left;
+ }
+ .col-lg-12 {
+ width: 100%;
+ }
+ .col-lg-11 {
+ width: 91.66666667%;
+ }
+ .col-lg-10 {
+ width: 83.33333333%;
+ }
+ .col-lg-9 {
+ width: 75%;
+ }
+ .col-lg-8 {
+ width: 66.66666667%;
+ }
+ .col-lg-7 {
+ width: 58.33333333%;
+ }
+ .col-lg-6 {
+ width: 50%;
+ }
+ .col-lg-5 {
+ width: 41.66666667%;
+ }
+ .col-lg-4 {
+ width: 33.33333333%;
+ }
+ .col-lg-3 {
+ width: 25%;
+ }
+ .col-lg-2 {
+ width: 16.66666667%;
+ }
+ .col-lg-1 {
+ width: 8.33333333%;
+ }
+ .col-lg-pull-12 {
+ right: 100%;
+ }
+ .col-lg-pull-11 {
+ right: 91.66666667%;
+ }
+ .col-lg-pull-10 {
+ right: 83.33333333%;
+ }
+ .col-lg-pull-9 {
+ right: 75%;
+ }
+ .col-lg-pull-8 {
+ right: 66.66666667%;
+ }
+ .col-lg-pull-7 {
+ right: 58.33333333%;
+ }
+ .col-lg-pull-6 {
+ right: 50%;
+ }
+ .col-lg-pull-5 {
+ right: 41.66666667%;
+ }
+ .col-lg-pull-4 {
+ right: 33.33333333%;
+ }
+ .col-lg-pull-3 {
+ right: 25%;
+ }
+ .col-lg-pull-2 {
+ right: 16.66666667%;
+ }
+ .col-lg-pull-1 {
+ right: 8.33333333%;
+ }
+ .col-lg-pull-0 {
+ right: auto;
+ }
+ .col-lg-push-12 {
+ left: 100%;
+ }
+ .col-lg-push-11 {
+ left: 91.66666667%;
+ }
+ .col-lg-push-10 {
+ left: 83.33333333%;
+ }
+ .col-lg-push-9 {
+ left: 75%;
+ }
+ .col-lg-push-8 {
+ left: 66.66666667%;
+ }
+ .col-lg-push-7 {
+ left: 58.33333333%;
+ }
+ .col-lg-push-6 {
+ left: 50%;
+ }
+ .col-lg-push-5 {
+ left: 41.66666667%;
+ }
+ .col-lg-push-4 {
+ left: 33.33333333%;
+ }
+ .col-lg-push-3 {
+ left: 25%;
+ }
+ .col-lg-push-2 {
+ left: 16.66666667%;
+ }
+ .col-lg-push-1 {
+ left: 8.33333333%;
+ }
+ .col-lg-push-0 {
+ left: auto;
+ }
+ .col-lg-offset-12 {
+ margin-left: 100%;
+ }
+ .col-lg-offset-11 {
+ margin-left: 91.66666667%;
+ }
+ .col-lg-offset-10 {
+ margin-left: 83.33333333%;
+ }
+ .col-lg-offset-9 {
+ margin-left: 75%;
+ }
+ .col-lg-offset-8 {
+ margin-left: 66.66666667%;
+ }
+ .col-lg-offset-7 {
+ margin-left: 58.33333333%;
+ }
+ .col-lg-offset-6 {
+ margin-left: 50%;
+ }
+ .col-lg-offset-5 {
+ margin-left: 41.66666667%;
+ }
+ .col-lg-offset-4 {
+ margin-left: 33.33333333%;
+ }
+ .col-lg-offset-3 {
+ margin-left: 25%;
+ }
+ .col-lg-offset-2 {
+ margin-left: 16.66666667%;
+ }
+ .col-lg-offset-1 {
+ margin-left: 8.33333333%;
+ }
+ .col-lg-offset-0 {
+ margin-left: 0%;
+ }
+}
+.clearfix:before,
+.clearfix:after,
+.container:before,
+.container:after,
+.container-fluid:before,
+.container-fluid:after,
+.row:before,
+.row:after {
+ content: " ";
+ display: table;
+}
+.clearfix:after,
+.container:after,
+.container-fluid:after,
+.row:after {
+ clear: both;
+}
+.center-block {
+ display: block;
+ margin-left: auto;
+ margin-right: auto;
+}
+.pull-right {
+ float: right !important;
+}
+.pull-left {
+ float: left !important;
+}
+.hide {
+ display: none !important;
+}
+.show {
+ display: block !important;
+}
+.invisible {
+ visibility: hidden;
+}
+.text-hide {
+ font: 0/0 a;
+ color: transparent;
+ text-shadow: none;
+ background-color: transparent;
+ border: 0;
+}
+.hidden {
+ display: none !important;
+}
+.affix {
+ position: fixed;
+}
+@-ms-viewport {
+ width: device-width;
+}
+.visible-xs,
+.visible-sm,
+.visible-md,
+.visible-lg {
+ display: none !important;
+}
+.visible-xs-block,
+.visible-xs-inline,
+.visible-xs-inline-block,
+.visible-sm-block,
+.visible-sm-inline,
+.visible-sm-inline-block,
+.visible-md-block,
+.visible-md-inline,
+.visible-md-inline-block,
+.visible-lg-block,
+.visible-lg-inline,
+.visible-lg-inline-block {
+ display: none !important;
+}
+@media (max-width: 767px) {
+ .visible-xs {
+ display: block !important;
+ }
+ table.visible-xs {
+ display: table !important;
+ }
+ tr.visible-xs {
+ display: table-row !important;
+ }
+ th.visible-xs,
+ td.visible-xs {
+ display: table-cell !important;
+ }
+}
+@media (max-width: 767px) {
+ .visible-xs-block {
+ display: block !important;
+ }
+}
+@media (max-width: 767px) {
+ .visible-xs-inline {
+ display: inline !important;
+ }
+}
+@media (max-width: 767px) {
+ .visible-xs-inline-block {
+ display: inline-block !important;
+ }
+}
+@media (min-width: 768px) and (max-width: 991px) {
+ .visible-sm {
+ display: block !important;
+ }
+ table.visible-sm {
+ display: table !important;
+ }
+ tr.visible-sm {
+ display: table-row !important;
+ }
+ th.visible-sm,
+ td.visible-sm {
+ display: table-cell !important;
+ }
+}
+@media (min-width: 768px) and (max-width: 991px) {
+ .visible-sm-block {
+ display: block !important;
+ }
+}
+@media (min-width: 768px) and (max-width: 991px) {
+ .visible-sm-inline {
+ display: inline !important;
+ }
+}
+@media (min-width: 768px) and (max-width: 991px) {
+ .visible-sm-inline-block {
+ display: inline-block !important;
+ }
+}
+@media (min-width: 992px) and (max-width: 1199px) {
+ .visible-md {
+ display: block !important;
+ }
+ table.visible-md {
+ display: table !important;
+ }
+ tr.visible-md {
+ display: table-row !important;
+ }
+ th.visible-md,
+ td.visible-md {
+ display: table-cell !important;
+ }
+}
+@media (min-width: 992px) and (max-width: 1199px) {
+ .visible-md-block {
+ display: block !important;
+ }
+}
+@media (min-width: 992px) and (max-width: 1199px) {
+ .visible-md-inline {
+ display: inline !important;
+ }
+}
+@media (min-width: 992px) and (max-width: 1199px) {
+ .visible-md-inline-block {
+ display: inline-block !important;
+ }
+}
+@media (min-width: 1200px) {
+ .visible-lg {
+ display: block !important;
+ }
+ table.visible-lg {
+ display: table !important;
+ }
+ tr.visible-lg {
+ display: table-row !important;
+ }
+ th.visible-lg,
+ td.visible-lg {
+ display: table-cell !important;
+ }
+}
+@media (min-width: 1200px) {
+ .visible-lg-block {
+ display: block !important;
+ }
+}
+@media (min-width: 1200px) {
+ .visible-lg-inline {
+ display: inline !important;
+ }
+}
+@media (min-width: 1200px) {
+ .visible-lg-inline-block {
+ display: inline-block !important;
+ }
+}
+@media (max-width: 767px) {
+ .hidden-xs {
+ display: none !important;
+ }
+}
+@media (min-width: 768px) and (max-width: 991px) {
+ .hidden-sm {
+ display: none !important;
+ }
+}
+@media (min-width: 992px) and (max-width: 1199px) {
+ .hidden-md {
+ display: none !important;
+ }
+}
+@media (min-width: 1200px) {
+ .hidden-lg {
+ display: none !important;
+ }
+}
+.visible-print {
+ display: none !important;
+}
+@media print {
+ .visible-print {
+ display: block !important;
+ }
+ table.visible-print {
+ display: table !important;
+ }
+ tr.visible-print {
+ display: table-row !important;
+ }
+ th.visible-print,
+ td.visible-print {
+ display: table-cell !important;
+ }
+}
+.visible-print-block {
+ display: none !important;
+}
+@media print {
+ .visible-print-block {
+ display: block !important;
+ }
+}
+.visible-print-inline {
+ display: none !important;
+}
+@media print {
+ .visible-print-inline {
+ display: inline !important;
+ }
+}
+.visible-print-inline-block {
+ display: none !important;
+}
+@media print {
+ .visible-print-inline-block {
+ display: inline-block !important;
+ }
+}
+@media print {
+ .hidden-print {
+ display: none !important;
+ }
+}
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/public/stylesheets/style.css b/utils/test/vnfcatalogue/VNF_Catalogue/public/stylesheets/style.css
new file mode 100644
index 000000000..e9b3c2d58
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/public/stylesheets/style.css
@@ -0,0 +1,252 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+@import url('https://fonts.googleapis.com/css?family=Muli:300,400,600,700,800');
+*
+{
+ color: #333333;
+ font-family: 'Muli', sans-serif;
+}
+*:focus
+{
+ outline: none;
+}
+html,
+body
+{
+ margin: 0;
+ padding: 0;
+ background: #ffffff;
+ font-family: 'Muli', sans-serif;
+}
+header
+{
+ padding: 10px 35px 0 0px;
+}
+header ul
+{
+ list-style: none;
+ display: inline-block;
+}
+header ul li
+{
+ display: inline-block;
+}
+header .logo
+{
+ background: url(../../images/logo.png) no-repeat;
+ background-size: cover;
+ width: 155px;
+ height: 34px;
+ display: inline-block;
+ margin-right: 20px;
+ margin-left: 0;
+ float: left;
+}
+header ul li.links
+{
+ margin: 7px 10px 0 0;
+}
+header ul li > a,
+.content ul.most-menu li.items a
+{
+ color: #333333;
+ font-weight: 800;
+ font-size: 14px;
+ letter-spacing: 0.6px;
+ font-family: 'Muli', sans-serif;
+}
+header ul.navigation-right
+{
+ float: right;
+ padding-top: 8px;
+}
+header ul.navigation-right li.signup > a
+{
+ border: 2px solid #333333;
+ border-radius: 4px;
+ font-size: 13px;
+ font-weigt: 700;
+ padding: 9px 15px;
+}
+header ul.navigation-right li.signin > a
+{
+ border-bottom: 2px solid #333333;
+ font-size: 13px;
+ font-weight: 700;
+ padding: 9px 2px;
+}
+header ul.navigation-right li.option
+{
+ font-weight: 800;
+ padding: 0 10px;
+}
+header ul li > a:hover,
+header ul.navigation-right li.signin > a:hover,
+header ul.navigation-right li.signup > a:hover,
+header ul li > a:focus,
+header ul.navigation-right li.signin > a:focus,
+header ul.navigation-right li.signup > a:focus,
+.content ul.most-menu li a:hover,
+.content ul.most-menu li a:focus
+{
+ text-decoration: none;
+ cursor: pointer;
+ color: #333333;
+}
+header ul.navigation-right li.signup > a:hover
+{
+ background: #333333;
+ color: #ffffff;
+}
+.search-box
+{
+ text-align: center;
+ padding: 100px 0;
+}
+.search-box h1
+{
+ font-size: 30px;
+ letter-spacing: 2px;
+ color: #333333;
+ font-weight: 600;
+}
+form.search-form
+{
+ padding: 10px 20px;
+}
+form.search-form input.search-input
+{
+ font-weight: 400;
+ margin: 30px 0;
+ height: 80px;
+ padding: 10px 30px;
+ max-width: 800px;
+ width: 70%;
+ border-radius: 5px;
+ border: 2px solid #333333;
+ box-shadow: 0 0 15px 1px rgba(0,0,0,0.50);
+ color: #333333;
+ font-size: 22px;
+}
+form.search-form button.search-button
+{
+ padding: 18px 35px;
+ background: #FFF572;
+ border: 0;
+ box-shadow: 0 0 15px 1px #958F40;
+ border-radius: 1px;
+ font-size: 20px;
+ color: #393E41;
+ letter-spacing: 1px;
+ border-radius: 5px;
+ font-weight: 600;
+}
+form.search-form input:focus
+{
+ outline: none;
+}
+form.search-form input::-webkit-input-placeholder
+{
+ font-weight: 400;
+ letter-spacing: 1px;
+ color: #333333;
+}
+form.search-form input::-moz-placeholder
+{
+ font-weight: 400;
+ letter-spacing: 1px;
+ color: #333333;
+}
+form.search-form input:-moz-placeholder
+{
+ font-weight: 400;
+ letter-spacing: 5px;
+ color: #333333;
+}
+form.search-form input:-ms-input-placeholder
+{
+ font-weight: 400;
+ letter-spacing: 1px;
+ color: #333333;
+}
+.content
+{
+ height: 500px;
+ background: #f9f9f9;
+ padding: 10px 0;
+}
+.content ul.most-menu
+{
+ list-style: none;
+ text-align: center;
+ padding-bottom: 10px;
+}
+.content ul.most-menu li.items
+{
+ display: inline-block;
+ margin-right: 5px;
+ padding: 15px 25px;
+}
+.content ul.most-menu li.active
+{
+ background: #FFF572;
+}
+.content-box
+{
+ overflow: hidden;
+ padding: 20px 0 50px 0;
+ display: flex;
+ justify-content: center;
+ background: #FFFFFF;
+ box-shadow: 0 2px 3px 0 rgba(0,0,0,0.50);
+ border-bottom: 2px solid #8B19A2;
+ margin-bottom: 30px;
+}
+.content-data
+{
+ align-self: center;
+}
+.content-data h1.content-title
+{
+ font-size: 25px;
+ color: #000000;
+ letter-spacing: 1.2px;
+}
+.content-data .box
+{
+ padding: 10px 0;
+ height: 90px;
+ text-align: center;
+ border: 2px solid #4D4D4D;
+ border-radius: 2px;
+}
+.content-data .commit-icon
+{
+ width: 23px;
+ height: 16px;
+}
+.content-data .box h3.commits
+{
+ text-align: center;
+ font-size: 12px;
+ color: #333333;
+ letter-spacing: 0.03px;
+}
+footer
+{
+ font-size: 12px;
+ font-weight: 800;
+ color: #333333;
+ text-align: center;
+ padding: 20px;
+}
+.space-10
+{
+ height: 10px;
+}
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/routes/index.js b/utils/test/vnfcatalogue/VNF_Catalogue/routes/index.js
new file mode 100644
index 000000000..950fcd57e
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/routes/index.js
@@ -0,0 +1,18 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+
+var express = require('express');
+var router = express.Router();
+
+/* GET VNF_Catalogue Home Page. */
+router.get('/', function(req, res) {
+ res.render('index', { title: 'Express' });
+});
+
+module.exports = router;
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/routes/search_projects.js b/utils/test/vnfcatalogue/VNF_Catalogue/routes/search_projects.js
new file mode 100644
index 000000000..49fceeb3c
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/routes/search_projects.js
@@ -0,0 +1,19 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+
+var express = require('express');
+var router = express.Router();
+
+router.get('/', function(req, res) {
+ var tags = req.param('tags');
+ console.log(tags);
+ res.render('search_projects', { title: 'Express' });
+});
+
+module.exports = router;
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/views/error.jade b/utils/test/vnfcatalogue/VNF_Catalogue/views/error.jade
new file mode 100644
index 000000000..4f7fbcaeb
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/views/error.jade
@@ -0,0 +1,12 @@
+//
+ Copyright (c) 2017 Kumar Rishabh and others.
+ All rights reserved. This program and the accompanying materials
+ are made available under the terms of the Apache License, Version 2.0
+ which accompanies this distribution, and is available at
+ http://www.apache.org/licenses/LICENSE-2.0
+extends layout
+
+block content
+ h1= message
+ h2= error.status
+ pre #{error.stack}
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/views/index.jade b/utils/test/vnfcatalogue/VNF_Catalogue/views/index.jade
new file mode 100644
index 000000000..b183f360f
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/views/index.jade
@@ -0,0 +1,131 @@
+doctype html
+//
+ Copyright (c) 2017 Kumar Rishabh and others.
+ All rights reserved. This program and the accompanying materials
+ are made available under the terms of the Apache License, Version 2.0
+ which accompanies this distribution, and is available at
+ http://www.apache.org/licenses/LICENSE-2.0
+html(lang='en')
+ head
+ meta(charset='UTF-8')
+ title Document
+ link(rel='stylesheet', href='/stylesheets/3rd_party/bootstrap.css')
+ link(rel='stylesheet', href='/stylesheets/style.css')
+ body
+ script(type='text/javascript' src='http://code.jquery.com/jquery.min.js')
+ script(src='/javascripts/global.js')
+ header
+ ul.navigation
+ li.logo
+ li.links
+ a(href='#') Projects
+ li.links
+ a(href='#') People
+ li.links
+ a(href='#') About
+ ul.navigation-right
+ li.signup
+ a(href='#') Sign up
+ li.option or
+ li.signin
+ a(href='#') Sign in
+ .search-box
+ h1 VNF Catalogue
+ form.search-form
+ input.search-input(type='text', placeholder='Search...', id='Tags')
+ .space-10
+ button.search-button(type='submit', value='Search', id='Search') Search
+ .content
+ ul.most-menu
+ li.items.active
+ a(href='#') Most Popular
+ li.items
+ a(href='#') Most Active
+ li.items
+ a(href='#') Most Active Contributions
+ .container
+ .row
+ .box-container
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ footer
+ | © 2017 OPNFV
+script.
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/views/layout.jade b/utils/test/vnfcatalogue/VNF_Catalogue/views/layout.jade
new file mode 100644
index 000000000..7cc7dfc92
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/views/layout.jade
@@ -0,0 +1,15 @@
+doctype html
+//
+ Copyright (c) 2017 Kumar Rishabh and others.
+ All rights reserved. This program and the accompanying materials
+ are made available under the terms of the Apache License, Version 2.0
+ which accompanies this distribution, and is available at
+ http://www.apache.org/licenses/LICENSE-2.0
+html
+ head
+ title= title
+ link(rel='stylesheet', href='/stylesheets/style.css')
+ body
+ block content
+ script(src='http://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js')
+ script(src='/javascripts/global.js')
diff --git a/utils/test/vnfcatalogue/VNF_Catalogue/views/search_projects.jade b/utils/test/vnfcatalogue/VNF_Catalogue/views/search_projects.jade
new file mode 100644
index 000000000..3076543af
--- /dev/null
+++ b/utils/test/vnfcatalogue/VNF_Catalogue/views/search_projects.jade
@@ -0,0 +1,128 @@
+doctype html
+//
+ Copyright (c) 2017 Kumar Rishabh and others.
+ All rights reserved. This program and the accompanying materials
+ are made available under the terms of the Apache License, Version 2.0
+ which accompanies this distribution, and is available at
+ http://www.apache.org/licenses/LICENSE-2.0
+html(lang='en')
+ head
+ meta(charset='UTF-8')
+ title Document
+ link(rel='stylesheet', href='/stylesheets/3rd_party/bootstrap.css')
+ link(rel='stylesheet', href='/stylesheets/style.css')
+ body
+ header
+ ul.navigation
+ li.logo
+ li.links
+ a(href='#') Projects
+ li.links
+ a(href='#') People
+ li.links
+ a(href='#') About
+ ul.navigation-right
+ li.signup
+ a(href='#') Sign up
+ li.option or
+ li.signin
+ a(href='#') Sign in
+ .search-box
+ h1 VNF Catalogue
+ form.search-form
+ input.search-input(type='text', placeholder='Search...')
+ .space-10
+ button.search-button(type='submit', value='Search') Search
+ .content
+ ul.most-menu
+ li.items.active
+ a(href='#') Most Popular
+ li.items
+ a(href='#') Most Active
+ li.items
+ a(href='#') Most Active Contributions
+ .container
+ .row
+ .box-container
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ .col-md-3
+ .content-box
+ .content-data
+ h1.content-title AAA
+ .box
+ img.commit-icon(src='/images/3rd_party/commits.png')
+ h3.commits
+ | 4,845
+ br
+ | commits
+ footer
+ | © 2017 OPNFV
diff --git a/utils/test/vnfcatalogue/helpers/README.md b/utils/test/vnfcatalogue/helpers/README.md
new file mode 100644
index 000000000..6c0ca78c3
--- /dev/null
+++ b/utils/test/vnfcatalogue/helpers/README.md
@@ -0,0 +1,22 @@
+# Helper Directory
+
+## Helper to migrate database
+
+First make sure nodejs and mysql are installed. Then use
+
+```bash
+npm install bookshelf mysql knex when lodash --save
+```
+
+Create a database named **vnf_catalogue**.
+Enter the mysql credentials in migrate.js.
+
+Then use
+
+```bash
+node migrate
+```
+
+If successful the script will return success message. The current script is
+idempotent is nature, if run twice it will just return error and write nothing.
+
diff --git a/utils/test/vnfcatalogue/helpers/migrate.js b/utils/test/vnfcatalogue/helpers/migrate.js
new file mode 100644
index 000000000..ec209053c
--- /dev/null
+++ b/utils/test/vnfcatalogue/helpers/migrate.js
@@ -0,0 +1,78 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh(penguinRaider) and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+
+var knex = require('knex')({
+ client: 'mysql',
+ connection: {
+ host : 'localhost',
+ user : '*',
+ password : '*',
+ database : 'vnf_catalogue',
+ charset : 'utf8'
+ }
+});
+var Schema = require('./schema');
+var sequence = require('when/sequence');
+var _ = require('lodash');
+function createTable(tableName) {
+ return knex.schema.createTable(tableName, function (table) {
+ var column;
+ var columnKeys = _.keys(Schema[tableName]);
+ _.each(columnKeys, function (key) {
+ if (Schema[tableName][key].type === 'text' && Schema[tableName][key].hasOwnProperty('fieldtype')) {
+ column = table[Schema[tableName][key].type](key, Schema[tableName][key].fieldtype);
+ }
+ else if (Schema[tableName][key].type === 'string' && Schema[tableName][key].hasOwnProperty('maxlength')) {
+ column = table[Schema[tableName][key].type](key, Schema[tableName][key].maxlength);
+ }
+ else {
+ column = table[Schema[tableName][key].type](key);
+ }
+ if (Schema[tableName][key].hasOwnProperty('nullable') && Schema[tableName][key].nullable === true) {
+ column.nullable();
+ }
+ else {
+ column.notNullable();
+ }
+ if (Schema[tableName][key].hasOwnProperty('primary') && Schema[tableName][key].primary === true) {
+ column.primary();
+ }
+ if (Schema[tableName][key].hasOwnProperty('unique') && Schema[tableName][key].unique) {
+ column.unique();
+ }
+ if (Schema[tableName][key].hasOwnProperty('unsigned') && Schema[tableName][key].unsigned) {
+ column.unsigned();
+ }
+ if (Schema[tableName][key].hasOwnProperty('references')) {
+ column.references(Schema[tableName][key].references);
+ }
+ if (Schema[tableName][key].hasOwnProperty('defaultTo')) {
+ column.defaultTo(Schema[tableName][key].defaultTo);
+ }
+ });
+ });
+}
+function createTables () {
+ var tables = [];
+ var tableNames = _.keys(Schema);
+ tables = _.map(tableNames, function (tableName) {
+ return function () {
+ return createTable(tableName);
+ };
+ });
+ return sequence(tables);
+}
+createTables()
+.then(function() {
+ console.log('Tables created!!');
+ process.exit(0);
+})
+.catch(function (error) {
+ throw error;
+});
diff --git a/utils/test/vnfcatalogue/helpers/schema.js b/utils/test/vnfcatalogue/helpers/schema.js
new file mode 100644
index 000000000..2aaf99ae2
--- /dev/null
+++ b/utils/test/vnfcatalogue/helpers/schema.js
@@ -0,0 +1,51 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh(penguinRaider) and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+var Schema = {
+ photo: {
+ photo_id: {type: 'increments', nullable: false, primary: true},
+ photo_url: {type: 'string', maxlength: 254, nullable: false}
+ },
+ user: {
+ user_id: {type: 'increments', nullable: false, primary: true},
+ user_name: {type: 'string', maxlength: 254, nullable: false},
+ password: {type: 'string', maxlength: 150, nullable: false},
+ email_id: {type: 'string', maxlength: 254, nullable: false, unique: true, validations: {isEmail: true}},
+ photo_id: {type: 'integer', nullable: true, unsigned: true, references: 'photo.photo_id'},
+ company: {type: 'string', maxlength: 254, nullable: false},
+ introduction: {type: 'string', maxlength: 510, nullable: false},
+ last_login: {type: 'dateTime', nullable: true},
+ created_at: {type: 'dateTime', nullable: false},
+ },
+ vnf: {
+ vnf_id: {type: 'increments', nullable: false, primary: true},
+ vnf_name: {type: 'string', maxlength: 254, nullable: false},
+ repo_url: {type: 'string', maxlength: 254, nullable: false},
+ photo_id: {type: 'integer', nullable: true, unsigned: true, references: 'photo.photo_id'},
+ submitter_id: {type: 'integer', nullable: false, unsigned: true, references: 'user.user_id'},
+ lines_of_code: {type: 'integer', nullable: true, unsigned: true},
+ versions: {type: 'integer', nullable: true, unsigned: true},
+ no_of_developers: {type: 'integer', nullable: true, unsigned: true},
+ },
+ tag: {
+ tag_id: {type: 'increments', nullable: false, primary: true},
+ name: {type: 'string', maxlength: 150, nullable: false}
+ },
+ vnf_tags: {
+ vnf_tag_id: {type: 'increments', nullable: false, primary: true},
+ tag_id: {type: 'integer', nullable: false, unsigned: true, references: 'tag.tag_id'},
+ vnf_id: {type: 'integer', nullable: false, unsigned: true, references: 'vnf.vnf_id'},
+ },
+ vnf_contributors: {
+ vnf_contributors_id: {type: 'increments', nullable: false, primary: true},
+ user_id: {type: 'integer', nullable: false, unsigned: true, references: 'user.user_id'},
+ vnf_id: {type: 'integer', nullable: false, unsigned: true, references: 'vnf.vnf_id'},
+ created_at: {type: 'dateTime', nullable: false},
+ }
+};
+module.exports = Schema;