summaryrefslogtreecommitdiffstats
path: root/jjb/apex
diff options
context:
space:
mode:
Diffstat (limited to 'jjb/apex')
-rwxr-xr-xjjb/apex/apex-build.sh6
-rwxr-xr-xjjb/apex/apex-deploy.sh35
-rwxr-xr-xjjb/apex/apex-download-artifact.sh18
-rwxr-xr-xjjb/apex/apex-unit-test.sh22
-rw-r--r--jjb/apex/apex.yml35
-rw-r--r--jjb/apex/apex.yml.j235
6 files changed, 91 insertions, 60 deletions
diff --git a/jjb/apex/apex-build.sh b/jjb/apex/apex-build.sh
index 58d9f1a40..ad94ba3d7 100755
--- a/jjb/apex/apex-build.sh
+++ b/jjb/apex/apex-build.sh
@@ -28,8 +28,10 @@ fi
BUILD_DIRECTORY=${WORKSPACE}/build
# start the build
-cd $WORKSPACE/ci
-./build.sh $BUILD_ARGS
+pushd ${BUILD_DIRECTORY}
+make clean
+popd
+python3 apex/build.py $BUILD_ARGS
RPM_VERSION=$(grep Version: $WORKSPACE/build/rpm_specs/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-')
# list the contents of BUILD_OUTPUT directory
echo "Build Directory is ${BUILD_DIRECTORY}/../.build"
diff --git a/jjb/apex/apex-deploy.sh b/jjb/apex/apex-deploy.sh
index 4244f4427..b3bc1416e 100755
--- a/jjb/apex/apex-deploy.sh
+++ b/jjb/apex/apex-deploy.sh
@@ -10,6 +10,8 @@ echo "Starting the Apex deployment."
echo "--------------------------------------------------------"
echo
+sudo rm -rf /tmp/tmp*
+
if [ -z "$DEPLOY_SCENARIO" ]; then
echo "Deploy scenario not set!"
exit 1
@@ -35,7 +37,7 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
# Settings for deploying from git workspace
DEPLOY_SETTINGS_DIR="${WORKSPACE}/config/deploy"
NETWORK_SETTINGS_DIR="${WORKSPACE}/config/network"
- DEPLOY_CMD="${WORKSPACE}/ci/deploy.sh"
+ DEPLOY_CMD="opnfv-deploy"
CLEAN_CMD="${WORKSPACE}/ci/clean.sh"
RESOURCES="${WORKSPACE}/.build/"
CONFIG="${WORKSPACE}/build"
@@ -46,6 +48,11 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
# Ensure artifacts were downloaded and extracted correctly
# TODO(trozet) add verification here
+ # Install dev build
+ mkdir -p ~/tmp
+ mv -f .build ~/tmp/
+ sudo pip3 install --upgrade --force-reinstall .
+ mv -f ~/tmp/.build .
else
DEPLOY_SETTINGS_DIR="/etc/opnfv-apex/"
NETWORK_SETTINGS_DIR="/etc/opnfv-apex/"
@@ -63,7 +70,10 @@ fi
# Install Dependencies
# Make sure python34 dependencies are installed
-for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do
+dependencies="epel-release python34 python34-devel libvirt-devel python34-pip \
+ansible python34-PyYAML python34-jinja2 python34-setuptools python-tox ansible"
+
+for dep_pkg in $dependencies; do
if ! rpm -q ${dep_pkg} > /dev/null; then
if ! sudo yum install -y ${dep_pkg}; then
echo "Failed to install ${dep_pkg}"
@@ -72,31 +82,12 @@ for dep_pkg in epel-release python34 python34-PyYAML python34-setuptools; do
fi
done
-# Make sure jinja2 is installed
-for python_pkg in jinja2; do
- if ! python3.4 -c "import $python_pkg"; then
- echo "$python_pkg package not found for python3.4, attempting to install..."
- if ! sudo easy_install-3.4 $python_pkg; then
- echo -e "Failed to install $python_pkg package for python3.4"
- exit 1
- fi
- fi
-done
-
if [[ "$JOB_NAME" =~ "virtual" ]]; then
# Make sure ipxe-roms-qemu package is updated to latest.
# This package is needed for multi virtio nic PXE boot in virtual environment.
sudo yum update -y ipxe-roms-qemu
- if [ -z ${PYTHONPATH:-} ]; then
- export PYTHONPATH=${WORKSPACE}/lib/python
- else
- export PYTHONPATH=$PYTHONPATH:${WORKSPACE}/lib/python
- fi
fi
-# set env vars to deploy cmd
-DEPLOY_CMD="BASE=${BASE} IMAGES=${IMAGES} LIB=${LIB} ${DEPLOY_CMD}"
-
if [ "$OPNFV_CLEAN" == 'yes' ]; then
if sudo test -e '/root/inventory/pod_settings.yaml'; then
clean_opts='-i /root/inventory/pod_settings.yaml'
@@ -104,7 +95,7 @@ if [ "$OPNFV_CLEAN" == 'yes' ]; then
clean_opts=''
fi
- sudo BASE=${BASE} LIB=${LIB} ${CLEAN_CMD} ${clean_opts}
+ sudo ${CLEAN_CMD} ${clean_opts}
fi
if echo ${DEPLOY_SCENARIO} | grep ipv6; then
diff --git a/jjb/apex/apex-download-artifact.sh b/jjb/apex/apex-download-artifact.sh
index 860cd60a5..a11fb65b8 100755
--- a/jjb/apex/apex-download-artifact.sh
+++ b/jjb/apex/apex-download-artifact.sh
@@ -21,7 +21,7 @@ if [[ "$ARTIFACT_VERSION" =~ dev ]]; then
tar -xvf apex-${OPNFV_ARTIFACT_VERSION}.tar.gz
popd > /dev/null
else
- echo "Will download RPMs..."
+ echo "Will use RPMs..."
# Must be RPMs/ISO
echo "Downloading latest properties file"
@@ -33,13 +33,13 @@ else
source $BUILD_DIRECTORY/opnfv.properties
RPM_INSTALL_PATH=$(echo "http://"$OPNFV_RPM_URL | sed 's/\/'"$(basename $OPNFV_RPM_URL)"'//')
- RPM_LIST=${RPM_INSTALL_PATH}/$(basename $OPNFV_RPM_URL)
+ RPM_LIST=$(basename $OPNFV_RPM_URL)
# find version of RPM
VERSION_EXTENSION=$(echo $(basename $RPM_LIST) | grep -Eo '[0-9]+\.[0-9]+-([0-9]{8}|[a-z]+-[0-9]\.[0-9]+)')
# build RPM List which already includes base Apex RPM
- RPM_LIST+=" ${RPM_INSTALL_PATH}/opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
- RPM_LIST+=" ${RPM_INSTALL_PATH}/python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
+ RPM_LIST+=" opnfv-apex-undercloud-${VERSION_EXTENSION}.noarch.rpm"
+ RPM_LIST+=" python34-opnfv-apex-${VERSION_EXTENSION}.noarch.rpm"
# remove old / install new RPMs
if rpm -q opnfv-apex > /dev/null; then
@@ -48,10 +48,20 @@ else
sudo yum remove -y ${INSTALLED_RPMS}
fi
fi
+ # Create an rpms dir on slave
+ mkdir -p ~/apex_rpms
+ pushd ~/apex_rpms
+ # Remove older rpms which do not match this version
+ find . ! -name "*${VERSION_EXTENSION}.noarch.rpm" -type f -exec rm -f {} +
+ # Download RPM only if changed on server
+ for rpm in $RPM_LIST; do
+ wget -N ${RPM_INSTALL_PATH}/${rpm}
+ done
if ! sudo yum install -y $RPM_LIST; then
echo "Unable to install new RPMs: $RPM_LIST"
exit 1
fi
+ popd
fi
# TODO: Uncomment these lines to verify SHA512SUMs once the sums are
diff --git a/jjb/apex/apex-unit-test.sh b/jjb/apex/apex-unit-test.sh
index 12cb862b0..3112c9d36 100755
--- a/jjb/apex/apex-unit-test.sh
+++ b/jjb/apex/apex-unit-test.sh
@@ -8,10 +8,24 @@ echo "--------------------------------------------------------------------------
echo
-pushd ci/ > /dev/null
-sudo BASE="${WORKSPACE}/build" LIB="${WORKSPACE}/lib" ./clean.sh
-./test.sh
-popd
+pushd build/ > /dev/null
+for pkg in yamllint rpmlint iproute epel-release python34-devel python34-nose python34-PyYAML python-pep8 python34-mock python34-pip; do
+ if ! rpm -q ${pkg} > /dev/null; then
+ if ! sudo yum install -y ${pkg}; then
+ echo "Failed to install ${pkg} package..."
+ exit 1
+ fi
+ fi
+done
+
+# Make sure coverage is installed
+if ! python3 -c "import coverage" &> /dev/null; then sudo pip3 install coverage; fi
+
+make rpmlint
+make python-pep8-check
+make yamllint
+make python-tests
+popd > /dev/null
echo "--------------------------------------------------------"
echo "Unit Tests Done!"
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index 4070e2099..bdce67a44 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -373,6 +373,8 @@
pattern: 'lib/**'
- compare-type: ANT
pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/**'
properties:
- logrotate-default
@@ -406,6 +408,10 @@
kill-phase-on: FAILURE
abort-all-job: true
git-revision: true
+ - shell: |
+ echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo 'os-.*') > detected_scenario
+ - inject:
+ properties-file: detected_scenario
- multijob:
name: functest-smoke
condition: SUCCESSFUL
@@ -413,7 +419,7 @@
- name: 'functest-apex-virtual-suite-{stream}'
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO={verify-scenario}
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
FUNCTEST_SUITE_NAME=healthcheck
GERRIT_BRANCH=$GERRIT_BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
@@ -505,7 +511,7 @@
wrappers:
- timeout:
- timeout: 120
+ timeout: 140
fail: true
parameters:
@@ -696,18 +702,19 @@
enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
abort-all-job: false
git-revision: false
-# - multijob:
-# name: StorPerf
-# condition: ALWAYS
-# projects:
-# - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
-# node-parameters: true
-# current-parameters: false
-# predefined-parameters:
-# DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-# kill-phase-on: NEVER
-# abort-all-job: false
-# git-revision: false
+ - multijob:
+ name: StorPerf
+ condition: ALWAYS
+ projects:
+ - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
# Build status is always success due conditional plugin prefetching
# build status before multijob phases execute
# - conditional-step:
diff --git a/jjb/apex/apex.yml.j2 b/jjb/apex/apex.yml.j2
index 28b83e806..514dd49bf 100644
--- a/jjb/apex/apex.yml.j2
+++ b/jjb/apex/apex.yml.j2
@@ -285,6 +285,8 @@
pattern: 'lib/**'
- compare-type: ANT
pattern: 'config/**'
+ - compare-type: ANT
+ pattern: 'apex/**'
properties:
- logrotate-default
@@ -318,6 +320,10 @@
kill-phase-on: FAILURE
abort-all-job: true
git-revision: true
+ - shell: |
+ echo DEPLOY_SCENARIO=$(echo $GERRIT_EVENT_COMMENT_TEXT | grep start-gate-scenario | grep -Eo 'os-.*') > detected_scenario
+ - inject:
+ properties-file: detected_scenario
- multijob:
name: functest-smoke
condition: SUCCESSFUL
@@ -325,7 +331,7 @@
- name: 'functest-apex-virtual-suite-{stream}'
current-parameters: false
predefined-parameters: |
- DEPLOY_SCENARIO={verify-scenario}
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
FUNCTEST_SUITE_NAME=healthcheck
GERRIT_BRANCH=$GERRIT_BRANCH
GERRIT_REFSPEC=$GERRIT_REFSPEC
@@ -417,7 +423,7 @@
wrappers:
- timeout:
- timeout: 120
+ timeout: 140
fail: true
parameters:
@@ -608,18 +614,19 @@
enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
abort-all-job: false
git-revision: false
-# - multijob:
-# name: StorPerf
-# condition: ALWAYS
-# projects:
-# - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
-# node-parameters: true
-# current-parameters: false
-# predefined-parameters:
-# DEPLOY_SCENARIO=$DEPLOY_SCENARIO
-# kill-phase-on: NEVER
-# abort-all-job: false
-# git-revision: false
+ - multijob:
+ name: StorPerf
+ condition: ALWAYS
+ projects:
+ - name: 'storperf-apex-baremetal-daily-{scenario_stream}'
+ node-parameters: true
+ current-parameters: false
+ predefined-parameters:
+ DEPLOY_SCENARIO=$DEPLOY_SCENARIO
+ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-nosdn-nofeature-ha/"
+ kill-phase-on: NEVER
+ abort-all-job: false
+ git-revision: false
# Build status is always success due conditional plugin prefetching
# build status before multijob phases execute
# - conditional-step: