summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--build/Makefile2
-rw-r--r--ci/build.py234
-rwxr-xr-xci/build.sh204
-rw-r--r--config/build/build_settings.yaml29
-rw-r--r--lib/ansible/playbooks/build_dependencies.yml21
6 files changed, 263 insertions, 228 deletions
diff --git a/.gitignore b/.gitignore
index 89f2a286..511a0de1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,4 @@
/releng/
.build/
.cache/
+ci/apex_build.log
diff --git a/build/Makefile b/build/Makefile
index 1d329fb5..5ed9d0ae 100644
--- a/build/Makefile
+++ b/build/Makefile
@@ -52,7 +52,7 @@ clean-cache:
images: undercloud overcloud-full overcloud-opendaylight overcloud-onos
.PHONY: rpms
-rpms: common-rpm undercloud-rpm opendaylight-rpm onos-rpm
+rpms: images common-rpm undercloud-rpm opendaylight-rpm onos-rpm
.PHONY: rpms-check
rpms-check: release-rpm-check common-rpm-check undercloud-rpm-check opendaylight-rpm-check onos-rpm-check
diff --git a/ci/build.py b/ci/build.py
new file mode 100644
index 00000000..a17b21bd
--- /dev/null
+++ b/ci/build.py
@@ -0,0 +1,234 @@
+##############################################################################
+# Copyright (c) 2017 Tim Rozet (trozet@redhat.com) and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import argparse
+import logging
+import os
+import subprocess
+import sys
+import uuid
+import yaml
+
+CACHE_JOURNAL = 'cache_journal.yaml'
+TMP_CACHE = '.cache'
+BUILD_ROOT = 'build'
+BUILD_LOG_FILE = './apex_build.log'
+
+class ApexBuildException(Exception):
+ pass
+
+
+def create_build_parser():
+ build_parser = argparse.ArgumentParser()
+ build_parser.add_argument('--debug', action='store_true', default=False,
+ help="Turn on debug messages")
+ build_parser.add_argument('-l', '--log-file',
+ default=BUILD_LOG_FILE,
+ dest='log_file', help="Log file to log to")
+ build_parser.add_argument('-c', '--cache-dir',
+ dest='cache_dir',
+ default=None,
+ help='Directory to store cache')
+ build_parser.add_argument('--iso', action='store_true',
+ default=False,
+ help='Build ISO image')
+ build_parser.add_argument('--rpms', action='store_true',
+ default=False,
+ help='Build RPMs')
+ build_parser.add_argument('-r', '--release',
+ dest='build_version',
+ help='Version to apply to build '
+ 'artifact label')
+
+ return build_parser
+
+
+def get_journal(cache_dir):
+ """
+ Search for the journal file and returns its contents
+ :param cache_dir: cache storage directory where journal file is
+ :return: content of journal file
+ """
+ journal_file = "{}/{}".format(cache_dir, CACHE_JOURNAL)
+ if os.path.isfile(journal_file) is False:
+ logging.info("Journal file not found {}, skipping cache search".format(
+ journal_file))
+ else:
+ with open(journal_file, 'r') as fh:
+ cache_journal = yaml.safe_load(fh)
+ assert isinstance(cache_journal, list)
+ return cache_journal
+
+
+def get_cache_file(cache_dir):
+ """
+ Searches for a valid cache entry in the cache journal
+ :param cache_dir: directory where cache and journal are located
+ :return: name of valid cache file
+ """
+ cache_journal = get_journal(cache_dir)
+ if cache_journal is not None:
+ valid_cache = cache_journal[-1]
+ if os.path.isfile(valid_cache):
+ return valid_cache
+
+
+def unpack_cache(cache_dest, cache_dir=None):
+ if cache_dir is None:
+ logging.info("Cache directory not provided, skipping cache unpack")
+ return
+ elif os.path.isdir(cache_dir) is False:
+ logging.info("Cache Directory does not exist, skipping cache unpack")
+ return
+ else:
+ logging.info("Cache Directory Found: {}".format(cache_dir))
+ cache_file = get_cache_file(cache_dir)
+ if cache_file is None:
+ logging.info("No cache file detected, skipping cache unpack")
+ return
+ logging.info("Unpacking Cache {}".format(cache_file))
+ if not os.path.exists(cache_dest):
+ os.makedirs(cache_dest)
+ try:
+ subprocess.check_call(["tar", "xvf", cache_file, "-C", cache_dest])
+ except subprocess.CalledProcessError:
+ logging.warning("Cache unpack failed")
+ return
+ logging.info("Cache unpacked, contents are: {}",
+ os.listdir(cache_dest))
+
+
+def build(build_root, version, iso=False, rpms=False):
+ if iso:
+ make_targets = ['iso']
+ elif rpms:
+ make_targets = ['rpms']
+ else:
+ make_targets = ['images', 'rpms-check']
+ if version is not None:
+ make_args = ['RELEASE={}'.format(version)]
+ else:
+ make_args = []
+ logging.info('Building targets: {}'.format(make_targets))
+ try:
+ output = subprocess.check_output(["make"] + make_args + ["-C",
+ build_root] + make_targets)
+ logging.info(output)
+ except subprocess.CalledProcessError as e:
+ logging.error("Failed to build Apex artifacts")
+ logging.error(e.output)
+ raise e
+
+
+def build_cache(cache_source, cache_dir):
+ """
+ Tar up new cache with unique name and store it in cache storage
+ directory. Also update journal file with new cache entry.
+ :param cache_source: source files to tar up when building cache file
+ :param cache_dir: cache storage location
+ :return: None
+ """
+ if cache_dir is None:
+ logging.info("No cache dir specified, will not build cache")
+ return
+ cache_name = 'apex-cache-{}.tgz'.format(str(uuid.uuid4()))
+ cache_full_path = os.path.join(cache_dir, cache_name)
+ os.makedirs(cache_dir, exist_ok=True)
+ try:
+ subprocess.check_call(['tar', '--atime-preserve', '--dereference',
+ '-caf', cache_full_path, '-C', cache_source,
+ '.'])
+ except BaseException as e:
+ logging.error("Unable to build new cache tarball")
+ if os.path.isfile(cache_full_path):
+ os.remove(cache_full_path)
+ raise e
+ if os.path.isfile(cache_full_path):
+ logging.info("Cache Build Complete")
+ # update journal
+ cache_entries = get_journal(cache_dir)
+ if cache_entries is None:
+ cache_entries = [cache_name]
+ else:
+ cache_entries.append(cache_name)
+ journal_file = os.path.join(cache_dir, CACHE_JOURNAL)
+ with open(journal_file, 'w') as fh:
+ yaml.safe_dump(cache_entries, fh, default_flow_style=False)
+ logging.info("Journal updated with new entry: {}".format(cache_name))
+ else:
+ logging.warning("Cache file did not build correctly")
+
+
+def prune_cache(cache_dir):
+ """
+ Remove older cache entries if there are more than 2
+ :param cache_dir: Cache storage directory
+ :return: None
+ """
+ if cache_dir is None:
+ return
+ cache_modified_flag = False
+ cache_entries = get_journal(cache_dir)
+ while len(cache_entries) > 2:
+ logging.debug("Will remove older cache entries")
+ cache_to_rm = cache_entries[0]
+ cache_full_path = os.path.join(cache_dir, cache_to_rm)
+ if os.path.isfile(cache_full_path):
+ try:
+ os.remove(cache_full_path)
+ cache_entries.pop(0)
+ cache_modified_flag = True
+ except os.EX_OSERR:
+ logging.warning("Failed to remove cache file: {}".format(
+ cache_full_path))
+ break
+
+ else:
+ logging.debug("No more cache cleanup necessary")
+
+ if cache_modified_flag:
+ logging.debug("Updating cache journal")
+ journal_file = os.path.join(cache_dir, CACHE_JOURNAL)
+ with open(journal_file, 'w') as fh:
+ yaml.safe_dump(cache_entries, fh, default_flow_style=False)
+
+if __name__ == '__main__':
+ parser = create_build_parser()
+ args = parser.parse_args(sys.argv[1:])
+ if args.debug:
+ log_level = logging.DEBUG
+ else:
+ log_level = logging.INFO
+ os.makedirs(os.path.dirname(args.log_file), exist_ok=True)
+ formatter = '%(asctime)s %(levelname)s: %(message)s'
+ logging.basicConfig(filename=args.log_file,
+ format=formatter,
+ datefmt='%m/%d/%Y %I:%M:%S %p',
+ level=log_level)
+ console = logging.StreamHandler()
+ console.setLevel(log_level)
+ console.setFormatter(logging.Formatter(formatter))
+ logging.getLogger('').addHandler(console)
+ apex_root = os.path.split(os.getcwd())[0]
+ for root, dirs, files in os.walk(apex_root):
+ if BUILD_ROOT in dirs:
+ apex_root = root
+ apex_build_root = os.path.join(apex_root, BUILD_ROOT)
+ if os.path.isdir(apex_build_root):
+ cache_tmp_dir = os.path.join(apex_root, TMP_CACHE)
+ else:
+ logging.error("You must execute this script inside of the Apex "
+ "local code repository")
+ raise ApexBuildException("Invalid path for apex root: {}. Must be "
+ "invoked from within Apex code directory.".
+ format(apex_root))
+ unpack_cache(cache_tmp_dir, args.cache_dir)
+ build(apex_build_root, args.build_version, args.iso, args.rpms)
+ build_cache(cache_tmp_dir, args.cache_dir)
+ prune_cache(args.cache_dir)
diff --git a/ci/build.sh b/ci/build.sh
index 31d7ba62..5cd2c28d 100755
--- a/ci/build.sh
+++ b/ci/build.sh
@@ -8,201 +8,9 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-set -xe
-
-display_usage ()
-{
-cat << EOF
-$0 Builds the Apex OPNFV Deployment Toolchain
-
-usage: $0 [ -c cache_dest_dir ] -r release_name [ --iso | --rpms ]
-
-OPTIONS:
- -c cache destination - destination to save tarball of cache
- -r release name/version of the build result
- --iso build the iso (implies RPMs too)
- --rpms build the rpms
- --debug enable debug
- -h help, prints this help text
-
-Example:
-build -c file:///tmp/cache -r dev123
-EOF
-}
-
-APEX_ROOT=$(dirname $(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd))
-CACHE_DEST=""
-CACHE_DIR="${APEX_ROOT}/.cache"
-CACHE_NAME="apex-cache"
-MAKE_TARGETS="images"
-REQUIRED_PKGS="rpm-build python-docutils python2-virtualbmc"
-RELEASE_RPM=""
-
-parse_cmdline() {
- while [ "${1:0:1}" = "-" ]
- do
- case "$1" in
- -h|--help)
- display_usage
- exit 0
- ;;
- -c|--cache-dest)
- CACHE_DEST=${2}
- shift 2
- ;;
- -r|--release)
- RELEASE=${2}
- shift 2
- ;;
- --iso )
- MAKE_TARGETS="iso"
- echo "Building opnfv-apex RPMs and ISO"
- shift 1
- ;;
- --rpms )
- MAKE_TARGETS="rpms"
- echo "Buiding opnfv-apex RPMs"
- shift 1
- ;;
- --release-rpm )
- RELEASE_RPM=" release-rpm"
- echo "Buiding opnfv-apex RPMs"
- shift 1
- ;;
- --debug )
- debug="TRUE"
- echo "Enable debug output"
- shift 1
- ;;
- --build-cache )
- MAKE_TARGETS=""
- echo "Building Cache"
- shift 1
- ;;
- *)
- display_usage
- exit 1
- ;;
- esac
- done
-
-}
-
-run_make() {
- make $MAKE_ARGS -C ${BUILD_DIRECTORY} $1
-}
-
-parse_cmdline "$@"
-
-if [ -z "$BUILD_DIRECTORY" ]; then
- if [ -d "${APEX_ROOT}/build" ]; then
- BUILD_DIRECTORY="${APEX_ROOT}/build"
- else
- echo "Cannot find build directory, please provide BUILD_DIRECTORY environment variable...exiting"
- exit 1
- fi
-elif [ ! -d "$BUILD_DIRECTORY" ]; then
- echo "Provided build directory is invalid: ${BUILD_DIRECTORY} ...exiting"
- exit 1
-fi
-
-# Add release rpm to make targets if defined
-MAKE_TARGETS+=$RELEASE_RPM
-
-# Install build dependencies
-for pkg in $REQUIRED_PKGS; do
- if ! rpm -q $pkg > /dev/null; then
- if ! sudo yum -y install $pkg > /dev/null; then
- echo "Required package $pkg missing and installation failed."
- exit 1
- fi
- fi
-done
-
-if [ -n "$RELEASE" ]; then MAKE_ARGS+="RELEASE=$RELEASE "; fi
-
-# Get the Old Cache and build new cache history file
-if [[ -n "$CACHE_DEST" && -n "$MAKE_TARGETS" ]]; then
- echo "Retrieving Cache"
- if [ -f $CACHE_DEST/${CACHE_NAME}.tgz ]; then
- echo "Cache found at ${CACHE_DEST}/${CACHE_NAME}.tgz"
- rm -rf $CACHE_DIR
- mkdir $CACHE_DIR
- echo "Unpacking Cache to ${CACHE_DIR}"
- tar -xvzf ${CACHE_DEST}/${CACHE_NAME}.tgz -C ${CACHE_DIR} || {
- rm ${CACHE_DEST}/${CACHE_NAME}.tgz
- echo "Cache unpack failed, Will rebuild."
- }
- echo "Cache contents after unpack:"
- ls -al ${CACHE_DIR}
- else
- echo "No Cache Found"
- fi
-fi
-
-# Ensure the build cache dir exists
-if [ ! -d "$CACHE_DIR" ]; then
- rm -rf ${CACHE_DIR}
- echo "Creating Build Cache Directory"
- mkdir ${CACHE_DIR}
-fi
-
-# Conditionally execute RPM build checks if the specs change and target is not rpm or iso
-if [[ "$MAKE_TARGETS" == "images" ]]; then
- commit_file_list=$(git show --pretty="format:" --name-status)
- if git show -s | grep "force-build-rpms"; then
- MAKE_TARGETS+=" rpms"
- elif [[ $commit_file_list == *"A$(printf '\t')"* || $commit_file_list == *build/Makefile* ]]; then
- # Makefile forces all rpms to be checked
- MAKE_TARGETS+=" rpms-check"
- else
- # Spec files are selective
- if [[ $commit_file_list == *build/rpm_specs/opnfv-apex-undercloud.spec* ]]; then
- MAKE_TARGETS+=" undercloud-rpm-check"
- fi
- if [[ $commit_file_list == *build/rpm_specs/opnfv-apex-release.spec* ]]; then
- MAKE_TARGETS+=" release-rpm-check"
- fi
- if [[ $commit_file_list == *build/rpm_specs/opnfv-apex-common.spec* ]]; then
- MAKE_TARGETS+=" common-rpm-check"
- fi
- if [[ $commit_file_list == *build/rpm_specs/opnfv-apex.spec* ]]; then
- MAKE_TARGETS+=" opendaylight-rpm-check"
- fi
- if [[ $commit_file_list == *build/rpm_specs/opnfv-apex-onos.spec* ]]; then
- MAKE_TARGETS+=" onos-rpm-check"
- fi
- fi
-fi
-
-# Make sure python is installed
-if ! rpm -q python34-devel > /dev/null; then
- sudo yum install -y epel-release
- if ! sudo yum install -y python34-devel; then
- echo "Failed to install python34-devel package..."
- exit 1
- fi
-fi
-
-# Execute make against targets
-for t in $MAKE_TARGETS; do
- run_make $t
-done
-
-echo "Build Complete"
-
-# Build new Cache
-if [ -n "$CACHE_DEST" ]; then
- echo "Building Cache"
- ls -lah ${CACHE_DIR}
- # ensure the destination exists
- mkdir -p ${CACHE_DEST}
- # roll the cache tarball
- tar --atime-preserve --dereference -caf ${CACHE_DEST}/${CACHE_NAME}.tgz -C ${CACHE_DIR} .
- if [ -f "${CACHE_DEST}/${CACHE_NAME}.tgz" ]; then
- echo "Cache Build Complete"
- else
- echo "WARN: Cache file did not build correctly"
- fi
-fi
-echo "Complete"
+DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+set -e
+rpm -q ansible || sudo yum -y install ansible
+ansible-playbook --become -i "localhost," -c local $DIR/../lib/ansible/playbooks/build_dependencies.yml -vvv
+make -C $DIR/../build clean
+python3 $DIR/build.py $@
diff --git a/config/build/build_settings.yaml b/config/build/build_settings.yaml
deleted file mode 100644
index 5fd7a4eb..00000000
--- a/config/build/build_settings.yaml
+++ /dev/null
@@ -1,29 +0,0 @@
----
-build_params:
- opendaylight: true
- onos: false
- ovs: true
- odl_puppet: true
-
-opendaylight_config:
- name: "OpenDaylight SFC"
- method: zip
- location: >-
- https://www.dropbox.com/s/6w76eo7loltvvb5/
- openstack.net-virt-sfc-karaf-1.2.1-SNAPSHOT.zip
-
-onos_config:
- name: "ONOS Base Controller"
- method: rpm
- location: https://www.example.com/onos.rpm
-
-ovs_config:
- name: "OVS NSH Build"
- method: rpm
- location: https://www.example.com/ovs_nsh_noarch.rpm
-
-odl_puppet_config:
- name: "Puppet to install ODL SFC"
- method: git
- location: https://github.com/puppet-opendaylight.git
- version: "stable/liberty"
diff --git a/lib/ansible/playbooks/build_dependencies.yml b/lib/ansible/playbooks/build_dependencies.yml
new file mode 100644
index 00000000..dcf2ed94
--- /dev/null
+++ b/lib/ansible/playbooks/build_dependencies.yml
@@ -0,0 +1,21 @@
+---
+- hosts: localhost
+ tasks:
+ - name: Add rdo-release repo
+ yum:
+ name: 'https://www.rdoproject.org/repos/rdo-release.rpm'
+ - name: Add dependency packages
+ yum:
+ name: python34,python34-devel,python34-jinja2,
+ python34-markupsafe, python2-virtualbmc,
+ libguestfs-tools,bsdtar,libvirt,
+ python2-oslo-config,python2-debtcollector,
+ make, python-pip, python-virtualenv
+ - name: Install Virtualization group
+ yum:
+ name: "@Virtualization Host"
+ - name: Install python ipmi from OPNFV artifacts
+ yum:
+ name: 'http://artifacts.opnfv.org/apex/dependencies/python3-ipmi-0.3.0-1.noarch.rpm'
+ - pip:
+ name: tox