diff options
author | Tim Rozet <trozet@redhat.com> | 2016-12-06 15:41:43 +0000 |
---|---|---|
committer | Gerrit Code Review <gerrit@opnfv.org> | 2016-12-06 15:41:43 +0000 |
commit | 9131b0a81c22e713b4d4798d7ff32254be3b99e3 (patch) | |
tree | 2e23b97ec3c37544f424859b31ec6368870c997d | |
parent | 844066d9de6f6b8d2968a9abb68ea37cbcb1c661 (diff) | |
parent | f5208cf13b61b41ecd339546c2d076d0d098d1e5 (diff) |
Merge "Updates to Caching"
-rw-r--r-- | build/cache.sh | 29 | ||||
-rwxr-xr-x | build/overcloud-opendaylight.sh | 3 | ||||
-rwxr-xr-x | build/undercloud.sh | 4 | ||||
-rwxr-xr-x | ci/build.sh | 41 |
4 files changed, 60 insertions, 17 deletions
diff --git a/build/cache.sh b/build/cache.sh index 5c7e5e73..0c2da399 100644 --- a/build/cache.sh +++ b/build/cache.sh @@ -8,12 +8,14 @@ # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## -CACHE_DIR="$(pwd)/cache" +CACHE_DIR="$(pwd)/.cache" +CACHE_HISTORY=".cache_history" # Make sure the cache dir exists function cache_dir { + if [ -f $CACHE_DIR ]; then rm -rf $CACHE_DIR; fi if [ ! -d $CACHE_DIR/ ]; then mkdir $CACHE_DIR/; fi - if [ ! -f $CACHE_DIR/.cache ]; then touch $CACHE_DIR/.cache; fi + if [ ! -f $CACHE_DIR/$CACHE_HISTORY ]; then touch $CACHE_DIR/$CACHE_HISTORY; fi echo "Cache Dir: $CACHE_DIR" } @@ -33,8 +35,8 @@ function curl_file { until curl -C- -L -o $CACHE_DIR/$2 $1 || (( count++ >= 20 )); do echo -n '' #do nothing, we just want to loop done - sed -i "/$2/d" $CACHE_DIR/.cache - echo "$(md5sum $CACHE_DIR/$2) $2" >> $CACHE_DIR/.cache + sed -i "/$2/d" $CACHE_DIR/$CACHE_HISTORY + echo "$(md5sum $CACHE_DIR/$2) $2" >> $CACHE_DIR/$CACHE_HISTORY } # $1 = download url @@ -63,9 +65,14 @@ function populate_cache { echo "Got empty MD5 from remote for $filename, skipping MD5 check" curl_file $1 $filename else - my_md5=$(grep ${filename} $CACHE_DIR/.cache | awk {'print $1'}) + my_md5=$(grep ${filename} $CACHE_HISTORY | awk {'print $1'}) + if [ -z "$my_md5" ]; then + echo "${filename} missing in $CACHE_HISTORY file. Caculating md5..." + my_md5=$(md5sum ${CACHE_DIR}/${filename} | awk {'print $1'}) + fi if [ "$remote_md5" != "$my_md5" ]; then - echo "MD5 mismatch, cache file MD5 is ${my_md5}" + echo "MD5 mismatch, local cache file MD5 is ${my_md5}" + echo " remote cache file MD5 is ${remote_md5}" echo "Downloading $filename" curl_file $1 $filename else @@ -76,6 +83,14 @@ function populate_cache { } # $1 = filename to get from cache +# $2 = destintation function get_cached_file { - cp -f $CACHE_DIR/$1 . + if [ ! -f $CACHE_DIR/$1 ]; then + echo "Cache file: ${CACHE_DIR}/$1 is not in cache." + else + echo "Cache file: Using cached file ${CACHE_DIR}/$1." + dest='.' + if [ -n $2 ]; then dest=$2; fi + cp -f $CACHE_DIR/$1 $dest + fi } diff --git a/build/overcloud-opendaylight.sh b/build/overcloud-opendaylight.sh index f7f8958d..4f81a300 100755 --- a/build/overcloud-opendaylight.sh +++ b/build/overcloud-opendaylight.sh @@ -64,8 +64,7 @@ priority=1 EOF # SDNVPN - Copy tunnel setup script -wget https://raw.githubusercontent.com/openstack/fuel-plugin-opendaylight/brahmaputra-sr2/deployment_scripts/puppet/modules/opendaylight/templates/setup_TEPs.py - +curl -fO https://raw.githubusercontent.com/openstack/fuel-plugin-opendaylight/brahmaputra-sr2/deployment_scripts/puppet/modules/opendaylight/templates/setup_TEPs.py # install ODL packages # install Jolokia for ODL HA diff --git a/build/undercloud.sh b/build/undercloud.sh index 39885faf..d0bab527 100755 --- a/build/undercloud.sh +++ b/build/undercloud.sh @@ -40,7 +40,7 @@ LIBGUESTFS_BACKEND=direct virt-customize \ --run-command "cd /usr/share && rm -rf openstack-tripleo-heat-templates && tar xzf opnfv-tht.tar.gz" \ --run-command "sed -i '/ControllerEnableCephStorage/c\\ ControllerEnableCephStorage: true' /usr/share/openstack-tripleo-heat-templates/environments/storage-environment.yaml" \ --run-command "sed -i '/ComputeEnableCephStorage/c\\ ComputeEnableCephStorage: true' /usr/share/openstack-tripleo-heat-templates/environments/storage-environment.yaml" \ - --run-command "curl http://download.opensuse.org/repositories/Openwsman/CentOS_CentOS-7/Openwsman.repo > /etc/yum.repos.d/wsman.repo" \ + --run-command "curl -f http://download.opensuse.org/repositories/Openwsman/CentOS_CentOS-7/Openwsman.repo > /etc/yum.repos.d/wsman.repo" \ --run-command "yum update -y openwsman*" \ --run-command "cp /usr/share/instack-undercloud/undercloud.conf.sample /home/stack/undercloud.conf && chown stack:stack /home/stack/undercloud.conf" \ --upload ../opnfv-environment.yaml:/home/stack/ \ @@ -63,7 +63,7 @@ LIBGUESTFS_BACKEND=direct virt-customize \ -a undercloud_build.qcow2 # Add custom IPA to allow kernel params -wget https://raw.githubusercontent.com/trozet/ironic-python-agent/opnfv_kernel/ironic_python_agent/extensions/image.py +curl -fO https://raw.githubusercontent.com/trozet/ironic-python-agent/opnfv_kernel/ironic_python_agent/extensions/image.py python3 -c 'import py_compile; py_compile.compile("image.py", cfile="image.pyc")' # Add performance image scripts diff --git a/ci/build.sh b/ci/build.sh index 1b66d557..0536cf91 100755 --- a/ci/build.sh +++ b/ci/build.sh @@ -18,7 +18,7 @@ $0 Builds the Apex OPNFV Deployment Toolchain usage: $0 [ -c cache_dir ] -r release_name [ --iso | --rpms ] OPTIONS: - -c cache destination - directory of cached files, defaults to ./cache + -c cache destination - destination to save tarball of cache -r release name/version of the build result --iso build the iso (implies RPMs too) --rpms build the rpms @@ -46,7 +46,7 @@ parse_cmdline() { display_usage exit 0 ;; - -c|--cache-dir) + -c|--cache-dest) CACHE_DEST=${2} shift 2 ;; @@ -74,6 +74,11 @@ parse_cmdline() { echo "Enable debug output" shift 1 ;; + --build-cache ) + MAKE_TARGETS="" + echo "Building Cache" + shift 1 + ;; *) display_usage exit 1 @@ -105,20 +110,35 @@ done if [ -n "$RELEASE" ]; then MAKE_ARGS+="RELEASE=$RELEASE "; fi # Get the Old Cache -if [ -n "$CACHE_DEST" ]; then +if [[ -n "$CACHE_DEST" && -n "$MAKE_TARGETS" ]]; then echo "Retrieving Cache" if [ -f $CACHE_DEST/${CACHE_NAME}.tgz ]; then echo "Cache found at ${CACHE_DEST}/${CACHE_NAME}.tgz" rm -rf $BUILD_BASE/$CACHE_DIR echo "Unpacking Cache to $BUILD_BASE" tar -xvzf $CACHE_DEST/${CACHE_NAME}.tgz -C ${BUILD_BASE} + if [ -f $BUILD_BASE/.cache ]; then + echo "Rebuilding .cache file" + if [ ! -d $BUILD_BASE/$CACHE_DIR ]; then + mkdir $BUILD_BASE/$CACHE_DIR + fi + for i in $(ls $BUILD_BASE/$CACHE_DIR); do + grep $i $BUILD_BASE/.cache >> $BUILD_BASE/$CACHE_DIR/.cache + done + fi echo "Cache contents after unpack:" ls -l $BUILD_BASE/$CACHE_DIR - elif [ ! -d $BUILD_BASE/$CACHE_DIR ]; then - mkdir $BUILD_BASE/$CACHE_DIR + else + echo "No Cache Found" fi fi +# Ensure the build cache dir exists +if [ ! -d "$BUILD_BASE/$CACHE_DIR" ]; then + echo "Creating Build Cache Directory" + mkdir $BUILD_BASE/$CACHE_DIR +fi + # Conditionally execute RPM build checks if the specs change and target is not rpm or iso if [[ "$MAKE_TARGETS" == "images" ]]; then commit_file_list=$(git show --pretty="format:" --name-status) @@ -166,8 +186,17 @@ echo "Build Complete" # Build new Cache if [ -n "$CACHE_DEST" ]; then echo "Building Cache" + ls -lh $BUILD_BASE/$CACHE_DIR/ + # ensure the destination exists if [ ! -d $CACHE_DEST ]; then mkdir -p $CACHE_DEST; fi - tar --atime-preserve --dereference -C $BUILD_BASE -caf $BUILD_BASE/${CACHE_NAME}.tgz $CACHE_DIR -C ${CACHE_DEST}/ + # ensure a sub cache dir exists to mirror the build base for extraction + if [ ! -d $BUILD_BASE/$CACHE_DIR/$CACHE_DIR/ ]; then mkdir -p $BUILD_BASE/$CACHE_DIR/$CACHE_DIR/; fi + # move directly cached files to cache dir for future extraction + for i in $(cat $BUILD_BASE/$CACHE_DIR/.cache | awk '{ print $2 }'); do + if [ -f $i ]; then mv $i $BUILD_BASE/$CACHE_DIR/$CACHE_DIR/; fi + done + # roll the cache tarball + tar --atime-preserve --dereference -C ${BUILD_BASE}/$CACHE_DIR -caf $CACHE_DEST/${CACHE_NAME}.tgz . if [ -f "${CACHE_DEST}/${CACHE_NAME}.tgz" ]; then echo "Cache Build Complete" else |