summaryrefslogtreecommitdiffstats
path: root/jjb/fuel/fuel-build.sh
blob: c66dc3d8d83fe0b71627511f5cece0ffe22266a9 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
#!/bin/bash
# SPDX-license-identifier: Apache-2.0
##############################################################################
# Copyright (c) 2016 Ericsson AB and others.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
set -o errexit
set -o nounset
set -o pipefail

export TERM="vt220"

cd $WORKSPACE

# remove the expired items from cache
test -f $WORKSPACE/ci/clean_cache.sh && $WORKSPACE/ci/clean_cache.sh $CACHE_DIRECTORY

LATEST_ISO_PROPERTIES=$WORKSPACE/latest.iso.properties
if [[ "$JOB_NAME" =~ "daily" ]]; then
    # check to see if we already have an artifact on artifacts.opnfv.org
    # for this commit during daily builds
    echo "Checking to see if we already built and stored Fuel ISO for this commit"

    curl -s -o $LATEST_ISO_PROPERTIES http://$GS_URL/latest.properties 2>/dev/null

    # get metadata of latest ISO
    LATEST_ISO_SHA1=$(grep OPNFV_GIT_SHA1 $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
    LATEST_ISO_URL=$(grep OPNFV_ARTIFACT_URL $LATEST_ISO_PROPERTIES | cut -d'=' -f2)
else
    LATEST_ISO_SHA1=none
fi

# get current SHA1
CURRENT_SHA1=$(git rev-parse HEAD)

# set FORCE_BUILD to false for non-daily builds
FORCE_BUILD=${FORCE_BUILD:-false}

if [[ "$CURRENT_SHA1" == "$LATEST_ISO_SHA1" && "$FORCE_BUILD" == "false" ]]; then
    echo "***************************************************"
    echo "   An ISO has already been built for this commit"
    echo "   $LATEST_ISO_URL"
    echo "***************************************************"
#    echo "Nothing new to build. Exiting."
#    touch $WORKSPACE/.noupload
#    exit 0
else
    echo "This commit has not been built yet or forced build! Proceeding with the build."
    /bin/rm -f $LATEST_ISO_PROPERTIES
    echo
fi

# log info to console
echo "Starting the build of $INSTALLER_TYPE. This could take some time..."
echo "--------------------------------------------------------"
echo

# create the cache directory if it doesn't exist
mkdir -p $CACHE_DIRECTORY

# set OPNFV_ARTIFACT_VERSION
if [[ "$JOB_NAME" =~ "merge" ]]; then
    echo "Building Fuel ISO for a merged change"
    export OPNFV_ARTIFACT_VERSION="gerrit-$GERRIT_CHANGE_NUMBER"
else
    export OPNFV_ARTIFACT_VERSION=$(date -u +"%Y-%m-%d_%H-%M-%S")
fi

NOCACHE_PATTERN="verify: no-cache"
if [[ "$JOB_NAME" =~ "verify" && "$GERRIT_CHANGE_COMMIT_MESSAGE" =~ "$NOCACHE_PATTERN" ]]; then
    echo "The cache will not be used for this build!"
    NOCACHE_ARG="-f P"
fi
NOCACHE_ARG=${NOCACHE_ARG:-}

# start the build
cd $WORKSPACE/ci
./build.sh -v $OPNFV_ARTIFACT_VERSION $NOCACHE_ARG -c file://$CACHE_DIRECTORY $BUILD_DIRECTORY

# list the build artifacts
ls -al $BUILD_DIRECTORY

# save information regarding artifact into file
(
    echo "OPNFV_ARTIFACT_VERSION=$OPNFV_ARTIFACT_VERSION"
    echo "OPNFV_GIT_URL=$(git config --get remote.origin.url)"
    echo "OPNFV_GIT_SHA1=$(git rev-parse HEAD)"
    echo "OPNFV_ARTIFACT_URL=$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso"
    echo "OPNFV_ARTIFACT_SHA512SUM=$(sha512sum $BUILD_DIRECTORY/opnfv-$OPNFV_ARTIFACT_VERSION.iso | cut -d' ' -f1)"
    echo "OPNFV_BUILD_URL=$BUILD_URL"
) > $WORKSPACE/opnfv.properties

echo
echo "--------------------------------------------------------"
echo "Done!"
ion(self, string): if isinstance(string, basestring): return string else: return yaml.dump(string, default_flow_style=False) def parse_interface_tag(self, tag): # Remove 'interface(' prefix, trailing ')' and split arguments args = tag[len('interface('):].rstrip(')').split(',') if len(args) == 1 and not args[0]: err('No arguments for interface().') elif len(args) == 2 and (not args[0] or not args[1]): err('Empty argument for interface().') elif len(args) > 2: err('Too many arguments for interface().') else: return args def get_interface_from_network(self, interfaces, network): nics = self.base[interfaces] for nic in nics: if network in nics[nic]: return nic err('Network not found: %s' % network) def get_role_interfaces(self, role): nodes = self.base['nodes'] for node in nodes: if role in node['role']: return node['interfaces'] err('Role not found: %s' % role) def lookup_interface(self, args): nodes = self.base['nodes'] if len(args) == 1: interfaces = nodes[0]['interfaces'] if len(args) == 2: interfaces = self.get_role_interfaces(args[1]) return self.get_interface_from_network(interfaces, args[0]) def parse_include_tag(self, tag): # Remove 'include(' prefix and trailing ')' filename = tag[len('include('):].rstrip(')') if not filename: err('No argument for include().') return filename def include_file(self, filename): fragment = self.load_yaml(filename) return yaml.dump(fragment, default_flow_style=False) def parse_tag(self, tag, indent): fragment = '' if 'interface(' in tag: args = self.parse_interface_tag(tag) fragment = self.lookup_interface(args) elif 'include(' in tag: filename = self.parse_include_tag(tag) fragment = self.include_file(filename) else: path = tag.split(DELIMITER) fragment = self.base for i in path: if i in fragment: fragment = fragment.get(i) else: err('Error: key "%s" does not exist in base YAML file' % i) fragment = self.format_substitution(fragment) return self.format_fragment(fragment, indent) def run(self): result = '' regex = re.compile(re.escape(TAG_START) + r'([a-z].+)' + re.escape(TAG_END), flags=re.IGNORECASE) with io.open(self.template_file) as f: for line in f: indent = self.get_indent(line) result += re.sub(regex, lambda match: self.parse_tag(match.group(1), indent), line) self.save_yaml(self.output_file, result) def parse_arguments(): description = '''Process 'template_file' using 'base_file' as source for template variable substitution and write the results to 'output_file'.''' parser = ArgParser(prog='python %s' % __file__, description=description) parser.add_argument('base_file', help='Base YAML filename') parser.add_argument('template_file', help='Fragment filename') parser.add_argument('output_file', help='Output filename') args = parser.parse_args() return(args.base_file, args.template_file, args.output_file) def main(): base_file, template_file, output_file = parse_arguments() templater = Templater(base_file, template_file, output_file) templater.run() if __name__ == '__main__': main()