diff options
-rw-r--r-- | jjb/apex/apex.yml | 11 | ||||
-rw-r--r-- | jjb/releng-jobs.yaml | 17 | ||||
-rw-r--r-- | jjb/releng-macros.yaml | 22 | ||||
-rwxr-xr-x | utils/fetch_os_creds.sh | 14 | ||||
-rwxr-xr-x | utils/jenkins-jnlp-connect.sh | 4 | ||||
-rw-r--r-- | utils/opnfv-artifacts.py | 244 |
6 files changed, 303 insertions, 9 deletions
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml index b304f2270..6f2f441af 100644 --- a/jjb/apex/apex.yml +++ b/jjb/apex/apex.yml @@ -15,7 +15,7 @@ branch: 'master' gs-pathname: '' block-stream: 'brahmaputra' - slave: 'opnfv-jump-1' + slave: 'intel-pod7' - brahmaputra: branch: 'stable/brahmaputra' gs-pathname: '/brahmaputra' @@ -167,7 +167,7 @@ - trigger-builds: - project: 'apex-deploy-{platform}-{scenario}-{stream}' predefined-parameters: - OPNFV_CLEAN='yes' + OPNFV_CLEAN=yes git-revision: false block: true - trigger-builds: @@ -229,8 +229,9 @@ - 'apex-build' - trigger-builds: - project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream}' - predefined-parameters: + predefined-parameters: | BUILD_DIRECTORY=apex-build-{stream}/build_output + OPNFV_CLEAN=yes git-revision: false block: true - 'apex-upload-artifact' @@ -502,9 +503,11 @@ else export OPNFV_ARTIFACT_VERSION=${ARTIFACT_VERSION} fi + # clean for stable but doesn't matter for master + sudo opnfv-clean || echo "" # start the build cd $WORKSPACE/ci - ./build.sh -v $OPNFV_ARTIFACT_VERSION $BUILD_ARGS + ./build.sh $BUILD_ARGS RPM_VERSION=$(grep Version $BUILD_DIRECTORY/../build/opnfv-apex.spec | awk '{ print $2 }')-$(echo $OPNFV_ARTIFACT_VERSION | tr -d '_-') # list the contents of BUILD_OUTPUT directory ls -al $BUILD_DIRECTORY diff --git a/jjb/releng-jobs.yaml b/jjb/releng-jobs.yaml index 0b36f0775..655660bb9 100644 --- a/jjb/releng-jobs.yaml +++ b/jjb/releng-jobs.yaml @@ -3,6 +3,7 @@ jobs: - 'builder-verify-jjb' - 'builder-merge' + - 'artifacts-api' project: 'releng' @@ -99,3 +100,19 @@ git pull jenkins-jobs update --delete-old jjb/ +- job-template: + name: 'artifacts-api' + + # Generate and upload the JSON file to used for artifacts site + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: 'master' + + triggers: + - timed: '@hourly' + + builders: + - build-and-upload-artifacts-json-api diff --git a/jjb/releng-macros.yaml b/jjb/releng-macros.yaml index b43a76d5f..625e10a4d 100644 --- a/jjb/releng-macros.yaml +++ b/jjb/releng-macros.yaml @@ -299,6 +299,28 @@ fi - builder: + name: build-and-upload-artifacts-json-api + builders: + - shell: | + #!/bin/bash + set -o errexit + set -o pipefail + + echo "Generating Artifacts API ..." + virtualenv $WORKSPACE/releng_artifacts + source $WORKSPACE/releng_artifacts/bin/activate + + # install python packages + easy_install -U setuptools + pip install google-api-python-client + + # generate and upload index file + python ./releng/utils/opnfv-artifacts.py > index.json + gsutil cp index.json gs://artifacts.opnfv.org/index.json + + deactivate + +- builder: name: upload-review-docs builders: - build-html-and-pdf-docs-output diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh index a5c601b82..88e6b4a69 100755 --- a/utils/fetch_os_creds.sh +++ b/utils/fetch_os_creds.sh @@ -10,7 +10,8 @@ usage() { - echo "usage: $0 -d <destination> -i <installer_type> -a <installer_ip>" >&2 + echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip>" >&2 + echo "[-v] Virtualized deployment" >&2 } info () { @@ -37,14 +38,15 @@ verify_connectivity() { error "Can not talk to $ip." } - +: ${DEPLOY_TYPE:=''} #Get options -while getopts ":d:i:a:h:" optchar; do +while getopts ":d:i:a:h:v" optchar; do case "${optchar}" in d) dest_path=${OPTARG} ;; i) installer_type=${OPTARG} ;; a) installer_ip=${OPTARG} ;; + v) DEPLOY_TYPE="virt" ;; *) echo "Non-option argument: '-${OPTARG}'" >&2 usage exit 2 @@ -99,6 +101,12 @@ if [ "$installer_type" == "fuel" ]; then #| grep http | head -1 | cut -d '|' -f 4 | sed 's/v1\/.*/v1\//' | sed 's/ //g') &> /dev/null #NOTE: this is super ugly sed 's/v1\/.*/v1\//'OS_AUTH_URL # but sometimes the output of endpoint-list is like this: http://172.30.9.70:8004/v1/%(tenant_id)s + # Fuel virtual need a fix + + if [ "$DEPLOY_TYPE" == "virt" ]; then + echo "INFO: Changing: internalURL -> publicURL in openrc" + sed -i 's/internalURL/publicURL/' $dest_path + fi elif [ "$installer_type" == "apex" ]; then verify_connectivity $installer_ip diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh index ec31a4aa7..10ac50b0a 100755 --- a/utils/jenkins-jnlp-connect.sh +++ b/utils/jenkins-jnlp-connect.sh @@ -100,8 +100,8 @@ if [[ -f $monitconfdir/jenkins ]]; then #test for diff if [[ "$(diff $monitconfdir/jenkins <(echo "\ check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid -start program = \"usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\" -stop program = \" /bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\ +start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\" +stop program = \"/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\ ") )" ]]; then echo "Updating monit config..." makemonit $@ diff --git a/utils/opnfv-artifacts.py b/utils/opnfv-artifacts.py new file mode 100644 index 000000000..f826d5ce6 --- /dev/null +++ b/utils/opnfv-artifacts.py @@ -0,0 +1,244 @@ +#!/usr/bin/python +# SPDX-license-identifier: Apache-2.0 +############################################################################## +# Copyright (c) 2016 The Linux Foundation and others +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +############################################################################## + +""" +Generate JSON listing of OPNFV Artifacts + +This produces a slimmed down version of metadata provided by Google +Storage for each artifact. Also excludes a large number of uninteresting +files. +""" + +from apiclient import discovery +from apiclient.errors import HttpError + +import argparse +import json +import os +import sys + +api = { + 'projects': {}, + 'docs': {}, + 'releases': {}, +} + +releases = [ + 'arno.2015.1.0', + 'arno.2015.2.0', + 'brahmaputra.1.0', +] + +# List of file extensions to filter out +ignore_extensions = [ + '.buildinfo', + '.woff', + '.ttf', + '.svg', + '.eot', + '.pickle', + '.doctree', + '.js', + '.png', + '.css', + '.gif', + '.jpeg', + '.jpg', + '.bmp', +] + + +parser = argparse.ArgumentParser( + description='OPNFV Artifacts JSON Generator') + +parser.add_argument( + '-k', + dest='key', + default='', + help='API Key for Google Cloud Storage') + +parser.add_argument( + '-p', + default=None, + dest='pretty', + action='store_const', + const=2, + help='pretty print the output') + +# Parse and assign arguments +args = parser.parse_args() +key = args.key +pretty_print = args.pretty + + +def output(item, indent=2): + print(json.dumps(item, sort_keys=True, indent=indent)) + + +def has_gerrit_review(dir_list): + """ + If a directory contains an integer, it is assumed to be a gerrit + review number + """ + for d in dir_list: + if d.isdigit(): + return int(d) + return False + + +def has_release(dir_list): + """ + Checks if any directory contains a release name + """ + for d in dir_list: + if d in releases: + return d + return False + + +def has_documentation(dir_list): + """ + Checks for a directory specifically named 'docs' + """ + for d in dir_list: + if d == 'docs': + return True + return False + + +# Rename this or modify how gerrit review are handled +def has_logs(gerrit_review): + """ + If a gerrit review exists, create a link to the review + """ + if gerrit_review: + return "https://gerrit.opnfv.org/gerrit/#/c/%s" % gerrit_review + return False + + +def has_md5hash(item): + """ + If a file has an md5hash available, grab it + """ + if 'md5Hash' in item: + return item['md5Hash'] + return False + + +def has_ignorable_extension(filename): + for extension in ignore_extensions: + if filename.lower().endswith(extension): + return True + return False + + +def get_results(key): + """ + Pull down all metadata from artifacts.opnfv.org + and store it in projects as: + { 'PROJECT': [file ...], } + """ + storage = discovery.build('storage', 'v1', developerKey=key) + files = storage.objects().list(bucket='artifacts.opnfv.org', + fields='nextPageToken,' + 'items(' + 'name,' + 'mediaLink,' + 'md5Hash,' + 'updated,' + 'contentType,' + 'size' + ')') + while (files is not None): + sites = files.execute() + + for site in sites['items']: + # Filter out unneeded files (js, images, css, buildinfo, etc) + if has_ignorable_extension(site['name']): + continue + + # Split /foo/bar/ into ['foo', 'bar'] and remove any extra + # slashes (ex. /foo//bar/) + site_split = filter(None, site['name'].split('/')) + + # Don't do anything if we aren't given files multiple + # directories deep + if len(site_split) < 2: + continue + + project = site_split[0] + name = '/'.join(site_split[1:]) + proxy = "http://build.opnfv.org/artifacts.opnfv.org/%s" % site['name'] + if name.endswith('.html'): + href = "http://artifacts.opnfv.org/%s" % site['name'] + href_type = 'view' + else: + href = site['mediaLink'] + href_type = 'download' + md5 = has_md5hash(site) + + gerrit = has_gerrit_review(site_split) + logs = False # has_logs(gerrit) + documentation = has_documentation(site_split) + release = has_release(site_split) + + category = 'project' + if gerrit: + category = 'gerrit' + elif release: + category = 'release' + elif logs: + category = 'logs' + + metadata = { + 'category': category, + 'gerritreview': gerrit, + 'release': release, + 'name': name, + 'size': site['size'], + 'time': site['updated'], + 'contentType': site['contentType'], + 'href': href, + 'href_type': href_type, + 'proxy_href': proxy, + 'md5hash': md5, + } + + if project in releases: + if project not in api['releases']: + api['releases'][project] = [metadata] + else: + api['releases'][project].append(metadata) + else: + if project not in api['projects']: + api['projects'][project] = [metadata] + else: + api['projects'][project].append(metadata) + + files = storage.objects().list_next(files, sites) + + return api + + +# Fail if there is an invalid response from GCE +try: + js = get_results(key) +except HttpError as e: + print >> sys.stderr, e + exit(1) + +output(js, indent=pretty_print) |