summaryrefslogtreecommitdiffstats
path: root/utils
diff options
context:
space:
mode:
Diffstat (limited to 'utils')
-rwxr-xr-xutils/fetch_os_creds.sh14
-rwxr-xr-xutils/jenkins-jnlp-connect.sh4
-rw-r--r--utils/opnfv-artifacts.py244
3 files changed, 257 insertions, 5 deletions
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh
index a5c601b82..88e6b4a69 100755
--- a/utils/fetch_os_creds.sh
+++ b/utils/fetch_os_creds.sh
@@ -10,7 +10,8 @@
usage() {
- echo "usage: $0 -d <destination> -i <installer_type> -a <installer_ip>" >&2
+ echo "usage: $0 [-v] -d <destination> -i <installer_type> -a <installer_ip>" >&2
+ echo "[-v] Virtualized deployment" >&2
}
info () {
@@ -37,14 +38,15 @@ verify_connectivity() {
error "Can not talk to $ip."
}
-
+: ${DEPLOY_TYPE:=''}
#Get options
-while getopts ":d:i:a:h:" optchar; do
+while getopts ":d:i:a:h:v" optchar; do
case "${optchar}" in
d) dest_path=${OPTARG} ;;
i) installer_type=${OPTARG} ;;
a) installer_ip=${OPTARG} ;;
+ v) DEPLOY_TYPE="virt" ;;
*) echo "Non-option argument: '-${OPTARG}'" >&2
usage
exit 2
@@ -99,6 +101,12 @@ if [ "$installer_type" == "fuel" ]; then
#| grep http | head -1 | cut -d '|' -f 4 | sed 's/v1\/.*/v1\//' | sed 's/ //g') &> /dev/null
#NOTE: this is super ugly sed 's/v1\/.*/v1\//'OS_AUTH_URL
# but sometimes the output of endpoint-list is like this: http://172.30.9.70:8004/v1/%(tenant_id)s
+ # Fuel virtual need a fix
+
+ if [ "$DEPLOY_TYPE" == "virt" ]; then
+ echo "INFO: Changing: internalURL -> publicURL in openrc"
+ sed -i 's/internalURL/publicURL/' $dest_path
+ fi
elif [ "$installer_type" == "apex" ]; then
verify_connectivity $installer_ip
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index ec31a4aa7..10ac50b0a 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -100,8 +100,8 @@ if [[ -f $monitconfdir/jenkins ]]; then
#test for diff
if [[ "$(diff $monitconfdir/jenkins <(echo "\
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = \"usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\"
-stop program = \" /bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\
+start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\"
+stop program = \"/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\
") )" ]]; then
echo "Updating monit config..."
makemonit $@
diff --git a/utils/opnfv-artifacts.py b/utils/opnfv-artifacts.py
new file mode 100644
index 000000000..f826d5ce6
--- /dev/null
+++ b/utils/opnfv-artifacts.py
@@ -0,0 +1,244 @@
+#!/usr/bin/python
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 The Linux Foundation and others
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##############################################################################
+
+"""
+Generate JSON listing of OPNFV Artifacts
+
+This produces a slimmed down version of metadata provided by Google
+Storage for each artifact. Also excludes a large number of uninteresting
+files.
+"""
+
+from apiclient import discovery
+from apiclient.errors import HttpError
+
+import argparse
+import json
+import os
+import sys
+
+api = {
+ 'projects': {},
+ 'docs': {},
+ 'releases': {},
+}
+
+releases = [
+ 'arno.2015.1.0',
+ 'arno.2015.2.0',
+ 'brahmaputra.1.0',
+]
+
+# List of file extensions to filter out
+ignore_extensions = [
+ '.buildinfo',
+ '.woff',
+ '.ttf',
+ '.svg',
+ '.eot',
+ '.pickle',
+ '.doctree',
+ '.js',
+ '.png',
+ '.css',
+ '.gif',
+ '.jpeg',
+ '.jpg',
+ '.bmp',
+]
+
+
+parser = argparse.ArgumentParser(
+ description='OPNFV Artifacts JSON Generator')
+
+parser.add_argument(
+ '-k',
+ dest='key',
+ default='',
+ help='API Key for Google Cloud Storage')
+
+parser.add_argument(
+ '-p',
+ default=None,
+ dest='pretty',
+ action='store_const',
+ const=2,
+ help='pretty print the output')
+
+# Parse and assign arguments
+args = parser.parse_args()
+key = args.key
+pretty_print = args.pretty
+
+
+def output(item, indent=2):
+ print(json.dumps(item, sort_keys=True, indent=indent))
+
+
+def has_gerrit_review(dir_list):
+ """
+ If a directory contains an integer, it is assumed to be a gerrit
+ review number
+ """
+ for d in dir_list:
+ if d.isdigit():
+ return int(d)
+ return False
+
+
+def has_release(dir_list):
+ """
+ Checks if any directory contains a release name
+ """
+ for d in dir_list:
+ if d in releases:
+ return d
+ return False
+
+
+def has_documentation(dir_list):
+ """
+ Checks for a directory specifically named 'docs'
+ """
+ for d in dir_list:
+ if d == 'docs':
+ return True
+ return False
+
+
+# Rename this or modify how gerrit review are handled
+def has_logs(gerrit_review):
+ """
+ If a gerrit review exists, create a link to the review
+ """
+ if gerrit_review:
+ return "https://gerrit.opnfv.org/gerrit/#/c/%s" % gerrit_review
+ return False
+
+
+def has_md5hash(item):
+ """
+ If a file has an md5hash available, grab it
+ """
+ if 'md5Hash' in item:
+ return item['md5Hash']
+ return False
+
+
+def has_ignorable_extension(filename):
+ for extension in ignore_extensions:
+ if filename.lower().endswith(extension):
+ return True
+ return False
+
+
+def get_results(key):
+ """
+ Pull down all metadata from artifacts.opnfv.org
+ and store it in projects as:
+ { 'PROJECT': [file ...], }
+ """
+ storage = discovery.build('storage', 'v1', developerKey=key)
+ files = storage.objects().list(bucket='artifacts.opnfv.org',
+ fields='nextPageToken,'
+ 'items('
+ 'name,'
+ 'mediaLink,'
+ 'md5Hash,'
+ 'updated,'
+ 'contentType,'
+ 'size'
+ ')')
+ while (files is not None):
+ sites = files.execute()
+
+ for site in sites['items']:
+ # Filter out unneeded files (js, images, css, buildinfo, etc)
+ if has_ignorable_extension(site['name']):
+ continue
+
+ # Split /foo/bar/ into ['foo', 'bar'] and remove any extra
+ # slashes (ex. /foo//bar/)
+ site_split = filter(None, site['name'].split('/'))
+
+ # Don't do anything if we aren't given files multiple
+ # directories deep
+ if len(site_split) < 2:
+ continue
+
+ project = site_split[0]
+ name = '/'.join(site_split[1:])
+ proxy = "http://build.opnfv.org/artifacts.opnfv.org/%s" % site['name']
+ if name.endswith('.html'):
+ href = "http://artifacts.opnfv.org/%s" % site['name']
+ href_type = 'view'
+ else:
+ href = site['mediaLink']
+ href_type = 'download'
+ md5 = has_md5hash(site)
+
+ gerrit = has_gerrit_review(site_split)
+ logs = False # has_logs(gerrit)
+ documentation = has_documentation(site_split)
+ release = has_release(site_split)
+
+ category = 'project'
+ if gerrit:
+ category = 'gerrit'
+ elif release:
+ category = 'release'
+ elif logs:
+ category = 'logs'
+
+ metadata = {
+ 'category': category,
+ 'gerritreview': gerrit,
+ 'release': release,
+ 'name': name,
+ 'size': site['size'],
+ 'time': site['updated'],
+ 'contentType': site['contentType'],
+ 'href': href,
+ 'href_type': href_type,
+ 'proxy_href': proxy,
+ 'md5hash': md5,
+ }
+
+ if project in releases:
+ if project not in api['releases']:
+ api['releases'][project] = [metadata]
+ else:
+ api['releases'][project].append(metadata)
+ else:
+ if project not in api['projects']:
+ api['projects'][project] = [metadata]
+ else:
+ api['projects'][project].append(metadata)
+
+ files = storage.objects().list_next(files, sites)
+
+ return api
+
+
+# Fail if there is an invalid response from GCE
+try:
+ js = get_results(key)
+except HttpError as e:
+ print >> sys.stderr, e
+ exit(1)
+
+output(js, indent=pretty_print)