summaryrefslogtreecommitdiffstats
path: root/utils
diff options
context:
space:
mode:
Diffstat (limited to 'utils')
-rwxr-xr-xutils/docs-build.sh234
-rwxr-xr-x[-rw-r--r--]utils/gpg_import_key.sh32
-rwxr-xr-xutils/jenkins-jnlp-connect.sh12
-rwxr-xr-xutils/retention_script.sh39
-rw-r--r--utils/test/reporting/functest/reporting-status.py68
-rw-r--r--utils/test/reporting/functest/reporting-tempest.py71
-rw-r--r--utils/test/reporting/functest/reporting-vims.py68
-rw-r--r--utils/test/reporting/functest/reportingConf.py8
-rw-r--r--utils/test/reporting/functest/reportingUtils.py38
-rw-r--r--utils/test/reporting/functest/template/index-status-tmpl.html2
-rw-r--r--utils/test/reporting/functest/testCase.py58
-rw-r--r--utils/test/result_collection_api/update/README.md158
-rw-r--r--utils/test/result_collection_api/update/templates/__init__.py0
-rw-r--r--utils/test/result_collection_api/update/templates/backup_mongodb.py (renamed from utils/test/result_collection_api/update/backup.py)2
-rw-r--r--utils/test/result_collection_api/update/templates/changes_in_mongodb.py (renamed from utils/test/result_collection_api/update/changes.py)0
-rw-r--r--utils/test/result_collection_api/update/templates/restore_mongodb.py (renamed from utils/test/result_collection_api/update/restore.py)0
-rw-r--r--utils/test/result_collection_api/update/templates/rm_olds.sh15
-rw-r--r--utils/test/result_collection_api/update/templates/update_mongodb.py (renamed from utils/test/result_collection_api/update/update.py)2
-rw-r--r--utils/test/result_collection_api/update/templates/utils.py (renamed from utils/test/result_collection_api/update/utils.py)0
-rw-r--r--utils/test/result_collection_api/update/update.yml41
20 files changed, 431 insertions, 417 deletions
diff --git a/utils/docs-build.sh b/utils/docs-build.sh
deleted file mode 100755
index 48037db2e..000000000
--- a/utils/docs-build.sh
+++ /dev/null
@@ -1,234 +0,0 @@
-#!/bin/bash -e
-# SPDX-license-identifier: Apache-2.0
-##############################################################################
-# Copyright (c) 2016 NEC and others.
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-export PATH=$PATH:/usr/local/bin/
-
-DOCS_DIR=${DOCS_DIR:-docs}
-INDEX_RST=${INDEX_RST:-index.rst}
-BUILD_DIR=${BUILD_DIR:-docs_build}
-OUTPUT_DIR=${OUTPUT_DIR:-docs_output}
-SRC_DIR=${SRC_DIR:-$BUILD_DIR/_src}
-VENV_DIR=${VENV_DIR:-$BUILD_DIR/_venv}
-RELENG_DIR=${RELENG_DIR:-releng}
-GERRIT_COMMENT=${GERRIT_COMMENT:-}
-
-get_title_script="
-import os
-from docutils import core, nodes
-
-try:
- with open('index.rst', 'r') as file:
- data = file.read()
- doctree = core.publish_doctree(data,
- settings_overrides={'report_level': 5,
- 'halt_level': 5})
- if isinstance(doctree[0], nodes.title):
- title = doctree[0]
- else:
- for c in doctree.children:
- if isinstance(c, nodes.section):
- title = c[0]
- break
- print title.astext()
-except:
- print 'None'"
-revision="$(git rev-parse --short HEAD)"
-rev_full="$(git rev-parse HEAD)"
-version="$(git describe --abbrev=0 2> /dev/null || echo draft) ($revision)"
-project="$(basename $(git rev-parse --show-toplevel))"
-html_notes=" Revision: $rev_full\n Build date: $(date -u +'%Y-%m-%d')"
-default_conf='releng/docs/etc/conf.py'
-opnfv_logo='releng/docs/etc/opnfv-logo.png'
-
-function check_rst_doc() {
- _src="$1"
-
- # Note: This check may fail in many jobs for building project docs, since
- # the old sample has lines more than 120. We ignore failures on this
- # check right now, but these have to be fixed before OPNFV B release.
- _out=$(doc8 --max-line-length 240 --ignore D000 "$_src") || {
- _msg='Warning: rst validation (doc8) has failed, please fix the following error(s).'
- _errs=$(echo "$_out" | sed -n -e "/^$_src/s/^/ /p")
- echo
- echo -e "$_msg\n$_errs"
- echo
- [[ -n "$GERRIT_COMMENT" ]] && echo -e "$_msg\n$_errs" >> "$GERRIT_COMMENT"
- }
-}
-
-function add_html_notes() {
- _src="$1"
-
- find "$_src" -name '*.rst' | while read file
- do
- if grep -q -e ' _sha1_' "$file" ; then
- # TODO: remove this, once old templates were removed from all repos.
- echo
- echo "Warn: '_sha1_' was found in [$file], use the latest document template."
- echo " See https://wiki.opnfv.org/documentation/tools ."
- echo
- sed -i "s/ _sha1_/ $git_sha1/g" "$file"
- fi
- sed -i -e "\$a\\\n..\n$html_notes" "$file"
- done
-}
-
-function prepare_src_files() {
- mkdir -p "$(dirname $SRC_DIR)"
-
- [[ -e "$SRC_DIR" ]] && rm -rf "$SRC_DIR"
- cp -r "$DOCS_DIR" "$SRC_DIR"
- add_html_notes "$SRC_DIR"
-}
-
-function add_config() {
- _conf="$1"
- _param="$2"
- _val="$3"
-
- if ! grep -q -e "^$_param = " "$_conf" ; then
- echo "Adding '$_param' into $_conf ..."
- echo "$_param = $_val" >> "$_conf"
- fi
-}
-
-function is_top_dir() {
- [[ "$1" == "$DOCS_DIR" ]]
-}
-
-function generate_name_for_top_dir() {
- for suffix in '' '.top' '.all' '.master' '_' '__' '___'
- do
- _name="$(basename $DOCS_DIR)$suffix"
- [[ -e "$DOCS_DIR/$_name" ]] && continue
- echo "$_name"
- return
- done
-
- echo "Error: cannot find name for top directory [$DOCS_DIR]"
- exit 1
-}
-
-function generate_name() {
- _dir=$1
-
- if is_top_dir "$_dir" ; then
- _name=$(generate_name_for_top_dir $DOCS_DIR)
- else
- _name="${_dir#$DOCS_DIR/}"
- fi
- # Replace '/' by '_'
- echo "${_name////_}"
-}
-
-
-check_rst_doc $DOCS_DIR
-
-if [[ ! -d "$RELENG_DIR" ]] ; then
- echo "Error: $RELENG_DIR dir not found. See https://wiki.opnfv.org/documentation/tools ."
- exit 1
-fi
-
-prepare_src_files
-
-if ! which virtualenv > /dev/null ; then
- echo "Error: 'virtualenv' not found. Exec 'sudo pip install virtualenv' first."
- exit 1
-fi
-
-virtualenv "$VENV_DIR"
-source "$VENV_DIR/bin/activate"
-pip install -r "$RELENG_DIR/docs/etc/requirements.txt"
-
-find $DOCS_DIR -name $INDEX_RST -printf '%h\n' | while read dir
-do
- name=$(generate_name $dir)
- if is_top_dir "$dir" ; then
- src="$SRC_DIR"
- else
- src="$SRC_DIR/${dir#$DOCS_DIR/}"
- fi
- build="$BUILD_DIR/$name"
- output="$OUTPUT_DIR/$name"
- conf="$src/conf.py"
-
- echo
- echo "#################${dir//?/#}"
- echo "Building DOCS in $dir"
- echo "#################${dir//?/#}"
- echo
-
- [[ ! -f "$conf" ]] && cp "$default_conf" "$conf"
- title=$(cd $src; python -c "$get_title_script")
- latex_conf="[('index', '$name.tex', \"$title\", 'OPNFV', 'manual'),]"
- add_config "$conf" 'latex_documents' "$latex_conf"
- add_config "$conf" 'release' "u'$version'"
- add_config "$conf" 'version' "u'$version'"
- add_config "$conf" 'project' "u'$project'"
- add_config "$conf" 'copyright' "u'$(date +%Y), OPNFV'"
- cp -f $opnfv_logo "$src/opnfv-logo.png"
-
- mkdir -p "$output"
-
- sphinx-build -b html -t html -E "$src" "$output"
-
- # Note: PDF creation may fail in project doc builds.
- # We allow this build job to be marked as succeeded with
- # failure in PDF creation, but leave message to fix it.
- # Any failure has to be fixed before OPNFV B release.
- {
- sphinx-build -b latex -t pdf -E "$src" "$build" && \
- make -C "$build" LATEXOPTS='--interaction=nonstopmode' all-pdf
- } && {
- mv "$build/$name.pdf" "$output"
- } || {
- msg="Error: PDF creation for $dir has failed, please fix source rst file(s)."
- echo
- echo "$msg"
- echo
- [[ -n "$GERRIT_COMMENT" ]] && echo "$msg" >> "$GERRIT_COMMENT"
- }
-
- # TODO: failures in ODT creation should be handled error and
- # cause 'exit 1' before OPNFV B release.
- tex=$(find $build -name '*.tex' | head -1)
- odt="${tex%.tex}.odt"
- if [[ -e $tex ]] && which pandoc > /dev/null ; then
- (
- cd $(dirname $tex)
- pandoc $(basename $tex) -o $(basename $odt)
- ) && {
- mv $odt $output/
- }|| {
- msg="Error: ODT creation for $dir has failed."
- echo
- echo "$msg"
- echo
- }
- else
- echo "Warn: tex file and/or 'pandoc' are not found, skip ODT creation."
- fi
-
- if is_top_dir "$dir" ; then
- # NOTE: Having top level document (docs/index.rst) is not recommended.
- # It may cause conflicts with other docs (mostly with HTML
- # folders for contents in top level docs and other document
- # folders). But, let's try merge of those contents into the top
- # docs directory.
- (
- cd $output
- find . -type d -print | xargs -I d mkdir -p ../d
- find . -type f -print | xargs -I f mv -b f ../f
- )
- rm -rf "$output"
- fi
-
-done
-
-deactivate
diff --git a/utils/gpg_import_key.sh b/utils/gpg_import_key.sh
index 3afeda839..bb11f0d45 100644..100755
--- a/utils/gpg_import_key.sh
+++ b/utils/gpg_import_key.sh
@@ -7,15 +7,32 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-
function isinstalled {
-if rpm -q "$@" >/dev/null 2>&1; then
- true
- else
- echo installing "$1"
- sudo yum install "$1"
- false
+
+source /etc/os-release; echo ${ID/*, /}
+
+if [[ ${ID/*, /} =~ "centos" ]]; then
+ if rpm -q "$@" >/dev/null 2>&1; then
+ true
+ else
+ echo installing "$1"
+ sudo yum install "$1"
+ false
+ fi
+
+elif [[ ${ID/*, /} =~ "ubuntu" ]]; then
+ if dpkg-query -W -f'${Status}' "$@" 2>/dev/null | grep -q "ok installed"; then
+ true
+ else
+ echo installing "$1"
+ sudo apt-get install -y "$1"
+ false
+ fi
+else
+ echo "Distro not supported"
+ exit 0
fi
+
}
if ! isinstalled gnupg2; then
@@ -40,3 +57,4 @@ else
rm -f "$NODE_NAME"-subkey
fi
fi
+
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index 8c41620d6..d268a28de 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -48,6 +48,14 @@ main () {
exit 1
fi
+ if [[ $(whoami) != "root" ]]; then
+ if grep "^Defaults requiretty" /etc/sudoers
+ then echo "please comment out Defaults requiretty from /etc/sudoers"
+ exit 1
+ fi
+ fi
+
+
if [ -d /etc/monit/conf.d ]; then
monitconfdir="/etc/monit/conf.d/"
elif [ -d /etc/monit.d ]; then
@@ -87,7 +95,7 @@ main () {
echo "Writing the following as monit config:"
cat << EOF | tee $monitconfdir/jenkins
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'"
+start program = "/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@' with timeout 60 seconds"
stop program = "/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'"
EOF
}
@@ -96,7 +104,7 @@ EOF
#test for diff
if [[ "$(diff $monitconfdir/jenkins <(echo "\
check process jenkins with pidfile /var/run/$jenkinsuser/jenkins_jnlp_pid
-start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@'\"
+start program = \"/usr/bin/sudo -u $jenkinsuser /bin/bash -c 'cd $dir; export started_monit=true; $0 $@' with timeout 60 seconds\"
stop program = \"/bin/bash -c '/bin/kill \$(/bin/cat /var/run/$jenkinsuser/jenkins_jnlp_pid)'\"\
") )" ]]; then
echo "Updating monit config..."
diff --git a/utils/retention_script.sh b/utils/retention_script.sh
new file mode 100755
index 000000000..7e50623ca
--- /dev/null
+++ b/utils/retention_script.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# SPDX-license-identifier: Apache-2.0
+##############################################################################
+# Copyright (c) 2016 The Linux Foundation and others
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##############################################################################
+
+PATH=$PATH:/usr/local/bin/
+
+#These are the only projects that generate artifacts
+for x in armband ovsnfv fuel apex compass4nfv
+do
+
+ echo "Looking at artifacts for project $x"
+
+ while IFS= read -r artifact; do
+
+ artifact_date="$(gsutil ls -L $artifact | grep "Creation time:" | awk '{print $4,$5,$6}')"
+ age=$(($(date +%s)-$(date -d"$artifact_date" +%s)))
+ daysold=$(($age/86400))
+
+ if [[ "$daysold" -gt "10" ]]; then
+ echo "$daysold Days old deleting: $(basename $artifact)"
+ else
+ echo "$daysold Days old retaining: $(basename $artifact)"
+ fi
+
+ done < <(gsutil ls gs://artifacts.opnfv.org/"$x" |grep -v "/$")
+done
diff --git a/utils/test/reporting/functest/reporting-status.py b/utils/test/reporting/functest/reporting-status.py
index adbee36aa..622c375cc 100644
--- a/utils/test/reporting/functest/reporting-status.py
+++ b/utils/test/reporting/functest/reporting-status.py
@@ -8,8 +8,6 @@
#
import datetime
import jinja2
-import logging
-import os
import requests
import sys
import time
@@ -21,17 +19,7 @@ import testCase as tc
import scenarioResult as sr
# Logger
-logFormatter = logging.Formatter("%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s")
-logger = logging.getLogger()
-
-fileHandler = logging.FileHandler("{0}/{1}".format('.', conf.LOG_FILE))
-fileHandler.setFormatter(logFormatter)
-logger.addHandler(fileHandler)
-
-consoleHandler = logging.StreamHandler()
-consoleHandler.setFormatter(logFormatter)
-logger.addHandler(consoleHandler)
-logger.setLevel(conf.LOG_LEVEL)
+logger = utils.getLogger("Status")
# Initialization
testValid = []
@@ -48,11 +36,11 @@ response = requests.get(cf)
functest_yaml_config = yaml.load(response.text)
-logger.info("****************************************")
-logger.info("* Generating reporting..... *")
-logger.info("* Data retention = %s days *" % conf.PERIOD)
-logger.info("* *")
-logger.info("****************************************")
+logger.info("*******************************************")
+logger.info("* Generating reporting scenario status *")
+logger.info("* Data retention = %s days *" % conf.PERIOD)
+logger.info("* *")
+logger.info("*******************************************")
# Retrieve test cases of Tier 1 (smoke)
config_tiers = functest_yaml_config.get("tiers")
@@ -111,17 +99,22 @@ for version in conf.versions:
for test_case in testValid:
test_case.checkRunnable(installer, s,
test_case.getConstraints())
- logger.debug("testcase %s is %s" % (test_case.getName(),
- test_case.isRunnable))
+ logger.debug("testcase %s is %s" %
+ (test_case.getDisplayName(),
+ test_case.isRunnable))
time.sleep(1)
if test_case.isRunnable:
dbName = test_case.getDbName()
name = test_case.getName()
+ displayName = test_case.getDisplayName()
project = test_case.getProject()
nb_test_runnable_for_this_scenario += 1
logger.info(" Searching results for case %s " %
- (dbName))
+ (displayName))
result = utils.getResult(dbName, installer, s, version)
+ # if no result set the value to 0
+ if result < 0:
+ result = 0
logger.info(" >>>> Test score = " + str(result))
test_case.setCriteria(result)
test_case.setIsRunnable(True)
@@ -144,18 +137,23 @@ for version in conf.versions:
if test_case.isRunnable:
dbName = test_case.getDbName()
name = test_case.getName()
+ displayName = test_case.getDisplayName()
project = test_case.getProject()
logger.info(" Searching results for case %s " %
- (dbName))
+ (displayName))
result = utils.getResult(dbName, installer, s, version)
- test_case.setCriteria(result)
- test_case.setIsRunnable(True)
- testCases2BeDisplayed.append(tc.TestCase(name,
- project,
- "",
- result,
- True,
- 4))
+ # at least 1 result for the test
+ if result > -1:
+ test_case.setCriteria(result)
+ test_case.setIsRunnable(True)
+ testCases2BeDisplayed.append(tc.TestCase(name,
+ project,
+ "",
+ result,
+ True,
+ 4))
+ else:
+ logger.debug("No results found")
items[s] = testCases2BeDisplayed
except:
@@ -182,7 +180,7 @@ for version in conf.versions:
else:
logger.info(">>>>> scenario OK, save the information")
s_status = "OK"
- path_validation_file = ("./release/" + version +
+ path_validation_file = (conf.REPORTING_PATH + "/release/" + version +
"/validated_scenario_history.txt")
with open(path_validation_file, "a") as f:
time_format = "%Y-%m-%d %H:%M"
@@ -193,12 +191,10 @@ for version in conf.versions:
scenario_result_criteria[s] = sr.ScenarioResult(s_status, s_score)
logger.info("--------------------------")
- templateLoader = jinja2.FileSystemLoader(os.path.dirname
- (os.path.abspath
- (__file__)))
+ templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
templateEnv = jinja2.Environment(loader=templateLoader)
- TEMPLATE_FILE = "./template/index-status-tmpl.html"
+ TEMPLATE_FILE = "/template/index-status-tmpl.html"
template = templateEnv.get_template(TEMPLATE_FILE)
outputText = template.render(scenario_stats=scenario_stats,
@@ -208,6 +204,6 @@ for version in conf.versions:
period=conf.PERIOD,
version=version)
- with open("./release/" + version +
+ with open(conf.REPORTING_PATH + "/release/" + version +
"/index-status-" + installer + ".html", "wb") as fh:
fh.write(outputText)
diff --git a/utils/test/reporting/functest/reporting-tempest.py b/utils/test/reporting/functest/reporting-tempest.py
index a065ef442..e3f4e3306 100644
--- a/utils/test/reporting/functest/reporting-tempest.py
+++ b/utils/test/reporting/functest/reporting-tempest.py
@@ -1,28 +1,44 @@
from urllib2 import Request, urlopen, URLError
import json
import jinja2
-import os
+import reportingConf as conf
+import reportingUtils as utils
-installers = ["apex", "compass", "fuel", "joid"]
+installers = conf.installers
items = ["tests", "Success rate", "duration"]
-PERIOD = 7
-print "Generate Tempest automatic reporting"
+PERIOD = conf.PERIOD
+criteria_nb_test = 165
+criteria_duration = 1800
+criteria_success_rate = 90
+
+logger = utils.getLogger("Tempest")
+logger.info("************************************************")
+logger.info("* Generating reporting Tempest_smoke_serial *")
+logger.info("* Data retention = %s days *" % PERIOD)
+logger.info("* *")
+logger.info("************************************************")
+
+logger.info("Success criteria:")
+logger.info("nb tests executed > %s s " % criteria_nb_test)
+logger.info("test duration < %s s " % criteria_duration)
+logger.info("success rate > %s " % criteria_success_rate)
+
for installer in installers:
# we consider the Tempest results of the last PERIOD days
- url = "http://testresults.opnfv.org/test/api/v1/results?case=tempest_smoke_serial"
- request = Request(url + '&period=' + str(PERIOD)
- + '&installer=' + installer + '&version=master')
-
+ url = conf.URL_BASE + "?case=tempest_smoke_serial"
+ request = Request(url + '&period=' + str(PERIOD) +
+ '&installer=' + installer + '&version=master')
+ logger.info("Search tempest_smoke_serial results for installer %s"
+ % installer)
try:
response = urlopen(request)
k = response.read()
results = json.loads(k)
except URLError, e:
- print 'No kittez. Got an error code:', e
+ logger.error("Error code: %s" % e)
test_results = results['results']
- test_results.reverse()
scenario_results = {}
criteria = {}
@@ -48,8 +64,8 @@ for installer in installers:
nb_tests_run = result['details']['tests']
nb_tests_failed = result['details']['failures']
if nb_tests_run != 0:
- success_rate = 100*(int(nb_tests_run)
- - int(nb_tests_failed))/int(nb_tests_run)
+ success_rate = 100*(int(nb_tests_run) -
+ int(nb_tests_failed)) / int(nb_tests_run)
else:
success_rate = 0
@@ -63,40 +79,49 @@ for installer in installers:
crit_time = False
# Expect that at least 165 tests are run
- if nb_tests_run >= 165:
+ if nb_tests_run >= criteria_nb_test:
crit_tests = True
# Expect that at least 90% of success
- if success_rate >= 90:
+ if success_rate >= criteria_success_rate:
crit_rate = True
# Expect that the suite duration is inferior to 30m
- if result['details']['duration'] < 1800:
+ if result['details']['duration'] < criteria_duration:
crit_time = True
result['criteria'] = {'tests': crit_tests,
'Success rate': crit_rate,
'duration': crit_time}
- # error management
+ try:
+ logger.debug("Scenario %s, Installer %s"
+ % (s_result[1]['scenario'], installer))
+ logger.debug("Nb Test run: %s" % nb_tests_run)
+ logger.debug("Test duration: %s"
+ % result['details']['duration'])
+ logger.debug("Success rate: %s" % success_rate)
+ except:
+ logger.error("Data format error")
+
+ # Error management
# ****************
try:
errors = result['details']['errors']
result['errors'] = errors.replace('{0}', '')
except:
- print "Error field not present (Brahamputra runs?)"
+ logger.error("Error field not present (Brahamputra runs?)")
- mypath = os.path.abspath(__file__)
- tplLoader = jinja2.FileSystemLoader(os.path.dirname(mypath))
- templateEnv = jinja2.Environment(loader=tplLoader)
+ templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
+ templateEnv = jinja2.Environment(loader=templateLoader)
- TEMPLATE_FILE = "./template/index-tempest-tmpl.html"
+ TEMPLATE_FILE = "/template/index-tempest-tmpl.html"
template = templateEnv.get_template(TEMPLATE_FILE)
outputText = template.render(scenario_results=scenario_results,
items=items,
installer=installer)
- with open("./release/master/index-tempest-" +
+ with open(conf.REPORTING_PATH + "/release/master/index-tempest-" +
installer + ".html", "wb") as fh:
fh.write(outputText)
-print "Tempest automatic reporting Done"
+logger.info("Tempest automatic reporting succesfully generated.")
diff --git a/utils/test/reporting/functest/reporting-vims.py b/utils/test/reporting/functest/reporting-vims.py
index 4033687e8..d0436ed14 100644
--- a/utils/test/reporting/functest/reporting-vims.py
+++ b/utils/test/reporting/functest/reporting-vims.py
@@ -1,7 +1,11 @@
from urllib2 import Request, urlopen, URLError
import json
import jinja2
-import os
+import reportingConf as conf
+import reportingUtils as utils
+
+logger = utils.getLogger("vIMS")
+
def sig_test_format(sig_test):
nbPassed = 0
@@ -9,7 +13,7 @@ def sig_test_format(sig_test):
nbSkipped = 0
for data_test in sig_test:
if data_test['result'] == "Passed":
- nbPassed+= 1
+ nbPassed += 1
elif data_test['result'] == "Failed":
nbFailures += 1
elif data_test['result'] == "Skipped":
@@ -20,21 +24,29 @@ def sig_test_format(sig_test):
total_sig_test_result['skipped'] = nbSkipped
return total_sig_test_result
-installers = ["fuel", "compass", "joid", "apex"]
-step_order = ["initialisation", "orchestrator", "vIMS", "sig_test"]
+logger.info("****************************************")
+logger.info("* Generating reporting vIMS *")
+logger.info("* Data retention = %s days *" % conf.PERIOD)
+logger.info("* *")
+logger.info("****************************************")
+installers = conf.installers
+step_order = ["initialisation", "orchestrator", "vIMS", "sig_test"]
+logger.info("Start processing....")
for installer in installers:
- request = Request('http://testresults.opnfv.org/test/api/v1/results?case=vims&installer=' + installer)
+ logger.info("Search vIMS results for installer %s" % installer)
+ request = Request(conf.URL_BASE + '?case=vims&installer=' + installer)
try:
response = urlopen(request)
k = response.read()
results = json.loads(k)
except URLError, e:
- print 'No kittez. Got an error code:', e
+ logger.error("Error code: %s" % e)
test_results = results['results']
- test_results.reverse()
+
+ logger.debug("Results found: %s" % test_results)
scenario_results = {}
for r in test_results:
@@ -44,6 +56,7 @@ for installer in installers:
for s, s_result in scenario_results.items():
scenario_results[s] = s_result[0:5]
+ logger.debug("Search for success criteria")
for result in scenario_results[s]:
result["start_date"] = result["start_date"].split(".")[0]
sig_test = result['details']['sig_test']['result']
@@ -67,17 +80,34 @@ for installer in installers:
result['pr_step_ok'] = 0
if nb_step != 0:
result['pr_step_ok'] = (float(nb_step_ok)/nb_step)*100
-
-
- templateLoader = jinja2.FileSystemLoader(os.path.dirname(os.path.abspath(__file__)))
- templateEnv = jinja2.Environment( loader=templateLoader )
-
- TEMPLATE_FILE = "./template/index-vims-tmpl.html"
- template = templateEnv.get_template( TEMPLATE_FILE )
-
- outputText = template.render( scenario_results = scenario_results, step_order = step_order, installer = installer)
-
- with open("./release/master/index-vims-" + installer + ".html", "wb") as fh:
+ try:
+ logger.debug("Scenario %s, Installer %s"
+ % (s_result[1]['scenario'], installer))
+ logger.debug("Orchestrator deployment: %s s"
+ % result['details']['orchestrator']['duration'])
+ logger.debug("vIMS deployment: %s s"
+ % result['details']['vIMS']['duration'])
+ logger.debug("Signaling testing: %s s"
+ % result['details']['sig_test']['duration'])
+ logger.debug("Signaling testing results: %s"
+ % format_result)
+ except:
+ logger.error("Data badly formatted")
+ logger.debug("------------------------------------------------")
+
+ templateLoader = jinja2.FileSystemLoader(conf.REPORTING_PATH)
+ templateEnv = jinja2.Environment(loader=templateLoader)
+
+ TEMPLATE_FILE = "/template/index-vims-tmpl.html"
+ template = templateEnv.get_template(TEMPLATE_FILE)
+
+ outputText = template.render(scenario_results=scenario_results,
+ step_order=step_order,
+ installer=installer)
+
+ with open(conf.REPORTING_PATH +
+ "/release/master/index-vims-" +
+ installer + ".html", "wb") as fh:
fh.write(outputText)
-
+logger.info("vIMS report succesfully generated")
diff --git a/utils/test/reporting/functest/reportingConf.py b/utils/test/reporting/functest/reportingConf.py
index 61410b414..a58eeecc9 100644
--- a/utils/test/reporting/functest/reportingConf.py
+++ b/utils/test/reporting/functest/reportingConf.py
@@ -13,14 +13,16 @@ installers = ["apex", "compass", "fuel", "joid"]
# installers = ["apex"]
# list of test cases declared in testcases.yaml but that must not be
# taken into account for the scoring
-blacklist = ["odl", "ovno", "security_scan"]
+blacklist = ["odl", "ovno", "security_scan", "copper", "moon"]
# versions = ["brahmaputra", "master"]
versions = ["master"]
PERIOD = 10
MAX_SCENARIO_CRITERIA = 18
# get the last 5 test results to determinate the success criteria
NB_TESTS = 5
+# REPORTING_PATH = "/usr/share/nginx/html/reporting/functest"
+REPORTING_PATH = "."
URL_BASE = 'http://testresults.opnfv.org/test/api/v1/results'
TEST_CONF = "https://git.opnfv.org/cgit/functest/plain/ci/testcases.yaml"
-LOG_LEVEL = "INFO"
-LOG_FILE = "reporting.log"
+LOG_LEVEL = "ERROR"
+LOG_FILE = REPORTING_PATH + "/reporting.log"
diff --git a/utils/test/reporting/functest/reportingUtils.py b/utils/test/reporting/functest/reportingUtils.py
index 2f06b8449..5051ffa95 100644
--- a/utils/test/reporting/functest/reportingUtils.py
+++ b/utils/test/reporting/functest/reportingUtils.py
@@ -7,8 +7,26 @@
# http://www.apache.org/licenses/LICENSE-2.0
#
from urllib2 import Request, urlopen, URLError
+import logging
import json
-import reportingConf
+import reportingConf as conf
+
+
+def getLogger(module):
+ logFormatter = logging.Formatter("%(asctime)s [" +
+ module +
+ "] [%(levelname)-5.5s] %(message)s")
+ logger = logging.getLogger()
+
+ fileHandler = logging.FileHandler("{0}/{1}".format('.', conf.LOG_FILE))
+ fileHandler.setFormatter(logFormatter)
+ logger.addHandler(fileHandler)
+
+ consoleHandler = logging.StreamHandler()
+ consoleHandler.setFormatter(logFormatter)
+ logger.addHandler(consoleHandler)
+ logger.setLevel(conf.LOG_LEVEL)
+ return logger
def getApiResults(case, installer, scenario, version):
@@ -19,10 +37,10 @@ def getApiResults(case, installer, scenario, version):
# urllib2.install_opener(opener)
# url = "http://127.0.0.1:8000/results?case=" + case + \
# "&period=30&installer=" + installer
- url = (reportingConf.URL_BASE + "?case=" + case +
- "&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
+ url = (conf.URL_BASE + "?case=" + case +
+ "&period=" + str(conf.PERIOD) + "&installer=" + installer +
"&scenario=" + scenario + "&version=" + version +
- "&last=" + str(reportingConf.NB_TESTS))
+ "&last=" + str(conf.NB_TESTS))
request = Request(url)
try:
@@ -38,9 +56,8 @@ def getApiResults(case, installer, scenario, version):
def getScenarios(case, installer, version):
case = case.getName()
- print case
- url = (reportingConf.URL_BASE + "?case=" + case +
- "&period=" + str(reportingConf.PERIOD) + "&installer=" + installer +
+ url = (conf.URL_BASE + "?case=" + case +
+ "&period=" + str(conf.PERIOD) + "&installer=" + installer +
"&version=" + version)
request = Request(url)
@@ -115,11 +132,16 @@ def getResult(testCase, installer, scenario, version):
# 2: <4 successful consecutive runs but passing the criteria
# 1: close to pass the success criteria
# 0: 0% success, not passing
+ # -1: no run available
test_result_indicator = 0
nbTestOk = getNbtestOk(scenario_results)
+
# print "Nb test OK (last 10 days):"+ str(nbTestOk)
# check that we have at least 4 runs
- if nbTestOk < 1:
+ if len(scenario_results) < 1:
+ # No results available
+ test_result_indicator = -1
+ elif nbTestOk < 1:
test_result_indicator = 0
elif nbTestOk < 2:
test_result_indicator = 1
diff --git a/utils/test/reporting/functest/template/index-status-tmpl.html b/utils/test/reporting/functest/template/index-status-tmpl.html
index 89a1d1527..0c3fa9426 100644
--- a/utils/test/reporting/functest/template/index-status-tmpl.html
+++ b/utils/test/reporting/functest/template/index-status-tmpl.html
@@ -76,7 +76,7 @@
{% for test in items[scenario] -%}
<th>
{% if test.getCriteria() > -1 -%}
- {{test.getDbName() }}
+ {{test.getDisplayName() }}
{%- endif %}
{% if test.getTier() > 3 -%}
*
diff --git a/utils/test/reporting/functest/testCase.py b/utils/test/reporting/functest/testCase.py
index f0e8f5995..e19853a09 100644
--- a/utils/test/reporting/functest/testCase.py
+++ b/utils/test/reporting/functest/testCase.py
@@ -19,6 +19,28 @@ class TestCase(object):
self.criteria = criteria
self.isRunnable = isRunnable
self.tier = tier
+ display_name_matrix = {'healthcheck': 'healthcheck',
+ 'vping_ssh': 'vPing (ssh)',
+ 'vping_userdata': 'vPing (userdata)',
+ 'odl': 'ODL',
+ 'onos': 'ONOS',
+ 'ocl': 'OCL',
+ 'tempest_smoke_serial': 'Tempest (smoke)',
+ 'tempest_full_parallel': 'Tempest (full)',
+ 'rally_sanity': 'Rally (smoke)',
+ 'bgpvpn': 'bgpvpn',
+ 'rally_full': 'Rally (full)',
+ 'vims': 'vIMS',
+ 'doctor': 'Doctor',
+ 'promise': 'Promise',
+ 'moon': 'moon',
+ 'copper': 'copper',
+ 'security_scan': 'security'
+ }
+ try:
+ self.displayName = display_name_matrix[self.name]
+ except:
+ self.displayName = "unknown"
def getName(self):
return self.name
@@ -74,10 +96,10 @@ class TestCase(object):
self.isRunnable = is_runnable
def toString(self):
- testcase = ("Name=" + self.name + ";Criteria=" + str(self.criteria)
- + ";Project=" + self.project + ";Constraints="
- + str(self.constraints) + ";IsRunnable"
- + str(self.isRunnable))
+ testcase = ("Name=" + self.name + ";Criteria=" +
+ str(self.criteria) + ";Project=" + self.project +
+ ";Constraints=" + str(self.constraints) +
+ ";IsRunnable" + str(self.isRunnable))
return testcase
def getDbName(self):
@@ -98,31 +120,15 @@ class TestCase(object):
'rally_full': 'rally_full',
'vims': 'vims',
'doctor': 'doctor-notification',
- 'promise': 'promise'
+ 'promise': 'promise',
+ 'moon': 'moon',
+ 'copper': 'copper',
+ 'security_scan': 'security'
}
try:
return test_match_matrix[self.name]
except:
return "unknown"
- def getTestDisplayName(self):
- # Correspondance name of the test case / name in the DB
- test_match_matrix = {'healthcheck': 'healthcheck',
- 'vping_ssh': 'vPing (ssh)',
- 'vping_userdata': 'vPing (userdata)',
- 'odl': 'ODL',
- 'onos': 'ONOS',
- 'ocl': 'OCL',
- 'tempest_smoke_serial': 'Tempest (smoke)',
- 'tempest_full_parallel': 'Tempest (full)',
- 'rally_sanity': 'Rally (smoke)',
- 'bgpvpn': 'bgpvpn',
- 'rally_full': 'Rally (full)',
- 'vims': 'vIMS',
- 'doctor': 'Doctor',
- 'promise': 'Promise'
- }
- try:
- return test_match_matrix[self.name]
- except:
- return "unknown"
+ def getDisplayName(self):
+ return self.displayName
diff --git a/utils/test/result_collection_api/update/README.md b/utils/test/result_collection_api/update/README.md
index 41b7fffa3..d3aef7efe 100644
--- a/utils/test/result_collection_api/update/README.md
+++ b/utils/test/result_collection_api/update/README.md
@@ -1,59 +1,105 @@
-# opnfv-testapi update
-
-## How to use:
-
-# backup mongodb,
-# arguments:
-# -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
-# -o/--output_dir: Output directory for the backup, default = ./
-# the backup output will be put under dir/db__XXXX_XX_XX_XXXXXX/db
-# -d/--db: database for the backup, default = test_results_collection
-```
-python backup.py
-```
-
-# restore mongodb
-# arguments:
-# -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
-# -i/--input_dir: Input directory for the Restore, must be specified
-# the restore input must be specified to dir/db__XXXX_XX_XX_XXXXXX/db
-# -d/--db: database name after the restore, default = basename of input_dir
-```
-python restore.py
-```
-
-# update mongodb
-# arguments:
-# -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
-# -d/--db: database name to be updated, default = test_results_collection
-# changes need to be done:
-# change collection name, modify changes.collections_old2New
-# collections_old2New = {
-# 'old_collection': 'new_collection',
-# }
-# change field name, modify changes.fields_old2New
-# fields_old2New = {
-# 'collection': [(query, {'old_field': 'new_field'})]
-# }
-# change the doc, modify changes.docs_old2New
-# docs_old2New = {
-# 'test_results': [
-# ({'field': 'old_value'}, {'field': 'new_value'}),
-# (query, {'field': 'new_value'}),
-# ]
-# }
-```
-python update.py
-```
-
-# update opnfv-testapi process
-# this script must be run right in this directory
-# and remember to change ../etc/config.ini before running this script
-# operations includes:
-# kill running test_collection_api & opnfv-testapi
-# install or update dependencies according to ../requirements.txt
-# install opnfv-testapi
-# run opnfv-testapi
+Welcome to TESTAPI Update!
+========================
+
+
+This file is used to describe how testapi update works
+
+----------
+How to use
+---------------
+
+#### <i class="icon-file"></i> backup mongodb
+
+arguments:
+: -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
+the backup output will be put under dir/db__XXXX_XX_XX_XXXXXX/db
+-d/--db: database for the backup, default = test_results_collection
+
+usage:
+```
+python backup_mongodb.py
+```
+
+#### <i class="icon-file"></i> restore mongodb
+
+arguments:
+: -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
+ -i/--input_dir: Input directory for the Restore, must be specified,
+ the restore input must be specified to dir/db__XXXX_XX_XX_XXXXXX/db
+ -d/--db: database name after the restore, default = basename of input_dir
+
+usage:
+```
+python restore_mongodb.py
+```
+#### <i class="icon-file"></i> update mongodb
+
+ arguments:
+: -u/--url: Mongo DB URL, default = mongodb://127.0.0.1:27017/
+ -d/--db: database name to be updated, default = test_results_collection
+
+changes need to be done:
+change collection name, modify changes.collections_old2New
+ > collections_old2New = {
+ 'old_collection': 'new_collection',
+ }
+
+ change field name, modify changes.fields_old2New
+ > fields_old2New = {
+ 'collection': [(query, {'old_field': 'new_field'})]
+ }
+
+ change the doc, modify changes.docs_old2New
+ > docs_old2New = {
+ 'test_results': [
+ ({'field': 'old_value'}, {'field': 'new_value'}),
+ (query, {'field': 'new_value'}),
+ ]
+ }
+
+#### <i class="icon-file"></i> update opnfv-testapi process
+This script must be run right in this directory and remember to
+change ../etc/config.ini before running this script.
+
+operations includes:
+: kill running test_collection_api & opnfv-testapi
+install or update dependencies according to ../requirements.txt
+install opnfv-testapi
+run opnfv-testapi
+
+usage:
```
python update_api.py
```
+#### <i class="icon-file"></i> update opnfv/testapi container
+Here ansible-playbook is used to implement auto update.
+Please make sure that the remote server is accessible via ssh.
+
+install ansible, please refer:
+```
+http://docs.ansible.com/ansible/intro_installation.html
+```
+run update.yml
+arguments:
+: host: remote server, must provide
+user: user used to access to remote server, default to root
+port: exposed port used to access to testapi, default to 8000
+image: testapi's docker image, default to opnfv/testapi:latest
+update_path: templates directory in remote server, default to /tmp/testapi
+mongodb_url: url of mongodb, default to 172.17.0.1, docker0 ip
+swagger_url: swagger access url, default to http://host:port
+
+usage:
+```
+ansible-playbook update.yml --extra-vars "
+host=10.63.243.17
+user=zte
+port=8000
+image=opnfv/testapi
+update_path=/tmp/testapi
+mongodb_url=mongodb://172.17.0.1:27017
+swagger_url=http://10.63.243.17:8000"```
+> **Note:**
+
+> - If documents need to be changed, please modify file
+templates/changes_in_mongodb.py, and refer section **update mongodb**
diff --git a/utils/test/result_collection_api/update/templates/__init__.py b/utils/test/result_collection_api/update/templates/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/utils/test/result_collection_api/update/templates/__init__.py
diff --git a/utils/test/result_collection_api/update/backup.py b/utils/test/result_collection_api/update/templates/backup_mongodb.py
index faa4fd410..7e0dd5545 100644
--- a/utils/test/result_collection_api/update/backup.py
+++ b/utils/test/result_collection_api/update/templates/backup_mongodb.py
@@ -6,9 +6,9 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-import os
import argparse
import datetime
+import os
from utils import execute, main, get_abspath
diff --git a/utils/test/result_collection_api/update/changes.py b/utils/test/result_collection_api/update/templates/changes_in_mongodb.py
index b3e23803d..b3e23803d 100644
--- a/utils/test/result_collection_api/update/changes.py
+++ b/utils/test/result_collection_api/update/templates/changes_in_mongodb.py
diff --git a/utils/test/result_collection_api/update/restore.py b/utils/test/result_collection_api/update/templates/restore_mongodb.py
index c45a0e621..c45a0e621 100644
--- a/utils/test/result_collection_api/update/restore.py
+++ b/utils/test/result_collection_api/update/templates/restore_mongodb.py
diff --git a/utils/test/result_collection_api/update/templates/rm_olds.sh b/utils/test/result_collection_api/update/templates/rm_olds.sh
new file mode 100644
index 000000000..c6bca1867
--- /dev/null
+++ b/utils/test/result_collection_api/update/templates/rm_olds.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+proc_number=`ps -ef | grep opnfv-testapi | grep -v grep | wc -l`
+if [ $proc_number -gt 0 ]; then
+ procs=`ps -ef | grep opnfv-testapi | grep -v grep`
+ echo "begin to kill opnfv-testapi $procs"
+ ps -ef | grep opnfv-testapi | grep -v grep | awk '{print $2}' | xargs kill -kill &>/dev/null
+fi
+
+number=`docker ps -a | awk 'NR != 1' | grep testapi | wc -l`
+if [ $number -gt 0 ]; then
+ containers=number=`docker ps -a | awk 'NR != 1' | grep testapi`
+ echo "begin to rm containers $containers"
+ docker ps -a | awk 'NR != 1' | grep testapi | awk '{print $1}' | xargs docker rm -f &>/dev/null
+fi
diff --git a/utils/test/result_collection_api/update/update.py b/utils/test/result_collection_api/update/templates/update_mongodb.py
index 8b385a0b3..b1e378dd7 100644
--- a/utils/test/result_collection_api/update/update.py
+++ b/utils/test/result_collection_api/update/templates/update_mongodb.py
@@ -10,8 +10,8 @@ import argparse
from pymongo import MongoClient
+from changes_in_mongodb import collections_old2New, fields_old2New, docs_old2New
from utils import main, parse_mongodb_url
-from changes import collections_old2New, fields_old2New, docs_old2New
parser = argparse.ArgumentParser(description='Update MongoDBs')
diff --git a/utils/test/result_collection_api/update/utils.py b/utils/test/result_collection_api/update/templates/utils.py
index a18ff0389..a18ff0389 100644
--- a/utils/test/result_collection_api/update/utils.py
+++ b/utils/test/result_collection_api/update/templates/utils.py
diff --git a/utils/test/result_collection_api/update/update.yml b/utils/test/result_collection_api/update/update.yml
new file mode 100644
index 000000000..08839564a
--- /dev/null
+++ b/utils/test/result_collection_api/update/update.yml
@@ -0,0 +1,41 @@
+---
+- hosts: "{{ host }}"
+ remote_user: "{{ user }}"
+ become: yes
+ become_method: sudo
+ vars:
+ user: "root"
+ port: "8000"
+ update_path: "/tmp/testapi"
+ image: "opnfv/testapi"
+ mongodb_url: "mongodb://172.17.0.1:27017"
+ swagger_url: "http://{{ host }}:{{ port }}"
+ tasks:
+ - name: create temporary update directory
+ file:
+ path: "{{ update_path }}"
+ state: directory
+ - name: transfer files in templates
+ copy:
+ src: templates/
+ dest: "{{ update_path }}"
+ - name: backup mongodb database
+ command: "python {{ update_path }}/backup_mongodb.py -u {{ mongodb_url }} -o {{ update_path }}"
+ - name: stop and remove old versions
+ command: bash "{{ update_path }}/rm_olds.sh"
+ register: rm_result
+ - debug: msg="{{ rm_result.stderr }}"
+ - name: delete old docker images
+ command: docker rmi "{{ image }}"
+ ignore_errors: true
+ - name: update mongodb
+ command: "python {{ update_path }}/update_mongodb.py -u {{ mongodb_url }}"
+ - name: docker start testapi server
+ command: docker run -dti -p "{{ port }}:8000"
+ -e "mongodb_url={{ mongodb_url }}"
+ -e "swagger_url={{ swagger_url }}"
+ "{{ image }}"
+ - name: remove temporary update directory
+ file:
+ path: "{{ update_path }}"
+ state: absent \ No newline at end of file