diff options
Diffstat (limited to 'utils')
48 files changed, 1171 insertions, 534 deletions
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh index 3d5d8a5fe..3bc66f942 100755 --- a/utils/fetch_os_creds.sh +++ b/utils/fetch_os_creds.sh @@ -95,6 +95,7 @@ fi ssh_options="-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no" # Start fetching the files +info "Fetching rc file..." if [ "$installer_type" == "fuel" ]; then verify_connectivity $installer_ip if [ "${BRANCH}" == "master" ]; then @@ -110,8 +111,12 @@ if [ "$installer_type" == "fuel" ]; then "sudo salt --out txt 'ctl*' pillar.get _param:openstack_control_address | awk '{print \$2; exit}'" | \ sed 's/ //g') &> /dev/null - info "Fetching rc file from controller $controller_ip..." + info "... from controller $controller_ip..." ssh ${ssh_options} ubuntu@${controller_ip} "sudo cat /root/keystonercv3" > $dest_path + + if [[ $BUILD_TAG =~ "baremetal" ]]; then + ssh ${ssh_options} ubuntu@${installer_ip} "cat /etc/ssl/certs/os_cacert" > $os_cacert + fi else #ip_fuel="10.20.0.2" env=$(sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \ @@ -130,7 +135,7 @@ if [ "$installer_type" == "fuel" ]; then error "The controller $controller_ip is not up. Please check that the POD is correctly deployed." fi - info "Fetching rc file from controller $controller_ip..." + info "... from controller $controller_ip..." sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \ "scp ${ssh_options} ${controller_ip}:/root/openrc ." &> /dev/null sshpass -p r00tme scp 2>/dev/null ${ssh_options} root@${installer_ip}:~/openrc $dest_path &> /dev/null @@ -144,11 +149,18 @@ if [ "$installer_type" == "fuel" ]; then echo $auth_url >> $dest_path elif [ "$installer_type" == "apex" ]; then + if ! ipcalc -c $installer_ip; then + installer_ip=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}') + if [ -z "$installer_ip" ] || ! $(ipcalc -c $installer_ip); then + echo "Unable to find valid IP for Apex undercloud: ${installer_ip}" + exit 1 + fi + fi verify_connectivity $installer_ip # The credentials file is located in the Instack VM (192.0.2.1) # NOTE: This might change for bare metal deployments - info "Fetching rc file from Instack VM $installer_ip..." + info "... from Instack VM $installer_ip..." if [ -f /root/.ssh/id_rsa ]; then chmod 600 /root/.ssh/id_rsa fi @@ -158,7 +170,7 @@ elif [ "$installer_type" == "compass" ]; then if [ "${BRANCH}" == "master" ]; then sudo docker cp compass-tasks:/opt/openrc $dest_path &> /dev/null sudo chown $(whoami):$(whoami) $dest_path - sudo docker cp compass-tasks:/opt/os_cacert $os_cacert &> /dev/null + sudo docker cp compass-tasks:/opt/os_cacert $os_cacert else verify_connectivity $installer_ip controller_ip=$(sshpass -p'root' ssh 2>/dev/null $ssh_options root@${installer_ip} \ @@ -170,7 +182,7 @@ elif [ "$installer_type" == "compass" ]; then error "The controller $controller_ip is not up. Please check that the POD is correctly deployed." fi - info "Fetching rc file from controller $controller_ip..." + info "... from controller $controller_ip..." sshpass -p root ssh 2>/dev/null $ssh_options root@${installer_ip} \ "scp $ssh_options ${controller_ip}:/opt/admin-openrc.sh ." &> /dev/null sshpass -p root scp 2>/dev/null $ssh_options root@${installer_ip}:~/admin-openrc.sh $dest_path &> /dev/null @@ -194,7 +206,7 @@ elif [ "$installer_type" == "compass" ]; then elif [ "$installer_type" == "joid" ]; then # do nothing...for the moment # we can either do a scp from the jumphost or use the -v option to transmit the param to the docker file - echo "Do nothing, creds will be provided through volume option at docker creation for joid" + info "Do nothing, creds will be provided through volume option at docker creation for joid" elif [ "$installer_type" == "foreman" ]; then #ip_foreman="172.30.10.73" diff --git a/utils/slave-monitor-0.1.sh b/utils/slave-monitor-0.1.sh new file mode 100644 index 000000000..161aaef21 --- /dev/null +++ b/utils/slave-monitor-0.1.sh @@ -0,0 +1,98 @@ +#!/bin/bash +# SPDX-license-identifier: Apache-2.0 +############################################################################## +# Copyright (c) 2016 Linux Foundation and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +#This will put a bunch of files in the pwd. you have been warned. +#Counts how long slaves have been online or offline + + +#Yes I know about jq +curlcommand() { +curl -s "https://build.opnfv.org/ci/computer/api/json?tree=computer\[displayName,offline\]" \ + | awk -v k=":" '{n=split($0,a,","); for (i=1; i<=n; i++) print a[i]}' \ + | grep -v "_class" \ + | awk 'NR%2{printf "%s ",$0;next;}1' \ + | awk -F":" '{print $2,$3}' \ + | awk '{print $1,$3}' \ + | sed s,\},,g \ + | sed s,],,g \ + | sed s,\",,g +} + +if [ -f podoutput-current ]; then + cp podoutput-current podoutput-lastiteration +fi + +curlcommand > podoutput-current + +declare -A slavescurrent slaveslastiteration + +while read -r name status ; do + slavescurrent["$name"]="$status" +done < <(cat podoutput-current) + +while read -r name status ; do + slaveslastiteration["$name"]=$status +done < <(cat podoutput-lastiteration) + +main () { +for slavename in "${!slavescurrent[@]}"; do + #Slave is online. Mark it down. + if [ "${slavescurrent[$slavename]}" == "false" ]; then + + if [ -f "$slavename"-offline ]; then + echo "removing offline status from $slavename slave was offline for $(cat "$slavename"-offline ) iterations" + rm "$slavename"-offline + fi + + if ! [ -f "$slavename"-online ]; then + echo "1" > "$slavename"-online + elif [ -f "$slavename"-online ]; then + #read and increment slavename + read -r -d $'\x04' var < "$slavename"-online + ((var++)) + echo -n "ONLINE $slavename " + echo "for $var iterations" + echo "$var" > "$slavename"-online + fi + fi + + #went offline since last iteration. + if [ "${slavescurrent[$slavename]}" == "false" ] && [ "${slaveslastiteration[$slavename]}" == "true" ]; then + echo "JUST WENT OFFLINE $slavename " + if [ -f "$slavename"-online ]; then + echo "removing online status from $slavename. slave was online for $(cat "$slavename"-online ) iterations" + rm "$slavename"-online + fi + + fi + + #slave is offline + if [ "${slavescurrent[$slavename]}" == "true" ]; then + if ! [ -f "$slavename"-offline ]; then + echo "1" > "$slavename"-offline + fi + + if [ -f "$slavename"-offline ]; then + #read and increment slavename + read -r -d $'\x04' var < "$slavename"-offline + ((var++)) + echo "$var" > "$slavename"-offline + if [ "$var" -gt "30" ]; then + echo "OFFLINE FOR $var ITERATIONS REMOVE $slavename " + else + echo "OFFLINE $slavename FOR $var ITERATIONS " + fi + fi + fi + +done +} + +main diff --git a/utils/test/reporting/docker/reporting.sh b/utils/test/reporting/docker/reporting.sh index 076dc4719..6cc7a7c9e 100755 --- a/utils/test/reporting/docker/reporting.sh +++ b/utils/test/reporting/docker/reporting.sh @@ -4,7 +4,7 @@ export PYTHONPATH="${PYTHONPATH}:./reporting" export CONFIG_REPORTING_YAML=./reporting/reporting.yaml declare -a versions=(danube master) -declare -a projects=(functest storperf yardstick qtip) +declare -a projects=(functest storperf yardstick qtip vsperf bottlenecks) project=$1 reporting_type=$2 @@ -32,6 +32,7 @@ cp -Rf js display # yardstick | status # storperf | status # qtip | status +# vsperf | status function report_project() { diff --git a/utils/test/reporting/reporting/bottlenecks/__init__.py b/utils/test/reporting/reporting/bottlenecks/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/utils/test/reporting/reporting/bottlenecks/__init__.py diff --git a/utils/test/reporting/reporting/bottlenecks/reporting-status.py b/utils/test/reporting/reporting/bottlenecks/reporting-status.py new file mode 100644 index 000000000..8966d0690 --- /dev/null +++ b/utils/test/reporting/reporting/bottlenecks/reporting-status.py @@ -0,0 +1,145 @@ +#!/usr/bin/python +# +# This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +import datetime +import os + +import jinja2 + +import reporting.utils.reporting_utils as rp_utils +import reporting.utils.scenarioResult as sr + +INSTALLERS = rp_utils.get_config('general.installers') +VERSIONS = rp_utils.get_config('general.versions') +PERIOD = rp_utils.get_config('general.period') + +# Logger +LOGGER = rp_utils.getLogger("Bottlenecks-Status") +reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + +LOGGER.info("*******************************************") +LOGGER.info("* Generating reporting scenario status *") +LOGGER.info("* Data retention = %s days *", PERIOD) +LOGGER.info("* *") +LOGGER.info("*******************************************") + +# retrieve the list of bottlenecks tests +BOTTLENECKS_TESTS = rp_utils.get_config('bottlenecks.test_list') +LOGGER.info("Bottlenecks tests: %s", BOTTLENECKS_TESTS) + +# For all the versions +for version in VERSIONS: + # For all the installers + for installer in INSTALLERS: + # get scenarios results data + scenario_results = rp_utils.getScenarios("bottlenecks", + "posca_factor_ping", + installer, + version) + LOGGER.info("scenario_results: %s", scenario_results) + + scenario_stats = rp_utils.getScenarioStats(scenario_results) + LOGGER.info("scenario_stats: %s", scenario_stats) + items = {} + scenario_result_criteria = {} + + # From each scenarios get results list + for s, s_result in scenario_results.items(): + LOGGER.info("---------------------------------") + LOGGER.info("installer %s, version %s, scenario %s", installer, + version, s) + ten_criteria = len(s_result) + + ten_score = 0 + for v in s_result: + if "PASS" in v['criteria']: + ten_score += 1 + + LOGGER.info("ten_score: %s / %s", (ten_score, ten_criteria)) + + four_score = 0 + try: + LASTEST_TESTS = rp_utils.get_config( + 'general.nb_iteration_tests_success_criteria') + s_result.sort(key=lambda x: x['start_date']) + four_result = s_result[-LASTEST_TESTS:] + LOGGER.debug("four_result: {}".format(four_result)) + LOGGER.debug("LASTEST_TESTS: {}".format(LASTEST_TESTS)) + # logger.debug("four_result: {}".format(four_result)) + four_criteria = len(four_result) + for v in four_result: + if "PASS" in v['criteria']: + four_score += 1 + LOGGER.info("4 Score: %s / %s ", (four_score, + four_criteria)) + except Exception: + LOGGER.error("Impossible to retrieve the four_score") + + try: + s_status = (four_score * 100) / four_criteria + except Exception: + s_status = 0 + LOGGER.info("Score percent = %s", str(s_status)) + s_four_score = str(four_score) + '/' + str(four_criteria) + s_ten_score = str(ten_score) + '/' + str(ten_criteria) + s_score_percent = str(s_status) + + LOGGER.debug(" s_status: %s", s_status) + if s_status == 100: + LOGGER.info(">>>>> scenario OK, save the information") + else: + LOGGER.info(">>>> scenario not OK, last 4 iterations = %s, \ + last 10 days = %s", (s_four_score, s_ten_score)) + + s_url = "" + if len(s_result) > 0: + build_tag = s_result[len(s_result)-1]['build_tag'] + LOGGER.debug("Build tag: %s", build_tag) + s_url = s_url = rp_utils.getJenkinsUrl(build_tag) + LOGGER.info("last jenkins url: %s", s_url) + + # Save daily results in a file + path_validation_file = ("./display/" + version + + "/bottlenecks/scenario_history.txt") + + if not os.path.exists(path_validation_file): + with open(path_validation_file, 'w') as f: + info = 'date,scenario,installer,details,score\n' + f.write(info) + + with open(path_validation_file, "a") as f: + info = (reportingDate + "," + s + "," + installer + + "," + s_ten_score + "," + + str(s_score_percent) + "\n") + f.write(info) + + scenario_result_criteria[s] = sr.ScenarioResult(s_status, + s_four_score, + s_ten_score, + s_score_percent, + s_url) + + LOGGER.info("--------------------------") + + templateLoader = jinja2.FileSystemLoader(".") + templateEnv = jinja2.Environment(loader=templateLoader, + autoescape=True) + + TEMPLATE_FILE = ("./reporting/bottlenecks/template" + "/index-status-tmpl.html") + template = templateEnv.get_template(TEMPLATE_FILE) + + outputText = template.render(scenario_results=scenario_result_criteria, + installer=installer, + period=PERIOD, + version=version, + date=reportingDate) + + with open("./display/" + version + + "/bottlenecks/status-" + installer + ".html", "wb") as fh: + fh.write(outputText) diff --git a/utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html b/utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html new file mode 100644 index 000000000..c4497ac1b --- /dev/null +++ b/utils/test/reporting/reporting/bottlenecks/template/index-status-tmpl.html @@ -0,0 +1,114 @@ + <html> + <head> + <meta charset="utf-8"> + <!-- Bootstrap core CSS --> + <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet"> + <link href="../../css/default.css" rel="stylesheet"> + <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script> + <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script> + <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script> + <script type="text/javascript" src="../../js/gauge.js"></script> + <script type="text/javascript" src="../../js/trend.js"></script> + <script> + function onDocumentReady() { + // Gauge management + {% for scenario in scenario_results.keys() -%} + var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}'); + {%- endfor %} + // assign success rate to the gauge + function updateReadings() { + {% for scenario in scenario_results.keys() -%} + gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}}); + {%- endfor %} + } + updateReadings(); + } + + // trend line management + d3.csv("./scenario_history.txt", function(data) { + // *************************************** + // Create the trend line + {% for scenario in scenario_results.keys() -%} + // for scenario {{scenario}} + // Filter results + var trend{{loop.index}} = data.filter(function(row) { + return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}"; + }) + // Parse the date + trend{{loop.index}}.forEach(function(d) { + d.date = parseDate(d.date); + d.score = +d.score + }); + // Draw the trend line + var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}}) + // **************************************** + {%- endfor %} + }); + if ( !window.isLoaded ) { + window.addEventListener("load", function() { + onDocumentReady(); + }, false); + } else { + onDocumentReady(); + } + </script> + <script type="text/javascript"> + $(document).ready(function (){ + $(".btn-more").click(function() { + $(this).hide(); + $(this).parent().find(".panel-default").show(); + }); + }) + </script> + </head> + <body> + <div class="container"> + <div class="masthead"> + <h3 class="text-muted">Bottlenecks status page ({{version}}, {{date}})</h3> + <nav> + <ul class="nav nav-justified"> + <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li> + <li><a href="status-apex.html">Apex</a></li> + <li><a href="status-compass.html">Compass</a></li> + <li><a href="status-fuel.html">Fuel</a></li> + <li><a href="status-joid.html">Joid</a></li> + </ul> + </nav> + </div> +<div class="row"> + <div class="col-md-1"></div> + <div class="col-md-10"> + <div class="page-header"> + <h2>{{installer}}</h2> + </div> + <div><h1>Reported values represent the percentage of completed + + CI tests (posca_factor_ping) during the reporting period, where results + + were communicated to the Test Database.</h1></div> + <div class="scenario-overview"> + <div class="panel-heading"><h4><b>List of last scenarios ({{version}}) run over the last {{period}} days </b></h4></div> + <table class="table"> + <tr> + <th width="40%">Scenario</th> + <th width="20%">Status</th> + <th width="20%">Trend</th> + <th width="10%">Last 4 Iterations</th> + <th width="10%">Last 10 Days</th> + </tr> + {% for scenario,result in scenario_results.iteritems() -%} + <tr class="tr-ok"> + <td><a href="{{scenario_results[scenario].getLastUrl()}}">{{scenario}}</a></td> + <td><div id="gaugeScenario{{loop.index}}"></div></td> + <td><div id="trend_svg{{loop.index}}"></div></td> + <td>{{scenario_results[scenario].getFourDaysScore()}}</td> + <td>{{scenario_results[scenario].getTenDaysScore()}}</td> + </tr> + {%- endfor %} + </table> + </div> + + + </div> + <div class="col-md-1"></div> +</div> diff --git a/utils/test/reporting/reporting/functest/reporting-status.py b/utils/test/reporting/reporting/functest/reporting-status.py index c7c2051a3..02bf67d0e 100755 --- a/utils/test/reporting/reporting/functest/reporting-status.py +++ b/utils/test/reporting/reporting/functest/reporting-status.py @@ -7,18 +7,19 @@ # http://www.apache.org/licenses/LICENSE-2.0 # import datetime -import jinja2 import os import sys import time +import jinja2 + import testCase as tc import scenarioResult as sr +import reporting.utils.reporting_utils as rp_utils -# manage conf -import utils.reporting_utils as rp_utils - -"""Functest reporting status""" +""" +Functest reporting status +""" # Logger logger = rp_utils.getLogger("Functest-Status") @@ -106,7 +107,8 @@ for version in versions: for installer in installers: # get scenarios - scenario_results = rp_utils.getScenarios(healthcheck, + scenario_results = rp_utils.getScenarios("functest", + "connection_check", installer, version) # get nb of supported architecture (x86, aarch64) @@ -219,7 +221,7 @@ for version in versions: logger.debug("No results found") items[s] = testCases2BeDisplayed - except: + except Exception: logger.error("Error: installer %s, version %s, scenario %s" % (installer, version, s)) logger.error("No data available: %s" % (sys.exc_info()[0])) @@ -279,13 +281,13 @@ for version in versions: template = templateEnv.get_template(TEMPLATE_FILE) outputText = template.render( - scenario_stats=scenario_stats, - scenario_results=scenario_result_criteria, - items=items, - installer=installer_display, - period=period, - version=version, - date=reportingDate) + scenario_stats=scenario_stats, + scenario_results=scenario_result_criteria, + items=items, + installer=installer_display, + period=period, + version=version, + date=reportingDate) with open("./display/" + version + "/functest/status-" + @@ -298,8 +300,6 @@ for version in versions: # Generate outputs for export # pdf - # TODO Change once web site updated...use the current one - # to test pdf production url_pdf = rp_utils.get_config('general.url') pdf_path = ("./display/" + version + "/functest/status-" + installer_display + ".html") diff --git a/utils/test/reporting/reporting/functest/reporting-tempest.py b/utils/test/reporting/reporting/functest/reporting-tempest.py index bc2885639..d78d9a19d 100755 --- a/utils/test/reporting/reporting/functest/reporting-tempest.py +++ b/utils/test/reporting/reporting/functest/reporting-tempest.py @@ -8,58 +8,57 @@ # http://www.apache.org/licenses/LICENSE-2.0 # SPDX-license-identifier: Apache-2.0 -from urllib2 import Request, urlopen, URLError from datetime import datetime import json -import jinja2 import os -# manage conf -import utils.reporting_utils as rp_utils +from urllib2 import Request, urlopen, URLError +import jinja2 + +import reporting.utils.reporting_utils as rp_utils -installers = rp_utils.get_config('general.installers') -items = ["tests", "Success rate", "duration"] +INSTALLERS = rp_utils.get_config('general.installers') +ITEMS = ["tests", "Success rate", "duration"] CURRENT_DIR = os.getcwd() PERIOD = rp_utils.get_config('general.period') -criteria_nb_test = 165 -criteria_duration = 1800 -criteria_success_rate = 90 +CRITERIA_NB_TEST = 100 +CRITERIA_DURATION = 1800 +CRITERIA_SUCCESS_RATE = 100 logger = rp_utils.getLogger("Tempest") logger.info("************************************************") logger.info("* Generating reporting Tempest_smoke_serial *") -logger.info("* Data retention = %s days *" % PERIOD) +logger.info("* Data retention = %s days *", PERIOD) logger.info("* *") logger.info("************************************************") logger.info("Success criteria:") -logger.info("nb tests executed > %s s " % criteria_nb_test) -logger.info("test duration < %s s " % criteria_duration) -logger.info("success rate > %s " % criteria_success_rate) +logger.info("nb tests executed > %s s ", CRITERIA_NB_TEST) +logger.info("test duration < %s s ", CRITERIA_DURATION) +logger.info("success rate > %s ", CRITERIA_SUCCESS_RATE) # For all the versions for version in rp_utils.get_config('general.versions'): - for installer in installers: + for installer in INSTALLERS: # we consider the Tempest results of the last PERIOD days url = ("http://" + rp_utils.get_config('testapi.url') + - "?case=tempest_smoke_serial") - request = Request(url + '&period=' + str(PERIOD) + - '&installer=' + installer + - '&version=' + version) - logger.info("Search tempest_smoke_serial results for installer %s" - " for version %s" - % (installer, version)) + "?case=tempest_smoke_serial&period=" + str(PERIOD) + + "&installer=" + installer + "&version=" + version) + request = Request(url) + logger.info(("Search tempest_smoke_serial results for installer %s" + " for version %s"), installer, version) try: response = urlopen(request) k = response.read() results = json.loads(k) - except URLError as e: - logger.error("Error code: %s" % e) - + except URLError as err: + logger.error("Error code: %s", err) + logger.debug("request sent: %s", url) + logger.debug("Results from API: %s", results) test_results = results['results'] - + logger.debug("Test results: %s", test_results) scenario_results = {} criteria = {} errors = {} @@ -72,27 +71,37 @@ for version in rp_utils.get_config('general.versions'): scenario_results[r['scenario']] = [] scenario_results[r['scenario']].append(r) + logger.debug("Scenario results: %s", scenario_results) + for s, s_result in scenario_results.items(): scenario_results[s] = s_result[0:5] # For each scenario, we build a result object to deal with # results, criteria and error handling for result in scenario_results[s]: result["start_date"] = result["start_date"].split(".")[0] + logger.debug("start_date= %s", result["start_date"]) # retrieve results # **************** nb_tests_run = result['details']['tests'] nb_tests_failed = result['details']['failures'] - if nb_tests_run != 0: - success_rate = 100 * ((int(nb_tests_run) - + logger.debug("nb_tests_run= %s", nb_tests_run) + logger.debug("nb_tests_failed= %s", nb_tests_failed) + + try: + success_rate = (100 * (int(nb_tests_run) - int(nb_tests_failed)) / - int(nb_tests_run)) - else: + int(nb_tests_run)) + except ZeroDivisionError: success_rate = 0 result['details']["tests"] = nb_tests_run result['details']["Success rate"] = str(success_rate) + "%" + logger.info("nb_tests_run= %s", result['details']["tests"]) + logger.info("test rate = %s", + result['details']["Success rate"]) + # Criteria management # ******************* crit_tests = False @@ -100,11 +109,11 @@ for version in rp_utils.get_config('general.versions'): crit_time = False # Expect that at least 165 tests are run - if nb_tests_run >= criteria_nb_test: + if nb_tests_run >= CRITERIA_NB_TEST: crit_tests = True # Expect that at least 90% of success - if success_rate >= criteria_success_rate: + if success_rate >= CRITERIA_SUCCESS_RATE: crit_rate = True # Expect that the suite duration is inferior to 30m @@ -114,28 +123,27 @@ for version in rp_utils.get_config('general.versions'): '%Y-%m-%d %H:%M:%S') delta = stop_date - start_date - if (delta.total_seconds() < criteria_duration): + + if delta.total_seconds() < CRITERIA_DURATION: crit_time = True result['criteria'] = {'tests': crit_tests, 'Success rate': crit_rate, 'duration': crit_time} try: - logger.debug("Scenario %s, Installer %s" - % (s_result[1]['scenario'], installer)) - logger.debug("Nb Test run: %s" % nb_tests_run) - logger.debug("Test duration: %s" - % result['details']['duration']) - logger.debug("Success rate: %s" % success_rate) - except: + logger.debug("Nb Test run: %s", nb_tests_run) + logger.debug("Test duration: %s", delta) + logger.debug("Success rate: %s", success_rate) + except Exception: # pylint: disable=broad-except logger.error("Data format error") # Error management # **************** try: errors = result['details']['errors'] - result['errors'] = errors.replace('{0}', '') - except: + logger.info("errors: %s", errors) + result['errors'] = errors + except Exception: # pylint: disable=broad-except logger.error("Error field not present (Brahamputra runs?)") templateLoader = jinja2.FileSystemLoader(".") @@ -146,7 +154,7 @@ for version in rp_utils.get_config('general.versions'): template = templateEnv.get_template(TEMPLATE_FILE) outputText = template.render(scenario_results=scenario_results, - items=items, + items=ITEMS, installer=installer) with open("./display/" + version + diff --git a/utils/test/reporting/reporting/qtip/reporting-status.py b/utils/test/reporting/reporting/qtip/reporting-status.py index f0127b50f..56f9e0aee 100644 --- a/utils/test/reporting/reporting/qtip/reporting-status.py +++ b/utils/test/reporting/reporting/qtip/reporting-status.py @@ -33,8 +33,7 @@ def prepare_profile_file(version): if not os.path.exists(profile_dir): os.makedirs(profile_dir) - profile_file = "{}/{}/scenario_history.txt".format(profile_dir, - version) + profile_file = "{}/scenario_history.txt".format(profile_dir) if not os.path.exists(profile_file): with open(profile_file, 'w') as f: info = 'date,scenario,installer,details,score\n' diff --git a/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html b/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html index 26da36ceb..92f3395dc 100644 --- a/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html +++ b/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html @@ -46,10 +46,11 @@ <nav> <ul class="nav nav-justified"> <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li> - <li><a href="index-status-apex.html">Apex</a></li> - <li><a href="index-status-compass.html">Compass</a></li> - <li><a href="index-status-fuel.html">Fuel</a></li> - <li><a href="index-status-joid.html">Joid</a></li> + <li><a href="status-apex.html">Apex</a></li> + <li><a href="status-compass.html">Compass</a></li> + <li><a href="status-daisy.html">Daisy</a></li> + <li><a href="status-fuel.html">Fuel</a></li> + <li><a href="status-joid.html">Joid</a></li> </ul> </nav> </div> diff --git a/utils/test/reporting/reporting/reporting.yaml b/utils/test/reporting/reporting/reporting.yaml index 26feb31d3..9bb90b806 100644 --- a/utils/test/reporting/reporting/reporting.yaml +++ b/utils/test/reporting/reporting/reporting.yaml @@ -64,6 +64,10 @@ qtip: log_level: ERROR period: 1 -bottleneck: +bottlenecks: + test_list: + - posca_factor_ping + - posca_factor_system_bandwidth + log_level: ERROR vsperf: diff --git a/utils/test/reporting/reporting/storperf/reporting-status.py b/utils/test/reporting/reporting/storperf/reporting-status.py index 0c188a338..103b80fd9 100644 --- a/utils/test/reporting/reporting/storperf/reporting-status.py +++ b/utils/test/reporting/reporting/storperf/reporting-status.py @@ -7,13 +7,12 @@ # http://www.apache.org/licenses/LICENSE-2.0 # import datetime -import jinja2 import os -# manage conf -import utils.reporting_utils as rp_utils +import jinja2 -import utils.scenarioResult as sr +import reporting.utils.reporting_utils as rp_utils +import reporting.utils.scenarioResult as sr installers = rp_utils.get_config('general.installers') versions = rp_utils.get_config('general.versions') @@ -39,7 +38,8 @@ for version in versions: for installer in installers: # get scenarios results data # for the moment we consider only 1 case snia_steady_state - scenario_results = rp_utils.getScenarios("snia_steady_state", + scenario_results = rp_utils.getScenarios("storperf", + "snia_steady_state", installer, version) # logger.info("scenario_results: %s" % scenario_results) diff --git a/utils/test/reporting/reporting/utils/reporting_utils.py b/utils/test/reporting/reporting/utils/reporting_utils.py index 62820914a..235bd6ef9 100644 --- a/utils/test/reporting/reporting/utils/reporting_utils.py +++ b/utils/test/reporting/reporting/utils/reporting_utils.py @@ -20,15 +20,15 @@ import yaml # YAML UTILS # # ----------------------------------------------------------- -def get_parameter_from_yaml(parameter, file): +def get_parameter_from_yaml(parameter, config_file): """ Returns the value of a given parameter in file.yaml parameter must be given in string format with dots Example: general.openstack.image_name """ - with open(file) as f: - file_yaml = yaml.safe_load(f) - f.close() + with open(config_file) as my_file: + file_yaml = yaml.safe_load(my_file) + my_file.close() value = file_yaml for element in parameter.split("."): value = value.get(element) @@ -39,6 +39,9 @@ def get_parameter_from_yaml(parameter, file): def get_config(parameter): + """ + Get configuration parameter from yaml configuration file + """ yaml_ = os.environ["CONFIG_REPORTING_YAML"] return get_parameter_from_yaml(parameter, yaml_) @@ -49,20 +52,23 @@ def get_config(parameter): # # ----------------------------------------------------------- def getLogger(module): - logFormatter = logging.Formatter("%(asctime)s [" + - module + - "] [%(levelname)-5.5s] %(message)s") + """ + Get Logger + """ + log_formatter = logging.Formatter("%(asctime)s [" + + module + + "] [%(levelname)-5.5s] %(message)s") logger = logging.getLogger() log_file = get_config('general.log.log_file') log_level = get_config('general.log.log_level') - fileHandler = logging.FileHandler("{0}/{1}".format('.', log_file)) - fileHandler.setFormatter(logFormatter) - logger.addHandler(fileHandler) + file_handler = logging.FileHandler("{0}/{1}".format('.', log_file)) + file_handler.setFormatter(log_formatter) + logger.addHandler(file_handler) - consoleHandler = logging.StreamHandler() - consoleHandler.setFormatter(logFormatter) - logger.addHandler(consoleHandler) + console_handler = logging.StreamHandler() + console_handler.setFormatter(log_formatter) + logger.addHandler(console_handler) logger.setLevel(log_level) return logger @@ -73,6 +79,9 @@ def getLogger(module): # # ----------------------------------------------------------- def getApiResults(case, installer, scenario, version): + """ + Get Results by calling the API + """ results = json.dumps([]) # to remove proxy (to be removed at the end for local test only) # proxy_handler = urllib2.ProxyHandler({}) @@ -94,29 +103,32 @@ def getApiResults(case, installer, scenario, version): response = urlopen(request) k = response.read() results = json.loads(k) - except URLError as e: - print 'No kittez. Got an error code:'.format(e) + except URLError: + print "Error when retrieving results form API" return results -def getScenarios(case, installer, version): - - try: - case = case.getName() - except: - # if case is not an object test case, try the string - if type(case) == str: - case = case - else: - raise ValueError("Case cannot be evaluated") +def getScenarios(project, case, installer, version): + """ + Get the list of Scenarios + """ period = get_config('general.period') url_base = get_config('testapi.url') - url = ("http://" + url_base + "?case=" + case + - "&period=" + str(period) + "&installer=" + installer + - "&version=" + version) + url = ("http://" + url_base + + "?installer=" + installer + + "&period=" + str(period)) + + if version is not None: + url += "&version=" + version + + if project is not None: + url += "&project=" + project + + if case is not None: + url += "&case=" + case try: request = Request(url) @@ -136,7 +148,7 @@ def getScenarios(case, installer, version): results = json.loads(k) test_results += results['results'] except KeyError: - print ('No pagination detected') + print "No pagination detected" except URLError as err: print 'Got an error code: {}'.format(err) @@ -144,32 +156,38 @@ def getScenarios(case, installer, version): test_results.reverse() scenario_results = {} - for r in test_results: + for my_result in test_results: # Retrieve all the scenarios per installer - if not r['scenario'] in scenario_results.keys(): - scenario_results[r['scenario']] = [] + if not my_result['scenario'] in scenario_results.keys(): + scenario_results[my_result['scenario']] = [] # Do we consider results from virtual pods ... # Do we consider results for non HA scenarios... exclude_virtual_pod = get_config('functest.exclude_virtual') exclude_noha = get_config('functest.exclude_noha') - if ((exclude_virtual_pod and "virtual" in r['pod_name']) or - (exclude_noha and "noha" in r['scenario'])): + if ((exclude_virtual_pod and "virtual" in my_result['pod_name']) or + (exclude_noha and "noha" in my_result['scenario'])): print "exclude virtual pod results..." else: - scenario_results[r['scenario']].append(r) + scenario_results[my_result['scenario']].append(my_result) return scenario_results def getScenarioStats(scenario_results): + """ + Get the number of occurence of scenarios over the defined PERIOD + """ scenario_stats = {} - for k, v in scenario_results.iteritems(): - scenario_stats[k] = len(v) - + for res_k, res_v in scenario_results.iteritems(): + scenario_stats[res_k] = len(res_v) return scenario_stats def getScenarioStatus(installer, version): + """ + Get the status of a scenariofor Yardstick + they used criteria SUCCESS (default: PASS) + """ period = get_config('general.period') url_base = get_config('testapi.url') @@ -184,33 +202,37 @@ def getScenarioStatus(installer, version): response.close() results = json.loads(k) test_results = results['results'] - except URLError as e: - print 'Got an error code: {}'.format(e) + except URLError: + print "GetScenarioStatus: error when calling the API" scenario_results = {} result_dict = {} if test_results is not None: - for r in test_results: - if r['stop_date'] != 'None' and r['criteria'] is not None: - if not r['scenario'] in scenario_results.keys(): - scenario_results[r['scenario']] = [] - scenario_results[r['scenario']].append(r) - - for k, v in scenario_results.items(): + for test_r in test_results: + if (test_r['stop_date'] != 'None' and + test_r['criteria'] is not None): + if not test_r['scenario'] in scenario_results.keys(): + scenario_results[test_r['scenario']] = [] + scenario_results[test_r['scenario']].append(test_r) + + for scen_k, scen_v in scenario_results.items(): # scenario_results[k] = v[:LASTEST_TESTS] s_list = [] - for element in v: + for element in scen_v: if element['criteria'] == 'SUCCESS': s_list.append(1) else: s_list.append(0) - result_dict[k] = s_list + result_dict[scen_k] = s_list # return scenario_results return result_dict def getQtipResults(version, installer): + """ + Get QTIP results + """ period = get_config('qtip.period') url_base = get_config('testapi.url') @@ -240,19 +262,24 @@ def getQtipResults(version, installer): def getNbtestOk(results): + """ + based on default value (PASS) count the number of test OK + """ nb_test_ok = 0 - for r in results: - for k, v in r.iteritems(): + for my_result in results: + for res_k, res_v in my_result.iteritems(): try: - if "PASS" in v: + if "PASS" in res_v: nb_test_ok += 1 - except: + except Exception: print "Cannot retrieve test status" return nb_test_ok def getResult(testCase, installer, scenario, version): - + """ + Get Result for a given Functest Testcase + """ # retrieve raw results results = getApiResults(testCase, installer, scenario, version) # let's concentrate on test results only @@ -269,10 +296,10 @@ def getResult(testCase, installer, scenario, version): # print " ---------------- " # print "nb of results:" + str(len(test_results)) - for r in test_results: + for res_r in test_results: # print r["start_date"] # print r["criteria"] - scenario_results.append({r["start_date"]: r["criteria"]}) + scenario_results.append({res_r["start_date"]: res_r["criteria"]}) # sort results scenario_results.sort() # 4 levels for the results @@ -295,7 +322,7 @@ def getResult(testCase, installer, scenario, version): test_result_indicator = 1 else: # Test the last 4 run - if (len(scenario_results) > 3): + if len(scenario_results) > 3: last4runResults = scenario_results[-4:] nbTestOkLast4 = getNbtestOk(last4runResults) # print "Nb test OK (last 4 run):"+ str(nbTestOkLast4) @@ -309,19 +336,22 @@ def getResult(testCase, installer, scenario, version): def getJenkinsUrl(build_tag): - # e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246 - # id = 246 - # jenkins-functest-compass-huawei-pod5-daily-master-136 - # id = 136 - # note it is linked to jenkins format - # if this format changes...function to be adapted.... + """ + Get Jenkins url_base corespoding to the last test CI run + e.g. jenkins-functest-apex-apex-daily-colorado-daily-colorado-246 + id = 246 + jenkins-functest-compass-huawei-pod5-daily-master-136 + id = 136 + note it is linked to jenkins format + if this format changes...function to be adapted.... + """ url_base = get_config('functest.jenkins_url') try: build_id = [int(s) for s in build_tag.split("-") if s.isdigit()] url_id = (build_tag[8:-(len(str(build_id[0])) + 1)] + "/" + str(build_id[0])) jenkins_url = url_base + url_id + "/console" - except: + except Exception: print 'Impossible to get jenkins url:' if "jenkins-" not in build_tag: @@ -331,10 +361,13 @@ def getJenkinsUrl(build_tag): def getScenarioPercent(scenario_score, scenario_criteria): + """ + Get success rate of the scenario (in %) + """ score = 0.0 try: score = float(scenario_score) / float(scenario_criteria) * 100 - except: + except Exception: print 'Impossible to calculate the percentage score' return score @@ -343,32 +376,41 @@ def getScenarioPercent(scenario_score, scenario_criteria): # Functest # ********* def getFunctestConfig(version=""): + """ + Get Functest configuration + """ config_file = get_config('functest.test_conf') + version response = requests.get(config_file) return yaml.safe_load(response.text) def getArchitectures(scenario_results): + """ + Get software architecture (x86 or Aarch64) + """ supported_arch = ['x86'] - if (len(scenario_results) > 0): + if len(scenario_results) > 0: for scenario_result in scenario_results.values(): for value in scenario_result: - if ("armband" in value['build_tag']): + if "armband" in value['build_tag']: supported_arch.append('aarch64') return supported_arch return supported_arch def filterArchitecture(results, architecture): + """ + Restrict the list of results based on given architecture + """ filtered_results = {} - for name, results in results.items(): + for name, res in results.items(): filtered_values = [] - for value in results: - if (architecture is "x86"): + for value in res: + if architecture is "x86": # drop aarch64 results if ("armband" not in value['build_tag']): filtered_values.append(value) - elif(architecture is "aarch64"): + elif architecture is "aarch64": # drop x86 results if ("armband" in value['build_tag']): filtered_values.append(value) @@ -381,6 +423,9 @@ def filterArchitecture(results, architecture): # Yardstick # ********* def subfind(given_list, pattern_list): + """ + Yardstick util function + """ LASTEST_TESTS = get_config('general.nb_iteration_tests_success_criteria') for i in range(len(given_list)): if given_list[i] == pattern_list[0] and \ @@ -390,7 +435,9 @@ def subfind(given_list, pattern_list): def _get_percent(status): - + """ + Yardstick util function to calculate success rate + """ if status * 100 % 6: return round(float(status) * 100 / 6, 1) else: @@ -398,13 +445,16 @@ def _get_percent(status): def get_percent(four_list, ten_list): + """ + Yardstick util function to calculate success rate + """ four_score = 0 ten_score = 0 - for v in four_list: - four_score += v - for v in ten_list: - ten_score += v + for res_v in four_list: + four_score += res_v + for res_v in ten_list: + ten_score += res_v LASTEST_TESTS = get_config('general.nb_iteration_tests_success_criteria') if four_score == LASTEST_TESTS: @@ -420,9 +470,12 @@ def get_percent(four_list, ten_list): def _test(): + """ + Yardstick util function (test) + """ status = getScenarioStatus("compass", "master") print "status:++++++++++++++++++++++++" - print(json.dumps(status, indent=4)) + print json.dumps(status, indent=4) # ---------------------------------------------------------- @@ -432,8 +485,9 @@ def _test(): # ----------------------------------------------------------- def export_csv(scenario_file_name, installer, version): - # csv - # generate sub files based on scenario_history.txt + """ + Generate sub files based on scenario_history.txt + """ scenario_installer_file_name = ("./display/" + version + "/functest/scenario_history_" + installer + ".csv") @@ -443,21 +497,25 @@ def export_csv(scenario_file_name, installer, version): for line in scenario_file: if installer in line: scenario_installer_file.write(line) - scenario_installer_file.close + scenario_installer_file.close def generate_csv(scenario_file): + """ + Generate sub files based on scenario_history.txt + """ import shutil - # csv - # generate sub files based on scenario_history.txt csv_file = scenario_file.replace('txt', 'csv') shutil.copy2(scenario_file, csv_file) def export_pdf(pdf_path, pdf_doc_name): + """ + Export results to pdf + """ try: pdfkit.from_file(pdf_path, pdf_doc_name) except IOError: print "Error but pdf generated anyway..." - except: + except Exception: print "impossible to generate PDF" diff --git a/utils/test/reporting/reporting/vsperf/__init__.py b/utils/test/reporting/reporting/vsperf/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/utils/test/reporting/reporting/vsperf/__init__.py diff --git a/utils/test/reporting/reporting/vsperf/reporting-status.py b/utils/test/reporting/reporting/vsperf/reporting-status.py new file mode 100644 index 000000000..fc4cc677d --- /dev/null +++ b/utils/test/reporting/reporting/vsperf/reporting-status.py @@ -0,0 +1,138 @@ +#!/usr/bin/python +# +# This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +import datetime +import os + +import jinja2 + +import reporting.utils.reporting_utils as rp_utils +import reporting.utils.scenarioResult as sr + +installers = rp_utils.get_config('general.installers') +PERIOD = rp_utils.get_config('general.period') + +# Logger +logger = rp_utils.getLogger("Storperf-Status") +reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + +logger.info("*******************************************") +logger.info("* Generating reporting scenario status *") +logger.info("* Data retention = %s days *" % PERIOD) +logger.info("* *") +logger.info("*******************************************") + +# retrieve the list of storperf tests +versions = {'master'} + +# For all the versions +for version in versions: + # For all the installers + for installer in installers: + scenario_results = rp_utils.getScenarios("vsperf", + None, + installer, + None) + items = {} + scenario_result_criteria = {} + logger.info("installer %s, version %s, scenario ", installer, version) + + # From each scenarios get results list + for s, s_result in scenario_results.items(): + logger.info("---------------------------------") + logger.info("installer %s, version %s, scenario %s", installer, + version, s) + ten_criteria = len(s_result) + + ten_score = 0 + for v in s_result: + if "PASS" in v['criteria']: + ten_score += 1 + + logger.info("ten_score: %s / %s" % (ten_score, ten_criteria)) + + four_score = 0 + try: + LASTEST_TESTS = rp_utils.get_config( + 'general.nb_iteration_tests_success_criteria') + s_result.sort(key=lambda x: x['start_date']) + four_result = s_result[-LASTEST_TESTS:] + logger.debug("four_result: {}".format(four_result)) + logger.debug("LASTEST_TESTS: {}".format(LASTEST_TESTS)) + # logger.debug("four_result: {}".format(four_result)) + four_criteria = len(four_result) + for v in four_result: + if "PASS" in v['criteria']: + four_score += 1 + logger.info("4 Score: %s / %s " % (four_score, + four_criteria)) + except Exception: + logger.error("Impossible to retrieve the four_score") + + try: + s_status = (four_score * 100) / four_criteria + except ZeroDivisionError: + s_status = 0 + logger.info("Score percent = %s" % str(s_status)) + s_four_score = str(four_score) + '/' + str(four_criteria) + s_ten_score = str(ten_score) + '/' + str(ten_criteria) + s_score_percent = str(s_status) + + logger.debug(" s_status: {}".format(s_status)) + if s_status == 100: + logger.info(">>>>> scenario OK, save the information") + else: + logger.info(">>>> scenario not OK, last 4 iterations = %s, \ + last 10 days = %s" % (s_four_score, s_ten_score)) + + s_url = "" + if len(s_result) > 0: + build_tag = s_result[len(s_result)-1]['build_tag'] + logger.debug("Build tag: %s" % build_tag) + s_url = s_url = rp_utils.getJenkinsUrl(build_tag) + logger.info("last jenkins url: %s" % s_url) + + # Save daily results in a file + path_validation_file = ("./display/" + version + + "/vsperf/scenario_history.txt") + + if not os.path.exists(path_validation_file): + with open(path_validation_file, 'w') as f: + info = 'date,scenario,installer,details,score\n' + f.write(info) + + with open(path_validation_file, "a") as f: + info = (reportingDate + "," + s + "," + installer + + "," + s_ten_score + "," + + str(s_score_percent) + "\n") + f.write(info) + + scenario_result_criteria[s] = sr.ScenarioResult(s_status, + s_four_score, + s_ten_score, + s_score_percent, + s_url) + + logger.info("--------------------------") + + templateLoader = jinja2.FileSystemLoader(".") + templateEnv = jinja2.Environment(loader=templateLoader, + autoescape=True) + + TEMPLATE_FILE = "./reporting/vsperf/template/index-status-tmpl.html" + template = templateEnv.get_template(TEMPLATE_FILE) + + outputText = template.render(scenario_results=scenario_result_criteria, + installer=installer, + period=PERIOD, + version=version, + date=reportingDate) + + with open("./display/" + version + + "/vsperf/status-" + installer + ".html", "wb") as fh: + fh.write(outputText) diff --git a/utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html b/utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html new file mode 100644 index 000000000..7e06ef66b --- /dev/null +++ b/utils/test/reporting/reporting/vsperf/template/index-status-tmpl.html @@ -0,0 +1,114 @@ + <html> + <head> + <meta charset="utf-8"> + <!-- Bootstrap core CSS --> + <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet"> + <link href="../../css/default.css" rel="stylesheet"> + <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script> + <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script> + <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script> + <script type="text/javascript" src="../../js/gauge.js"></script> + <script type="text/javascript" src="../../js/trend.js"></script> + <script> + function onDocumentReady() { + // Gauge management + {% for scenario in scenario_results.keys() -%} + var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}'); + {%- endfor %} + // assign success rate to the gauge + function updateReadings() { + {% for scenario in scenario_results.keys() -%} + gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}}); + {%- endfor %} + } + updateReadings(); + } + + // trend line management + d3.csv("./scenario_history.txt", function(data) { + // *************************************** + // Create the trend line + {% for scenario in scenario_results.keys() -%} + // for scenario {{scenario}} + // Filter results + var trend{{loop.index}} = data.filter(function(row) { + return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}"; + }) + // Parse the date + trend{{loop.index}}.forEach(function(d) { + d.date = parseDate(d.date); + d.score = +d.score + }); + // Draw the trend line + var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}}) + // **************************************** + {%- endfor %} + }); + if ( !window.isLoaded ) { + window.addEventListener("load", function() { + onDocumentReady(); + }, false); + } else { + onDocumentReady(); + } + </script> + <script type="text/javascript"> + $(document).ready(function (){ + $(".btn-more").click(function() { + $(this).hide(); + $(this).parent().find(".panel-default").show(); + }); + }) + </script> + </head> + <body> + <div class="container"> + <div class="masthead"> + <h3 class="text-muted">Vsperf status page ({{version}}, {{date}})</h3> + <nav> + <ul class="nav nav-justified"> + <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li> + <li><a href="status-apex.html">Apex</a></li> + <li><a href="status-compass.html">Compass</a></li> + <li><a href="status-fuel.html">Fuel</a></li> + <li><a href="status-joid.html">Joid</a></li> + </ul> + </nav> + </div> +<div class="row"> + <div class="col-md-1"></div> + <div class="col-md-10"> + <div class="page-header"> + <h2>{{installer}}</h2> + </div> + <div><h1>Reported values represent the percentage of completed + + CI tests during the reporting period, where results + + were communicated to the Test Database.</h1></div> + <div class="scenario-overview"> + <div class="panel-heading"><h4><b>List of last scenarios ({{version}}) run over the last {{period}} days </b></h4></div> + <table class="table"> + <tr> + <th width="40%">Scenario</th> + <th width="20%">Status</th> + <th width="20%">Trend</th> + <th width="10%">Last 4 Iterations</th> + <th width="10%">Last 10 Days</th> + </tr> + {% for scenario,result in scenario_results.iteritems() -%} + <tr class="tr-ok"> + <td><a href="{{scenario_results[scenario].getLastUrl()}}">{{scenario}}</a></td> + <td><div id="gaugeScenario{{loop.index}}"></div></td> + <td><div id="trend_svg{{loop.index}}"></div></td> + <td>{{scenario_results[scenario].getFourDaysScore()}}</td> + <td>{{scenario_results[scenario].getTenDaysScore()}}</td> + </tr> + {%- endfor %} + </table> + </div> + + + </div> + <div class="col-md-1"></div> +</div> diff --git a/utils/test/reporting/reporting/yardstick/reporting-status.py b/utils/test/reporting/reporting/yardstick/reporting-status.py index 85c386bf1..6584f4e8d 100644 --- a/utils/test/reporting/reporting/yardstick/reporting-status.py +++ b/utils/test/reporting/reporting/yardstick/reporting-status.py @@ -7,14 +7,13 @@ # http://www.apache.org/licenses/LICENSE-2.0 # import datetime -import jinja2 import os -import utils.scenarioResult as sr -from scenarios import config as cf +import jinja2 -# manage conf -import utils.reporting_utils as rp_utils +import reporting.utils.scenarioResult as sr +import reporting.utils.reporting_utils as rp_utils +from scenarios import config as cf installers = rp_utils.get_config('general.installers') versions = rp_utils.get_config('general.versions') diff --git a/utils/test/testapi/.gitignore b/utils/test/testapi/.gitignore index c7b63b5b1..86ec0d2d5 100644 --- a/utils/test/testapi/.gitignore +++ b/utils/test/testapi/.gitignore @@ -1,4 +1,7 @@ AUTHORS ChangeLog setup.cfg-e - +opnfv_testapi/static +build +*.egg-info +3rd_party/static/static diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html index cdfcfaf36..e366670a9 100644 --- a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html +++ b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html @@ -1,8 +1,12 @@ -<h3>{{ctrl.pageHeader}}</h3> -<p>{{ctrl.pageParagraph}}</p> +<h3>Pods</h3> +<p>This page is used to create or query pods.<br> + Querying pods is open to everybody.<br> + But only login users are granted the privilege to create the new pod. +</p> + <div class="row" style="margin-bottom:24px;"></div> -<div class="pod-create"> +<div class="pod-create" ng-class="{ 'hidden': ! auth.isAuthenticated }"> <h4>Create</h4> <div class="row"> <div ng-repeat="require in ctrl.createRequirements"> @@ -63,7 +67,7 @@ </tbody> </table> </div> - +<br> <div ng-show="ctrl.showError" class="alert alert-danger" role="alert"> <span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true"></span> <span class="sr-only">Error:</span> diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js index 53e8b1eff..894fcc152 100644 --- a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js +++ b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js @@ -20,7 +20,7 @@ .controller('PodsController', PodsController); PodsController.$inject = [ - '$scope', '$http', '$filter', '$state', 'testapiApiUrl','raiseAlert' + '$rootScope', '$scope', '$http', '$filter', '$state', 'testapiApiUrl','raiseAlert' ]; /** @@ -31,7 +31,6 @@ function PodsController($scope, $http, $filter, $state, testapiApiUrl, raiseAlert) { var ctrl = this; - ctrl.url = testapiApiUrl + '/pods'; ctrl.create = create; @@ -53,9 +52,6 @@ ctrl.mode = 'metal'; ctrl.details = ''; - ctrl.pageHeader = 'Pods'; - ctrl.pageParagraph = 'This page is used to create or query pods.'; - /** * This is called when the date filter calendar is opened. It * does some event handling, and sets a scope variable so the UI @@ -82,21 +78,27 @@ */ function create() { ctrl.showError = false; - var pods_url = ctrl.url; - var body = { - name: ctrl.name, - mode: ctrl.mode, - role: ctrl.role, - details: ctrl.details - }; - ctrl.podsRequest = - $http.post(pods_url, body).error(function (error) { - ctrl.showError = true; - ctrl.error = - 'Error creating the new pod from server: ' + - angular.toJson(error); - }); + if(ctrl.name != ""){ + var pods_url = ctrl.url; + var body = { + name: ctrl.name, + mode: ctrl.mode, + role: ctrl.role, + details: ctrl.details + }; + ctrl.podsRequest = + $http.post(pods_url, body).error(function (error) { + ctrl.showError = true; + ctrl.error = + 'Error creating the new pod from server: ' + + angular.toJson(error); + }); + } + else{ + ctrl.showError = true; + ctrl.error = 'Name is missing.' + } } /** diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/profile/profile.html b/utils/test/testapi/3rd_party/static/testapi-ui/components/profile/profile.html index dc97c41e2..763f5d120 100644 --- a/utils/test/testapi/3rd_party/static/testapi-ui/components/profile/profile.html +++ b/utils/test/testapi/3rd_party/static/testapi-ui/components/profile/profile.html @@ -3,9 +3,16 @@ <div> <table class="table table-striped table-hover"> <tbody> - <tr> <td>User name</td> <td>{{auth.currentUser.fullname}}</td> </tr> - <tr> <td>User OpenId</td> <td>{{auth.currentUser.openid}}</td> </tr> + <tr> <td>User</td> <td>{{auth.currentUser.user}}</td> </tr> + <tr> <td>Fullname</td> <td>{{auth.currentUser.fullname}}</td> </tr> <tr> <td>Email</td> <td>{{auth.currentUser.email}}</td> </tr> + <tr> <td>Groups</td> + <td> + <div ng-repeat="group in auth.currentUser.groups"> + {{group}}</br> + </div> + </td> + </tr> </tbody> </table> </div> diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/profile/profileController.js b/utils/test/testapi/3rd_party/static/testapi-ui/components/profile/profileController.js index 0660e19f6..5dbdf7b1a 100644 --- a/utils/test/testapi/3rd_party/static/testapi-ui/components/profile/profileController.js +++ b/utils/test/testapi/3rd_party/static/testapi-ui/components/profile/profileController.js @@ -26,7 +26,7 @@ * This is a provider for the user's uploaded public keys. */ function PubKeys($resource, testapiApiUrl) { - return $resource(testapiApiUrl + '/profile/pubkeys/:id', null, null); + return $resource(testapiApiUrl + '/user/pubkeys/:id', null, null); } angular diff --git a/utils/test/testapi/MANIFEST.in b/utils/test/testapi/MANIFEST.in new file mode 100644 index 000000000..0ba1808ba --- /dev/null +++ b/utils/test/testapi/MANIFEST.in @@ -0,0 +1 @@ +recursive-include 3rd_party
\ No newline at end of file diff --git a/utils/test/testapi/docker/Dockerfile b/utils/test/testapi/docker/Dockerfile index 5311f35b8..a46fce20a 100644 --- a/utils/test/testapi/docker/Dockerfile +++ b/utils/test/testapi/docker/Dockerfile @@ -47,5 +47,5 @@ RUN git clone https://gerrit.opnfv.org/gerrit/releng /home/releng WORKDIR /home/releng/utils/test/testapi/ RUN pip install -r requirements.txt -RUN bash install.sh +RUN python setup.py install CMD ["bash", "docker/start-server.sh"] diff --git a/utils/test/testapi/docker/prepare-env.sh b/utils/test/testapi/docker/prepare-env.sh index b14bc2448..92a0c9fd7 100755 --- a/utils/test/testapi/docker/prepare-env.sh +++ b/utils/test/testapi/docker/prepare-env.sh @@ -10,5 +10,5 @@ if [ "$base_url" != "" ]; then sudo crudini --set --existing $FILE api url $base_url/api/v1 sudo crudini --set --existing $FILE ui url $base_url sudo echo "{\"testapiApiUrl\": \"$base_url/api/v1\"}" > \ - /usr/local/lib/python2.7/dist-packages/opnfv_testapi/static/testapi-ui/config.json + /usr/local/share/opnfv_testapi/testapi-ui/config.json fi diff --git a/utils/test/testapi/etc/config.ini b/utils/test/testapi/etc/config.ini index 1ec899fcb..8d0bde20b 100644 --- a/utils/test/testapi/etc/config.ini +++ b/utils/test/testapi/etc/config.ini @@ -21,48 +21,11 @@ authenticate = False [ui] url = http://localhost:8000 -[osid] - -# OpenStackID Auth Server URI. (string value) -openstack_openid_endpoint = https://openstackid.org/accounts/openid2 - -# OpenStackID logout URI. (string value) -openid_logout_endpoint = https://openstackid.org/accounts/user/logout - -# Interaction mode. Specifies whether Openstack Id IdP may interact -# with the user to determine the outcome of the request. (string -# value) -openid_mode = checkid_setup - -# Protocol version. Value identifying the OpenID protocol version -# being used. This value should be "http://specs.openid.net/auth/2.0". -# (string value) -openid_ns = http://specs.openid.net/auth/2.0 - -# Return endpoint in Refstack's API. Value indicating the endpoint -# where the user should be returned to after signing in. Openstack Id -# Idp only supports HTTPS address types. (string value) -openid_return_to = v1/auth/signin_return - -# Claimed identifier. This value must be set to -# "http://specs.openid.net/auth/2.0/identifier_select". or to user -# claimed identity (user local identifier or user owned identity [ex: -# custom html hosted on a owned domain set to html discover]). (string -# value) -openid_claimed_id = http://specs.openid.net/auth/2.0/identifier_select - -# Alternate identifier. This value must be set to -# http://specs.openid.net/auth/2.0/identifier_select. (string value) -openid_identity = http://specs.openid.net/auth/2.0/identifier_select - -# Indicates request for user attribute information. This value must be -# set to "http://openid.net/extensions/sreg/1.1". (string value) -openid_ns_sreg = http://openid.net/extensions/sreg/1.1 - -# Comma-separated list of field names which, if absent from the -# response, will prevent the Consumer from completing the registration -# without End User interation. The field names are those that are -# specified in the Response Format, with the "openid.sreg." prefix -# removed. Valid values include: "country", "email", "firstname", -# "language", "lastname" (string value) -openid_sreg_required = email,fullname +# this path should be the seem with data_files installation path +static_path = /usr/local/share/opnfv_testapi + +[lfid] +# Linux Foundation cas URL +cas_url = https://identity.linuxfoundation.org/cas/ +#service url used to authenticate to cas +signin_return = api/v1/auth/signin_return diff --git a/utils/test/testapi/install.sh b/utils/test/testapi/install.sh deleted file mode 100755 index d470e38c3..000000000 --- a/utils/test/testapi/install.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -usage=" -Script to install opnfv_tesgtapi automatically. -This script should be run under root. - -usage: - bash $(basename "$0") [-h|--help] [-t <test_name>] - -where: - -h|--help show this help text" - -# Ref :- https://openstack.nimeyo.com/87286/openstack-packaging-all-definition-data-files-config-setup -if [ -z "$VIRTUAL_ENV" ]; -then - if [[ $(whoami) != "root" ]]; - then - echo "Error: This script must be run as root!" - exit 1 - fi -else - sed -i -e 's#/etc/opnfv_testapi =#etc/opnfv_testapi =#g' setup.cfg -fi - -cp -fr 3rd_party/static opnfv_testapi/static -python setup.py install -rm -fr opnfv_testapi/static -if [ ! -z "$VIRTUAL_ENV" ]; then - sed -i -e 's#etc/opnfv_testapi =#/etc/opnfv_testapi =#g' setup.cfg -fi
\ No newline at end of file diff --git a/utils/test/testapi/opnfv_testapi/cmd/server.py b/utils/test/testapi/opnfv_testapi/cmd/server.py index 50ac049a0..b7d3caa20 100644 --- a/utils/test/testapi/opnfv_testapi/cmd/server.py +++ b/utils/test/testapi/opnfv_testapi/cmd/server.py @@ -38,7 +38,7 @@ from opnfv_testapi.tornado_swagger import swagger def make_app(): swagger.docs(base_url=CONF.ui_url, - static_path=CONF.static_path) + static_path=CONF.ui_static_path) return swagger.Application( url_mappings.mappings, debug=CONF.api_debug, diff --git a/utils/test/testapi/opnfv_testapi/common/config.py b/utils/test/testapi/opnfv_testapi/common/config.py index 4cd53c619..140e49283 100644 --- a/utils/test/testapi/opnfv_testapi/common/config.py +++ b/utils/test/testapi/opnfv_testapi/common/config.py @@ -16,14 +16,10 @@ import sys class Config(object): def __init__(self): - self.config_file = None + self.config_file = '/etc/opnfv_testapi/config.ini' self._set_config_file() self._parse() self._parse_per_page() - self.static_path = os.path.join( - os.path.dirname(os.path.normpath(__file__)), - os.pardir, - 'static') def _parse(self): if not os.path.exists(self.config_file): @@ -56,23 +52,12 @@ class Config(object): return value def _set_config_file(self): - if not self._set_sys_config_file(): - self._set_default_config_file() - - def _set_sys_config_file(self): parser = argparse.ArgumentParser() parser.add_argument("-c", "--config-file", dest='config_file', help="Config file location", metavar="FILE") args, _ = parser.parse_known_args(sys.argv) - try: + if hasattr(args, 'config_file') and args.config_file: self.config_file = args.config_file - finally: - return self.config_file is not None - - def _set_default_config_file(self): - is_venv = os.getenv('VIRTUAL_ENV') - self.config_file = os.path.join('/' if not is_venv else is_venv, - 'etc/opnfv_testapi/config.ini') CONF = Config() diff --git a/utils/test/testapi/opnfv_testapi/common/constants.py b/utils/test/testapi/opnfv_testapi/common/constants.py new file mode 100644 index 000000000..70c922383 --- /dev/null +++ b/utils/test/testapi/opnfv_testapi/common/constants.py @@ -0,0 +1,4 @@ +TESTAPI_ID = 'testapi_id' +CSRF_TOKEN = 'csrf_token' +ROLE = 'role' +TESTAPI_USERS = ['opnfv-testapi-users'] diff --git a/utils/test/testapi/opnfv_testapi/resources/result_handlers.py b/utils/test/testapi/opnfv_testapi/resources/result_handlers.py index 9389d266d..e202f5c2c 100644 --- a/utils/test/testapi/opnfv_testapi/resources/result_handlers.py +++ b/utils/test/testapi/opnfv_testapi/resources/result_handlers.py @@ -6,20 +6,20 @@ # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## -import logging -from datetime import datetime -from datetime import timedelta import json +import logging from bson import objectid +from datetime import datetime +from datetime import timedelta -from opnfv_testapi.common.config import CONF +from opnfv_testapi.common import constants from opnfv_testapi.common import message from opnfv_testapi.common import raises +from opnfv_testapi.common.config import CONF from opnfv_testapi.resources import handlers from opnfv_testapi.resources import result_models from opnfv_testapi.tornado_swagger import swagger -from opnfv_testapi.ui.auth import constants as auth_const class GenericResultHandler(handlers.GenericApiHandler): @@ -59,13 +59,12 @@ class GenericResultHandler(handlers.GenericApiHandler): elif k == 'to': date_range.update({'$lt': str(v)}) elif k == 'signed': - openid = self.get_secure_cookie(auth_const.OPENID) - role = self.get_secure_cookie(auth_const.ROLE) - logging.info('role:%s', role) + username = self.get_secure_cookie(constants.TESTAPI_ID) + role = self.get_secure_cookie(constants.ROLE) if role: del query['public'] if role != "reviewer": - query['user'] = openid + query['user'] = username elif k not in ['last', 'page', 'descend']: query[k] = v if date_range: @@ -246,7 +245,7 @@ class ResultsUploadHandler(ResultsCLHandler): self.json_args = json.loads(fileinfo['body']).copy() self.json_args['public'] = is_public - openid = self.get_secure_cookie(auth_const.OPENID) + openid = self.get_secure_cookie(constants.TESTAPI_ID) if openid: self.json_args['user'] = openid diff --git a/utils/test/testapi/opnfv_testapi/router/url_mappings.py b/utils/test/testapi/opnfv_testapi/router/url_mappings.py index 3e3ab87aa..ce0a3eeb3 100644 --- a/utils/test/testapi/opnfv_testapi/router/url_mappings.py +++ b/utils/test/testapi/opnfv_testapi/router/url_mappings.py @@ -72,12 +72,12 @@ mappings = [ # static path (r'/(.*\.(css|png|gif|js|html|json|map|woff2|woff|ttf))', tornado.web.StaticFileHandler, - {'path': CONF.static_path}), + {'path': CONF.ui_static_path}), (r'/', root.RootHandler), (r'/api/v1/auth/signin', sign.SigninHandler), - (r'/api/v1/auth/signin_return', sign.SigninReturnHandler), + (r'/{}'.format(CONF.lfid_signin_return), sign.SigninReturnHandler), (r'/api/v1/auth/signout', sign.SignoutHandler), - (r'/api/v1/profile', user.ProfileHandler), + (r'/api/v1/profile', user.UserHandler), ] diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini deleted file mode 100644 index be7f2b9f8..000000000 --- a/utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini +++ /dev/null @@ -1,16 +0,0 @@ -# to add a new parameter in the config file, -# the CONF object in config.ini must be updated -[mongo] -# URL of the mongo DB -# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1 -url = mongodb://127.0.0.1:27017/ - -[api] -# Listening port -port = 8000 -# With debug_on set to true, error traces will be shown in HTTP responses -debug = True -authenticate = False - -[ui] -url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini deleted file mode 100644 index c81c6c56a..000000000 --- a/utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini +++ /dev/null @@ -1,17 +0,0 @@ -# to add a new parameter in the config file, -# the CONF object in config.ini must be updated -[mongo] -# URL of the mongo DB -# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1 -url = mongodb://127.0.0.1:27017/ -dbname = test_results_collection - -[api] -# Listening port -port = 8000 -# With debug_on set to true, error traces will be shown in HTTP responses -debug = True -authenticate = False - -[ui] -url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini deleted file mode 100644 index a9ed49c5c..000000000 --- a/utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini +++ /dev/null @@ -1,11 +0,0 @@ -# to add a new parameter in the config file, -# the CONF object in config.ini must be updated -[api] -# Listening port -port = 8000 -# With debug_on set to true, error traces will be shown in HTTP responses -debug = True -authenticate = False - -[ui] -url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini deleted file mode 100644 index 3a11f9dd3..000000000 --- a/utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini +++ /dev/null @@ -1,17 +0,0 @@ -# to add a new parameter in the config file, -# the CONF object in config.ini must be updated -[mongo] -# URL of the mongo DB -# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1 -url = mongodb://127.0.0.1:27017/ -dbname = test_results_collection - -[api] -# Listening port -port = 8000 -# With debug_on set to true, error traces will be shown in HTTP responses -debug = True -authenticate = notboolean - -[ui] -url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini deleted file mode 100644 index 8180719b8..000000000 --- a/utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini +++ /dev/null @@ -1,17 +0,0 @@ -# to add a new parameter in the config file, -# the CONF object in config.ini must be updated -[mongo] -# URL of the mongo DB -# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1 -url = mongodb://127.0.0.1:27017/ -dbname = test_results_collection - -[api] -# Listening port -port = notint -# With debug_on set to true, error traces will be shown in HTTP responses -debug = True -authenticate = False - -[ui] -url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py b/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py index feff1daaa..75e621d0e 100644 --- a/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py +++ b/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py @@ -5,4 +5,4 @@ import pytest @pytest.fixture def config_normal(): - return path.join(path.dirname(__file__), 'common/normal.ini') + return path.join(path.dirname(__file__), '../../../etc/config.ini') diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py index 77a8d18c1..39633e5f5 100644 --- a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py +++ b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py @@ -37,7 +37,8 @@ class TestBase(testing.AsyncHTTPTestCase): def _patch_server(self): import argparse - config = path.join(path.dirname(__file__), '../common/normal.ini') + config = path.join(path.dirname(__file__), + '../../../../etc/config.ini') self.config_patcher = mock.patch( 'argparse.ArgumentParser.parse_known_args', return_value=(argparse.Namespace(config_file=config), None)) @@ -46,9 +47,6 @@ class TestBase(testing.AsyncHTTPTestCase): self.config_patcher.start() self.db_patcher.start() - def set_config_file(self): - self.config_file = 'normal.ini' - def get_app(self): from opnfv_testapi.cmd import server return server.make_app() diff --git a/utils/test/testapi/opnfv_testapi/ui/auth/base.py b/utils/test/testapi/opnfv_testapi/ui/auth/base.py deleted file mode 100644 index bea87c4d9..000000000 --- a/utils/test/testapi/opnfv_testapi/ui/auth/base.py +++ /dev/null @@ -1,35 +0,0 @@ -import random -import string - -from six.moves.urllib import parse - -from opnfv_testapi.resources import handlers - - -class BaseHandler(handlers.GenericApiHandler): - def __init__(self, application, request, **kwargs): - super(BaseHandler, self).__init__(application, request, **kwargs) - self.table = 'users' - - def set_cookies(self, cookies): - for cookie_n, cookie_v in cookies: - self.set_secure_cookie(cookie_n, cookie_v) - - -def get_token(length=30): - """Get random token.""" - return ''.join(random.choice(string.ascii_lowercase) - for i in range(length)) - - -def set_query_params(url, params): - """Set params in given query.""" - url_parts = parse.urlparse(url) - url = parse.urlunparse(( - url_parts.scheme, - url_parts.netloc, - url_parts.path, - url_parts.params, - parse.urlencode(params), - url_parts.fragment)) - return url diff --git a/utils/test/testapi/opnfv_testapi/ui/auth/constants.py b/utils/test/testapi/opnfv_testapi/ui/auth/constants.py deleted file mode 100644 index 44ccb46d7..000000000 --- a/utils/test/testapi/opnfv_testapi/ui/auth/constants.py +++ /dev/null @@ -1,18 +0,0 @@ -OPENID = 'openid' -ROLE = 'role' -DEFAULT_ROLE = 'user' - -# OpenID parameters -OPENID_MODE = 'openid.mode' -OPENID_NS = 'openid.ns' -OPENID_RETURN_TO = 'openid.return_to' -OPENID_CLAIMED_ID = 'openid.claimed_id' -OPENID_IDENTITY = 'openid.identity' -OPENID_REALM = 'openid.realm' -OPENID_NS_SREG = 'openid.ns.sreg' -OPENID_NS_SREG_REQUIRED = 'openid.sreg.required' -OPENID_NS_SREG_EMAIL = 'openid.sreg.email' -OPENID_NS_SREG_FULLNAME = 'openid.sreg.fullname' -OPENID_ERROR = 'openid.error' - -CSRF_TOKEN = 'csrf_token' diff --git a/utils/test/testapi/opnfv_testapi/ui/auth/sign.py b/utils/test/testapi/opnfv_testapi/ui/auth/sign.py index 462395225..318473ea2 100644 --- a/utils/test/testapi/opnfv_testapi/ui/auth/sign.py +++ b/utils/test/testapi/opnfv_testapi/ui/auth/sign.py @@ -1,76 +1,59 @@ -from six.moves.urllib import parse +from cas import CASClient from tornado import gen from tornado import web +from opnfv_testapi.common import constants from opnfv_testapi.common.config import CONF from opnfv_testapi.db import api as dbapi -from opnfv_testapi.ui.auth import base -from opnfv_testapi.ui.auth import constants as const +from opnfv_testapi.resources import handlers -class SigninHandler(base.BaseHandler): +class SignBaseHandler(handlers.GenericApiHandler): + def __init__(self, application, request, **kwargs): + super(SignBaseHandler, self).__init__(application, request, **kwargs) + self.table = 'users' + self.cas_client = CASClient(version='2', + server_url=CONF.lfid_cas_url, + service_url='{}/{}'.format( + CONF.ui_url, + CONF.lfid_signin_return)) + + +class SigninHandler(SignBaseHandler): def get(self): - csrf_token = base.get_token() - return_endpoint = parse.urljoin(CONF.api_url, - CONF.osid_openid_return_to) - return_to = base.set_query_params(return_endpoint, - {const.CSRF_TOKEN: csrf_token}) + self.redirect(url=(self.cas_client.get_login_url())) - params = { - const.OPENID_MODE: CONF.osid_openid_mode, - const.OPENID_NS: CONF.osid_openid_ns, - const.OPENID_RETURN_TO: return_to, - const.OPENID_CLAIMED_ID: CONF.osid_openid_claimed_id, - const.OPENID_IDENTITY: CONF.osid_openid_identity, - const.OPENID_REALM: CONF.api_url, - const.OPENID_NS_SREG: CONF.osid_openid_ns_sreg, - const.OPENID_NS_SREG_REQUIRED: CONF.osid_openid_sreg_required, - } - url = CONF.osid_openstack_openid_endpoint - url = base.set_query_params(url, params) - self.redirect(url=url, permanent=False) +class SigninReturnHandler(SignBaseHandler): -class SigninReturnHandler(base.BaseHandler): @web.asynchronous @gen.coroutine def get(self): - if self.get_query_argument(const.OPENID_MODE) == 'cancel': - self._auth_failure('Authentication canceled.') - - openid = self.get_query_argument(const.OPENID_CLAIMED_ID) - role = const.DEFAULT_ROLE - new_user_info = { - 'openid': openid, - 'email': self.get_query_argument(const.OPENID_NS_SREG_EMAIL), - 'fullname': self.get_query_argument(const.OPENID_NS_SREG_FULLNAME), - const.ROLE: role - } - user = yield dbapi.db_find_one(self.table, {'openid': openid}) - if not user: - dbapi.db_save(self.table, new_user_info) - else: - role = user.get(const.ROLE) - - self.clear_cookie(const.OPENID) - self.clear_cookie(const.ROLE) - self.set_secure_cookie(const.OPENID, openid) - self.set_secure_cookie(const.ROLE, role) - self.redirect(url=CONF.ui_url) - - def _auth_failure(self, message): - params = {'message': message} - url = parse.urljoin(CONF.ui_url, - '/#/auth_failure?' + parse.urlencode(params)) - self.redirect(url) - - -class SignoutHandler(base.BaseHandler): + ticket = self.get_query_argument('ticket', default=None) + if ticket: + (user, attrs, _) = self.cas_client.verify_ticket(ticket=ticket) + login_user = { + 'user': user, + 'email': attrs.get('mail'), + 'fullname': attrs.get('field_lf_full_name'), + 'groups': constants.TESTAPI_USERS + attrs.get('group', []) + } + q_user = {'user': user} + db_user = yield dbapi.db_find_one(self.table, q_user) + if not db_user: + dbapi.db_save(self.table, login_user) + else: + dbapi.db_update(self.table, q_user, login_user) + + self.clear_cookie(constants.TESTAPI_ID) + self.set_secure_cookie(constants.TESTAPI_ID, user) + + self.redirect(url=CONF.ui_url) + + +class SignoutHandler(SignBaseHandler): def get(self): """Handle signout request.""" - self.clear_cookie(const.OPENID) - self.clear_cookie(const.ROLE) - params = {'openid_logout': CONF.osid_openid_logout_endpoint} - url = parse.urljoin(CONF.ui_url, - '/#/logout?' + parse.urlencode(params)) - self.redirect(url) + self.clear_cookie(constants.TESTAPI_ID) + logout_url = self.cas_client.get_logout_url(redirect_url=CONF.ui_url) + self.redirect(url=logout_url) diff --git a/utils/test/testapi/opnfv_testapi/ui/auth/user.py b/utils/test/testapi/opnfv_testapi/ui/auth/user.py index 955cdeead..ab86007f1 100644 --- a/utils/test/testapi/opnfv_testapi/ui/auth/user.py +++ b/utils/test/testapi/opnfv_testapi/ui/auth/user.py @@ -1,25 +1,26 @@ -from tornado import gen -from tornado import web - +from opnfv_testapi.common import constants from opnfv_testapi.common import raises -from opnfv_testapi.db import api as dbapi -from opnfv_testapi.ui.auth import base +from opnfv_testapi.resources import handlers +from opnfv_testapi.resources import models + + +class User(models.ModelBase): + def __init__(self, user=None, email=None, fullname=None, groups=None): + self.user = user + self.email = email + self.fullname = fullname + self.groups = groups + +class UserHandler(handlers.GenericApiHandler): + def __init__(self, application, request, **kwargs): + super(UserHandler, self).__init__(application, request, **kwargs) + self.table = 'users' + self.table_cls = User -class ProfileHandler(base.BaseHandler): - @web.asynchronous - @gen.coroutine def get(self): - openid = self.get_secure_cookie('openid') - if openid: - try: - user = yield dbapi.db_find_one(self.table, {'openid': openid}) - self.finish_request({ - "openid": user.get('openid'), - "email": user.get('email'), - "fullname": user.get('fullname'), - "role": user.get('role', 'user') - }) - except Exception: - pass - raises.Unauthorized('Unauthorized') + username = self.get_secure_cookie(constants.TESTAPI_ID) + if username: + self._get_one(query={'user': username}) + else: + raises.Unauthorized('Unauthorized') diff --git a/utils/test/testapi/opnfv_testapi/ui/root.py b/utils/test/testapi/opnfv_testapi/ui/root.py index 5b2c922d7..286a6b097 100644 --- a/utils/test/testapi/opnfv_testapi/ui/root.py +++ b/utils/test/testapi/opnfv_testapi/ui/root.py @@ -1,10 +1,10 @@ -from opnfv_testapi.resources.handlers import GenericApiHandler from opnfv_testapi.common.config import CONF +from opnfv_testapi.resources import handlers -class RootHandler(GenericApiHandler): +class RootHandler(handlers.GenericApiHandler): def get_template_path(self): - return CONF.static_path + return CONF.ui_static_path def get(self): self.render('testapi-ui/index.html') diff --git a/utils/test/testapi/requirements.txt b/utils/test/testapi/requirements.txt index 4b6f75c10..fbd2e0ede 100644 --- a/utils/test/testapi/requirements.txt +++ b/utils/test/testapi/requirements.txt @@ -8,3 +8,4 @@ tornado>=3.1,<=4.3 # Apache-2.0 epydoc>=0.3.1 six>=1.9.0 # MIT motor # Apache-2.0 +python-cas diff --git a/utils/test/testapi/setup.cfg b/utils/test/testapi/setup.cfg index ab1ef553e..d9aa6762e 100644 --- a/utils/test/testapi/setup.cfg +++ b/utils/test/testapi/setup.cfg @@ -23,18 +23,10 @@ setup-hooks = [files] packages = opnfv_testapi -package_data = - opnfv_testapi = - static/*.* - static/*/*.* - static/*/*/*.* - static/*/*/*/*.* - static/*/*/*/*/*.* - static/*/*/*/*/*/*.* - static/*/*/*/*/*/*/*.* + data_files = - /etc/opnfv_testapi = - etc/config.ini + /etc/opnfv_testapi = etc/config.ini + /usr/local/share/opnfv_testapi = 3rd_party/static/* [entry_points] console_scripts = @@ -44,4 +36,3 @@ console_scripts = tag_build = tag_date = 0 tag_svn_revision = 0 - diff --git a/utils/test/testapi/setup.py b/utils/test/testapi/setup.py index f689cb30e..f9d95a32d 100644 --- a/utils/test/testapi/setup.py +++ b/utils/test/testapi/setup.py @@ -1,6 +1,5 @@ import setuptools - __author__ = 'serena' try: @@ -8,6 +7,7 @@ try: except ImportError: pass + setuptools.setup( - setup_requires=['pbr==2.0.0'], + setup_requires=['pbr>=2.0.0'], pbr=True) diff --git a/utils/test/testapi/tools/watchdog/docker_watch.sh b/utils/test/testapi/tools/watchdog/docker_watch.sh new file mode 100644 index 000000000..786fc10b9 --- /dev/null +++ b/utils/test/testapi/tools/watchdog/docker_watch.sh @@ -0,0 +1,165 @@ +# * +# http://www.apache.org/licenses/LICENSE-2.0 * +# * +# Unless required by applicable law or agreed to in writing, * +# software distributed under the License is distributed on an * +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * +# KIND, either express or implied. See the License for the * +# specific language governing permissions and limitations * +# under the License. * + +# This script checks if deployments are working or and then +# starts the specified containers in case one of the containers +# crash. The only solution is restarting docker as of now. + +#!/bin/bash + +## List of modules +modules=(testapi reporting) + +## Ports of the modules +declare -A ports=( ["testapi"]="8082" ["reporting"]="8084") + +## Urls to check if the modules are deployed or not ? +declare -A urls=( ["testapi"]="http://testresults.opnfv.org/test/" \ + ["reporting"]="http://testresults.opnfv.org/reporting2/reporting/index.html") + +### Functions related to checking. + +function is_deploying() { + xml=$(curl -m10 "https://build.opnfv.org/ci/job/${1}-automate-master/lastBuild/api/xml?depth=1") + building=$(grep -oPm1 "(?<=<building>)[^<]+" <<< "$xml") + if [[ $building == "false" ]] + then + return 0 + else + return 1 + fi +} + +function get_docker_status() { + status=$(service docker status | sed -n 3p | cut -d ' ' -f5) + echo -e "Docker status: $status" + if [ $status = "active" ] + then + return 1 + else + return 0 + fi +} + +function check_connectivity() { + echo "Checking $1 connection : $2" + cmd=`curl --head -m10 --request GET ${2} | grep '200 OK' > /dev/null` + rc=$? + if [[ $rc == 0 ]]; then + return 0 + else + return 1 + fi +} + +function check_modules() { + echo -e "Checking modules" + failed_modules=() + for module in "${modules[@]}" + do + if is_deploying $module; then + continue + fi + if ! check_connectivity $module "${urls[$module]}"; then + echo -e "$module failed" + failed_modules+=($module) + fi + done + if [ ! -z "$failed_modules" ]; then + echo -e "Failed Modules: $failed_modules" + return 1 + else + echo -e "All modules working good" + exit 0 + fi +} + +### Functions related fixes. + +function restart_docker_fix() { + echo -e "Running restart_docker_fix" + service docker restart + start_containers_fix "${modules[@]}" +} + +function docker_proxy_fix() { + echo -e "Running docker_proxy_fix" + fix_modules=("${@}") + for module in "${fix_modules[@]}" + do + echo -e "Kill docker proxy and restart containers" + pid=$(netstat -nlp | grep :${ports[$module]} | awk '{print $7}' | cut -d'/' -f1) + echo $pid + if [ ! -z "$pid" ]; then + kill $pid + start_container_fix $module + fi + done +} + +function start_containers_fix() { + start_modules=("${@}") + for module in "${start_modules[@]}" + do + start_container_fix $module + done +} + +function start_container_fix() { + echo -e "Starting a container $module" + sudo docker stop $module + sudo docker start $module + sleep 5 + if ! check_connectivity $module "${urls[$module]}"; then + echo -e "Starting an old container $module_old" + sudo docker stop $module + sudo docker start $module"_old" + sleep 5 + fi +} + +### Main Flow + +echo -e +echo -e "WatchDog Started" +echo -e +echo -e `date "+%Y-%m-%d %H:%M:%S.%N"` +echo -e + +## If the problem is related to docker daemon + +if get_docker_status; then + restart_docker_fix + if ! check_modules; then + echo -e "Watchdog failed while restart_docker_fix" + fi + exit +fi + +## If the problem is related to docker proxy + +if ! check_modules; then + docker_proxy_fix "${failed_modules[@]}" +fi + +## If any other problem : restart docker + +if ! check_modules; then + restart_docker_fix +fi + +## If nothing works out + +if ! check_modules; then + echo -e "Watchdog failed" +fi + +sudo docker ps +sudo docker images |