diff options
Diffstat (limited to 'utils')
7 files changed, 266 insertions, 64 deletions
diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh index 312e1ac5c..0e2a2b93f 100755 --- a/utils/fetch_os_creds.sh +++ b/utils/fetch_os_creds.sh @@ -112,6 +112,10 @@ if [ "$installer_type" == "fuel" ]; then info "Fetching rc file from controller $controller_ip..." ssh ${ssh_options} ubuntu@${controller_ip} "sudo cat /root/keystonercv3" > $dest_path + + if [[ $BUILD_TAG =~ "baremetal" ]]; then + ssh ${ssh_options} ubuntu@${installer_ip} "cat /etc/ssl/certs/os_cacert" > $os_cacert + fi else #ip_fuel="10.20.0.2" env=$(sshpass -p r00tme ssh 2>/dev/null ${ssh_options} root@${installer_ip} \ @@ -144,6 +148,13 @@ if [ "$installer_type" == "fuel" ]; then echo $auth_url >> $dest_path elif [ "$installer_type" == "apex" ]; then + if ! ipcalc -c $installer_ip; then + installer_ip=$(sudo virsh domifaddr undercloud | grep -Eo '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}') + if [ -z "$installer_ip" ] || ! $(ipcalc -c $installer_ip); then + echo "Unable to find valid IP for Apex undercloud: ${installer_ip}" + exit 1 + fi + fi verify_connectivity $installer_ip # The credentials file is located in the Instack VM (192.0.2.1) diff --git a/utils/test/reporting/reporting/functest/reporting-tempest.py b/utils/test/reporting/reporting/functest/reporting-tempest.py index bc2885639..d78d9a19d 100755 --- a/utils/test/reporting/reporting/functest/reporting-tempest.py +++ b/utils/test/reporting/reporting/functest/reporting-tempest.py @@ -8,58 +8,57 @@ # http://www.apache.org/licenses/LICENSE-2.0 # SPDX-license-identifier: Apache-2.0 -from urllib2 import Request, urlopen, URLError from datetime import datetime import json -import jinja2 import os -# manage conf -import utils.reporting_utils as rp_utils +from urllib2 import Request, urlopen, URLError +import jinja2 + +import reporting.utils.reporting_utils as rp_utils -installers = rp_utils.get_config('general.installers') -items = ["tests", "Success rate", "duration"] +INSTALLERS = rp_utils.get_config('general.installers') +ITEMS = ["tests", "Success rate", "duration"] CURRENT_DIR = os.getcwd() PERIOD = rp_utils.get_config('general.period') -criteria_nb_test = 165 -criteria_duration = 1800 -criteria_success_rate = 90 +CRITERIA_NB_TEST = 100 +CRITERIA_DURATION = 1800 +CRITERIA_SUCCESS_RATE = 100 logger = rp_utils.getLogger("Tempest") logger.info("************************************************") logger.info("* Generating reporting Tempest_smoke_serial *") -logger.info("* Data retention = %s days *" % PERIOD) +logger.info("* Data retention = %s days *", PERIOD) logger.info("* *") logger.info("************************************************") logger.info("Success criteria:") -logger.info("nb tests executed > %s s " % criteria_nb_test) -logger.info("test duration < %s s " % criteria_duration) -logger.info("success rate > %s " % criteria_success_rate) +logger.info("nb tests executed > %s s ", CRITERIA_NB_TEST) +logger.info("test duration < %s s ", CRITERIA_DURATION) +logger.info("success rate > %s ", CRITERIA_SUCCESS_RATE) # For all the versions for version in rp_utils.get_config('general.versions'): - for installer in installers: + for installer in INSTALLERS: # we consider the Tempest results of the last PERIOD days url = ("http://" + rp_utils.get_config('testapi.url') + - "?case=tempest_smoke_serial") - request = Request(url + '&period=' + str(PERIOD) + - '&installer=' + installer + - '&version=' + version) - logger.info("Search tempest_smoke_serial results for installer %s" - " for version %s" - % (installer, version)) + "?case=tempest_smoke_serial&period=" + str(PERIOD) + + "&installer=" + installer + "&version=" + version) + request = Request(url) + logger.info(("Search tempest_smoke_serial results for installer %s" + " for version %s"), installer, version) try: response = urlopen(request) k = response.read() results = json.loads(k) - except URLError as e: - logger.error("Error code: %s" % e) - + except URLError as err: + logger.error("Error code: %s", err) + logger.debug("request sent: %s", url) + logger.debug("Results from API: %s", results) test_results = results['results'] - + logger.debug("Test results: %s", test_results) scenario_results = {} criteria = {} errors = {} @@ -72,27 +71,37 @@ for version in rp_utils.get_config('general.versions'): scenario_results[r['scenario']] = [] scenario_results[r['scenario']].append(r) + logger.debug("Scenario results: %s", scenario_results) + for s, s_result in scenario_results.items(): scenario_results[s] = s_result[0:5] # For each scenario, we build a result object to deal with # results, criteria and error handling for result in scenario_results[s]: result["start_date"] = result["start_date"].split(".")[0] + logger.debug("start_date= %s", result["start_date"]) # retrieve results # **************** nb_tests_run = result['details']['tests'] nb_tests_failed = result['details']['failures'] - if nb_tests_run != 0: - success_rate = 100 * ((int(nb_tests_run) - + logger.debug("nb_tests_run= %s", nb_tests_run) + logger.debug("nb_tests_failed= %s", nb_tests_failed) + + try: + success_rate = (100 * (int(nb_tests_run) - int(nb_tests_failed)) / - int(nb_tests_run)) - else: + int(nb_tests_run)) + except ZeroDivisionError: success_rate = 0 result['details']["tests"] = nb_tests_run result['details']["Success rate"] = str(success_rate) + "%" + logger.info("nb_tests_run= %s", result['details']["tests"]) + logger.info("test rate = %s", + result['details']["Success rate"]) + # Criteria management # ******************* crit_tests = False @@ -100,11 +109,11 @@ for version in rp_utils.get_config('general.versions'): crit_time = False # Expect that at least 165 tests are run - if nb_tests_run >= criteria_nb_test: + if nb_tests_run >= CRITERIA_NB_TEST: crit_tests = True # Expect that at least 90% of success - if success_rate >= criteria_success_rate: + if success_rate >= CRITERIA_SUCCESS_RATE: crit_rate = True # Expect that the suite duration is inferior to 30m @@ -114,28 +123,27 @@ for version in rp_utils.get_config('general.versions'): '%Y-%m-%d %H:%M:%S') delta = stop_date - start_date - if (delta.total_seconds() < criteria_duration): + + if delta.total_seconds() < CRITERIA_DURATION: crit_time = True result['criteria'] = {'tests': crit_tests, 'Success rate': crit_rate, 'duration': crit_time} try: - logger.debug("Scenario %s, Installer %s" - % (s_result[1]['scenario'], installer)) - logger.debug("Nb Test run: %s" % nb_tests_run) - logger.debug("Test duration: %s" - % result['details']['duration']) - logger.debug("Success rate: %s" % success_rate) - except: + logger.debug("Nb Test run: %s", nb_tests_run) + logger.debug("Test duration: %s", delta) + logger.debug("Success rate: %s", success_rate) + except Exception: # pylint: disable=broad-except logger.error("Data format error") # Error management # **************** try: errors = result['details']['errors'] - result['errors'] = errors.replace('{0}', '') - except: + logger.info("errors: %s", errors) + result['errors'] = errors + except Exception: # pylint: disable=broad-except logger.error("Error field not present (Brahamputra runs?)") templateLoader = jinja2.FileSystemLoader(".") @@ -146,7 +154,7 @@ for version in rp_utils.get_config('general.versions'): template = templateEnv.get_template(TEMPLATE_FILE) outputText = template.render(scenario_results=scenario_results, - items=items, + items=ITEMS, installer=installer) with open("./display/" + version + diff --git a/utils/test/reporting/reporting/qtip/reporting-status.py b/utils/test/reporting/reporting/qtip/reporting-status.py index f0127b50f..56f9e0aee 100644 --- a/utils/test/reporting/reporting/qtip/reporting-status.py +++ b/utils/test/reporting/reporting/qtip/reporting-status.py @@ -33,8 +33,7 @@ def prepare_profile_file(version): if not os.path.exists(profile_dir): os.makedirs(profile_dir) - profile_file = "{}/{}/scenario_history.txt".format(profile_dir, - version) + profile_file = "{}/scenario_history.txt".format(profile_dir) if not os.path.exists(profile_file): with open(profile_file, 'w') as f: info = 'date,scenario,installer,details,score\n' diff --git a/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html b/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html index 26da36ceb..92f3395dc 100644 --- a/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html +++ b/utils/test/reporting/reporting/qtip/template/index-status-tmpl.html @@ -46,10 +46,11 @@ <nav> <ul class="nav nav-justified"> <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li> - <li><a href="index-status-apex.html">Apex</a></li> - <li><a href="index-status-compass.html">Compass</a></li> - <li><a href="index-status-fuel.html">Fuel</a></li> - <li><a href="index-status-joid.html">Joid</a></li> + <li><a href="status-apex.html">Apex</a></li> + <li><a href="status-compass.html">Compass</a></li> + <li><a href="status-daisy.html">Daisy</a></li> + <li><a href="status-fuel.html">Fuel</a></li> + <li><a href="status-joid.html">Joid</a></li> </ul> </nav> </div> diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html index cdfcfaf36..7ce36ca7c 100644 --- a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html +++ b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/pods.html @@ -63,7 +63,7 @@ </tbody> </table> </div> - +<br> <div ng-show="ctrl.showError" class="alert alert-danger" role="alert"> <span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true"></span> <span class="sr-only">Error:</span> diff --git a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js index 53e8b1eff..201258619 100644 --- a/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js +++ b/utils/test/testapi/3rd_party/static/testapi-ui/components/pods/podsController.js @@ -31,7 +31,6 @@ function PodsController($scope, $http, $filter, $state, testapiApiUrl, raiseAlert) { var ctrl = this; - ctrl.url = testapiApiUrl + '/pods'; ctrl.create = create; @@ -82,21 +81,27 @@ */ function create() { ctrl.showError = false; - var pods_url = ctrl.url; - var body = { - name: ctrl.name, - mode: ctrl.mode, - role: ctrl.role, - details: ctrl.details - }; - ctrl.podsRequest = - $http.post(pods_url, body).error(function (error) { - ctrl.showError = true; - ctrl.error = - 'Error creating the new pod from server: ' + - angular.toJson(error); - }); + if(ctrl.name != ""){ + var pods_url = ctrl.url; + var body = { + name: ctrl.name, + mode: ctrl.mode, + role: ctrl.role, + details: ctrl.details + }; + ctrl.podsRequest = + $http.post(pods_url, body).error(function (error) { + ctrl.showError = true; + ctrl.error = + 'Error creating the new pod from server: ' + + angular.toJson(error); + }); + } + else{ + ctrl.showError = true; + ctrl.error = 'Name is missing.' + } } /** diff --git a/utils/test/testapi/tools/watchdog/docker_watch.sh b/utils/test/testapi/tools/watchdog/docker_watch.sh new file mode 100644 index 000000000..d67e4b380 --- /dev/null +++ b/utils/test/testapi/tools/watchdog/docker_watch.sh @@ -0,0 +1,178 @@ +# * +# http://www.apache.org/licenses/LICENSE-2.0 * +# * +# Unless required by applicable law or agreed to in writing, * +# software distributed under the License is distributed on an * +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * +# KIND, either express or implied. See the License for the * +# specific language governing permissions and limitations * +# under the License. * + +# This script checks if deployments are working or and then +# starts the specified containers in case one of the containers +# crash. The only solution is restarting docker as of now. + +#!/bin/bash + +## List of modules +modules=(testapi reporting) + +## Ports of the modules +declare -A ports=( ["testapi"]="8082" ["reporting"]="8084") + +## Urls to check if the modules are deployed or not ? +#declare -A urls=( ["testapi"]="http://testresults.opnfv.org/test/" \ +# ["reporting"]="http://testresults.opnfv.org/reporting2/reporting/index.html") + +declare -A urls=( ["testapi"]="http://localhost:8082/" \ + ["reporting"]="http://testresults.opnfv.org/reporting2/reporting/index.html") + + +### Functions related to checking. + +function is_deploying() { + echo -e "Checking job statuses" + for module in "${modules[@]}" + do + if get_status $module; then + exit 0 + fi + done +} + +function get_status() { + xml=$(curl -m10 "https://build.opnfv.org/ci/job/${1}-automate-master/lastBuild/api/xml?depth=1") + building=$(grep -oPm1 "(?<=<building>)[^<]+" <<< "$xml") + if [[ $building == "false" ]] + then + return 1 + else + return 0 + fi +} + +function get_docker_status() { + status=$(service docker status | sed -n 3p | cut -d ' ' -f5) + echo -e "Docker status: $status" + if [ $status = "active" ] + then + return 1 + else + return 0 + fi +} + +function check_connectivity() { + echo "Checking $1 connection : $2" + cmd=`curl --head -m10 --request GET ${2} | grep '200 OK' > /dev/null` + rc=$? + if [[ $rc == 0 ]]; then + return 0 + else + return 1 + fi +} + +function check_modules() { + echo -e "Checking modules" + failed_modules=() + for module in "${modules[@]}" + do + if ! check_connectivity $module "${urls[$module]}"; then + echo -e "$module failed" + failed_modules+=($module) + fi + done + if [ ! -z "$failed_modules" ]; then + echo -e "Failed Modules: $failed_modules" + return 1 + else + echo -e "All modules working good" + exit 0 + fi +} + +### Functions related fixes. + +function restart_docker_fix() { + echo -e "Running restart_docker_fix" + service docker restart + start_containers_fix "${modules[@]}" +} + +function docker_proxy_fix() { + echo -e "Running docker_proxy_fix" + fix_modules=("${@}") + for module in "${fix_modules[@]}" + do + echo -e "Kill docker proxy and restart containers" + pid=$(netstat -nlp | grep :${ports[$module]} | awk '{print $7}' | cut -d'/' -f1) + echo $pid + if [ ! -z "$pid" ]; then + kill $pid + start_containers_fix $module + fi + done +} + +function start_containers_fix() { + echo "Runnning start_containers_fix" + start_modules=("${@}") + for module in "${start_modules[@]}" + do + echo -e "Starting a container $module" + sudo docker stop $module + sudo docker start $module + sleep 5 + if ! check_connectivity $module "${urls[$module]}"; then + echo -e "Starting an old container $module_old" + sudo docker stop $module + sudo docker start $module"_old" + sleep 5 + fi + done +} + +### Main Flow + +echo -e +echo -e "WatchDog Started" +echo -e +echo -e `date "+%Y-%m-%d %H:%M:%S.%N"` +echo -e + +if ! is_deploying; then + echo -e "Jenkins Jobs running" + exit +fi + +## If the problem is related to docker daemon + +if get_docker_status; then + restart_docker_fix + if ! check_modules; then + echo -e "Watchdog failed while restart_docker_fix" + fi + exit +fi + +## If the problem is related to docker containers + +if ! check_modules; then + start_containers_fix "${failed_modules[@]}" +fi + +## If the problem is related to docker proxy + +if ! check_modules; then + docker_proxy_fix "${failed_modules[@]}" +fi + +## If nothing works out + +if ! check_modules; then + echo -e "Watchdog failed" +fi + +sudo docker ps +sudo docker images
\ No newline at end of file |