summaryrefslogtreecommitdiffstats
path: root/dovetail
diff options
context:
space:
mode:
Diffstat (limited to 'dovetail')
-rwxr-xr-xdovetail/run.py15
-rw-r--r--dovetail/tests/unit/cmd_config.yml4
-rw-r--r--dovetail/tests/unit/test_run.py40
-rw-r--r--dovetail/tests/unit/utils/test_dovetail_utils.py49
-rw-r--r--dovetail/utils/dovetail_utils.py28
5 files changed, 124 insertions, 12 deletions
diff --git a/dovetail/run.py b/dovetail/run.py
index 71a69687..c83c1973 100755
--- a/dovetail/run.py
+++ b/dovetail/run.py
@@ -11,6 +11,7 @@
import copy
+from datetime import datetime
import os
import time
import uuid
@@ -48,10 +49,20 @@ def run_test(testcase_list, report_flag, logger):
testcase = dt_testcase.Testcase.get(testcase_name)
run_testcase = True
+ tc_start_time = datetime.fromtimestamp(
+ time.time()).strftime('%Y-%m-%d %H:%M:%S')
if run_testcase:
testcase.run()
+ tc_stop_time = datetime.fromtimestamp(
+ time.time()).strftime('%Y-%m-%d %H:%M:%S')
result = report.check_tc_result(testcase)
+ if os.getenv('OPNFV_CI') == 'true':
+ dt_utils.push_results_to_db(case_name=testcase_name,
+ start_date=tc_start_time,
+ stop_date=tc_stop_time,
+ details=result,
+ logger=logger)
if dt_cfg.dovetail_config['stop']:
try:
if (not result or result['criteria'] == 'FAIL'):
@@ -248,8 +259,8 @@ def main(*args, **kwargs):
if not get_result_path():
return
clean_results_dir()
- if kwargs['debug']:
- os.environ['DEBUG'] = 'true'
+ os.environ['DEBUG'] = 'true' if kwargs['debug'] else 'false'
+ os.environ['OPNFV_CI'] = 'true' if kwargs['opnfv_ci'] else 'false'
create_logs()
logger = dt_logger.Logger('run').getLogger()
diff --git a/dovetail/tests/unit/cmd_config.yml b/dovetail/tests/unit/cmd_config.yml
index 4a1439f6..98a2a9d8 100644
--- a/dovetail/tests/unit/cmd_config.yml
+++ b/dovetail/tests/unit/cmd_config.yml
@@ -22,3 +22,7 @@ cli:
flags:
- '--report'
is_flag: 'True'
+ opnfvci:
+ flags:
+ - '--opnfv-ci'
+ is_flag: 'True'
diff --git a/dovetail/tests/unit/test_run.py b/dovetail/tests/unit/test_run.py
index 7f36d31f..654d8c9c 100644
--- a/dovetail/tests/unit/test_run.py
+++ b/dovetail/tests/unit/test_run.py
@@ -57,30 +57,45 @@ class RunTesting(unittest.TestCase):
logger.warning.assert_called_once_with(
"No test case will be executed.")
+ @patch('dovetail.run.datetime')
+ @patch('dovetail.run.dt_utils')
@patch('dovetail.run.dt_cfg')
@patch('dovetail.run.dt_report.Report')
@patch('dovetail.run.dt_testcase.Testcase')
@patch('dovetail.run.time')
- def test_run_test(self, mock_time, mock_testcase, mock_report,
- mock_config):
+ @patch('os.getenv')
+ def test_run_test(self, mock_getenv, mock_time, mock_testcase, mock_report,
+ mock_config, mock_utils, mock_datetime):
logger = Mock()
report_obj = Mock()
mock_report.return_value = report_obj
- mock_time.time.side_effect = [42, 84]
+ mock_time.time.side_effect = [42, 43, 83, 84]
+ datetime_obj = Mock()
+ mock_datetime.fromtimestamp.return_value = datetime_obj
+ datetime_obj.strftime.side_effect = ['1969-12-31 19:00:43',
+ '1969-12-31 19:01:23']
testcase_name = 'testcase'
testcase_obj = Mock()
mock_testcase.get.return_value = testcase_obj
mock_config.dovetail_config = {'stop': True}
+ mock_getenv.return_value = 'true'
report_obj.check_tc_result.return_value = {'criteria': 'PASS'}
+ mock_utils.push_results_to_db.return_value = True
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_has_calls([call(), call()])
+ mock_time.time.assert_has_calls([call(), call(), call(), call()])
logger.info.assert_called_once_with(
'>>[testcase]: {}'.format(testcase_name))
mock_testcase.get.assert_called_once_with(testcase_name)
testcase_obj.run.assert_called_once_with()
report_obj.check_tc_result.assert_called_once_with(testcase_obj)
+ mock_utils.push_results_to_db.assert_called_once_with(
+ case_name=testcase_name,
+ start_date='1969-12-31 19:00:43',
+ stop_date='1969-12-31 19:01:23',
+ details={'criteria': 'PASS'},
+ logger=logger)
report_obj.generate.assert_called_once_with([testcase_name], 42)
report_obj.save_logs.assert_called_once_with()
@@ -101,7 +116,8 @@ class RunTesting(unittest.TestCase):
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_called_once_with()
+ mock_time.time.assert_has_calls([call(), call(), call().__float__(),
+ call(), call().__float__()])
logger.info.assert_has_calls([
call('>>[testcase]: {}'.format(testcase_name)),
call('Stop because {} failed'.format(testcase_name))])
@@ -127,7 +143,7 @@ class RunTesting(unittest.TestCase):
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_called_once_with()
+ mock_time.time.assert_has_calls([call(), call(), call()])
logger.info.assert_has_calls([
call('>>[testcase]: {}'.format(testcase_name)),
call('Stop because {} failed'.format(testcase_name))])
@@ -504,14 +520,15 @@ class RunTesting(unittest.TestCase):
mock_get_list.return_value = testcase_list
kwargs_dict = {
'debug': True,
+ 'opnfv_ci': True,
'report': True,
'testsuite': 'testsuite',
'docker_tag': '2.0.0'
}
with self.assertRaises(SystemExit) as cm:
- dt_run.main([
- '--testsuite=testsuite', '--debug', '--report', '2.0.0'])
+ dt_run.main(['--testsuite=testsuite', '--debug', '--report',
+ '2.0.0', '--opnfv-ci'])
expected = cm.exception
logger_temp_obj.getLogger.assert_called_once_with()
@@ -521,7 +538,8 @@ class RunTesting(unittest.TestCase):
mock_config.dovetail_config)
mock_get_result.assert_called_once_with()
mock_clean.assert_called_once_with()
- self.assertEquals({'DEBUG': 'true'}, mock_os.environ)
+ self.assertEquals({'DEBUG': 'true', 'OPNFV_CI': 'true'},
+ mock_os.environ)
mock_create_logs.assert_called_once_with()
logger_obj.info.assert_has_calls([
call('================================================'),
@@ -587,6 +605,7 @@ class RunTesting(unittest.TestCase):
mock_get_list.return_value = None
kwargs_dict = {
'debug': True,
+ 'opnfv_ci': False,
'report': True,
'testsuite': 'testsuite',
'docker_tag': '2.0.0'
@@ -605,7 +624,8 @@ class RunTesting(unittest.TestCase):
mock_config.dovetail_config)
mock_get_result.assert_called_once_with()
mock_clean.assert_called_once_with()
- self.assertEquals({'DEBUG': 'true'}, mock_os.environ)
+ self.assertEquals({'DEBUG': 'true', 'OPNFV_CI': 'false'},
+ mock_os.environ)
mock_create_logs.assert_called_once_with()
logger_obj.info.assert_has_calls([
call('================================================'),
diff --git a/dovetail/tests/unit/utils/test_dovetail_utils.py b/dovetail/tests/unit/utils/test_dovetail_utils.py
index 33fc1eae..2635fb6f 100644
--- a/dovetail/tests/unit/utils/test_dovetail_utils.py
+++ b/dovetail/tests/unit/utils/test_dovetail_utils.py
@@ -1334,3 +1334,52 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_host.assert_called_once()
mock_endpoint.assert_called_once()
mock_hardware.assert_called_once()
+
+ @patch('json.dumps')
+ @patch('dovetail.utils.dovetail_utils.requests')
+ @patch('os.getenv')
+ def test_push_results_to_db(self, mock_getenv, mock_requests, mock_dumps):
+ logger = Mock()
+ case_name = 'case_name'
+ details = {'criteria': 'PASS'}
+ start_date = 'start_date'
+ stop_date = 'stop_date'
+ mock_getenv.side_effect = [
+ 'url', 'installer', 'scenario', 'pod_name', 'build_tag', 'version']
+ post_req = Mock()
+ post_req.raise_for_status.return_value = None
+ mock_requests.post.return_value = post_req
+ mock_dumps.return_value = {"project_name": "dovetail"}
+
+ dovetail_utils.push_results_to_db(
+ case_name, details, start_date, stop_date, logger)
+
+ mock_requests.post.assert_called_once_with(
+ 'url',
+ data={"project_name": "dovetail"},
+ headers={"Content-Type": "application/json"})
+ logger.debug.assert_called_once_with(
+ "The results were successfully pushed to DB.")
+
+ @patch('json.dumps')
+ @patch('dovetail.utils.dovetail_utils.requests')
+ @patch('os.getenv')
+ def test_push_results_to_db_exception(self, mock_getenv, mock_requests,
+ mock_dumps):
+ logger = Mock()
+ case_name = 'case_name'
+ details = {'criteria': 'PASS'}
+ start_date = 'start_date'
+ stop_date = 'stop_date'
+ mock_getenv.side_effect = [
+ 'url', 'installer', 'scenario', 'pod_name', 'build_tag', 'version']
+ post_req = Mock()
+ post_req.raise_for_status.side_effect = Exception()
+ mock_requests.post.return_value = post_req
+ mock_dumps.return_value = {"project_name": "dovetail"}
+ dovetail_utils.push_results_to_db(
+ case_name, details, start_date, stop_date, logger)
+
+ logger.debug.assert_not_called()
+ logger.exception.assert_called_once_with(
+ "The results cannot be pushed to DB.")
diff --git a/dovetail/utils/dovetail_utils.py b/dovetail/utils/dovetail_utils.py
index a3d07824..aee6dc29 100644
--- a/dovetail/utils/dovetail_utils.py
+++ b/dovetail/utils/dovetail_utils.py
@@ -12,6 +12,7 @@ from __future__ import print_function
import sys
import os
import re
+import requests
import subprocess
from collections import Mapping, Set, Sequence
import json
@@ -403,3 +404,30 @@ def get_openstack_info(logger):
get_hosts_info(logger)
get_openstack_endpoint(logger)
get_hardware_info(logger)
+
+
+def push_results_to_db(case_name, details, start_date, stop_date, logger):
+ """
+ Push results to OPNFV TestAPI DB when running with OPNFV CI jobs.
+ All results can be filtered with TestAPI.
+ http://testresults.opnfv.org/test/#/results
+ """
+ try:
+ url = os.getenv('TEST_DB_URL')
+ data = {'project_name': 'dovetail', 'case_name': case_name,
+ 'details': details, 'start_date': start_date,
+ 'stop_date': stop_date}
+ data['criteria'] = details['criteria'] if details else 'FAIL'
+ data['installer'] = os.getenv('INSTALLER_TYPE')
+ data['scenario'] = os.getenv('DEPLOY_SCENARIO')
+ data['pod_name'] = os.getenv('NODE_NAME')
+ data['build_tag'] = os.getenv('BUILD_TAG')
+ data['version'] = os.getenv('VERSION')
+ req = requests.post(url, data=json.dumps(data, sort_keys=True),
+ headers={'Content-Type': 'application/json'})
+ req.raise_for_status()
+ logger.debug('The results were successfully pushed to DB.')
+ return True
+ except Exception:
+ logger.exception('The results cannot be pushed to DB.')
+ return False