summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorxudan <xudan16@huawei.com>2019-04-03 05:33:24 -0400
committerxudan <xudan16@huawei.com>2019-04-03 22:25:59 -0400
commit06a0a8f2bd8ee7ce7db154c54301ec986b93b89f (patch)
tree89bd74978b9d9c4b107b4a617d557a60c95007a7
parenteb86745284805f2943d4733bb029948be760d597 (diff)
Push CI results to OPNFV test DB
In order to use OPNFV test results page to do the ayalysis of all OVP test cases, it needs to push all results generated by Dovetail CI jobs to test DB and then using http://testresults.opnfv.org/test/#/results to check all results. The following data must contain in the POST body: 1. project_name 2. case_name 3. details 4. installer 5. scenario 6. pod_name 7. build_tag 8. criteria 9. start_date 10. stop_date 11. version JIRA: DOVETAIL-767 Change-Id: I925ae249e24efd7bfb1c68a69150e9c22f0cdf36 Signed-off-by: xudan <xudan16@huawei.com>
-rwxr-xr-xdovetail/run.py15
-rw-r--r--dovetail/tests/unit/cmd_config.yml4
-rw-r--r--dovetail/tests/unit/test_run.py40
-rw-r--r--dovetail/tests/unit/utils/test_dovetail_utils.py49
-rw-r--r--dovetail/utils/dovetail_utils.py28
-rw-r--r--etc/conf/cmd_config.yml5
6 files changed, 129 insertions, 12 deletions
diff --git a/dovetail/run.py b/dovetail/run.py
index 71a69687..c83c1973 100755
--- a/dovetail/run.py
+++ b/dovetail/run.py
@@ -11,6 +11,7 @@
import copy
+from datetime import datetime
import os
import time
import uuid
@@ -48,10 +49,20 @@ def run_test(testcase_list, report_flag, logger):
testcase = dt_testcase.Testcase.get(testcase_name)
run_testcase = True
+ tc_start_time = datetime.fromtimestamp(
+ time.time()).strftime('%Y-%m-%d %H:%M:%S')
if run_testcase:
testcase.run()
+ tc_stop_time = datetime.fromtimestamp(
+ time.time()).strftime('%Y-%m-%d %H:%M:%S')
result = report.check_tc_result(testcase)
+ if os.getenv('OPNFV_CI') == 'true':
+ dt_utils.push_results_to_db(case_name=testcase_name,
+ start_date=tc_start_time,
+ stop_date=tc_stop_time,
+ details=result,
+ logger=logger)
if dt_cfg.dovetail_config['stop']:
try:
if (not result or result['criteria'] == 'FAIL'):
@@ -248,8 +259,8 @@ def main(*args, **kwargs):
if not get_result_path():
return
clean_results_dir()
- if kwargs['debug']:
- os.environ['DEBUG'] = 'true'
+ os.environ['DEBUG'] = 'true' if kwargs['debug'] else 'false'
+ os.environ['OPNFV_CI'] = 'true' if kwargs['opnfv_ci'] else 'false'
create_logs()
logger = dt_logger.Logger('run').getLogger()
diff --git a/dovetail/tests/unit/cmd_config.yml b/dovetail/tests/unit/cmd_config.yml
index 4a1439f6..98a2a9d8 100644
--- a/dovetail/tests/unit/cmd_config.yml
+++ b/dovetail/tests/unit/cmd_config.yml
@@ -22,3 +22,7 @@ cli:
flags:
- '--report'
is_flag: 'True'
+ opnfvci:
+ flags:
+ - '--opnfv-ci'
+ is_flag: 'True'
diff --git a/dovetail/tests/unit/test_run.py b/dovetail/tests/unit/test_run.py
index 7f36d31f..654d8c9c 100644
--- a/dovetail/tests/unit/test_run.py
+++ b/dovetail/tests/unit/test_run.py
@@ -57,30 +57,45 @@ class RunTesting(unittest.TestCase):
logger.warning.assert_called_once_with(
"No test case will be executed.")
+ @patch('dovetail.run.datetime')
+ @patch('dovetail.run.dt_utils')
@patch('dovetail.run.dt_cfg')
@patch('dovetail.run.dt_report.Report')
@patch('dovetail.run.dt_testcase.Testcase')
@patch('dovetail.run.time')
- def test_run_test(self, mock_time, mock_testcase, mock_report,
- mock_config):
+ @patch('os.getenv')
+ def test_run_test(self, mock_getenv, mock_time, mock_testcase, mock_report,
+ mock_config, mock_utils, mock_datetime):
logger = Mock()
report_obj = Mock()
mock_report.return_value = report_obj
- mock_time.time.side_effect = [42, 84]
+ mock_time.time.side_effect = [42, 43, 83, 84]
+ datetime_obj = Mock()
+ mock_datetime.fromtimestamp.return_value = datetime_obj
+ datetime_obj.strftime.side_effect = ['1969-12-31 19:00:43',
+ '1969-12-31 19:01:23']
testcase_name = 'testcase'
testcase_obj = Mock()
mock_testcase.get.return_value = testcase_obj
mock_config.dovetail_config = {'stop': True}
+ mock_getenv.return_value = 'true'
report_obj.check_tc_result.return_value = {'criteria': 'PASS'}
+ mock_utils.push_results_to_db.return_value = True
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_has_calls([call(), call()])
+ mock_time.time.assert_has_calls([call(), call(), call(), call()])
logger.info.assert_called_once_with(
'>>[testcase]: {}'.format(testcase_name))
mock_testcase.get.assert_called_once_with(testcase_name)
testcase_obj.run.assert_called_once_with()
report_obj.check_tc_result.assert_called_once_with(testcase_obj)
+ mock_utils.push_results_to_db.assert_called_once_with(
+ case_name=testcase_name,
+ start_date='1969-12-31 19:00:43',
+ stop_date='1969-12-31 19:01:23',
+ details={'criteria': 'PASS'},
+ logger=logger)
report_obj.generate.assert_called_once_with([testcase_name], 42)
report_obj.save_logs.assert_called_once_with()
@@ -101,7 +116,8 @@ class RunTesting(unittest.TestCase):
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_called_once_with()
+ mock_time.time.assert_has_calls([call(), call(), call().__float__(),
+ call(), call().__float__()])
logger.info.assert_has_calls([
call('>>[testcase]: {}'.format(testcase_name)),
call('Stop because {} failed'.format(testcase_name))])
@@ -127,7 +143,7 @@ class RunTesting(unittest.TestCase):
dt_run.run_test([testcase_name], True, logger)
- mock_time.time.assert_called_once_with()
+ mock_time.time.assert_has_calls([call(), call(), call()])
logger.info.assert_has_calls([
call('>>[testcase]: {}'.format(testcase_name)),
call('Stop because {} failed'.format(testcase_name))])
@@ -504,14 +520,15 @@ class RunTesting(unittest.TestCase):
mock_get_list.return_value = testcase_list
kwargs_dict = {
'debug': True,
+ 'opnfv_ci': True,
'report': True,
'testsuite': 'testsuite',
'docker_tag': '2.0.0'
}
with self.assertRaises(SystemExit) as cm:
- dt_run.main([
- '--testsuite=testsuite', '--debug', '--report', '2.0.0'])
+ dt_run.main(['--testsuite=testsuite', '--debug', '--report',
+ '2.0.0', '--opnfv-ci'])
expected = cm.exception
logger_temp_obj.getLogger.assert_called_once_with()
@@ -521,7 +538,8 @@ class RunTesting(unittest.TestCase):
mock_config.dovetail_config)
mock_get_result.assert_called_once_with()
mock_clean.assert_called_once_with()
- self.assertEquals({'DEBUG': 'true'}, mock_os.environ)
+ self.assertEquals({'DEBUG': 'true', 'OPNFV_CI': 'true'},
+ mock_os.environ)
mock_create_logs.assert_called_once_with()
logger_obj.info.assert_has_calls([
call('================================================'),
@@ -587,6 +605,7 @@ class RunTesting(unittest.TestCase):
mock_get_list.return_value = None
kwargs_dict = {
'debug': True,
+ 'opnfv_ci': False,
'report': True,
'testsuite': 'testsuite',
'docker_tag': '2.0.0'
@@ -605,7 +624,8 @@ class RunTesting(unittest.TestCase):
mock_config.dovetail_config)
mock_get_result.assert_called_once_with()
mock_clean.assert_called_once_with()
- self.assertEquals({'DEBUG': 'true'}, mock_os.environ)
+ self.assertEquals({'DEBUG': 'true', 'OPNFV_CI': 'false'},
+ mock_os.environ)
mock_create_logs.assert_called_once_with()
logger_obj.info.assert_has_calls([
call('================================================'),
diff --git a/dovetail/tests/unit/utils/test_dovetail_utils.py b/dovetail/tests/unit/utils/test_dovetail_utils.py
index 33fc1eae..2635fb6f 100644
--- a/dovetail/tests/unit/utils/test_dovetail_utils.py
+++ b/dovetail/tests/unit/utils/test_dovetail_utils.py
@@ -1334,3 +1334,52 @@ class DovetailUtilsTesting(unittest.TestCase):
mock_host.assert_called_once()
mock_endpoint.assert_called_once()
mock_hardware.assert_called_once()
+
+ @patch('json.dumps')
+ @patch('dovetail.utils.dovetail_utils.requests')
+ @patch('os.getenv')
+ def test_push_results_to_db(self, mock_getenv, mock_requests, mock_dumps):
+ logger = Mock()
+ case_name = 'case_name'
+ details = {'criteria': 'PASS'}
+ start_date = 'start_date'
+ stop_date = 'stop_date'
+ mock_getenv.side_effect = [
+ 'url', 'installer', 'scenario', 'pod_name', 'build_tag', 'version']
+ post_req = Mock()
+ post_req.raise_for_status.return_value = None
+ mock_requests.post.return_value = post_req
+ mock_dumps.return_value = {"project_name": "dovetail"}
+
+ dovetail_utils.push_results_to_db(
+ case_name, details, start_date, stop_date, logger)
+
+ mock_requests.post.assert_called_once_with(
+ 'url',
+ data={"project_name": "dovetail"},
+ headers={"Content-Type": "application/json"})
+ logger.debug.assert_called_once_with(
+ "The results were successfully pushed to DB.")
+
+ @patch('json.dumps')
+ @patch('dovetail.utils.dovetail_utils.requests')
+ @patch('os.getenv')
+ def test_push_results_to_db_exception(self, mock_getenv, mock_requests,
+ mock_dumps):
+ logger = Mock()
+ case_name = 'case_name'
+ details = {'criteria': 'PASS'}
+ start_date = 'start_date'
+ stop_date = 'stop_date'
+ mock_getenv.side_effect = [
+ 'url', 'installer', 'scenario', 'pod_name', 'build_tag', 'version']
+ post_req = Mock()
+ post_req.raise_for_status.side_effect = Exception()
+ mock_requests.post.return_value = post_req
+ mock_dumps.return_value = {"project_name": "dovetail"}
+ dovetail_utils.push_results_to_db(
+ case_name, details, start_date, stop_date, logger)
+
+ logger.debug.assert_not_called()
+ logger.exception.assert_called_once_with(
+ "The results cannot be pushed to DB.")
diff --git a/dovetail/utils/dovetail_utils.py b/dovetail/utils/dovetail_utils.py
index a3d07824..aee6dc29 100644
--- a/dovetail/utils/dovetail_utils.py
+++ b/dovetail/utils/dovetail_utils.py
@@ -12,6 +12,7 @@ from __future__ import print_function
import sys
import os
import re
+import requests
import subprocess
from collections import Mapping, Set, Sequence
import json
@@ -403,3 +404,30 @@ def get_openstack_info(logger):
get_hosts_info(logger)
get_openstack_endpoint(logger)
get_hardware_info(logger)
+
+
+def push_results_to_db(case_name, details, start_date, stop_date, logger):
+ """
+ Push results to OPNFV TestAPI DB when running with OPNFV CI jobs.
+ All results can be filtered with TestAPI.
+ http://testresults.opnfv.org/test/#/results
+ """
+ try:
+ url = os.getenv('TEST_DB_URL')
+ data = {'project_name': 'dovetail', 'case_name': case_name,
+ 'details': details, 'start_date': start_date,
+ 'stop_date': stop_date}
+ data['criteria'] = details['criteria'] if details else 'FAIL'
+ data['installer'] = os.getenv('INSTALLER_TYPE')
+ data['scenario'] = os.getenv('DEPLOY_SCENARIO')
+ data['pod_name'] = os.getenv('NODE_NAME')
+ data['build_tag'] = os.getenv('BUILD_TAG')
+ data['version'] = os.getenv('VERSION')
+ req = requests.post(url, data=json.dumps(data, sort_keys=True),
+ headers={'Content-Type': 'application/json'})
+ req.raise_for_status()
+ logger.debug('The results were successfully pushed to DB.')
+ return True
+ except Exception:
+ logger.exception('The results cannot be pushed to DB.')
+ return False
diff --git a/etc/conf/cmd_config.yml b/etc/conf/cmd_config.yml
index f02c864e..1ea2d0a2 100644
--- a/etc/conf/cmd_config.yml
+++ b/etc/conf/cmd_config.yml
@@ -79,3 +79,8 @@ cli:
- '--optional'
is_flag: 'True'
help: 'Run all optional test cases.'
+ opnfvci:
+ flags:
+ - '--opnfv-ci'
+ is_flag: 'True'
+ help: 'Only enabled when running with OPNFV CI jobs and pushing results to TestAPI DB'