summaryrefslogtreecommitdiffstats
path: root/utils/test/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'utils/test/scripts')
-rw-r--r--utils/test/scripts/config.ini14
-rw-r--r--utils/test/scripts/config.py88
-rw-r--r--utils/test/scripts/create_kibana_dashboards.py40
-rw-r--r--utils/test/scripts/kibana_cleanup.py5
-rw-r--r--utils/test/scripts/mongo2elastic_format.py9
-rw-r--r--utils/test/scripts/mongo_to_elasticsearch.py373
-rw-r--r--utils/test/scripts/shared_utils.py1
-rw-r--r--utils/test/scripts/testcases_parser.py (renamed from utils/test/scripts/conf_utils.py)8
8 files changed, 347 insertions, 191 deletions
diff --git a/utils/test/scripts/config.ini b/utils/test/scripts/config.ini
new file mode 100644
index 000000000..63d283dc8
--- /dev/null
+++ b/utils/test/scripts/config.ini
@@ -0,0 +1,14 @@
+# to add a new parameter in the config file,
+# the CONF object in config.ini must be updated
+[elastic]
+url = http://localhost:9200
+creds =
+
+[output]
+# elasticsearch or console
+destination = elasticsearch
+
+[kibana]
+url = http://10.63.243.17/kibana/app/kibana
+js = true
+js_path = /usr/share/nginx/html/kibana_dashboards/conf.js
diff --git a/utils/test/scripts/config.py b/utils/test/scripts/config.py
new file mode 100644
index 000000000..2d447a7ba
--- /dev/null
+++ b/utils/test/scripts/config.py
@@ -0,0 +1,88 @@
+#! /usr/bin/env python
+
+from ConfigParser import SafeConfigParser, NoOptionError
+
+
+class ParseError(Exception):
+ """
+ Custom exception class for config file
+ """
+
+ def __init__(self, message):
+ self.msg = message
+
+ def __str__(self):
+ return 'error parsing config file : %s' % self.msg
+
+
+class APIConfig:
+ """
+ The purpose of this class is to load values correctly from the config file.
+ Each key is declared as an attribute in __init__() and linked in parse()
+ """
+
+ def __init__(self):
+ self._default_config_location = "./config.ini"
+ self.elastic_url = 'http://localhost:9200'
+ self.elastic_creds = None
+ self.destination = 'elasticsearch'
+ self.kibana_url = None
+ self.is_js = True
+ self.js_path = None
+
+ def _get_str_parameter(self, section, param):
+ try:
+ return self._parser.get(section, param)
+ except NoOptionError:
+ raise ParseError("[%s.%s] parameter not found" % (section, param))
+
+ def _get_int_parameter(self, section, param):
+ try:
+ return int(self._get_str_parameter(section, param))
+ except ValueError:
+ raise ParseError("[%s.%s] not an int" % (section, param))
+
+ def _get_bool_parameter(self, section, param):
+ result = self._get_str_parameter(section, param)
+ if str(result).lower() == 'true':
+ return True
+ if str(result).lower() == 'false':
+ return False
+
+ raise ParseError(
+ "[%s.%s : %s] not a boolean" % (section, param, result))
+
+ @staticmethod
+ def parse(config_location=None):
+ obj = APIConfig()
+
+ if config_location is None:
+ config_location = obj._default_config_location
+
+ obj._parser = SafeConfigParser()
+ obj._parser.read(config_location)
+ if not obj._parser:
+ raise ParseError("%s not found" % config_location)
+
+ # Linking attributes to keys from file with their sections
+ obj.elastic_url = obj._get_str_parameter("elastic", "url")
+ obj.elastic_creds = obj._get_str_parameter("elastic", "creds")
+ obj.destination = obj._get_str_parameter("output", "destination")
+ obj.kibana_url = obj._get_str_parameter("kibana", "url")
+ obj.is_js = obj._get_bool_parameter("kibana", "js")
+ obj.js_path = obj._get_str_parameter("kibana", "js_path")
+
+ return obj
+
+ def __str__(self):
+ return "elastic_url = %s \n" \
+ "elastic_creds = %s \n" \
+ "destination = %s \n" \
+ "kibana_url = %s \n" \
+ "is_js = %s \n" \
+ "js_path = %s \n" % (self.elastic_url,
+ self.elastic_creds,
+ self.destination,
+ self.kibana_url,
+ self.is_js,
+ self.js_path)
diff --git a/utils/test/scripts/create_kibana_dashboards.py b/utils/test/scripts/create_kibana_dashboards.py
index efa6e177e..19d5b5e52 100644
--- a/utils/test/scripts/create_kibana_dashboards.py
+++ b/utils/test/scripts/create_kibana_dashboards.py
@@ -4,12 +4,21 @@ import urlparse
import argparse
-import conf_utils
import logger_utils
import shared_utils
+import testcases_parser
+from config import APIConfig
logger = logger_utils.KibanaDashboardLogger('elastic2kibana').get
+parser = argparse.ArgumentParser()
+parser.add_argument("-c", "--config-file",
+ dest='config_file',
+ help="Config file location")
+
+args = parser.parse_args()
+CONF = APIConfig().parse(args.config_file)
+
_installers = {'fuel', 'apex', 'compass', 'joid'}
@@ -303,7 +312,7 @@ def construct_dashboards():
:return: list of KibanaDashboards
"""
kibana_dashboards = []
- for project, case_dicts in conf_utils.testcases_yaml.items():
+ for project, case_dicts in testcases_parser.testcases_yaml.items():
for case in case_dicts:
case_name = case.get('name')
visualizations = case.get('visualizations')
@@ -351,28 +360,11 @@ def generate_js_inputs(js_file_path, kibana_url, dashboards):
if __name__ == '__main__':
- parser = argparse.ArgumentParser(description='Create Kibana dashboards from data in elasticsearch')
- parser.add_argument('-e', '--elasticsearch-url', default='http://localhost:9200',
- help='the url of elasticsearch, defaults to http://localhost:9200')
-
- parser.add_argument('-js', '--generate_js_inputs', action='store_true',
- help='Use this argument to generate javascript inputs for kibana landing page')
-
- parser.add_argument('--js_path', default='/usr/share/nginx/html/kibana_dashboards/conf.js',
- help='Path of javascript file with inputs for kibana landing page')
-
- parser.add_argument('-k', '--kibana_url', default='https://testresults.opnfv.org/kibana/app/kibana',
- help='The url of kibana for javascript inputs')
-
- parser.add_argument('-u', '--elasticsearch-username', default=None,
- help='The username with password for elasticsearch in format username:password')
-
- args = parser.parse_args()
- base_elastic_url = args.elasticsearch_url
- generate_inputs = args.generate_js_inputs
- input_file_path = args.js_path
- kibana_url = args.kibana_url
- es_creds = args.elasticsearch_username
+ base_elastic_url = CONF.elastic_url
+ generate_inputs = CONF.is_js
+ input_file_path = CONF.js_path
+ kibana_url = CONF.kibana_url
+ es_creds = CONF.elastic_creds
dashboards = construct_dashboards()
diff --git a/utils/test/scripts/kibana_cleanup.py b/utils/test/scripts/kibana_cleanup.py
index e699db43c..d87d9a285 100644
--- a/utils/test/scripts/kibana_cleanup.py
+++ b/utils/test/scripts/kibana_cleanup.py
@@ -1,9 +1,10 @@
#! /usr/bin/env python
import logging
+import urlparse
+
import argparse
+
import shared_utils
-import json
-import urlparse
logger = logging.getLogger('clear_kibana')
logger.setLevel(logging.DEBUG)
diff --git a/utils/test/scripts/mongo2elastic_format.py b/utils/test/scripts/mongo2elastic_format.py
index 0b036e3ff..ef485bae0 100644
--- a/utils/test/scripts/mongo2elastic_format.py
+++ b/utils/test/scripts/mongo2elastic_format.py
@@ -62,7 +62,14 @@ def format_rally(testcase):
-> details.tests
-> details.success_percentage
"""
- summary = testcase['details']['summary']
+ details = testcase['details']
+ summary = None
+ for item in details:
+ if 'summary' in item:
+ summary = item['summary']
+
+ if not summary:
+ return False
testcase['details'] = {
'duration': summary['duration'],
diff --git a/utils/test/scripts/mongo_to_elasticsearch.py b/utils/test/scripts/mongo_to_elasticsearch.py
index b722793b3..777eda6ad 100644
--- a/utils/test/scripts/mongo_to_elasticsearch.py
+++ b/utils/test/scripts/mongo_to_elasticsearch.py
@@ -1,4 +1,5 @@
#! /usr/bin/env python
+
import datetime
import json
import os
@@ -9,191 +10,239 @@ import uuid
import argparse
-import conf_utils
import logger_utils
import mongo2elastic_format
import shared_utils
+import testcases_parser
+from config import APIConfig
logger = logger_utils.KibanaDashboardLogger('mongo2elastic').get
-parser = argparse.ArgumentParser(description='Modify and filter mongo json data for elasticsearch')
-parser.add_argument('-od', '--output-destination',
- default='elasticsearch',
- choices=('elasticsearch', 'stdout'),
- help='defaults to elasticsearch')
-
-parser.add_argument('-ml', '--merge-latest', default=0, type=int, metavar='N',
+parser = argparse.ArgumentParser()
+parser.add_argument("-c", "--config-file",
+ dest='config_file',
+ help="Config file location")
+parser.add_argument('-ld', '--latest-days',
+ default=0,
+ type=int,
+ metavar='N',
help='get entries old at most N days from mongodb and'
' parse those that are not already in elasticsearch.'
' If not present, will get everything from mongodb, which is the default')
-parser.add_argument('-e', '--elasticsearch-url', default='http://localhost:9200',
- help='the url of elasticsearch, defaults to http://localhost:9200')
-
-parser.add_argument('-u', '--elasticsearch-username', default=None,
- help='The username with password for elasticsearch in format username:password')
-
args = parser.parse_args()
+CONF = APIConfig().parse(args.config_file)
+
tmp_docs_file = './mongo-{}.json'.format(uuid.uuid4())
-def _fix_date(date_string):
- if isinstance(date_string, dict):
- return date_string['$date']
- else:
- return date_string[:-3].replace(' ', 'T') + 'Z'
-
-
-def verify_document(testcase):
- """
- Mandatory fields:
- installer
- pod_name
- version
- case_name
- date
- project
- details
-
- these fields must be present and must NOT be None
-
- Optional fields:
- description
-
- these fields will be preserved if the are NOT None
- """
- mandatory_fields = ['installer',
- 'pod_name',
- 'version',
- 'case_name',
- 'project_name',
- 'details']
- mandatory_fields_to_modify = {'start_date': _fix_date}
- fields_to_swap_or_add = {'scenario': 'version'}
- if '_id' in testcase:
- mongo_id = testcase['_id']
- else:
- mongo_id = None
- optional_fields = ['description']
- for key, value in testcase.items():
- if key in mandatory_fields:
- if value is None:
- # empty mandatory field, invalid input
- logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing value"
- " for mandatory field '{}'".format(mongo_id, key))
- return False
- else:
- mandatory_fields.remove(key)
- elif key in mandatory_fields_to_modify:
- if value is None:
- # empty mandatory field, invalid input
- logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing value"
- " for mandatory field '{}'".format(mongo_id, key))
- return False
+class DocumentPublisher:
+
+ def __init__(self, doc, fmt, exist_docs, creds, to):
+ self.doc = doc
+ self.fmt = fmt
+ self.creds = creds
+ self.exist_docs = exist_docs
+ self.to = to
+ self.is_formatted = True
+
+ def format(self):
+ try:
+ if self._verify_document() and self.fmt:
+ self.is_formatted = vars(mongo2elastic_format)[self.fmt](self.doc)
else:
- testcase[key] = mandatory_fields_to_modify[key](value)
- del mandatory_fields_to_modify[key]
- elif key in fields_to_swap_or_add:
- if value is None:
- swapped_key = fields_to_swap_or_add[key]
- swapped_value = testcase[swapped_key]
- logger.info("Swapping field '{}' with value None for '{}' with value '{}'.".format(key, swapped_key, swapped_value))
- testcase[key] = swapped_value
- del fields_to_swap_or_add[key]
+ self.is_formatted = False
+ except Exception:
+ logger.error("Fail in format testcase[%s]\nerror message: %s" %
+ (self.doc, traceback.format_exc()))
+ self.is_formatted = False
+ finally:
+ return self
+
+ def publish(self):
+ if self.is_formatted and self.doc not in self.exist_docs:
+ self._publish()
+
+ def _publish(self):
+ status, data = shared_utils.publish_json(self.doc, self.creds, self.to)
+ if status > 300:
+ logger.error('Publish record[{}] failed, due to [{}]'
+ .format(self.doc, json.loads(data)['error']['reason']))
+
+ def _fix_date(self, date_string):
+ if isinstance(date_string, dict):
+ return date_string['$date']
+ else:
+ return date_string[:-3].replace(' ', 'T') + 'Z'
+
+ def _verify_document(self):
+ """
+ Mandatory fields:
+ installer
+ pod_name
+ version
+ case_name
+ date
+ project
+ details
+
+ these fields must be present and must NOT be None
+
+ Optional fields:
+ description
+
+ these fields will be preserved if the are NOT None
+ """
+ mandatory_fields = ['installer',
+ 'pod_name',
+ 'version',
+ 'case_name',
+ 'project_name',
+ 'details']
+ mandatory_fields_to_modify = {'start_date': self._fix_date}
+ fields_to_swap_or_add = {'scenario': 'version'}
+ if '_id' in self.doc:
+ mongo_id = self.doc['_id']
+ else:
+ mongo_id = None
+ optional_fields = ['description']
+ for key, value in self.doc.items():
+ if key in mandatory_fields:
+ if value is None:
+ # empty mandatory field, invalid input
+ logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing value"
+ " for mandatory field '{}'".format(mongo_id, key))
+ return False
+ else:
+ mandatory_fields.remove(key)
+ elif key in mandatory_fields_to_modify:
+ if value is None:
+ # empty mandatory field, invalid input
+ logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing value"
+ " for mandatory field '{}'".format(mongo_id, key))
+ return False
+ else:
+ self.doc[key] = mandatory_fields_to_modify[key](value)
+ del mandatory_fields_to_modify[key]
+ elif key in fields_to_swap_or_add:
+ if value is None:
+ swapped_key = fields_to_swap_or_add[key]
+ swapped_value = self.doc[swapped_key]
+ logger.info("Swapping field '{}' with value None for '{}' with value '{}'.".format(key, swapped_key,
+ swapped_value))
+ self.doc[key] = swapped_value
+ del fields_to_swap_or_add[key]
+ else:
+ del fields_to_swap_or_add[key]
+ elif key in optional_fields:
+ if value is None:
+ # empty optional field, remove
+ del self.doc[key]
+ optional_fields.remove(key)
else:
- del fields_to_swap_or_add[key]
- elif key in optional_fields:
- if value is None:
- # empty optional field, remove
- del testcase[key]
- optional_fields.remove(key)
+ # unknown field
+ del self.doc[key]
+
+ if len(mandatory_fields) > 0:
+ # some mandatory fields are missing
+ logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing"
+ " mandatory field(s) '{}'".format(mongo_id, mandatory_fields))
+ return False
+ elif len(mandatory_fields_to_modify) > 0:
+ # some mandatory fields are missing
+ logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing"
+ " mandatory field(s) '{}'".format(mongo_id, mandatory_fields_to_modify.keys()))
+ return False
else:
- # unknown field
- del testcase[key]
-
- if len(mandatory_fields) > 0:
- # some mandatory fields are missing
- logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing"
- " mandatory field(s) '{}'".format(mongo_id, mandatory_fields))
- return False
- elif len(mandatory_fields_to_modify) > 0:
- # some mandatory fields are missing
- logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing"
- " mandatory field(s) '{}'".format(mongo_id, mandatory_fields_to_modify.keys()))
- return False
- else:
- if len(fields_to_swap_or_add) > 0:
- for key, swap_key in fields_to_swap_or_add.iteritems():
- testcase[key] = testcase[swap_key]
-
- return True
-
-
-def format_document(testcase):
- # 1. verify and identify the testcase
- # 2. if modification is implemented, then use that
- # 3. if not, try to use default
- # 4. if 2 or 3 is successful, return True, otherwise return False
- if verify_document(testcase):
- project = testcase['project_name']
- case_name = testcase['case_name']
- fmt = conf_utils.get_format(project, case_name)
- if fmt:
- try:
- logger.info("Processing %s/%s using format %s" % (project, case_name, fmt))
- return vars(mongo2elastic_format)[fmt](testcase)
- except Exception:
- logger.error("Fail in format testcase[%s]\nerror message: %s" % (testcase, traceback.format_exc()))
- return False
- else:
- return False
-
-
-def export_documents(days):
- cmd = ['mongoexport', '--db', 'test_results_collection', '-c', 'results']
- if days > 0:
- past_time = datetime.datetime.today() - datetime.timedelta(days=days)
- cmd += ['--query', '{{"start_date":{{$gt:"{}"}}}}'.format(past_time)]
- cmd += [ '--out', '{}'.format(tmp_docs_file)]
-
- try:
- subprocess.check_call(cmd)
- except Exception, err:
- logger.error("export mongodb failed: %s" % err)
- exit(-1)
-
-
-def publish_document(document, es_creds, to):
- status, data = shared_utils.publish_json(document, es_creds, to)
- if status > 300:
- logger.error('Publish record[{}] failed, due to [{}]'
- .format(document, json.loads(data)['error']['reason']))
-
-
-def publish_nonexist_documents(elastic_docs, es_creds, to):
- try:
- with open(tmp_docs_file) as fdocs:
- for doc_line in fdocs:
- doc = json.loads(doc_line)
- if format_document(doc) and doc not in elastic_docs:
- publish_document(doc, es_creds, to)
- finally:
- fdocs.close()
+ if len(fields_to_swap_or_add) > 0:
+ for key, swap_key in fields_to_swap_or_add.iteritems():
+ self.doc[key] = self.doc[swap_key]
+
+ return True
+
+
+class DocumentsPublisher:
+
+ def __init__(self, project, case, fmt, days, elastic_url, creds, to):
+ self.project = project
+ self.case = case
+ self.fmt = fmt
+ self.days = days
+ self.elastic_url = elastic_url
+ self.creds = creds
+ self.to = to
+ self.existed_docs = []
+
+ def export(self):
+ if self.days > 0:
+ past_time = datetime.datetime.today() - datetime.timedelta(days=self.days)
+ query = '''{{
+ "project_name": "{}",
+ "case_name": "{}",
+ "start_date": {{"$gt" : "{}"}}
+ }}'''.format(self.project, self.case, past_time)
+ else:
+ query = '''{{
+ "project_name": "{}",
+ "case_name": "{}"
+ }}'''.format(self.project, self.case)
+ cmd = ['mongoexport',
+ '--db', 'test_results_collection',
+ '--collection', 'results',
+ '--query', '{}'.format(query),
+ '--out', '{}'.format(tmp_docs_file)]
+ try:
+ subprocess.check_call(cmd)
+ return self
+ except Exception, err:
+ logger.error("export mongodb failed: %s" % err)
+ self._remove()
+ exit(-1)
+
+ def get_existed_docs(self):
+ self.existed_docs = shared_utils.get_elastic_docs_by_days(self.elastic_url, self.creds, self.days)
+ return self
+
+ def publish(self):
+ try:
+ with open(tmp_docs_file) as fdocs:
+ for doc_line in fdocs:
+ DocumentPublisher(json.loads(doc_line),
+ self.fmt,
+ self.existed_docs,
+ self.creds,
+ self.to).format().publish()
+ finally:
+ fdocs.close()
+ self._remove()
+
+ def _remove(self):
if os.path.exists(tmp_docs_file):
os.remove(tmp_docs_file)
-if __name__ == '__main__':
- base_elastic_url = urlparse.urljoin(args.elasticsearch_url, '/test_results/mongo2elastic')
- to = args.output_destination
- days = args.merge_latest
- es_creds = args.elasticsearch_username
+def main():
+ base_elastic_url = urlparse.urljoin(CONF.elastic_url, '/test_results/mongo2elastic')
+ to = CONF.destination
+ days = args.latest_days
+ es_creds = CONF.elastic_creds
if to == 'elasticsearch':
to = base_elastic_url
- export_documents(days)
- elastic_docs = shared_utils.get_elastic_docs_by_days(base_elastic_url, es_creds, days)
- logger.info('number of hits in elasticsearch for now-{}d: {}'.format(days, len(elastic_docs)))
- publish_nonexist_documents(elastic_docs, es_creds, to)
+ for project, case_dicts in testcases_parser.testcases_yaml.items():
+ for case_dict in case_dicts:
+ case = case_dict.get('name')
+ fmt = testcases_parser.compose_format(case_dict.get('format'))
+ DocumentsPublisher(project,
+ case,
+ fmt,
+ days,
+ base_elastic_url,
+ es_creds,
+ to).export().get_existed_docs().publish()
+
+
+if __name__ == '__main__':
+ main() \ No newline at end of file
diff --git a/utils/test/scripts/shared_utils.py b/utils/test/scripts/shared_utils.py
index aa8a65d79..e90a17fa3 100644
--- a/utils/test/scripts/shared_utils.py
+++ b/utils/test/scripts/shared_utils.py
@@ -41,6 +41,7 @@ def get_elastic_docs(elastic_url, creds, body=None, field = '_source'):
elastic_docs.append(hit[field])
return elastic_docs
+
def get_elastic_docs_by_days(elastic_url, creds, days):
if days == 0:
body = '''{
diff --git a/utils/test/scripts/conf_utils.py b/utils/test/scripts/testcases_parser.py
index e35d5ed10..cf9599858 100644
--- a/utils/test/scripts/conf_utils.py
+++ b/utils/test/scripts/testcases_parser.py
@@ -6,15 +6,19 @@ with open('./testcases.yaml') as f:
f.close()
+def compose_format(fmt):
+ return 'format_' + fmt.strip()
+
+
def get_format(project, case):
testcases = testcases_yaml.get(project)
if isinstance(testcases, list):
for case_dict in testcases:
if case_dict['name'] == case:
- return 'format_' + case_dict['format'].strip()
+ return compose_format(case_dict['format'])
return None
if __name__ == '__main__':
fmt = get_format('functest', 'vping_ssh')
- print fmt \ No newline at end of file
+ print fmt