summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--docker/Dockerfile1
-rw-r--r--func/ansible_api.py11
-rw-r--r--func/args_handler.py27
-rw-r--r--func/driver.py38
-rw-r--r--func/env_setup.py29
-rw-r--r--func/spawn_vm.py2
-rw-r--r--restful_server/db.py41
-rw-r--r--restful_server/qtip_server.py54
-rw-r--r--test_list/compute26
-rw-r--r--test_list/network12
-rw-r--r--test_list/storage10
-rw-r--r--tests/driver_test.py6
-rw-r--r--tests/qtip_server_test.py55
-rw-r--r--utils/__init__.py0
-rw-r--r--utils/logger_utils.py65
15 files changed, 282 insertions, 95 deletions
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 2adeba6e..fc0e57c5 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -43,6 +43,7 @@ RUN apt-get install ansible --force-yes -y
RUN mkdir -p ${REPOS_DIR}
RUN mkdir -p /root/.ssh
+RUN mkdir -p /var/log/qtip
RUN chmod 700 /root/.ssh
#Config ansible
diff --git a/func/ansible_api.py b/func/ansible_api.py
index 57224eb7..2f02a62e 100644
--- a/func/ansible_api.py
+++ b/func/ansible_api.py
@@ -8,12 +8,15 @@
##############################################################################
import os
from collections import namedtuple
-import logging
+from ansible.executor.playbook_executor import PlaybookExecutor
+from ansible.inventory import Inventory
from ansible.parsing.dataloader import DataLoader
from ansible.vars import VariableManager
-from ansible.inventory import Inventory
-from ansible.executor.playbook_executor import PlaybookExecutor
+
+from utils import logger_utils
+
+logger = logger_utils.QtipLogger('ansible_api').get
class AnsibleApi:
@@ -26,7 +29,7 @@ class AnsibleApi:
def _check_path(self, file_path):
if not os.path.exists(file_path):
- logging.error('The playbook %s does not exist' % file_path)
+ logger.error('The playbook %s does not exist' % file_path)
return False
else:
return True
diff --git a/func/args_handler.py b/func/args_handler.py
index 57ecfcbd..50d803eb 100644
--- a/func/args_handler.py
+++ b/func/args_handler.py
@@ -7,19 +7,23 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
import os
+from operator import add
+import simplejson as json
from func.env_setup import Env_setup
from func.spawn_vm import SpawnVM
from func.driver import Driver
-def get_files_in_test_list(suit_name):
- with open('test_list/' + suit_name, 'r') as fin_put:
- benchmark_list = fin_put.readlines()
- return map(lambda x: x.rstrip(), benchmark_list)
+def get_files_in_test_list(suit_name, case_type='all'):
+ benchmark_list = json.load(file('test_list/{0}'.format(suit_name)))
+ return reduce(add, benchmark_list.values()) \
+ if case_type == 'all' else benchmark_list[case_type]
-def get_files_in_test_case(lab, suit_name):
- return os.listdir('./test_cases/{0}/{1}'.format(lab, suit_name))
+def get_files_in_test_case(lab, suit_name, case_type='all'):
+ test_case_all = os.listdir('./test_cases/{0}/{1}'.format(lab, suit_name))
+ return test_case_all if case_type == 'all' else \
+ filter(lambda x: case_type in x, test_case_all)
def get_benchmark_path(lab, suit, benchmark):
@@ -51,12 +55,13 @@ def prepare_ansible_env(benchmark_test_case):
def run_benchmark(installer_type, pwd, benchmark, benchmark_details,
proxy_info, env_setup, benchmark_test_case):
driver = Driver()
- driver.drive_bench(installer_type, pwd, benchmark,
- env_setup.roles_dict.items(), _get_f_name(benchmark_test_case),
- benchmark_details, env_setup.ip_pw_dict.items(), proxy_info)
+ return driver.drive_bench(installer_type, pwd, benchmark,
+ env_setup.roles_dict.items(),
+ _get_f_name(benchmark_test_case),
+ benchmark_details, env_setup.ip_pw_dict.items(), proxy_info)
def prepare_and_run_benchmark(installer_type, pwd, benchmark_test_case):
benchmark, benchmark_details, proxy_info, env_setup = prepare_ansible_env(benchmark_test_case)
- run_benchmark(installer_type, pwd, benchmark, benchmark_details,
- proxy_info, env_setup, benchmark_test_case)
+ return run_benchmark(installer_type, pwd, benchmark, benchmark_details,
+ proxy_info, env_setup, benchmark_test_case)
diff --git a/func/driver.py b/func/driver.py
index 726016a5..bcda0ce1 100644
--- a/func/driver.py
+++ b/func/driver.py
@@ -1,20 +1,24 @@
##############################################################################
-# Copyright (c) 2015 Dell Inc and others.
+# Copyright (c) 2015 Dell Inc, ZTE and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-import logging
-from func.ansible_api import AnsibleApi
+from utils import logger_utils
+from operator import add
+from ansible_api import AnsibleApi
+
+
+logger = logger_utils.QtipLogger('driver').get
class Driver:
def __init__(self):
- logging.info("Class driver initialized\n")
+ logger.info("Class driver initialized\n")
self.installer_username = {'fuel': 'root',
'joid': 'ubuntu',
'apex': 'heat-admin'}
@@ -45,7 +49,7 @@ class Driver:
def get_special_var_json(self, role, roles, benchmark_detail, pip_dict):
special_json = {}
index = roles.index(role) + 1
- private_ip = pip_dict[0][1][0] if pip_dict[0][1][0] else 'NONE'
+ private_ip = pip_dict[0][1] if pip_dict[0][1][0] else 'NONE'
map(lambda x: special_json.update({'ip' + str(index): x}), role[1])\
if benchmark_detail and (role[0] == '1-server') else None
map(lambda x: special_json.update({'privateip' + str(index): private_ip}), role[1])\
@@ -58,12 +62,12 @@ class Driver:
return special_json
def run_ansible_playbook(self, benchmark, extra_vars):
- logging.info(extra_vars)
+ logger.info(extra_vars)
ansible_api = AnsibleApi()
ansible_api.execute_playbook('./data/hosts',
'./benchmarks/playbooks/{0}.yaml'.format(benchmark),
'./data/QtipKey', extra_vars)
- return ansible_api.get_detail_playbook_stats()
+ return self.get_ansible_result(extra_vars['role'], ansible_api.get_detail_playbook_stats())
def drive_bench(self, installer_type, pwd, benchmark, roles, benchmark_fname,
benchmark_detail=None, pip_dict=None, proxy_info=None):
@@ -71,8 +75,18 @@ class Driver:
pip_dict = sorted(pip_dict)
var_json = self.get_common_var_json(installer_type, pwd, benchmark_fname,
benchmark_detail, pip_dict, proxy_info)
- map(lambda role: self.run_ansible_playbook
- (benchmark, self.merge_two_dicts(var_json,
- self.get_special_var_json(role, roles,
- benchmark_detail,
- pip_dict))), roles)
+ result = map(lambda role: self.run_ansible_playbook
+ (benchmark, self.merge_two_dicts(var_json,
+ self.get_special_var_json(role, roles,
+ benchmark_detail,
+ pip_dict))), roles)
+ return reduce(self._merge_ansible_result, result)
+
+ def get_ansible_result(self, role, stats):
+ result = reduce(add, map(lambda x: x[1]['failures'] + x[1]['unreachable'], stats))
+ return {'result': result,
+ 'detail': {role: stats}}
+
+ def _merge_ansible_result(self, result_1, result_2):
+ return {'result': result_1['result'] + result_2['result'],
+ 'detail': self.merge_two_dicts(result_1['detail'], result_2['detail'])}
diff --git a/func/env_setup.py b/func/env_setup.py
index 96f984cb..f10f8620 100644
--- a/func/env_setup.py
+++ b/func/env_setup.py
@@ -8,18 +8,19 @@
##############################################################################
import os
+import random
+import socket
import sys
-from collections import defaultdict
-import yaml
import time
-import paramiko
-import socket
+from collections import defaultdict
from os.path import expanduser
-import random
-import logging
-LOG = logging.getLogger(__name__)
-LOG.setLevel(logging.DEBUG)
+import paramiko
+import yaml
+
+from utils import logger_utils
+
+logger = logger_utils.QtipLogger('env_setup').get
class Env_setup:
@@ -100,7 +101,7 @@ class Env_setup:
@staticmethod
def fetch_compute_ips():
- LOG.info("Fetch compute ips through installer")
+ logger.info("Fetch compute ips through installer")
ips = []
installer_type = str(os.environ['INSTALLER_TYPE'].lower())
@@ -112,18 +113,18 @@ class Env_setup:
cmd = "bash ./func/fetch_compute_ips.sh -i %s -a %s" % \
(installer_type, installer_ip)
- LOG.info(cmd)
+ logger.info(cmd)
os.system(cmd)
home = expanduser("~")
with open(home + "/ips.log", "r") as file:
data = file.read()
if data:
ips.extend(data.rstrip('\n').split('\n'))
- LOG.info("All compute ips: %s" % ips)
+ logger.info("All compute ips: %s" % ips)
return ips
def check_machine_ips(self, host_tag):
- LOG.info("Check machine ips")
+ logger.info("Check machine ips")
ips = self.fetch_compute_ips()
ips_num = len(ips)
num = len(host_tag)
@@ -137,7 +138,7 @@ class Env_setup:
if host_tag[hostlabel]['ip'] in ips:
info = "%s's ip %s is defined by test case yaml file" % \
(hostlabel, host_tag[hostlabel]['ip'])
- LOG.info(info)
+ logger.info(info)
else:
err = "%s is not in %s" % (host_tag[hostlabel]['ip'], ips)
raise RuntimeError(err)
@@ -174,7 +175,7 @@ class Env_setup:
def parse(self, config_file_path):
try:
f_name = open(config_file_path, 'r+')
- doc = yaml.load(f_name)
+ doc = yaml.safe_load(f_name)
f_name.close()
if doc['Scenario']['benchmark']:
self.benchmark = doc['Scenario']['benchmark']
diff --git a/func/spawn_vm.py b/func/spawn_vm.py
index 15c26861..5710308b 100644
--- a/func/spawn_vm.py
+++ b/func/spawn_vm.py
@@ -65,7 +65,7 @@ class SpawnVM(Env_setup):
Heat_Dic = {}
try:
with open('./heat/SampleHeat.yaml', 'r+') as H_temp:
- Heat_Dic = yaml.load(H_temp)
+ Heat_Dic = yaml.safe_load(H_temp)
except yaml.YAMLError as exc:
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
diff --git a/restful_server/db.py b/restful_server/db.py
index 42808b80..916fc031 100644
--- a/restful_server/db.py
+++ b/restful_server/db.py
@@ -10,6 +10,7 @@ from datetime import datetime
import uuid
jobs = {}
+threads = {}
def create_job(args):
@@ -23,8 +24,8 @@ def create_job(args):
'suite_name': args["suite_name"],
'max-minutes': args["max-minutes"],
'type': args["type"],
- 'start-time': str(datetime.now()),
- 'end-time': None,
+ 'start_time': str(datetime.now()),
+ 'end_time': None,
'state': 'processing',
'state_detail': [],
'result': []}
@@ -33,7 +34,9 @@ def create_job(args):
def delete_job(job_id):
- if job_id in jobs.keys():
+ if job_id in threads:
+ stop_thread(job_id)
+ if job_id in jobs:
jobs[job_id]['end_time'] = str(datetime.now())
jobs[job_id]['state'] = 'terminated'
return True
@@ -42,23 +45,24 @@ def delete_job(job_id):
def get_job_info(job_id):
- if job_id in jobs.keys():
+ if job_id in jobs:
return jobs[job_id]
else:
return None
-def finish_job(job_id, state):
- jobs[job_id]['end-time'] = str(datetime.now())
- jobs[job_id]['state'] = state
+def finish_job(job_id):
+ jobs[job_id]['end_time'] = str(datetime.now())
+ jobs[job_id]['state'] = 'finished'
+ del threads[job_id]
def update_job_state_detail(job_id, state_detail):
- jobs[job_id][state_detail] = state_detail
+ jobs[job_id]['state_detail'] = state_detail
def update_job_result(job_id, result):
- jobs[job_id][result] = result
+ jobs[job_id]['result'] = result
def is_job_timeout(job_id):
@@ -66,3 +70,22 @@ def is_job_timeout(job_id):
"%Y-%m-%d %H:%M:%S.%f")
return True if jobs[job_id]['max-minutes'] * 60 < period.total_seconds()\
else False
+
+
+def start_thread(job_id, thread, thread_stop):
+ threads[job_id] = {'thread': thread,
+ 'thread_stop': thread_stop}
+ thread.start()
+
+
+def stop_thread(job_id):
+ if threads[job_id]['thread'].isAlive():
+ threads[job_id]['thread_stop'].set()
+ threads[job_id]['thread'].join()
+ if job_id in threads:
+ del threads[job_id]
+
+
+def update_benmark_state_in_state_detail(job_id, benchmark, benchmark_state):
+ filter(lambda x: x["benchmark"] == benchmark,
+ get_job_info(job_id)["state_detail"])[0]['state'] = benchmark_state
diff --git a/restful_server/qtip_server.py b/restful_server/qtip_server.py
index 00d598a0..b03c8f16 100644
--- a/restful_server/qtip_server.py
+++ b/restful_server/qtip_server.py
@@ -9,7 +9,10 @@
from flask import Flask, abort
from flask_restful import Api, Resource, fields, reqparse
from flask_restful_swagger import swagger
+import threading
+from copy import copy
import db
+import func.args_handler as args_handler
app = Flask(__name__)
@@ -89,12 +92,12 @@ class JobList(Resource):
"installer_ip": The installer ip of the pod,
"max-minutes": If specified, the maximum duration in minutes
-for any single test iteration, default is '10',
+for any single test iteration, default is '60',
"pod_name": If specified, the Pod name, default is 'default',
-"suite_name": If specified, Test suite name, for example 'compute', 'network', 'storage', 'all',
-default is 'all'
+"suite_name": If specified, Test suite name, for example 'compute', 'network', 'storage',
+default is 'compute'
"type": BM or VM,default is 'BM'
""",
"required": True,
@@ -122,17 +125,52 @@ default is 'all'
parser = reqparse.RequestParser()
parser.add_argument('installer_type', type=str, required=True, help='Installer_type is required')
parser.add_argument('installer_ip', type=str, required=True, help='Installer_ip is required')
- parser.add_argument('max-minutes', type=int, required=False, default=10, help='max-minutes should be integer')
+ parser.add_argument('max-minutes', type=int, required=False, default=60, help='max-minutes should be integer')
parser.add_argument('pod_name', type=str, required=False, default='default', help='pod_name should be string')
- parser.add_argument('suite_name', type=str, required=False, default='all', help='suite_name should be string')
+ parser.add_argument('suite_name', type=str, required=False, default='compute', help='suite_name should be string')
parser.add_argument('type', type=str, required=False, default='BM', help='type should be BM, VM and ALL')
args = parser.parse_args()
- ret = db.create_job(args)
- return {'job_id': str(ret)} if ret else abort(409, 'message:It already has one job running now!')
+ if not args_handler.check_suit_in_test_list(args["suite_name"]):
+ return abort(404, 'message:Test Suit {0} does not exist in test_list'.format(args["suite_name"]))
+ if not args_handler.check_lab_name(args["pod_name"]):
+ return abort(404, 'message: You have specified a lab {0}\
+ that is not present in test_cases'.format(args['pod_name']))
+
+ job_id = db.create_job(args)
+ if not job_id:
+ return abort(409, 'message:It already has one job running now!')
+
+ benchmarks = args_handler.get_files_in_test_list(args["suite_name"],
+ args["type"].lower())
+ test_cases = args_handler.get_files_in_test_case(args["pod_name"],
+ args["suite_name"],
+ args["type"].lower())
+ benchmarks_list = filter(lambda x: x in test_cases, benchmarks)
+ state_detail = map(lambda x: {'benchmark': x, 'state': 'idle'}, benchmarks_list)
+ db.update_job_state_detail(job_id, copy(state_detail))
+ thread_stop = threading.Event()
+ post_thread = threading.Thread(target=self.thread_post, args=(args["installer_type"],
+ benchmarks_list,
+ args["pod_name"],
+ args["suite_name"],
+ job_id,
+ thread_stop))
+ db.start_thread(job_id, post_thread, thread_stop)
+ return {'job_id': str(job_id)}
+
+ def thread_post(self, installer_type, benchmarks_list, pod_name, suite_name, job_id, stop_event):
+ for benchmark in benchmarks_list:
+ if db.is_job_timeout(job_id) or stop_event.is_set():
+ break
+ db.update_benmark_state_in_state_detail(job_id, benchmark, 'processing')
+ args_handler.prepare_and_run_benchmark(installer_type, '/home',
+ args_handler.get_benchmark_path(pod_name, suite_name, benchmark))
+ db.update_benmark_state_in_state_detail(job_id, benchmark, 'finished')
+ db.finish_job(job_id)
api.add_resource(JobList, '/api/v1.0/jobs')
api.add_resource(Job, '/api/v1.0/jobs/<string:id>')
if __name__ == "__main__":
- app.run(debug=True)
+ app.run()
diff --git a/test_list/compute b/test_list/compute
index 7fc76145..3bf1b184 100644
--- a/test_list/compute
+++ b/test_list/compute
@@ -1,10 +1,16 @@
-dhrystone_bm.yaml
-dhrystone_vm.yaml
-whetstone_bm.yaml
-whetstone_vm.yaml
-ramspeed_bm.yaml
-ramspeed_vm.yaml
-dpi_bm.yaml
-dpi_vm.yaml
-ssl_bm.yaml
-ssl_vm.yaml \ No newline at end of file
+{
+ "bm": [
+ "dhrystone_bm.yaml",
+ "whetstone_bm.yaml",
+ "ramspeed_bm.yaml",
+ "dpi_bm.yaml",
+ "ssl_bm.yaml"
+ ],
+ "vm": [
+ "dhrystone_vm.yaml",
+ "whetstone_vm.yaml",
+ "ramspeed_vm.yaml",
+ "dpi_vm.yaml",
+ "ssl_vm.yaml"
+ ]
+}
diff --git a/test_list/network b/test_list/network
index 677f2ba4..58ce5cb9 100644
--- a/test_list/network
+++ b/test_list/network
@@ -1,3 +1,9 @@
-iperf_bm.yaml
-iperf_vm.yaml
-iperf_vm_2.yaml \ No newline at end of file
+{
+ "bm": [
+ "iperf_bm.yaml"
+ ],
+ "vm": [
+ "iperf_vm.yaml",
+ "iperf_vm_2.yaml"
+ ]
+}
diff --git a/test_list/storage b/test_list/storage
index c7f53402..f3068dd5 100644
--- a/test_list/storage
+++ b/test_list/storage
@@ -1,2 +1,8 @@
-fio_bm.yaml
-fio_vm.yaml \ No newline at end of file
+{
+ "bm": [
+ "fio_bm.yaml"
+ ],
+ "vm": [
+ "fio_vm.yaml"
+ ]
+}
diff --git a/tests/driver_test.py b/tests/driver_test.py
index 9162ca16..83c69c82 100644
--- a/tests/driver_test.py
+++ b/tests/driver_test.py
@@ -35,13 +35,13 @@ class TestClass:
[('1-server', ['10.20.0.13']), ('2-host', ['10.20.0.15'])],
"iperf_vm.yaml",
[('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)],
- [("10.20.0.13", [None]), ("10.20.0.15", [None])],
+ [('1-server', '10.10.17.4'), ('2-host', '10.10.17.5')],
{}],
[{'Dest_dir': 'results',
'ip1': '10.20.0.13',
'ip2': '',
'installer': 'joid',
- 'privateip1': 'NONE',
+ 'privateip1': '10.10.17.4',
'workingdir': '/home',
'fname': 'iperf_vm.yaml',
'username': 'ubuntu',
@@ -53,7 +53,7 @@ class TestClass:
'ip1': '10.20.0.13',
'ip2': '',
'installer': 'joid',
- 'privateip1': 'NONE',
+ 'privateip1': '10.10.17.4',
'workingdir': '/home',
'fname': 'iperf_vm.yaml',
'username': 'ubuntu',
diff --git a/tests/qtip_server_test.py b/tests/qtip_server_test.py
index c2b12974..3f70a1f6 100644
--- a/tests/qtip_server_test.py
+++ b/tests/qtip_server_test.py
@@ -1,6 +1,8 @@
import restful_server.qtip_server as server
import pytest
import json
+import mock
+import time
@pytest.fixture
@@ -14,6 +16,14 @@ def app_client(app):
return client
+def side_effect_sleep(sleep_time):
+ time.sleep(sleep_time)
+
+
+def side_effect_pass():
+ pass
+
+
class TestClass:
@pytest.mark.parametrize("body, expected", [
({'installer_type': 'fuel',
@@ -22,11 +32,15 @@ class TestClass:
'installer_type': 'fuel',
'installer_ip': '10.20.0.2',
'pod_name': 'default',
- 'suite_name': 'all',
- 'max-minutes': 10,
+ 'suite_name': 'compute',
+ 'max-minutes': 60,
'type': 'BM',
- 'state': 'processing',
- 'state_detail': [],
+ 'state': 'finished',
+ 'state_detail': [{'state': 'finished', 'benchmark': 'dhrystone_bm.yaml'},
+ {'state': 'finished', 'benchmark': 'whetstone_bm.yaml'},
+ {'state': 'finished', 'benchmark': 'ramspeed_bm.yaml'},
+ {'state': 'finished', 'benchmark': 'dpi_bm.yaml'},
+ {'state': 'finished', 'benchmark': 'ssl_bm.yaml'}],
'result': []}),
({'installer_type': 'fuel',
'installer_ip': '10.20.0.2',
@@ -41,17 +55,26 @@ class TestClass:
'suite_name': 'compute',
'max-minutes': 20,
'type': 'VM',
- 'state': 'processing',
- 'state_detail': [],
+ 'state': 'finished',
+ 'state_detail': [{u'state': u'finished', u'benchmark': u'dhrystone_vm.yaml'},
+ {u'state': u'finished', u'benchmark': u'whetstone_vm.yaml'},
+ {u'state': u'finished', u'benchmark': u'ramspeed_vm.yaml'},
+ {u'state': u'finished', u'benchmark': u'dpi_vm.yaml'},
+ {u'state': u'finished', u'benchmark': u'ssl_vm.yaml'}],
'result': []})
])
- def test_post_get_delete_job_successful(self, app_client, body, expected):
+ @mock.patch('restful_server.qtip_server.args_handler.prepare_and_run_benchmark')
+ def test_post_get_delete_job_successful(self, mock_args_handler, app_client, body, expected):
reply = app_client.post("/api/v1.0/jobs", data=body)
- print reply.data
+ print(reply.data)
id = json.loads(reply.data)['job_id']
expected['job_id'] = id
- get_reply = app_client.get("/api/v1.0/jobs/%s" % id)
- reply_data = json.loads(get_reply.data)
+ post_process = ''
+ while post_process != 'finished':
+ get_reply = app_client.get("/api/v1.0/jobs/%s" % id)
+ reply_data = json.loads(get_reply.data)
+ post_process = reply_data['state']
+ print(reply_data)
assert len(filter(lambda x: reply_data[x] == expected[x], expected.keys())) == len(expected)
delete_reply = app_client.delete("/api/v1.0/jobs/%s" % id)
assert "successful" in delete_reply.data
@@ -62,15 +85,11 @@ class TestClass:
{'installer_type': 'compass',
'installer_ip': '192.168.20.50'}],
['job_id',
- 'It already has one job running now!']),
- ([{'installer_type': 'fuel',
- 'installer_ip': '10.20.0.2'},
- {'installer_type': 'compass',
- 'insta_ip': '192.168.20.50'}],
- ['job_id',
- 'Installer_ip is required'])
+ 'It already has one job running now!'])
])
- def test_post_two_jobs_unsuccessful(self, app_client, body, expected):
+ @mock.patch('restful_server.qtip_server.args_handler.prepare_and_run_benchmark',
+ side_effect=[side_effect_sleep(0.5), side_effect_pass])
+ def test_post_two_jobs_unsuccessful(self, mock_args_hanler, app_client, body, expected):
reply_1 = app_client.post("/api/v1.0/jobs", data=body[0])
reply_2 = app_client.post("/api/v1.0/jobs", data=body[1])
assert expected[0] in json.loads(reply_1.data).keys()
diff --git a/utils/__init__.py b/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/__init__.py
diff --git a/utils/logger_utils.py b/utils/logger_utils.py
new file mode 100644
index 00000000..780696f4
--- /dev/null
+++ b/utils/logger_utils.py
@@ -0,0 +1,65 @@
+##############################################################################
+# Copyright (c) 2016 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Logging levels:
+# Level Numeric value
+# CRITICAL 50
+# ERROR 40
+# WARNING 30
+# INFO 20
+# DEBUG 10
+# NOTSET 0
+#
+# Usage:
+# from utils import logger_utils
+# logger = logger_utils.QtipLogger(__file__).get
+# logger.info("message to be shown with - INFO - ")
+# logger.debug("message to be shown with - DEBUG -")
+##############################################################################
+
+import logging
+import os
+
+
+class Logger(object):
+ file_path = '/var/log'
+ formatter = logging.Formatter('%(asctime)s - %(name)s - '
+ '%(levelname)s - %(message)s')
+
+ def __init__(self, logger_name):
+
+ IF_DEBUG = os.getenv('IF_DEBUG')
+
+ self.logger_name = logger_name
+ self.logger = logging.getLogger(logger_name)
+ self.logger.propagate = 0
+ self.logger.setLevel(logging.DEBUG)
+
+ ch = logging.StreamHandler()
+ ch.setFormatter(self.formatter)
+ if IF_DEBUG is not None and IF_DEBUG.lower() == "true":
+ ch.setLevel(logging.DEBUG)
+ else:
+ ch.setLevel(logging.INFO)
+ self.logger.addHandler(ch)
+
+ hdlr = logging.FileHandler('%s/%s.log' % (self.file_path, logger_name))
+ hdlr.setFormatter(self.formatter)
+ hdlr.setLevel(logging.DEBUG)
+ self.logger.addHandler(hdlr)
+
+ @property
+ def get(self):
+ return self.logger
+
+
+class QtipLogger(Logger):
+ file_path = '/var/log/qtip'
+
+ def __init__(self, logger_name):
+ super(QtipLogger, self).__init__(logger_name)