aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--api/__init__.py4
-rw-r--r--api/base.py8
-rw-r--r--api/database/handler.py30
-rw-r--r--api/database/v1/__init__.py0
-rw-r--r--api/database/v1/handlers.py (renamed from api/database/handlers.py)37
-rw-r--r--api/database/v1/models.py (renamed from api/database/models.py)2
-rw-r--r--api/resources/asynctask.py51
-rw-r--r--api/resources/case_docs.py30
-rw-r--r--api/resources/env_action.py150
-rw-r--r--api/resources/release_action.py35
-rw-r--r--api/resources/results.py114
-rw-r--r--api/resources/samples_action.py35
-rw-r--r--api/resources/testsuites_action.py52
-rw-r--r--api/server.py2
-rw-r--r--api/urls.py1
-rw-r--r--api/utils/common.py33
-rw-r--r--api/utils/daemonthread.py49
-rw-r--r--api/utils/thread.py37
-rw-r--r--api/views.py6
-rw-r--r--tests/unit/apiserver/resources/test_env_action.py2
-rw-r--r--tests/unit/apiserver/utils/test_common.py25
-rw-r--r--tests/unit/cmd/commands/test_env.py2
-rw-r--r--yardstick/benchmark/core/runner.py2
-rw-r--r--yardstick/benchmark/core/scenario.py2
-rw-r--r--yardstick/benchmark/core/task.py9
-rw-r--r--yardstick/benchmark/scenarios/availability/ha_tools/disk/recovery_disk_io.bash2
-rw-r--r--yardstick/cmd/cli.py2
-rw-r--r--yardstick/cmd/commands/env.py6
-rw-r--r--yardstick/cmd/commands/plugin.py2
-rw-r--r--yardstick/cmd/commands/report.py2
-rw-r--r--yardstick/cmd/commands/runner.py2
-rw-r--r--yardstick/cmd/commands/scenario.py2
-rw-r--r--yardstick/cmd/commands/task.py2
-rw-r--r--yardstick/common/constants.py5
34 files changed, 378 insertions, 365 deletions
diff --git a/api/__init__.py b/api/__init__.py
index e69de29bb..c6cbbf104 100644
--- a/api/__init__.py
+++ b/api/__init__.py
@@ -0,0 +1,4 @@
+from yardstick import _init_logging
+
+
+_init_logging()
diff --git a/api/base.py b/api/base.py
index 6fa2777ce..0f1e76a57 100644
--- a/api/base.py
+++ b/api/base.py
@@ -15,6 +15,7 @@ from flask import request
from flask_restful import Resource
from api.utils import common as common_utils
+from yardstick.common import constants as consts
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
@@ -47,8 +48,9 @@ class ApiResource(Resource):
action, args = self._post_args()
return self._dispatch(args, action)
- def _dispatch_get(self):
+ def _dispatch_get(self, **kwargs):
args = self._get_args()
+ args.update(kwargs)
return self._dispatch(args)
def _dispatch(self, args, action='default'):
@@ -59,5 +61,5 @@ class ApiResource(Resource):
resources = importlib.import_module(module_name)
try:
return getattr(resources, action)(args)
- except NameError:
- common_utils.error_handler('Wrong action')
+ except AttributeError:
+ common_utils.result_handler(consts.API_ERROR, 'No such action')
diff --git a/api/database/handler.py b/api/database/handler.py
deleted file mode 100644
index f6a22578f..000000000
--- a/api/database/handler.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# ############################################################################
-# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-# ############################################################################
-from api.database import db_session
-from api.database.models import AsyncTasks
-
-
-class AsyncTaskHandler(object):
- def insert(self, kwargs):
- task = AsyncTasks(**kwargs)
- db_session.add(task)
- db_session.commit()
- return task
-
- def update_status(self, task, status):
- task.status = status
- db_session.commit()
-
- def update_error(self, task, error):
- task.error = error
- db_session.commit()
-
- def get_task_by_taskid(self, task_id):
- task = AsyncTasks.query.filter_by(task_id=task_id).first()
- return task
diff --git a/api/database/v1/__init__.py b/api/database/v1/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/api/database/v1/__init__.py
diff --git a/api/database/handlers.py b/api/database/v1/handlers.py
index 42979b529..938015d82 100644
--- a/api/database/handlers.py
+++ b/api/database/v1/handlers.py
@@ -7,7 +7,8 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
from api.database import db_session
-from api.database.models import Tasks
+from api.database.v1.models import Tasks
+from api.database.v1.models import AsyncTasks
class TasksHandler(object):
@@ -18,14 +19,38 @@ class TasksHandler(object):
db_session.commit()
return task
- def update_status(self, task, status):
- task.status = status
+ def get_task_by_taskid(self, task_id):
+ task = Tasks.query.filter_by(task_id=task_id).first()
+ if not task:
+ raise ValueError
+
+ return task
+
+ def update_attr(self, task_id, attr):
+ task = self.get_task_by_taskid(task_id)
+
+ for k, v in attr.items():
+ setattr(task, k, v)
db_session.commit()
- def update_error(self, task, error):
- task.error = error
+
+class AsyncTaskHandler(object):
+ def insert(self, kwargs):
+ task = AsyncTasks(**kwargs)
+ db_session.add(task)
db_session.commit()
+ return task
def get_task_by_taskid(self, task_id):
- task = Tasks.query.filter_by(task_id=task_id).first()
+ task = AsyncTasks.query.filter_by(task_id=task_id).first()
+ if not task:
+ raise ValueError
+
return task
+
+ def update_attr(self, task_id, attr):
+ task = self.get_task_by_taskid(task_id)
+
+ for k, v in attr.items():
+ setattr(task, k, v)
+ db_session.commit()
diff --git a/api/database/models.py b/api/database/v1/models.py
index 2270de96b..213e77f6e 100644
--- a/api/database/models.py
+++ b/api/database/v1/models.py
@@ -10,6 +10,7 @@ from __future__ import absolute_import
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import String
+from sqlalchemy import Text
from api.database import Base
@@ -20,6 +21,7 @@ class Tasks(Base):
task_id = Column(String(30))
status = Column(Integer)
error = Column(String(120))
+ result = Column(Text)
details = Column(String(120))
def __repr__(self):
diff --git a/api/resources/asynctask.py b/api/resources/asynctask.py
index dd2a71003..39b47c0ee 100644
--- a/api/resources/asynctask.py
+++ b/api/resources/asynctask.py
@@ -7,9 +7,14 @@
# http://www.apache.org/licenses/LICENSE-2.0
# ############################################################################
import uuid
+import logging
-from api.utils import common as common_utils
-from api.database.models import AsyncTasks
+from api.utils.common import result_handler
+from api.database.v1.handlers import AsyncTaskHandler
+from yardstick.common import constants as consts
+
+LOG = logging.getLogger(__name__)
+LOG.setLevel(logging.DEBUG)
def default(args):
@@ -19,17 +24,41 @@ def default(args):
def _get_status(args):
try:
task_id = args['task_id']
- uuid.UUID(task_id)
except KeyError:
- message = 'measurement and task_id must be provided'
- return common_utils.error_handler(message)
+ return result_handler(consts.API_ERROR, 'task_id must be provided')
- asynctask = AsyncTasks.query.filter_by(task_id=task_id).first()
+ try:
+ uuid.UUID(task_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid task_id')
+ asynctask_handler = AsyncTaskHandler()
try:
- status = asynctask.status
- error = asynctask.error if asynctask.error else []
+ asynctask = asynctask_handler.get_task_by_taskid(task_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid task_id')
+
+ def _unfinished():
+ return result_handler(consts.TASK_NOT_DONE, {})
+
+ def _finished():
+ return result_handler(consts.TASK_DONE, {})
+
+ def _error():
+ return result_handler(consts.TASK_FAILED, asynctask.error)
+
+ status = asynctask.status
+ LOG.debug('Task status is: %s', status)
+
+ if status not in [consts.TASK_NOT_DONE,
+ consts.TASK_DONE,
+ consts.TASK_FAILED]:
+ return result_handler(consts.API_ERROR, 'internal server error')
+
+ switcher = {
+ consts.TASK_NOT_DONE: _unfinished,
+ consts.TASK_DONE: _finished,
+ consts.TASK_FAILED: _error
+ }
- return common_utils.result_handler(status, error)
- except AttributeError:
- return common_utils.error_handler('no such task')
+ return switcher.get(status)()
diff --git a/api/resources/case_docs.py b/api/resources/case_docs.py
new file mode 100644
index 000000000..289410d2d
--- /dev/null
+++ b/api/resources/case_docs.py
@@ -0,0 +1,30 @@
+import os
+import logging
+
+from api.utils.common import result_handler
+from yardstick.common import constants as consts
+
+LOG = logging.getLogger(__name__)
+LOG.setLevel(logging.DEBUG)
+
+
+def default(args):
+ return get_case_docs(args)
+
+
+def get_case_docs(args):
+ try:
+ case_name = args['case_name']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'case_name must be provided')
+
+ docs_path = os.path.join(consts.DOCS_DIR, '{}.rst'.format(case_name))
+
+ if not os.path.exists(docs_path):
+ return result_handler(consts.API_ERROR, 'case not exists')
+
+ LOG.info('Reading %s', case_name)
+ with open(docs_path) as f:
+ content = f.read()
+
+ return result_handler(consts.API_SUCCESS, {'docs': content})
diff --git a/api/resources/env_action.py b/api/resources/env_action.py
index 3536559b7..2ea64ef1a 100644
--- a/api/resources/env_action.py
+++ b/api/resources/env_action.py
@@ -23,7 +23,7 @@ from six.moves import configparser
from oslo_serialization import jsonutils
from docker import Client
-from api.database.handler import AsyncTaskHandler
+from api.database.v1.handlers import AsyncTaskHandler
from api.utils import influx
from api.utils.common import result_handler
from yardstick.common import constants as consts
@@ -32,17 +32,19 @@ from yardstick.common import openstack_utils
from yardstick.common.httpClient import HttpClient
-logger = logging.getLogger(__name__)
-logger.setLevel(logging.DEBUG)
+LOG = logging.getLogger(__name__)
+LOG.setLevel(logging.DEBUG)
+async_handler = AsyncTaskHandler()
-def createGrafanaContainer(args):
+
+def create_grafana(args):
task_id = str(uuid.uuid4())
thread = threading.Thread(target=_create_grafana, args=(task_id,))
thread.start()
- return result_handler('success', {'task_id': task_id})
+ return result_handler(consts.API_SUCCESS, {'task_id': task_id})
def _create_grafana(task_id):
@@ -51,22 +53,29 @@ def _create_grafana(task_id):
client = Client(base_url=consts.DOCKER_URL)
try:
+ LOG.info('Checking if grafana image exist')
image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
if not _check_image_exist(client, image):
+ LOG.info('Grafana image not exist, start pulling')
client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG)
+ LOG.info('Createing grafana container')
_create_grafana_container(client)
+ LOG.info('Grafana container is created')
time.sleep(5)
+ LOG.info('Creating data source for grafana')
_create_data_source()
+ LOG.info('Creating dashboard for grafana')
_create_dashboard()
_update_task_status(task_id)
+ LOG.info('Finished')
except Exception as e:
_update_task_error(task_id, str(e))
- logger.exception('Error: %s', e)
+ LOG.exception('Create grafana failed')
def _create_dashboard():
@@ -76,7 +85,11 @@ def _create_dashboard():
for i in sorted(glob.iglob(path)):
with open(i) as f:
data = jsonutils.load(f)
- HttpClient().post(url, data)
+ try:
+ HttpClient().post(url, data)
+ except Exception:
+ LOG.exception('Create dashboard %s failed', i)
+ raise
def _create_data_source():
@@ -94,7 +107,11 @@ def _create_data_source():
"basicAuthPassword": "admin",
"isDefault": False,
}
- HttpClient().post(url, data)
+ try:
+ HttpClient().post(url, data)
+ except Exception:
+ LOG.exception('Create datasources failed')
+ raise
def _create_grafana_container(client):
@@ -104,12 +121,14 @@ def _create_grafana_container(client):
host_config = client.create_host_config(port_bindings=port_bindings,
restart_policy=restart_policy)
+ LOG.info('Creating container')
container = client.create_container(image='%s:%s' % (consts.GRAFANA_IMAGE,
consts.GRAFANA_TAG),
ports=ports,
detach=True,
tty=True,
host_config=host_config)
+ LOG.info('Starting container')
client.start(container)
@@ -117,13 +136,13 @@ def _check_image_exist(client, t):
return any(t in a['RepoTags'][0] for a in client.images() if a['RepoTags'])
-def createInfluxDBContainer(args):
+def create_influxdb(args):
task_id = str(uuid.uuid4())
thread = threading.Thread(target=_create_influxdb, args=(task_id,))
thread.start()
- return result_handler('success', {'task_id': task_id})
+ return result_handler(consts.API_SUCCESS, {'task_id': task_id})
def _create_influxdb(task_id):
@@ -132,22 +151,30 @@ def _create_influxdb(task_id):
client = Client(base_url=consts.DOCKER_URL)
try:
+ LOG.info('Changing output to influxdb')
_change_output_to_influxdb()
+ LOG.info('Checking if influxdb image exist')
if not _check_image_exist(client, '%s:%s' % (consts.INFLUXDB_IMAGE,
consts.INFLUXDB_TAG)):
+ LOG.info('Influxdb image not exist, start pulling')
client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG)
+ LOG.info('Createing influxdb container')
_create_influxdb_container(client)
+ LOG.info('Influxdb container is created')
time.sleep(5)
+ LOG.info('Config influxdb')
_config_influxdb()
_update_task_status(task_id)
+
+ LOG.info('Finished')
except Exception as e:
_update_task_error(task_id, str(e))
- logger.debug('Error: %s', e)
+ LOG.exception('Creating influxdb failed')
def _create_influxdb_container(client):
@@ -158,12 +185,14 @@ def _create_influxdb_container(client):
host_config = client.create_host_config(port_bindings=port_bindings,
restart_policy=restart_policy)
+ LOG.info('Creating container')
container = client.create_container(image='%s:%s' % (consts.INFLUXDB_IMAGE,
consts.INFLUXDB_TAG),
ports=ports,
detach=True,
tty=True,
host_config=host_config)
+ LOG.info('Starting container')
client.start(container)
@@ -174,32 +203,35 @@ def _config_influxdb():
consts.INFLUXDB_PASS,
consts.INFLUXDB_DB_NAME)
client.create_database(consts.INFLUXDB_DB_NAME)
- logger.info('Success to config influxDB')
- except Exception as e:
- logger.debug('Failed to config influxDB: %s', e)
+ LOG.info('Success to config influxDB')
+ except Exception:
+ LOG.exception('Config influxdb failed')
def _change_output_to_influxdb():
common_utils.makedirs(consts.CONF_DIR)
parser = configparser.ConfigParser()
+ LOG.info('Reading output sample configuration')
parser.read(consts.CONF_SAMPLE_FILE)
+ LOG.info('Set dispatcher to influxdb')
parser.set('DEFAULT', 'dispatcher', 'influxdb')
parser.set('dispatcher_influxdb', 'target',
'http://%s:8086' % consts.INFLUXDB_IP)
+ LOG.info('Writing to %s', consts.CONF_FILE)
with open(consts.CONF_FILE, 'w') as f:
parser.write(f)
-def prepareYardstickEnv(args):
+def prepare_env(args):
task_id = str(uuid.uuid4())
thread = threading.Thread(target=_prepare_env_daemon, args=(task_id,))
thread.start()
- return result_handler('success', {'task_id': task_id})
+ return result_handler(consts.API_SUCCESS, {'task_id': task_id})
def _already_source_openrc():
@@ -216,23 +248,33 @@ def _prepare_env_daemon(task_id):
rc_file = consts.OPENRC
+ LOG.info('Checkout Openrc Environment variable')
if not _already_source_openrc():
+ LOG.info('Openrc variable not found in Environment')
if not os.path.exists(rc_file):
+ LOG.info('Openrc file not found')
installer_ip = os.environ.get('INSTALLER_IP', '192.168.200.2')
installer_type = os.environ.get('INSTALLER_TYPE', 'compass')
+ LOG.info('Getting openrc file from %s', installer_type)
_get_remote_rc_file(rc_file, installer_ip, installer_type)
+ LOG.info('Source openrc file')
_source_file(rc_file)
+ LOG.info('Appending external network')
_append_external_network(rc_file)
+ LOG.info('Openrc file exist, source openrc file')
_source_file(rc_file)
+ LOG.info('Cleaning images')
_clean_images()
+ LOG.info('Loading images')
_load_images()
_update_task_status(task_id)
+ LOG.info('Finished')
except Exception as e:
_update_task_error(task_id, str(e))
- logger.debug('Error: %s', e)
+ LOG.exception('Prepare env failed')
def _create_directories():
@@ -254,7 +296,7 @@ def _get_remote_rc_file(rc_file, installer_ip, installer_type):
p.communicate()
if p.returncode != 0:
- logger.debug('Failed to fetch credentials from installer')
+ LOG.error('Failed to fetch credentials from installer')
except OSError as e:
if e.errno != errno.EEXIST:
raise
@@ -266,7 +308,7 @@ def _append_external_network(rc_file):
try:
ext_network = next(n['name'] for n in networks if n['router:external'])
except StopIteration:
- logger.warning("Can't find external network")
+ LOG.warning("Can't find external network")
else:
cmd = 'export EXTERNAL_NETWORK=%s' % ext_network
try:
@@ -281,38 +323,26 @@ def _clean_images():
cmd = [consts.CLEAN_IMAGES_SCRIPT]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
output = p.communicate()[0]
- logger.debug('The result is: %s', output)
+ LOG.debug(output)
def _load_images():
cmd = [consts.LOAD_IMAGES_SCRIPT]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR)
output = p.communicate()[0]
- logger.debug('The result is: %s', output)
+ LOG.debug(output)
def _create_task(task_id):
- async_handler = AsyncTaskHandler()
- task_dict = {
- 'task_id': task_id,
- 'status': 0
- }
- async_handler.insert(task_dict)
+ async_handler.insert({'status': 0, 'task_id': task_id})
def _update_task_status(task_id):
- async_handler = AsyncTaskHandler()
-
- task = async_handler.get_task_by_taskid(task_id)
- async_handler.update_status(task, 1)
+ async_handler.update_attr(task_id, {'status': 1})
def _update_task_error(task_id, error):
- async_handler = AsyncTaskHandler()
-
- task = async_handler.get_task_by_taskid(task_id)
- async_handler.update_status(task, 2)
- async_handler.update_error(task, error)
+ async_handler.update_attr(task_id, {'status': 2, 'error': error})
def update_openrc(args):
@@ -325,22 +355,22 @@ def update_openrc(args):
return result_handler(consts.API_ERROR, 'args should be a dict')
lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
- logger.debug('Writing: %s', ''.join(lines))
+ LOG.debug('Writing: %s', ''.join(lines))
- logger.info('Writing openrc: Writing')
+ LOG.info('Writing openrc: Writing')
common_utils.makedirs(consts.CONF_DIR)
with open(consts.OPENRC, 'w') as f:
f.writelines(lines)
- logger.info('Writing openrc: Done')
+ LOG.info('Writing openrc: Done')
- logger.info('Source openrc: Sourcing')
+ LOG.info('Source openrc: Sourcing')
try:
_source_file(consts.OPENRC)
except Exception as e:
- logger.exception('Failed to source openrc')
+ LOG.exception('Failed to source openrc')
return result_handler(consts.API_ERROR, str(e))
- logger.info('Source openrc: Done')
+ LOG.info('Source openrc: Done')
return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
@@ -351,14 +381,44 @@ def upload_pod_file(args):
except KeyError:
return result_handler(consts.API_ERROR, 'file must be provided')
- logger.info('Checking file')
+ LOG.info('Checking file')
data = yaml.load(pod_file.read())
if not isinstance(data, collections.Mapping):
return result_handler(consts.API_ERROR, 'invalid yaml file')
- logger.info('Writing file')
+ LOG.info('Writing file')
with open(consts.POD_FILE, 'w') as f:
yaml.dump(data, f, default_flow_style=False)
- logger.info('Writing finished')
+ LOG.info('Writing finished')
return result_handler(consts.API_SUCCESS, {'pod_info': data})
+
+
+def update_pod_file(args):
+ try:
+ pod_dic = args['pod']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'pod must be provided')
+ else:
+ if not isinstance(pod_dic, collections.Mapping):
+ return result_handler(consts.API_ERROR, 'pod should be a dict')
+
+ LOG.info('Writing file')
+ with open(consts.POD_FILE, 'w') as f:
+ yaml.dump(pod_dic, f, default_flow_style=False)
+ LOG.info('Writing finished')
+
+ return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic})
+
+
+def update_hosts(hosts_ip):
+ if not isinstance(hosts_ip, dict):
+ return result_handler(consts.API_ERROR, 'Error, args should be a dict')
+ LOG.info('Writing hosts: Writing')
+ hosts_list = ['\n{} {}'.format(ip, host_name)
+ for host_name, ip in hosts_ip.items()]
+ LOG.debug('Writing: %s', hosts_list)
+ with open(consts.ETC_HOSTS, 'a') as f:
+ f.writelines(hosts_list)
+ LOG.info('Writing hosts: Done')
+ return result_handler(consts.API_SUCCESS, 'success')
diff --git a/api/resources/release_action.py b/api/resources/release_action.py
index 9016d4aa2..9871c1fc3 100644
--- a/api/resources/release_action.py
+++ b/api/resources/release_action.py
@@ -11,33 +11,34 @@ import uuid
import os
import logging
-from api.utils import common as common_utils
+from api.utils.common import result_handler
+from api.utils.thread import TaskThread
from yardstick.common import constants as consts
+from yardstick.benchmark.core import Param
+from yardstick.benchmark.core.task import Task
logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
-def runTestCase(args):
+def run_test_case(args):
try:
- opts = args.get('opts', {})
- testcase = args['testcase']
+ case_name = args['testcase']
except KeyError:
- return common_utils.error_handler('Lack of testcase argument')
+ return result_handler(consts.API_ERROR, 'testcase must be provided')
- testcase_name = consts.TESTCASE_PRE + testcase
- testcase = os.path.join(consts.TESTCASE_DIR, testcase_name + '.yaml')
+ testcase = os.path.join(consts.TESTCASE_DIR, '{}.yaml'.format(case_name))
task_id = str(uuid.uuid4())
- command_list = ['task', 'start']
- command_list = common_utils.get_command_list(command_list, opts, testcase)
- logger.debug('The command_list is: %s', command_list)
-
- logger.debug('Start to execute command list')
- task_dict = {
- 'task_id': task_id,
- 'details': testcase_name
+ task_args = {
+ 'inputfile': [testcase],
+ 'task_id': task_id
}
- common_utils.exec_command_task(command_list, task_dict)
+ task_args.update(args.get('opts', {}))
+
+ param = Param(task_args)
+ task_thread = TaskThread(Task().start, param)
+ task_thread.start()
- return common_utils.result_handler('success', task_id)
+ return result_handler(consts.API_SUCCESS, {'task_id': task_id})
diff --git a/api/resources/results.py b/api/resources/results.py
index a0527ed8c..692e00cc6 100644
--- a/api/resources/results.py
+++ b/api/resources/results.py
@@ -9,12 +9,14 @@
from __future__ import absolute_import
import logging
import uuid
+import json
-from api.utils import influx as influx_utils
-from api.utils import common as common_utils
-from api.database.handlers import TasksHandler
+from api.utils.common import result_handler
+from api.database.v1.handlers import TasksHandler
+from yardstick.common import constants as consts
logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
def default(args):
@@ -24,96 +26,44 @@ def default(args):
def getResult(args):
try:
task_id = args['task_id']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'task_id must be provided')
+ try:
uuid.UUID(task_id)
- except KeyError:
- message = 'task_id must be provided'
- return common_utils.result_handler(2, message)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid task_id')
- task = TasksHandler().get_task_by_taskid(task_id)
+ task_handler = TasksHandler()
+ try:
+ task = task_handler.get_task_by_taskid(task_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid task_id')
def _unfinished():
- return common_utils.result_handler(0, {})
+ return result_handler(consts.TASK_NOT_DONE, {})
def _finished():
- testcases = task.details.split(',')
-
- def get_data(testcase):
- query_template = "select * from %s where task_id='%s'"
- query_sql = query_template % (testcase, task_id)
- data = common_utils.translate_to_str(influx_utils.query(query_sql))
- return data
-
- result = _format_data({k: get_data(k) for k in testcases})
-
- return common_utils.result_handler(1, result)
+ if task.result:
+ return result_handler(consts.TASK_DONE, json.loads(task.result))
+ else:
+ return result_handler(consts.TASK_DONE, {})
def _error():
- return common_utils.result_handler(2, task.error)
+ return result_handler(consts.TASK_FAILED, task.error)
- try:
- status = task.status
+ status = task.status
+ logger.debug('Task status is: %s', status)
- switcher = {
- 0: _unfinished,
- 1: _finished,
- 2: _error
- }
- return switcher.get(status, lambda: 'nothing')()
- except IndexError:
- return common_utils.result_handler(2, 'no such task')
+ if status not in [consts.TASK_NOT_DONE,
+ consts.TASK_DONE,
+ consts.TASK_FAILED]:
+ return result_handler(consts.API_ERROR, 'internal server error')
-
-def _format_data(data):
- try:
- first_value = data.values()[0][0]
- except IndexError:
- return {'criteria': 'FAIL', 'testcases': {}}
- else:
- info = {
- 'deploy_scenario': first_value.get('deploy_scenario'),
- 'installer': first_value.get('installer'),
- 'pod_name': first_value.get('pod_name'),
- 'version': first_value.get('version')
- }
- task_id = first_value.get('task_id')
- criteria = first_value.get('criteria')
- testcases = {k: _get_case_data(v) for k, v in data.items()}
-
- result = {
- 'criteria': criteria,
- 'info': info,
- 'task_id': task_id,
- 'testcases': testcases
- }
- return result
-
-
-def _get_case_data(data):
- try:
- scenario = data[0]
- except IndexError:
- return {'tc_data': [], 'criteria': 'FAIL'}
- else:
- tc_data = [_get_scenario_data(s) for s in data]
- criteria = scenario.get('criteria')
- return {'tc_data': tc_data, 'criteria': criteria}
-
-
-def _get_scenario_data(data):
- result = {
- 'data': {},
- 'timestamp': ''
+ switcher = {
+ consts.TASK_NOT_DONE: _unfinished,
+ consts.TASK_DONE: _finished,
+ consts.TASK_FAILED: _error
}
- blacklist = {'criteria', 'deploy_scenario', 'host', 'installer',
- 'pod_name', 'runner_id', 'scenarios', 'target',
- 'task_id', 'time', 'version'}
-
- keys = set(data.keys()) - set(blacklist)
- for k in keys:
- result['data'][k] = data[k]
-
- result['timestamp'] = data.get('time')
-
- return result
+ return switcher.get(status)()
diff --git a/api/resources/samples_action.py b/api/resources/samples_action.py
index 3093864e0..10b9980af 100644
--- a/api/resources/samples_action.py
+++ b/api/resources/samples_action.py
@@ -11,32 +11,35 @@ import uuid
import os
import logging
-from api.utils import common as common_utils
+from api.utils.common import result_handler
+from api.utils.thread import TaskThread
from yardstick.common import constants as consts
+from yardstick.benchmark.core import Param
+from yardstick.benchmark.core.task import Task
logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
-def runTestCase(args):
+def run_test_case(args):
try:
- opts = args.get('opts', {})
- testcase_name = args['testcase']
+ case_name = args['testcase']
except KeyError:
- return common_utils.error_handler('Lack of testcase argument')
+ return result_handler(consts.API_ERROR, 'testcase must be provided')
- testcase = os.path.join(consts.SAMPLE_CASE_DIR, testcase_name + '.yaml')
+ testcase = os.path.join(consts.SAMPLE_CASE_DIR,
+ '{}.yaml'.format(case_name))
task_id = str(uuid.uuid4())
- command_list = ['task', 'start']
- command_list = common_utils.get_command_list(command_list, opts, testcase)
- logger.debug('The command_list is: %s', command_list)
-
- logger.debug('Start to execute command list')
- task_dict = {
- 'task_id': task_id,
- 'details': testcase_name
+ task_args = {
+ 'inputfile': [testcase],
+ 'task_id': task_id
}
- common_utils.exec_command_task(command_list, task_dict)
+ task_args.update(args.get('opts', {}))
+
+ param = Param(task_args)
+ task_thread = TaskThread(Task().start, param)
+ task_thread.start()
- return common_utils.result_handler('success', task_id)
+ return result_handler(consts.API_SUCCESS, {'task_id': task_id})
diff --git a/api/resources/testsuites_action.py b/api/resources/testsuites_action.py
index a385290d9..e37eacc3e 100644
--- a/api/resources/testsuites_action.py
+++ b/api/resources/testsuites_action.py
@@ -6,57 +6,41 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-
-"""Yardstick test suite api action"""
-
from __future__ import absolute_import
import uuid
import os
import logging
-import yaml
-from api.utils import common as common_utils
+from api.utils.common import result_handler
+from api.utils.thread import TaskThread
from yardstick.common import constants as consts
-from yardstick.common.task_template import TaskTemplate
+from yardstick.benchmark.core import Param
+from yardstick.benchmark.core.task import Task
logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
-def runTestSuite(args):
+def run_test_suite(args):
try:
- opts = args.get('opts', {})
- testsuite = args['testsuite']
+ suite_name = args['testsuite']
except KeyError:
- return common_utils.error_handler('Lack of testsuite argument')
+ return result_handler(consts.API_ERROR, 'testsuite must be provided')
- if 'suite' not in opts:
- opts['suite'] = 'true'
-
- testsuite = os.path.join(consts.TESTSUITE_DIR, '{}.yaml'.format(testsuite))
+ testsuite = os.path.join(consts.TESTSUITE_DIR,
+ '{}.yaml'.format(suite_name))
task_id = str(uuid.uuid4())
- command_list = ['task', 'start']
- command_list = common_utils.get_command_list(command_list, opts, testsuite)
- logger.debug('The command_list is: %s', command_list)
-
- logger.debug('Start to execute command list')
- task_dic = {
+ task_args = {
+ 'inputfile': [testsuite],
'task_id': task_id,
- 'details': _get_cases_from_suite_file(testsuite)
+ 'suite': True
}
- common_utils.exec_command_task(command_list, task_dic)
-
- return common_utils.result_handler('success', task_id)
-
-
-def _get_cases_from_suite_file(testsuite):
- def get_name(full_name):
- return os.path.splitext(full_name)[0]
+ task_args.update(args.get('opts', {}))
- with open(testsuite) as f:
- contents = TaskTemplate.render(f.read())
+ param = Param(task_args)
+ task_thread = TaskThread(Task().start, param)
+ task_thread.start()
- suite_dic = yaml.safe_load(contents)
- testcases = (get_name(c['file_name']) for c in suite_dic['test_cases'])
- return ','.join(testcases)
+ return result_handler(consts.API_SUCCESS, {'task_id': task_id})
diff --git a/api/server.py b/api/server.py
index 1d42feffb..d39c44544 100644
--- a/api/server.py
+++ b/api/server.py
@@ -19,7 +19,7 @@ from flask_restful import Api
from api.database import Base
from api.database import db_session
from api.database import engine
-from api.database import models
+from api.database.v1 import models
from api.urls import urlpatterns
from yardstick import _init_logging
diff --git a/api/urls.py b/api/urls.py
index b9ddd4c72..13c6c7675 100644
--- a/api/urls.py
+++ b/api/urls.py
@@ -17,6 +17,7 @@ urlpatterns = [
Url('/yardstick/testcases', views.Testcases, 'testcases'),
Url('/yardstick/testcases/release/action', views.ReleaseAction, 'release'),
Url('/yardstick/testcases/samples/action', views.SamplesAction, 'samples'),
+ Url('/yardstick/testcases/<case_name>/docs', views.CaseDocs, 'casedocs'),
Url('/yardstick/testsuites/action', views.TestsuitesAction, 'testsuites'),
Url('/yardstick/results', views.Results, 'results'),
Url('/yardstick/env/action', views.EnvAction, 'env')
diff --git a/api/utils/common.py b/api/utils/common.py
index f8b0d40ba..eda9c17dd 100644
--- a/api/utils/common.py
+++ b/api/utils/common.py
@@ -13,10 +13,8 @@ import logging
from flask import jsonify
import six
-from api.utils.daemonthread import DaemonThread
-from yardstick.cmd.cli import YardstickCLI
-
-logger = logging.getLogger(__name__)
+LOG = logging.getLogger(__name__)
+LOG.setLevel(logging.DEBUG)
def translate_to_str(obj):
@@ -29,33 +27,6 @@ def translate_to_str(obj):
return obj
-def get_command_list(command_list, opts, args):
-
- command_list.append(args)
-
- command_list.extend(('--{}'.format(k) for k in opts if k != 'task-args'))
-
- task_args = opts.get('task-args', '')
- if task_args:
- command_list.extend(['--task-args', str(task_args)])
-
- return command_list
-
-
-def exec_command_task(command_list, task_dict): # pragma: no cover
- daemonthread = DaemonThread(YardstickCLI().api, (command_list, task_dict))
- daemonthread.start()
-
-
-def error_handler(message):
- logger.debug(message)
- result = {
- 'status': 'error',
- 'message': message
- }
- return jsonify(result)
-
-
def result_handler(status, data):
result = {
'status': status,
diff --git a/api/utils/daemonthread.py b/api/utils/daemonthread.py
deleted file mode 100644
index 3d5625547..000000000
--- a/api/utils/daemonthread.py
+++ /dev/null
@@ -1,49 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-from __future__ import absolute_import
-import threading
-import os
-import errno
-
-from yardstick.common import constants as consts
-from api.database.handlers import TasksHandler
-
-
-class DaemonThread(threading.Thread):
-
- def __init__(self, method, args):
- super(DaemonThread, self).__init__(target=method, args=args)
- self.method = method
- self.command_list = args[0]
- self.task_dict = args[1]
-
- def run(self):
- self.task_dict['status'] = 0
- task_id = self.task_dict['task_id']
-
- try:
- task_handler = TasksHandler()
- task = task_handler.insert(self.task_dict)
-
- self.method(self.command_list, task_id)
-
- task_handler.update_status(task, 1)
- except Exception as e:
- task_handler.update_status(task, 2)
- task_handler.update_error(task, str(e))
- finally:
- _handle_testsuite_file(task_id)
-
-
-def _handle_testsuite_file(task_id):
- try:
- os.remove(os.path.join(consts.TESTSUITE_DIR, task_id + '.yaml'))
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
diff --git a/api/utils/thread.py b/api/utils/thread.py
new file mode 100644
index 000000000..2106548f5
--- /dev/null
+++ b/api/utils/thread.py
@@ -0,0 +1,37 @@
+import threading
+import logging
+
+from oslo_serialization import jsonutils
+
+from api.database.v1.handlers import TasksHandler
+from yardstick.common import constants as consts
+
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
+
+
+class TaskThread(threading.Thread):
+
+ def __init__(self, target, args):
+ super(TaskThread, self).__init__(target=target, args=args)
+ self.target = target
+ self.args = args
+
+ def run(self):
+ task_handler = TasksHandler()
+ data = {'task_id': self.args.task_id, 'status': consts.TASK_NOT_DONE}
+ task_handler.insert(data)
+
+ logger.info('Starting run task')
+ try:
+ data = self.target(self.args)
+ except Exception as e:
+ logger.exception('Task Failed')
+ update_data = {'status': consts.TASK_FAILED, 'error': str(e)}
+ task_handler.update_attr(self.args.task_id, update_data)
+ else:
+ logger.info('Task Finished')
+ logger.debug('Result: %s', data)
+
+ data['result'] = jsonutils.dumps(data.get('result', {}))
+ task_handler.update_attr(self.args.task_id, data)
diff --git a/api/views.py b/api/views.py
index 9fd236fad..9c9ca4ef9 100644
--- a/api/views.py
+++ b/api/views.py
@@ -74,3 +74,9 @@ class EnvAction(ApiResource):
def post(self):
return self._dispatch_post()
+
+
+class CaseDocs(ApiResource):
+
+ def get(self, case_name):
+ return self._dispatch_get(case_name=case_name)
diff --git a/tests/unit/apiserver/resources/test_env_action.py b/tests/unit/apiserver/resources/test_env_action.py
index c7ae10a20..d61092dbc 100644
--- a/tests/unit/apiserver/resources/test_env_action.py
+++ b/tests/unit/apiserver/resources/test_env_action.py
@@ -18,7 +18,7 @@ class EnvTestCase(APITestCase):
def test_create_grafana(self):
url = 'yardstick/env/action'
- data = {'action': 'createGrafanaContainer'}
+ data = {'action': 'create_grafana'}
resp = self._post(url, data)
time.sleep(1)
diff --git a/tests/unit/apiserver/utils/test_common.py b/tests/unit/apiserver/utils/test_common.py
index acf6e41b1..ad81cb76b 100644
--- a/tests/unit/apiserver/utils/test_common.py
+++ b/tests/unit/apiserver/utils/test_common.py
@@ -33,31 +33,6 @@ class TranslateToStrTestCase(unittest.TestCase):
self.assertEqual(result, output_str)
-class GetCommandListTestCase(unittest.TestCase):
-
- def test_get_command_list_no_opts(self):
- command_list = ['a']
- opts = {}
- args = 'b'
- output_list = common.get_command_list(command_list, opts, args)
-
- result_list = ['a', 'b']
- self.assertEqual(result_list, output_list)
-
- def test_get_command_list_with_opts_args(self):
- command_list = ['a']
- opts = {
- 'b': 'c',
- 'task-args': 'd'
- }
- args = 'e'
-
- output_list = common.get_command_list(command_list, opts, args)
-
- result_list = ['a', 'e', '--b', '--task-args', 'd']
- self.assertEqual(result_list, output_list)
-
-
def main():
unittest.main()
diff --git a/tests/unit/cmd/commands/test_env.py b/tests/unit/cmd/commands/test_env.py
index c6e0e1d20..73cd5af47 100644
--- a/tests/unit/cmd/commands/test_env.py
+++ b/tests/unit/cmd/commands/test_env.py
@@ -42,7 +42,7 @@ class EnvCommandTestCase(unittest.TestCase):
@mock.patch('yardstick.cmd.commands.env.HttpClient.post')
def test_start_async_task(self, post_mock):
- data = {'action': 'createGrafanaContainer'}
+ data = {'action': 'create_grafana'}
EnvCommand()._start_async_task(data)
self.assertTrue(post_mock.called)
diff --git a/yardstick/benchmark/core/runner.py b/yardstick/benchmark/core/runner.py
index b9c22cbc9..64acdaa99 100644
--- a/yardstick/benchmark/core/runner.py
+++ b/yardstick/benchmark/core/runner.py
@@ -15,7 +15,7 @@ from yardstick.benchmark.runners.base import Runner
from yardstick.benchmark.core import print_hbar
-class Runners(object):
+class Runners(object): # pragma: no cover
"""Runner commands.
Set of commands to discover and display runner types.
diff --git a/yardstick/benchmark/core/scenario.py b/yardstick/benchmark/core/scenario.py
index a9d933faf..cd119c24c 100644
--- a/yardstick/benchmark/core/scenario.py
+++ b/yardstick/benchmark/core/scenario.py
@@ -15,7 +15,7 @@ from yardstick.benchmark.scenarios.base import Scenario
from yardstick.benchmark.core import print_hbar
-class Scenarios(object):
+class Scenarios(object): # pragma: no cover
"""Scenario commands.
Set of commands to discover and display scenario types.
diff --git a/yardstick/benchmark/core/task.py b/yardstick/benchmark/core/task.py
index 478a51f9d..9c6caf03f 100644
--- a/yardstick/benchmark/core/task.py
+++ b/yardstick/benchmark/core/task.py
@@ -20,6 +20,8 @@ import time
import logging
import uuid
import errno
+import collections
+
from six.moves import filter
from yardstick.benchmark.contexts.base import Context
@@ -51,7 +53,8 @@ class Task(object): # pragma: no cover
atexit.register(self.atexit_handler)
- self.task_id = kwargs.get('task_id', str(uuid.uuid4()))
+ task_id = getattr(args, 'task_id')
+ self.task_id = task_id if task_id else str(uuid.uuid4())
check_environment()
@@ -133,6 +136,7 @@ class Task(object): # pragma: no cover
scenario['task_id'], scenario['tc'])
print("Done, exiting")
+ return result
def _init_output_config(self, output_config):
output_config.setdefault('DEFAULT', {})
@@ -594,6 +598,9 @@ def print_invalid_header(source_name, args):
def parse_task_args(src_name, args):
+ if isinstance(args, collections.Mapping):
+ return args
+
try:
kw = args and yaml.safe_load(args)
kw = {} if kw is None else kw
diff --git a/yardstick/benchmark/scenarios/availability/ha_tools/disk/recovery_disk_io.bash b/yardstick/benchmark/scenarios/availability/ha_tools/disk/recovery_disk_io.bash
index dbe8519ba..d69ebc7d5 100644
--- a/yardstick/benchmark/scenarios/availability/ha_tools/disk/recovery_disk_io.bash
+++ b/yardstick/benchmark/scenarios/availability/ha_tools/disk/recovery_disk_io.bash
@@ -12,4 +12,4 @@
# recover a node from disk io block status
sudo kill `pidof dd`
-
+sudo rm -rf /test.dbf
diff --git a/yardstick/cmd/cli.py b/yardstick/cmd/cli.py
index 79f66e574..d2c49e89b 100644
--- a/yardstick/cmd/cli.py
+++ b/yardstick/cmd/cli.py
@@ -53,7 +53,7 @@ def find_config_files(path_list):
return None
-class YardstickCLI():
+class YardstickCLI(): # pragma: no cover
"""Command-line interface to yardstick"""
# Command categories
diff --git a/yardstick/cmd/commands/env.py b/yardstick/cmd/commands/env.py
index d5aef7faf..dbef303b0 100644
--- a/yardstick/cmd/commands/env.py
+++ b/yardstick/cmd/commands/env.py
@@ -26,21 +26,21 @@ class EnvCommand(object):
"""
def do_influxdb(self, args):
- data = {'action': 'createInfluxDBContainer'}
+ data = {'action': 'create_influxdb'}
task_id = self._start_async_task(data)
start = '* creating influxDB'
self._check_status(task_id, start)
def do_grafana(self, args):
- data = {'action': 'createGrafanaContainer'}
+ data = {'action': 'create_grafana'}
task_id = self._start_async_task(data)
start = '* creating grafana'
self._check_status(task_id, start)
def do_prepare(self, args):
- data = {'action': 'prepareYardstickEnv'}
+ data = {'action': 'prepare_env'}
task_id = self._start_async_task(data)
start = '* preparing yardstick environment'
diff --git a/yardstick/cmd/commands/plugin.py b/yardstick/cmd/commands/plugin.py
index f97c490b7..b90ac15e6 100644
--- a/yardstick/cmd/commands/plugin.py
+++ b/yardstick/cmd/commands/plugin.py
@@ -17,7 +17,7 @@ from yardstick.common.utils import cliargs
from yardstick.cmd.commands import change_osloobj_to_paras
-class PluginCommands(object):
+class PluginCommands(object): # pragma: no cover
"""Plugin commands.
Set of commands to manage plugins.
diff --git a/yardstick/cmd/commands/report.py b/yardstick/cmd/commands/report.py
index 87ae7d5f7..47bf22a1f 100644
--- a/yardstick/cmd/commands/report.py
+++ b/yardstick/cmd/commands/report.py
@@ -19,7 +19,7 @@ from yardstick.cmd.commands import change_osloobj_to_paras
from yardstick.common.utils import cliargs
-class ReportCommands(object):
+class ReportCommands(object): # pragma: no cover
"""Report commands.
Set of commands to manage benchmark tasks.
diff --git a/yardstick/cmd/commands/runner.py b/yardstick/cmd/commands/runner.py
index b99ae789b..9ee99cf44 100644
--- a/yardstick/cmd/commands/runner.py
+++ b/yardstick/cmd/commands/runner.py
@@ -17,7 +17,7 @@ from yardstick.common.utils import cliargs
from yardstick.cmd.commands import change_osloobj_to_paras
-class RunnerCommands(object):
+class RunnerCommands(object): # pragma: no cover
"""Runner commands.
Set of commands to discover and display runner types.
diff --git a/yardstick/cmd/commands/scenario.py b/yardstick/cmd/commands/scenario.py
index 618ed2915..0e3f2c3be 100644
--- a/yardstick/cmd/commands/scenario.py
+++ b/yardstick/cmd/commands/scenario.py
@@ -16,7 +16,7 @@ from yardstick.common.utils import cliargs
from yardstick.cmd.commands import change_osloobj_to_paras
-class ScenarioCommands(object):
+class ScenarioCommands(object): # pragma: no cover
"""Scenario commands.
Set of commands to discover and display scenario types.
diff --git a/yardstick/cmd/commands/task.py b/yardstick/cmd/commands/task.py
index 6384e6eb1..0f98cabdc 100644
--- a/yardstick/cmd/commands/task.py
+++ b/yardstick/cmd/commands/task.py
@@ -19,7 +19,7 @@ from yardstick.cmd.commands import change_osloobj_to_paras
output_file_default = "/tmp/yardstick.out"
-class TaskCommands(object):
+class TaskCommands(object): # pragma: no cover
"""Task commands.
Set of commands to manage benchmark tasks.
diff --git a/yardstick/common/constants.py b/yardstick/common/constants.py
index 47a519923..d251341fc 100644
--- a/yardstick/common/constants.py
+++ b/yardstick/common/constants.py
@@ -39,9 +39,11 @@ ANSIBLE_DIR = join(REPOS_DIR, 'ansible')
SAMPLE_CASE_DIR = join(REPOS_DIR, 'samples')
TESTCASE_DIR = join(YARDSTICK_ROOT_PATH, 'tests/opnfv/test_cases/')
TESTSUITE_DIR = join(YARDSTICK_ROOT_PATH, 'tests/opnfv/test_suites/')
+DOCS_DIR = join(REPOS_DIR, 'docs/testing/user/userguide/')
# file
OPENRC = get_param('file.openrc', '/etc/yardstick/openstack.creds')
+ETC_HOSTS = get_param('file.etc_hosts', '/etc/hosts')
CONF_FILE = join(CONF_DIR, 'yardstick.conf')
POD_FILE = join(CONF_DIR, 'pod.yaml')
CONF_SAMPLE_FILE = join(CONF_SAMPLE_DIR, 'yardstick.conf.sample')
@@ -80,6 +82,9 @@ SQLITE = 'sqlite:////tmp/yardstick.db'
API_SUCCESS = 1
API_ERROR = 2
+TASK_NOT_DONE = 0
+TASK_DONE = 1
+TASK_FAILED = 2
BASE_URL = 'http://localhost:5000'
ENV_ACTION_API = BASE_URL + '/yardstick/env/action'