aboutsummaryrefslogtreecommitdiffstats
path: root/api
diff options
context:
space:
mode:
Diffstat (limited to 'api')
-rw-r--r--api/base.py11
-rw-r--r--api/resources/env_action.py67
-rw-r--r--api/resources/results.py63
3 files changed, 129 insertions, 12 deletions
diff --git a/api/base.py b/api/base.py
index 527008588..6fa2777ce 100644
--- a/api/base.py
+++ b/api/base.py
@@ -23,9 +23,16 @@ logger.setLevel(logging.DEBUG)
class ApiResource(Resource):
def _post_args(self):
- params = common_utils.translate_to_str(request.json)
- action = params.get('action', '')
+ data = request.json if request.json else {}
+ params = common_utils.translate_to_str(data)
+ action = params.get('action', request.form.get('action', ''))
args = params.get('args', {})
+
+ try:
+ args['file'] = request.files['file']
+ except KeyError:
+ pass
+
logger.debug('Input args is: action: %s, args: %s', action, args)
return action, args
diff --git a/api/resources/env_action.py b/api/resources/env_action.py
index 7bfaf27a7..3536559b7 100644
--- a/api/resources/env_action.py
+++ b/api/resources/env_action.py
@@ -16,6 +16,8 @@ import threading
import time
import uuid
import glob
+import yaml
+import collections
from six.moves import configparser
from oslo_serialization import jsonutils
@@ -25,7 +27,7 @@ from api.database.handler import AsyncTaskHandler
from api.utils import influx
from api.utils.common import result_handler
from yardstick.common import constants as consts
-from yardstick.common import utils as yardstick_utils
+from yardstick.common import utils as common_utils
from yardstick.common import openstack_utils
from yardstick.common.httpClient import HttpClient
@@ -98,7 +100,9 @@ def _create_data_source():
def _create_grafana_container(client):
ports = [3000]
port_bindings = {k: k for k in ports}
- host_config = client.create_host_config(port_bindings=port_bindings)
+ restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
+ host_config = client.create_host_config(port_bindings=port_bindings,
+ restart_policy=restart_policy)
container = client.create_container(image='%s:%s' % (consts.GRAFANA_IMAGE,
consts.GRAFANA_TAG),
@@ -150,7 +154,9 @@ def _create_influxdb_container(client):
ports = [8083, 8086]
port_bindings = {k: k for k in ports}
- host_config = client.create_host_config(port_bindings=port_bindings)
+ restart_policy = {"MaximumRetryCount": 0, "Name": "always"}
+ host_config = client.create_host_config(port_bindings=port_bindings,
+ restart_policy=restart_policy)
container = client.create_container(image='%s:%s' % (consts.INFLUXDB_IMAGE,
consts.INFLUXDB_TAG),
@@ -174,7 +180,7 @@ def _config_influxdb():
def _change_output_to_influxdb():
- yardstick_utils.makedirs(consts.CONF_DIR)
+ common_utils.makedirs(consts.CONF_DIR)
parser = configparser.ConfigParser()
parser.read(consts.CONF_SAMPLE_FILE)
@@ -230,11 +236,11 @@ def _prepare_env_daemon(task_id):
def _create_directories():
- yardstick_utils.makedirs(consts.CONF_DIR)
+ common_utils.makedirs(consts.CONF_DIR)
def _source_file(rc_file):
- yardstick_utils.source_env(rc_file)
+ common_utils.source_env(rc_file)
def _get_remote_rc_file(rc_file, installer_ip, installer_type):
@@ -307,3 +313,52 @@ def _update_task_error(task_id, error):
task = async_handler.get_task_by_taskid(task_id)
async_handler.update_status(task, 2)
async_handler.update_error(task, error)
+
+
+def update_openrc(args):
+ try:
+ openrc_vars = args['openrc']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'openrc must be provided')
+ else:
+ if not isinstance(openrc_vars, collections.Mapping):
+ return result_handler(consts.API_ERROR, 'args should be a dict')
+
+ lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
+ logger.debug('Writing: %s', ''.join(lines))
+
+ logger.info('Writing openrc: Writing')
+ common_utils.makedirs(consts.CONF_DIR)
+
+ with open(consts.OPENRC, 'w') as f:
+ f.writelines(lines)
+ logger.info('Writing openrc: Done')
+
+ logger.info('Source openrc: Sourcing')
+ try:
+ _source_file(consts.OPENRC)
+ except Exception as e:
+ logger.exception('Failed to source openrc')
+ return result_handler(consts.API_ERROR, str(e))
+ logger.info('Source openrc: Done')
+
+ return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars})
+
+
+def upload_pod_file(args):
+ try:
+ pod_file = args['file']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'file must be provided')
+
+ logger.info('Checking file')
+ data = yaml.load(pod_file.read())
+ if not isinstance(data, collections.Mapping):
+ return result_handler(consts.API_ERROR, 'invalid yaml file')
+
+ logger.info('Writing file')
+ with open(consts.POD_FILE, 'w') as f:
+ yaml.dump(data, f, default_flow_style=False)
+ logger.info('Writing finished')
+
+ return result_handler(consts.API_SUCCESS, {'pod_info': data})
diff --git a/api/resources/results.py b/api/resources/results.py
index 86fc25193..a0527ed8c 100644
--- a/api/resources/results.py
+++ b/api/resources/results.py
@@ -28,12 +28,12 @@ def getResult(args):
uuid.UUID(task_id)
except KeyError:
message = 'task_id must be provided'
- return common_utils.error_handler(message)
+ return common_utils.result_handler(2, message)
task = TasksHandler().get_task_by_taskid(task_id)
def _unfinished():
- return common_utils.result_handler(0, [])
+ return common_utils.result_handler(0, {})
def _finished():
testcases = task.details.split(',')
@@ -44,7 +44,7 @@ def getResult(args):
data = common_utils.translate_to_str(influx_utils.query(query_sql))
return data
- result = {k: get_data(k) for k in testcases}
+ result = _format_data({k: get_data(k) for k in testcases})
return common_utils.result_handler(1, result)
@@ -61,4 +61,59 @@ def getResult(args):
}
return switcher.get(status, lambda: 'nothing')()
except IndexError:
- return common_utils.error_handler('no such task')
+ return common_utils.result_handler(2, 'no such task')
+
+
+def _format_data(data):
+ try:
+ first_value = data.values()[0][0]
+ except IndexError:
+ return {'criteria': 'FAIL', 'testcases': {}}
+ else:
+ info = {
+ 'deploy_scenario': first_value.get('deploy_scenario'),
+ 'installer': first_value.get('installer'),
+ 'pod_name': first_value.get('pod_name'),
+ 'version': first_value.get('version')
+ }
+ task_id = first_value.get('task_id')
+ criteria = first_value.get('criteria')
+ testcases = {k: _get_case_data(v) for k, v in data.items()}
+
+ result = {
+ 'criteria': criteria,
+ 'info': info,
+ 'task_id': task_id,
+ 'testcases': testcases
+ }
+ return result
+
+
+def _get_case_data(data):
+ try:
+ scenario = data[0]
+ except IndexError:
+ return {'tc_data': [], 'criteria': 'FAIL'}
+ else:
+ tc_data = [_get_scenario_data(s) for s in data]
+ criteria = scenario.get('criteria')
+ return {'tc_data': tc_data, 'criteria': criteria}
+
+
+def _get_scenario_data(data):
+ result = {
+ 'data': {},
+ 'timestamp': ''
+ }
+
+ blacklist = {'criteria', 'deploy_scenario', 'host', 'installer',
+ 'pod_name', 'runner_id', 'scenarios', 'target',
+ 'task_id', 'time', 'version'}
+
+ keys = set(data.keys()) - set(blacklist)
+ for k in keys:
+ result['data'][k] = data[k]
+
+ result['timestamp'] = data.get('time')
+
+ return result