diff options
author | chenjiankun <chenjiankun1@huawei.com> | 2017-07-07 03:13:36 +0000 |
---|---|---|
committer | chenjiankun <chenjiankun1@huawei.com> | 2017-07-11 09:47:30 +0000 |
commit | 0d18f9d3299480cb43e6b335180e5cbdb58505c3 (patch) | |
tree | a35aa6373a88eddb3d13f4fdb7cd3db9bad1a4ee /api/resources | |
parent | 41136dafb30d0c410e92f9f7a7c19eae60f224e2 (diff) |
Yardstick API architecture improvement
JIRA: YARDSTICK-710
Since we have the plan to upload api v2 and gui.
We need to add put and delete method.
So the architecture need to be improved.
Change-Id: Ie20a79c26ef6c581897ce4e63980fa2895b162d2
Signed-off-by: chenjiankun <chenjiankun1@huawei.com>
Diffstat (limited to 'api/resources')
-rw-r--r-- | api/resources/asynctask.py | 64 | ||||
-rw-r--r-- | api/resources/case_docs.py | 30 | ||||
-rw-r--r-- | api/resources/env_action.py | 427 | ||||
-rw-r--r-- | api/resources/release_action.py | 44 | ||||
-rw-r--r-- | api/resources/results.py | 69 | ||||
-rw-r--r-- | api/resources/samples_action.py | 45 | ||||
-rw-r--r-- | api/resources/testcases.py | 21 | ||||
-rw-r--r-- | api/resources/testsuites_action.py | 46 | ||||
-rw-r--r-- | api/resources/v1/__init__.py | 0 | ||||
-rw-r--r-- | api/resources/v1/asynctasks.py | 65 | ||||
-rw-r--r-- | api/resources/v1/env.py | 421 | ||||
-rw-r--r-- | api/resources/v1/results.py | 78 | ||||
-rw-r--r-- | api/resources/v1/testcases.py | 114 | ||||
-rw-r--r-- | api/resources/v1/testsuites.py | 64 |
14 files changed, 742 insertions, 746 deletions
diff --git a/api/resources/asynctask.py b/api/resources/asynctask.py deleted file mode 100644 index 39b47c0ee..000000000 --- a/api/resources/asynctask.py +++ /dev/null @@ -1,64 +0,0 @@ -# ############################################################################ -# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others. -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -# ############################################################################ -import uuid -import logging - -from api.utils.common import result_handler -from api.database.v1.handlers import AsyncTaskHandler -from yardstick.common import constants as consts - -LOG = logging.getLogger(__name__) -LOG.setLevel(logging.DEBUG) - - -def default(args): - return _get_status(args) - - -def _get_status(args): - try: - task_id = args['task_id'] - except KeyError: - return result_handler(consts.API_ERROR, 'task_id must be provided') - - try: - uuid.UUID(task_id) - except ValueError: - return result_handler(consts.API_ERROR, 'invalid task_id') - - asynctask_handler = AsyncTaskHandler() - try: - asynctask = asynctask_handler.get_task_by_taskid(task_id) - except ValueError: - return result_handler(consts.API_ERROR, 'invalid task_id') - - def _unfinished(): - return result_handler(consts.TASK_NOT_DONE, {}) - - def _finished(): - return result_handler(consts.TASK_DONE, {}) - - def _error(): - return result_handler(consts.TASK_FAILED, asynctask.error) - - status = asynctask.status - LOG.debug('Task status is: %s', status) - - if status not in [consts.TASK_NOT_DONE, - consts.TASK_DONE, - consts.TASK_FAILED]: - return result_handler(consts.API_ERROR, 'internal server error') - - switcher = { - consts.TASK_NOT_DONE: _unfinished, - consts.TASK_DONE: _finished, - consts.TASK_FAILED: _error - } - - return switcher.get(status)() diff --git a/api/resources/case_docs.py b/api/resources/case_docs.py deleted file mode 100644 index 289410d2d..000000000 --- a/api/resources/case_docs.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -import logging - -from api.utils.common import result_handler -from yardstick.common import constants as consts - -LOG = logging.getLogger(__name__) -LOG.setLevel(logging.DEBUG) - - -def default(args): - return get_case_docs(args) - - -def get_case_docs(args): - try: - case_name = args['case_name'] - except KeyError: - return result_handler(consts.API_ERROR, 'case_name must be provided') - - docs_path = os.path.join(consts.DOCS_DIR, '{}.rst'.format(case_name)) - - if not os.path.exists(docs_path): - return result_handler(consts.API_ERROR, 'case not exists') - - LOG.info('Reading %s', case_name) - with open(docs_path) as f: - content = f.read() - - return result_handler(consts.API_SUCCESS, {'docs': content}) diff --git a/api/resources/env_action.py b/api/resources/env_action.py deleted file mode 100644 index fed987063..000000000 --- a/api/resources/env_action.py +++ /dev/null @@ -1,427 +0,0 @@ -############################################################################## -# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -from __future__ import absolute_import - -import errno -import logging -import os -import subprocess -import threading -import time -import uuid -import glob -import yaml -import collections -from subprocess import PIPE - -from six.moves import configparser -from oslo_serialization import jsonutils -from docker import Client - -from api.database.v1.handlers import AsyncTaskHandler -from api.utils import influx -from api.utils.common import result_handler -from yardstick.common import constants as consts -from yardstick.common import utils as common_utils -from yardstick.common import openstack_utils -from yardstick.common.httpClient import HttpClient - - -LOG = logging.getLogger(__name__) -LOG.setLevel(logging.DEBUG) - -async_handler = AsyncTaskHandler() - - -def create_grafana(args): - task_id = str(uuid.uuid4()) - - thread = threading.Thread(target=_create_grafana, args=(task_id,)) - thread.start() - - return result_handler(consts.API_SUCCESS, {'task_id': task_id}) - - -def _create_grafana(task_id): - _create_task(task_id) - - client = Client(base_url=consts.DOCKER_URL) - - try: - LOG.info('Checking if grafana image exist') - image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG) - if not _check_image_exist(client, image): - LOG.info('Grafana image not exist, start pulling') - client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG) - - LOG.info('Createing grafana container') - _create_grafana_container(client) - LOG.info('Grafana container is created') - - time.sleep(5) - - LOG.info('Creating data source for grafana') - _create_data_source() - - LOG.info('Creating dashboard for grafana') - _create_dashboard() - - _update_task_status(task_id) - LOG.info('Finished') - except Exception as e: - _update_task_error(task_id, str(e)) - LOG.exception('Create grafana failed') - - -def _create_dashboard(): - url = 'http://admin:admin@%s:3000/api/dashboards/db' % consts.GRAFANA_IP - path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json') - - for i in sorted(glob.iglob(path)): - with open(i) as f: - data = jsonutils.load(f) - try: - HttpClient().post(url, data) - except Exception: - LOG.exception('Create dashboard %s failed', i) - raise - - -def _create_data_source(): - url = 'http://admin:admin@%s:3000/api/datasources' % consts.GRAFANA_IP - data = { - "name": "yardstick", - "type": "influxdb", - "access": "proxy", - "url": "http://%s:8086" % consts.INFLUXDB_IP, - "password": "root", - "user": "root", - "database": "yardstick", - "basicAuth": True, - "basicAuthUser": "admin", - "basicAuthPassword": "admin", - "isDefault": False, - } - try: - HttpClient().post(url, data) - except Exception: - LOG.exception('Create datasources failed') - raise - - -def _create_grafana_container(client): - ports = [3000] - port_bindings = {k: k for k in ports} - restart_policy = {"MaximumRetryCount": 0, "Name": "always"} - host_config = client.create_host_config(port_bindings=port_bindings, - restart_policy=restart_policy) - - LOG.info('Creating container') - container = client.create_container(image='%s:%s' % (consts.GRAFANA_IMAGE, - consts.GRAFANA_TAG), - ports=ports, - detach=True, - tty=True, - host_config=host_config) - LOG.info('Starting container') - client.start(container) - - -def _check_image_exist(client, t): - return any(t in a['RepoTags'][0] for a in client.images() if a['RepoTags']) - - -def create_influxdb(args): - task_id = str(uuid.uuid4()) - - thread = threading.Thread(target=_create_influxdb, args=(task_id,)) - thread.start() - - return result_handler(consts.API_SUCCESS, {'task_id': task_id}) - - -def _create_influxdb(task_id): - _create_task(task_id) - - client = Client(base_url=consts.DOCKER_URL) - - try: - LOG.info('Changing output to influxdb') - _change_output_to_influxdb() - - LOG.info('Checking if influxdb image exist') - if not _check_image_exist(client, '%s:%s' % (consts.INFLUXDB_IMAGE, - consts.INFLUXDB_TAG)): - LOG.info('Influxdb image not exist, start pulling') - client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG) - - LOG.info('Createing influxdb container') - _create_influxdb_container(client) - LOG.info('Influxdb container is created') - - time.sleep(5) - - LOG.info('Config influxdb') - _config_influxdb() - - _update_task_status(task_id) - - LOG.info('Finished') - except Exception as e: - _update_task_error(task_id, str(e)) - LOG.exception('Creating influxdb failed') - - -def _create_influxdb_container(client): - - ports = [8083, 8086] - port_bindings = {k: k for k in ports} - restart_policy = {"MaximumRetryCount": 0, "Name": "always"} - host_config = client.create_host_config(port_bindings=port_bindings, - restart_policy=restart_policy) - - LOG.info('Creating container') - container = client.create_container(image='%s:%s' % (consts.INFLUXDB_IMAGE, - consts.INFLUXDB_TAG), - ports=ports, - detach=True, - tty=True, - host_config=host_config) - LOG.info('Starting container') - client.start(container) - - -def _config_influxdb(): - try: - client = influx.get_data_db_client() - client.create_user(consts.INFLUXDB_USER, - consts.INFLUXDB_PASS, - consts.INFLUXDB_DB_NAME) - client.create_database(consts.INFLUXDB_DB_NAME) - LOG.info('Success to config influxDB') - except Exception: - LOG.exception('Config influxdb failed') - - -def _change_output_to_influxdb(): - common_utils.makedirs(consts.CONF_DIR) - - parser = configparser.ConfigParser() - LOG.info('Reading output sample configuration') - parser.read(consts.CONF_SAMPLE_FILE) - - LOG.info('Set dispatcher to influxdb') - parser.set('DEFAULT', 'dispatcher', 'influxdb') - parser.set('dispatcher_influxdb', 'target', - 'http://%s:8086' % consts.INFLUXDB_IP) - - LOG.info('Writing to %s', consts.CONF_FILE) - with open(consts.CONF_FILE, 'w') as f: - parser.write(f) - - -def prepare_env(args): - task_id = str(uuid.uuid4()) - - thread = threading.Thread(target=_prepare_env_daemon, args=(task_id,)) - thread.start() - - return result_handler(consts.API_SUCCESS, {'task_id': task_id}) - - -def _already_source_openrc(): - """Check if openrc is sourced already""" - return all(os.environ.get(k) for k in ['OS_AUTH_URL', 'OS_USERNAME', - 'OS_PASSWORD', 'EXTERNAL_NETWORK']) - - -def _prepare_env_daemon(task_id): - _create_task(task_id) - - try: - _create_directories() - - rc_file = consts.OPENRC - - LOG.info('Checkout Openrc Environment variable') - if not _already_source_openrc(): - LOG.info('Openrc variable not found in Environment') - if not os.path.exists(rc_file): - LOG.info('Openrc file not found') - installer_ip = os.environ.get('INSTALLER_IP', '192.168.200.2') - installer_type = os.environ.get('INSTALLER_TYPE', 'compass') - LOG.info('Getting openrc file from %s', installer_type) - _get_remote_rc_file(rc_file, installer_ip, installer_type) - LOG.info('Source openrc file') - _source_file(rc_file) - LOG.info('Appending external network') - _append_external_network(rc_file) - LOG.info('Openrc file exist, source openrc file') - _source_file(rc_file) - - LOG.info('Cleaning images') - _clean_images() - - LOG.info('Loading images') - _load_images() - - _update_task_status(task_id) - LOG.info('Finished') - except Exception as e: - _update_task_error(task_id, str(e)) - LOG.exception('Prepare env failed') - - -def _create_directories(): - common_utils.makedirs(consts.CONF_DIR) - - -def _source_file(rc_file): - common_utils.source_env(rc_file) - - -def _get_remote_rc_file(rc_file, installer_ip, installer_type): - - os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT) - - try: - cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type, - '-a', installer_ip] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE) - p.communicate() - - if p.returncode != 0: - LOG.error('Failed to fetch credentials from installer') - except OSError as e: - if e.errno != errno.EEXIST: - raise - - -def _append_external_network(rc_file): - neutron_client = openstack_utils.get_neutron_client() - networks = neutron_client.list_networks()['networks'] - try: - ext_network = next(n['name'] for n in networks if n['router:external']) - except StopIteration: - LOG.warning("Can't find external network") - else: - cmd = 'export EXTERNAL_NETWORK=%s' % ext_network - try: - with open(rc_file, 'a') as f: - f.write(cmd + '\n') - except OSError as e: - if e.errno != errno.EEXIST: - raise - - -def _clean_images(): - cmd = [consts.CLEAN_IMAGES_SCRIPT] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR) - output = p.communicate()[0] - LOG.debug(output) - - -def _load_images(): - cmd = [consts.LOAD_IMAGES_SCRIPT] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR) - output = p.communicate()[0] - LOG.debug(output) - - -def _create_task(task_id): - async_handler.insert({'status': 0, 'task_id': task_id}) - - -def _update_task_status(task_id): - async_handler.update_attr(task_id, {'status': 1}) - - -def _update_task_error(task_id, error): - async_handler.update_attr(task_id, {'status': 2, 'error': error}) - - -def update_openrc(args): - try: - openrc_vars = args['openrc'] - except KeyError: - return result_handler(consts.API_ERROR, 'openrc must be provided') - else: - if not isinstance(openrc_vars, collections.Mapping): - return result_handler(consts.API_ERROR, 'args should be a dict') - - lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()] - LOG.debug('Writing: %s', ''.join(lines)) - - LOG.info('Writing openrc: Writing') - common_utils.makedirs(consts.CONF_DIR) - - with open(consts.OPENRC, 'w') as f: - f.writelines(lines) - LOG.info('Writing openrc: Done') - - LOG.info('Source openrc: Sourcing') - try: - _source_file(consts.OPENRC) - except Exception as e: - LOG.exception('Failed to source openrc') - return result_handler(consts.API_ERROR, str(e)) - LOG.info('Source openrc: Done') - - return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars}) - - -def upload_pod_file(args): - try: - pod_file = args['file'] - except KeyError: - return result_handler(consts.API_ERROR, 'file must be provided') - - LOG.info('Checking file') - data = yaml.load(pod_file.read()) - if not isinstance(data, collections.Mapping): - return result_handler(consts.API_ERROR, 'invalid yaml file') - - LOG.info('Writing file') - with open(consts.POD_FILE, 'w') as f: - yaml.dump(data, f, default_flow_style=False) - LOG.info('Writing finished') - - return result_handler(consts.API_SUCCESS, {'pod_info': data}) - - -def update_pod_file(args): - try: - pod_dic = args['pod'] - except KeyError: - return result_handler(consts.API_ERROR, 'pod must be provided') - else: - if not isinstance(pod_dic, collections.Mapping): - return result_handler(consts.API_ERROR, 'pod should be a dict') - - LOG.info('Writing file') - with open(consts.POD_FILE, 'w') as f: - yaml.dump(pod_dic, f, default_flow_style=False) - LOG.info('Writing finished') - - return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic}) - - -def update_hosts(hosts_ip): - if not isinstance(hosts_ip, dict): - return result_handler(consts.API_ERROR, 'Error, args should be a dict') - LOG.info('Writing hosts: Writing') - LOG.debug('Writing: %s', hosts_ip) - cmd = ["sudo", "python", "write_hosts.py"] - p = subprocess.Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, - cwd = os.path.join(consts.REPOS_DIR, "api/resources")) - _, err = p.communicate(jsonutils.dumps(hosts_ip)) - if p.returncode != 0 : - return result_handler(consts.API_ERROR, err) - LOG.info('Writing hosts: Done') - return result_handler(consts.API_SUCCESS, 'success') diff --git a/api/resources/release_action.py b/api/resources/release_action.py deleted file mode 100644 index 9871c1fc3..000000000 --- a/api/resources/release_action.py +++ /dev/null @@ -1,44 +0,0 @@ -############################################################################## -# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -from __future__ import absolute_import -import uuid -import os -import logging - -from api.utils.common import result_handler -from api.utils.thread import TaskThread -from yardstick.common import constants as consts -from yardstick.benchmark.core import Param -from yardstick.benchmark.core.task import Task - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -def run_test_case(args): - try: - case_name = args['testcase'] - except KeyError: - return result_handler(consts.API_ERROR, 'testcase must be provided') - - testcase = os.path.join(consts.TESTCASE_DIR, '{}.yaml'.format(case_name)) - - task_id = str(uuid.uuid4()) - - task_args = { - 'inputfile': [testcase], - 'task_id': task_id - } - task_args.update(args.get('opts', {})) - - param = Param(task_args) - task_thread = TaskThread(Task().start, param) - task_thread.start() - - return result_handler(consts.API_SUCCESS, {'task_id': task_id}) diff --git a/api/resources/results.py b/api/resources/results.py deleted file mode 100644 index 692e00cc6..000000000 --- a/api/resources/results.py +++ /dev/null @@ -1,69 +0,0 @@ -############################################################################## -# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -from __future__ import absolute_import -import logging -import uuid -import json - -from api.utils.common import result_handler -from api.database.v1.handlers import TasksHandler -from yardstick.common import constants as consts - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -def default(args): - return getResult(args) - - -def getResult(args): - try: - task_id = args['task_id'] - except KeyError: - return result_handler(consts.API_ERROR, 'task_id must be provided') - - try: - uuid.UUID(task_id) - except ValueError: - return result_handler(consts.API_ERROR, 'invalid task_id') - - task_handler = TasksHandler() - try: - task = task_handler.get_task_by_taskid(task_id) - except ValueError: - return result_handler(consts.API_ERROR, 'invalid task_id') - - def _unfinished(): - return result_handler(consts.TASK_NOT_DONE, {}) - - def _finished(): - if task.result: - return result_handler(consts.TASK_DONE, json.loads(task.result)) - else: - return result_handler(consts.TASK_DONE, {}) - - def _error(): - return result_handler(consts.TASK_FAILED, task.error) - - status = task.status - logger.debug('Task status is: %s', status) - - if status not in [consts.TASK_NOT_DONE, - consts.TASK_DONE, - consts.TASK_FAILED]: - return result_handler(consts.API_ERROR, 'internal server error') - - switcher = { - consts.TASK_NOT_DONE: _unfinished, - consts.TASK_DONE: _finished, - consts.TASK_FAILED: _error - } - - return switcher.get(status)() diff --git a/api/resources/samples_action.py b/api/resources/samples_action.py deleted file mode 100644 index 10b9980af..000000000 --- a/api/resources/samples_action.py +++ /dev/null @@ -1,45 +0,0 @@ -############################################################################## -# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -from __future__ import absolute_import -import uuid -import os -import logging - -from api.utils.common import result_handler -from api.utils.thread import TaskThread -from yardstick.common import constants as consts -from yardstick.benchmark.core import Param -from yardstick.benchmark.core.task import Task - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -def run_test_case(args): - try: - case_name = args['testcase'] - except KeyError: - return result_handler(consts.API_ERROR, 'testcase must be provided') - - testcase = os.path.join(consts.SAMPLE_CASE_DIR, - '{}.yaml'.format(case_name)) - - task_id = str(uuid.uuid4()) - - task_args = { - 'inputfile': [testcase], - 'task_id': task_id - } - task_args.update(args.get('opts', {})) - - param = Param(task_args) - task_thread = TaskThread(Task().start, param) - task_thread.start() - - return result_handler(consts.API_SUCCESS, {'task_id': task_id}) diff --git a/api/resources/testcases.py b/api/resources/testcases.py deleted file mode 100644 index 6ee15efb3..000000000 --- a/api/resources/testcases.py +++ /dev/null @@ -1,21 +0,0 @@ -# ############################################################################ -# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others. -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -# ############################################################################ -from yardstick.benchmark.core.testcase import Testcase -from yardstick.benchmark.core import Param -from api.utils import common as common_utils - - -def default(args): - return listAllTestcases(args) - - -def listAllTestcases(args): - param = Param(args) - testcase_list = Testcase().list_all(param) - return common_utils.result_handler(1, testcase_list) diff --git a/api/resources/testsuites_action.py b/api/resources/testsuites_action.py deleted file mode 100644 index e37eacc3e..000000000 --- a/api/resources/testsuites_action.py +++ /dev/null @@ -1,46 +0,0 @@ -############################################################################## -# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. -# -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## -from __future__ import absolute_import -import uuid -import os -import logging - -from api.utils.common import result_handler -from api.utils.thread import TaskThread -from yardstick.common import constants as consts -from yardstick.benchmark.core import Param -from yardstick.benchmark.core.task import Task - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -def run_test_suite(args): - try: - suite_name = args['testsuite'] - except KeyError: - return result_handler(consts.API_ERROR, 'testsuite must be provided') - - testsuite = os.path.join(consts.TESTSUITE_DIR, - '{}.yaml'.format(suite_name)) - - task_id = str(uuid.uuid4()) - - task_args = { - 'inputfile': [testsuite], - 'task_id': task_id, - 'suite': True - } - task_args.update(args.get('opts', {})) - - param = Param(task_args) - task_thread = TaskThread(Task().start, param) - task_thread.start() - - return result_handler(consts.API_SUCCESS, {'task_id': task_id}) diff --git a/api/resources/v1/__init__.py b/api/resources/v1/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/api/resources/v1/__init__.py diff --git a/api/resources/v1/asynctasks.py b/api/resources/v1/asynctasks.py new file mode 100644 index 000000000..759df214c --- /dev/null +++ b/api/resources/v1/asynctasks.py @@ -0,0 +1,65 @@ +# ############################################################################ +# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# ############################################################################ +import uuid +import logging + +from api import ApiResource +from api.database.v1.handlers import AsyncTaskHandler +from yardstick.common import constants as consts +from yardstick.common.utils import result_handler + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +class V1AsyncTask(ApiResource): + + def get(self): + args = self._get_args() + + try: + task_id = args['task_id'] + except KeyError: + return result_handler(consts.API_ERROR, 'task_id must be provided') + + try: + uuid.UUID(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid task_id') + + asynctask_handler = AsyncTaskHandler() + try: + asynctask = asynctask_handler.get_task_by_taskid(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid task_id') + + def _unfinished(): + return result_handler(consts.TASK_NOT_DONE, {}) + + def _finished(): + return result_handler(consts.TASK_DONE, {}) + + def _error(): + return result_handler(consts.TASK_FAILED, asynctask.error) + + status = asynctask.status + LOG.debug('Task status is: %s', status) + + if status not in [consts.TASK_NOT_DONE, + consts.TASK_DONE, + consts.TASK_FAILED]: + return result_handler(consts.API_ERROR, 'internal server error') + + switcher = { + consts.TASK_NOT_DONE: _unfinished, + consts.TASK_DONE: _finished, + consts.TASK_FAILED: _error + } + + return switcher.get(status)() diff --git a/api/resources/v1/env.py b/api/resources/v1/env.py new file mode 100644 index 000000000..4632f15fe --- /dev/null +++ b/api/resources/v1/env.py @@ -0,0 +1,421 @@ +############################################################################## +# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +from __future__ import absolute_import + +import errno +import logging +import os +import subprocess +import threading +import time +import uuid +import glob +import yaml +import collections + +from six.moves import configparser +from oslo_serialization import jsonutils +from docker import Client + +from api.database.v1.handlers import AsyncTaskHandler +from api.utils import influx +from api import ApiResource +from yardstick.common import constants as consts +from yardstick.common import utils +from yardstick.common.utils import result_handler +from yardstick.common import openstack_utils +from yardstick.common.httpClient import HttpClient + + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + +async_handler = AsyncTaskHandler() + + +class V1Env(ApiResource): + + def post(self): + return self._dispatch_post() + + def create_grafana(self, args): + task_id = str(uuid.uuid4()) + + thread = threading.Thread(target=self._create_grafana, args=(task_id,)) + thread.start() + + return result_handler(consts.API_SUCCESS, {'task_id': task_id}) + + def _create_grafana(self, task_id): + self._create_task(task_id) + + client = Client(base_url=consts.DOCKER_URL) + + try: + LOG.info('Checking if grafana image exist') + image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG) + if not self._check_image_exist(client, image): + LOG.info('Grafana image not exist, start pulling') + client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG) + + LOG.info('Createing grafana container') + self._create_grafana_container(client) + LOG.info('Grafana container is created') + + time.sleep(5) + + LOG.info('Creating data source for grafana') + self._create_data_source() + + LOG.info('Creating dashboard for grafana') + self._create_dashboard() + + self._update_task_status(task_id) + LOG.info('Finished') + except Exception as e: + self._update_task_error(task_id, str(e)) + LOG.exception('Create grafana failed') + + def _create_dashboard(self): + url = 'http://admin:admin@%s:3000/api/dashboards/db' % consts.GRAFANA_IP + path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json') + + for i in sorted(glob.iglob(path)): + with open(i) as f: + data = jsonutils.load(f) + try: + HttpClient().post(url, data) + except Exception: + LOG.exception('Create dashboard %s failed', i) + raise + + def _create_data_source(self): + url = 'http://admin:admin@%s:3000/api/datasources' % consts.GRAFANA_IP + data = { + "name": "yardstick", + "type": "influxdb", + "access": "proxy", + "url": "http://%s:8086" % consts.INFLUXDB_IP, + "password": "root", + "user": "root", + "database": "yardstick", + "basicAuth": True, + "basicAuthUser": "admin", + "basicAuthPassword": "admin", + "isDefault": False, + } + try: + HttpClient().post(url, data) + except Exception: + LOG.exception('Create datasources failed') + raise + + def _create_grafana_container(self, client): + ports = [3000] + port_bindings = {k: k for k in ports} + restart_policy = {"MaximumRetryCount": 0, "Name": "always"} + host_config = client.create_host_config(port_bindings=port_bindings, + restart_policy=restart_policy) + + LOG.info('Creating container') + container = client.create_container(image='%s:%s' % + (consts.GRAFANA_IMAGE, + consts.GRAFANA_TAG), + ports=ports, + detach=True, + tty=True, + host_config=host_config) + LOG.info('Starting container') + client.start(container) + + def _check_image_exist(self, client, t): + return any(t in a['RepoTags'][0] + for a in client.images() if a['RepoTags']) + + def create_influxdb(self, args): + task_id = str(uuid.uuid4()) + + thread = threading.Thread(target=self._create_influxdb, args=(task_id,)) + thread.start() + + return result_handler(consts.API_SUCCESS, {'task_id': task_id}) + + def _create_influxdb(self, task_id): + self._create_task(task_id) + + client = Client(base_url=consts.DOCKER_URL) + + try: + LOG.info('Changing output to influxdb') + self._change_output_to_influxdb() + + LOG.info('Checking if influxdb image exist') + if not self._check_image_exist(client, '%s:%s' % + (consts.INFLUXDB_IMAGE, + consts.INFLUXDB_TAG)): + LOG.info('Influxdb image not exist, start pulling') + client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG) + + LOG.info('Createing influxdb container') + self._create_influxdb_container(client) + LOG.info('Influxdb container is created') + + time.sleep(5) + + LOG.info('Config influxdb') + self._config_influxdb() + + self._update_task_status(task_id) + + LOG.info('Finished') + except Exception as e: + self._update_task_error(task_id, str(e)) + LOG.exception('Creating influxdb failed') + + def _create_influxdb_container(self, client): + + ports = [8083, 8086] + port_bindings = {k: k for k in ports} + restart_policy = {"MaximumRetryCount": 0, "Name": "always"} + host_config = client.create_host_config(port_bindings=port_bindings, + restart_policy=restart_policy) + + LOG.info('Creating container') + container = client.create_container(image='%s:%s' % + (consts.INFLUXDB_IMAGE, + consts.INFLUXDB_TAG), + ports=ports, + detach=True, + tty=True, + host_config=host_config) + LOG.info('Starting container') + client.start(container) + + def _config_influxdb(self): + try: + client = influx.get_data_db_client() + client.create_user(consts.INFLUXDB_USER, + consts.INFLUXDB_PASS, + consts.INFLUXDB_DB_NAME) + client.create_database(consts.INFLUXDB_DB_NAME) + LOG.info('Success to config influxDB') + except Exception: + LOG.exception('Config influxdb failed') + + def _change_output_to_influxdb(self): + utils.makedirs(consts.CONF_DIR) + + parser = configparser.ConfigParser() + LOG.info('Reading output sample configuration') + parser.read(consts.CONF_SAMPLE_FILE) + + LOG.info('Set dispatcher to influxdb') + parser.set('DEFAULT', 'dispatcher', 'influxdb') + parser.set('dispatcher_influxdb', 'target', + 'http://%s:8086' % consts.INFLUXDB_IP) + + LOG.info('Writing to %s', consts.CONF_FILE) + with open(consts.CONF_FILE, 'w') as f: + parser.write(f) + + def prepare_env(self, args): + task_id = str(uuid.uuid4()) + + thread = threading.Thread(target=self._prepare_env_daemon, + args=(task_id,)) + thread.start() + + return result_handler(consts.API_SUCCESS, {'task_id': task_id}) + + def _already_source_openrc(self): + """Check if openrc is sourced already""" + return all(os.environ.get(k) for k in ['OS_AUTH_URL', + 'OS_USERNAME', + 'OS_PASSWORD', + 'EXTERNAL_NETWORK']) + + def _prepare_env_daemon(self, task_id): + self._create_task(task_id) + + try: + self._create_directories() + + rc_file = consts.OPENRC + + LOG.info('Checkout Openrc Environment variable') + if not self._already_source_openrc(): + LOG.info('Openrc variable not found in Environment') + if not os.path.exists(rc_file): + LOG.info('Openrc file not found') + installer_ip = os.environ.get('INSTALLER_IP', + '192.168.200.2') + installer_type = os.environ.get('INSTALLER_TYPE', 'compass') + LOG.info('Getting openrc file from %s', installer_type) + self._get_remote_rc_file(rc_file, + installer_ip, + installer_type) + LOG.info('Source openrc file') + self._source_file(rc_file) + LOG.info('Appending external network') + self._append_external_network(rc_file) + LOG.info('Openrc file exist, source openrc file') + self._source_file(rc_file) + + LOG.info('Cleaning images') + self._clean_images() + + LOG.info('Loading images') + self._load_images() + + self._update_task_status(task_id) + LOG.info('Finished') + except Exception as e: + self._update_task_error(task_id, str(e)) + LOG.exception('Prepare env failed') + + def _create_directories(self): + utils.makedirs(consts.CONF_DIR) + + def _source_file(self, rc_file): + utils.source_env(rc_file) + + def _get_remote_rc_file(self, rc_file, installer_ip, installer_type): + + os_fetch_script = os.path.join(consts.RELENG_DIR, consts.FETCH_SCRIPT) + + try: + cmd = [os_fetch_script, '-d', rc_file, '-i', installer_type, + '-a', installer_ip] + p = subprocess.Popen(cmd, stdout=subprocess.PIPE) + p.communicate() + + if p.returncode != 0: + LOG.error('Failed to fetch credentials from installer') + except OSError as e: + if e.errno != errno.EEXIST: + raise + + def _append_external_network(self, rc_file): + neutron_client = openstack_utils.get_neutron_client() + networks = neutron_client.list_networks()['networks'] + try: + ext_network = next(n['name'] + for n in networks if n['router:external']) + except StopIteration: + LOG.warning("Can't find external network") + else: + cmd = 'export EXTERNAL_NETWORK=%s' % ext_network + try: + with open(rc_file, 'a') as f: + f.write(cmd + '\n') + except OSError as e: + if e.errno != errno.EEXIST: + raise + + def _clean_images(self): + cmd = [consts.CLEAN_IMAGES_SCRIPT] + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR) + output = p.communicate()[0] + LOG.debug(output) + + def _load_images(self): + cmd = [consts.LOAD_IMAGES_SCRIPT] + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=consts.REPOS_DIR) + output = p.communicate()[0] + LOG.debug(output) + + def _create_task(self, task_id): + async_handler.insert({'status': 0, 'task_id': task_id}) + + def _update_task_status(self, task_id): + async_handler.update_attr(task_id, {'status': 1}) + + def _update_task_error(self, task_id, error): + async_handler.update_attr(task_id, {'status': 2, 'error': error}) + + def update_openrc(self, args): + try: + openrc_vars = args['openrc'] + except KeyError: + return result_handler(consts.API_ERROR, 'openrc must be provided') + else: + if not isinstance(openrc_vars, collections.Mapping): + return result_handler(consts.API_ERROR, 'args should be a dict') + + lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()] + LOG.debug('Writing: %s', ''.join(lines)) + + LOG.info('Writing openrc: Writing') + utils.makedirs(consts.CONF_DIR) + + with open(consts.OPENRC, 'w') as f: + f.writelines(lines) + LOG.info('Writing openrc: Done') + + LOG.info('Source openrc: Sourcing') + try: + self._source_file(consts.OPENRC) + except Exception as e: + LOG.exception('Failed to source openrc') + return result_handler(consts.API_ERROR, str(e)) + LOG.info('Source openrc: Done') + + return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars}) + + def upload_pod_file(self, args): + try: + pod_file = args['file'] + except KeyError: + return result_handler(consts.API_ERROR, 'file must be provided') + + LOG.info('Checking file') + data = yaml.load(pod_file.read()) + if not isinstance(data, collections.Mapping): + return result_handler(consts.API_ERROR, 'invalid yaml file') + + LOG.info('Writing file') + with open(consts.POD_FILE, 'w') as f: + yaml.dump(data, f, default_flow_style=False) + LOG.info('Writing finished') + + return result_handler(consts.API_SUCCESS, {'pod_info': data}) + + def update_pod_file(self, args): + try: + pod_dic = args['pod'] + except KeyError: + return result_handler(consts.API_ERROR, 'pod must be provided') + else: + if not isinstance(pod_dic, collections.Mapping): + return result_handler(consts.API_ERROR, 'pod should be a dict') + + LOG.info('Writing file') + with open(consts.POD_FILE, 'w') as f: + yaml.dump(pod_dic, f, default_flow_style=False) + LOG.info('Writing finished') + + return result_handler(consts.API_SUCCESS, {'pod_info': pod_dic}) + + def update_hosts(self, hosts_ip): + if not isinstance(hosts_ip, collections.Mapping): + return result_handler(consts.API_ERROR, 'args should be a dict') + LOG.info('Writing hosts: Writing') + LOG.debug('Writing: %s', hosts_ip) + cmd = ["sudo", "python", "write_hosts.py"] + p = subprocess.Popen(cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=os.path.join(consts.REPOS_DIR, + "api/resources")) + _, err = p.communicate(jsonutils.dumps(hosts_ip)) + if p.returncode != 0: + return result_handler(consts.API_ERROR, err) + LOG.info('Writing hosts: Done') + return result_handler(consts.API_SUCCESS, 'success') diff --git a/api/resources/v1/results.py b/api/resources/v1/results.py new file mode 100644 index 000000000..0493b43b6 --- /dev/null +++ b/api/resources/v1/results.py @@ -0,0 +1,78 @@ +############################################################################## +# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +from __future__ import absolute_import +import logging +import uuid +import json +import os + +from flasgger.utils import swag_from + +from api import ApiResource +from api.database.v1.handlers import TasksHandler +from yardstick.common import constants as consts +from yardstick.common.utils import result_handler +from api.swagger import models + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +ResultModel = models.ResultModel + + +class V1Result(ApiResource): + + @swag_from(os.path.join(consts.REPOS_DIR, 'api/swagger/docs/results.yaml')) + def get(self): + args = self._get_args() + + try: + task_id = args['task_id'] + except KeyError: + return result_handler(consts.API_ERROR, 'task_id must be provided') + + try: + uuid.UUID(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid task_id') + + task_handler = TasksHandler() + try: + task = task_handler.get_task_by_taskid(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid task_id') + + def _unfinished(): + return result_handler(consts.TASK_NOT_DONE, {}) + + def _finished(): + if task.result: + return result_handler(consts.TASK_DONE, json.loads(task.result)) + else: + return result_handler(consts.TASK_DONE, {}) + + def _error(): + return result_handler(consts.TASK_FAILED, task.error) + + status = task.status + LOG.debug('Task status is: %s', status) + + if status not in [consts.TASK_NOT_DONE, + consts.TASK_DONE, + consts.TASK_FAILED]: + return result_handler(consts.API_ERROR, 'internal server error') + + switcher = { + consts.TASK_NOT_DONE: _unfinished, + consts.TASK_DONE: _finished, + consts.TASK_FAILED: _error + } + + return switcher.get(status)() diff --git a/api/resources/v1/testcases.py b/api/resources/v1/testcases.py new file mode 100644 index 000000000..fbeb36f31 --- /dev/null +++ b/api/resources/v1/testcases.py @@ -0,0 +1,114 @@ +# ############################################################################ +# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# ############################################################################ + +from __future__ import absolute_import +import uuid +import os +import logging + +from flasgger.utils import swag_from + +from yardstick.benchmark.core.testcase import Testcase +from yardstick.benchmark.core.task import Task +from yardstick.benchmark.core import Param +from yardstick.common import constants as consts +from yardstick.common.utils import result_handler +from api.utils.thread import TaskThread +from api import ApiResource +from api.swagger import models + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +class V1Testcase(ApiResource): + + def get(self): + param = Param({}) + testcase_list = Testcase().list_all(param) + return result_handler(consts.API_SUCCESS, testcase_list) + + +class V1CaseDocs(ApiResource): + + def get(self, case_name): + docs_path = os.path.join(consts.DOCS_DIR, '{}.rst'.format(case_name)) + + if not os.path.exists(docs_path): + return result_handler(consts.API_ERROR, 'case not exists') + + LOG.info('Reading %s', case_name) + with open(docs_path) as f: + content = f.read() + + return result_handler(consts.API_SUCCESS, {'docs': content}) + + +TestCaseActionModel = models.TestCaseActionModel +TestCaseActionArgsModel = models.TestCaseActionArgsModel +TestCaseActionArgsOptsModel = models.TestCaseActionArgsOptsModel +TestCaseActionArgsOptsTaskArgModel = models.TestCaseActionArgsOptsTaskArgModel + + +class V1ReleaseCase(ApiResource): + + @swag_from(os.path.join(consts.REPOS_DIR, + 'api/swagger/docs/release_action.yaml')) + def post(self): + return self._dispatch_post() + + def run_test_case(self, args): + try: + name = args['testcase'] + except KeyError: + return result_handler(consts.API_ERROR, 'testcase must be provided') + + testcase = os.path.join(consts.TESTCASE_DIR, '{}.yaml'.format(name)) + + task_id = str(uuid.uuid4()) + + task_args = { + 'inputfile': [testcase], + 'task_id': task_id + } + task_args.update(args.get('opts', {})) + + param = Param(task_args) + task_thread = TaskThread(Task().start, param) + task_thread.start() + + return result_handler(consts.API_SUCCESS, {'task_id': task_id}) + + +class V1SampleCase(ApiResource): + + def post(self): + return self._dispatch_post() + + def run_test_case(self, args): + try: + name = args['testcase'] + except KeyError: + return result_handler(consts.API_ERROR, 'testcase must be provided') + + testcase = os.path.join(consts.SAMPLE_CASE_DIR, '{}.yaml'.format(name)) + + task_id = str(uuid.uuid4()) + + task_args = { + 'inputfile': [testcase], + 'task_id': task_id + } + task_args.update(args.get('opts', {})) + + param = Param(task_args) + task_thread = TaskThread(Task().start, param) + task_thread.start() + + return result_handler(consts.API_SUCCESS, {'task_id': task_id}) diff --git a/api/resources/v1/testsuites.py b/api/resources/v1/testsuites.py new file mode 100644 index 000000000..5f72c2ea6 --- /dev/null +++ b/api/resources/v1/testsuites.py @@ -0,0 +1,64 @@ +############################################################################## +# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +from __future__ import absolute_import +import uuid +import os +import logging + +from flasgger.utils import swag_from + +from api import ApiResource +from api.utils.thread import TaskThread +from yardstick.common import constants as consts +from yardstick.common.utils import result_handler +from yardstick.benchmark.core import Param +from yardstick.benchmark.core.task import Task +from api.swagger import models + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +TestSuiteActionModel = models.TestSuiteActionModel +TestSuiteActionArgsModel = models.TestSuiteActionArgsModel +TestSuiteActionArgsOptsModel = models.TestSuiteActionArgsOptsModel +TestSuiteActionArgsOptsTaskArgModel = \ + models.TestSuiteActionArgsOptsTaskArgModel + + +class V1Testsuite(ApiResource): + + @swag_from(os.path.join(consts.REPOS_DIR, + 'api/swagger/docs/testsuites_action.yaml')) + def post(self): + return self._dispatch_post() + + def run_test_suite(self, args): + try: + name = args['testsuite'] + except KeyError: + return result_handler(consts.API_ERROR, + 'testsuite must be provided') + + testsuite = os.path.join(consts.TESTSUITE_DIR, '{}.yaml'.format(name)) + + task_id = str(uuid.uuid4()) + + task_args = { + 'inputfile': [testsuite], + 'task_id': task_id, + 'suite': True + } + task_args.update(args.get('opts', {})) + + param = Param(task_args) + task_thread = TaskThread(Task().start, param) + task_thread.start() + + return result_handler(consts.API_SUCCESS, {'task_id': task_id}) |