diff options
Diffstat (limited to 'api/resources')
-rw-r--r-- | api/resources/v1/testcases.py | 5 | ||||
-rw-r--r-- | api/resources/v2/containers.py | 383 | ||||
-rw-r--r-- | api/resources/v2/images.py | 72 | ||||
-rw-r--r-- | api/resources/v2/pods.py | 21 | ||||
-rw-r--r-- | api/resources/v2/projects.py | 97 | ||||
-rw-r--r-- | api/resources/v2/tasks.py | 245 | ||||
-rw-r--r-- | api/resources/v2/testcases.py | 62 | ||||
-rw-r--r-- | api/resources/v2/testsuites.py | 81 |
8 files changed, 964 insertions, 2 deletions
diff --git a/api/resources/v1/testcases.py b/api/resources/v1/testcases.py index fbeb36f31..f15947245 100644 --- a/api/resources/v1/testcases.py +++ b/api/resources/v1/testcases.py @@ -22,6 +22,7 @@ from yardstick.common.utils import result_handler from api.utils.thread import TaskThread from api import ApiResource from api.swagger import models +from api.database.v1.handlers import TasksHandler LOG = logging.getLogger(__name__) LOG.setLevel(logging.DEBUG) @@ -80,7 +81,7 @@ class V1ReleaseCase(ApiResource): task_args.update(args.get('opts', {})) param = Param(task_args) - task_thread = TaskThread(Task().start, param) + task_thread = TaskThread(Task().start, param, TasksHandler()) task_thread.start() return result_handler(consts.API_SUCCESS, {'task_id': task_id}) @@ -108,7 +109,7 @@ class V1SampleCase(ApiResource): task_args.update(args.get('opts', {})) param = Param(task_args) - task_thread = TaskThread(Task().start, param) + task_thread = TaskThread(Task().start, param, TasksHandler()) task_thread.start() return result_handler(consts.API_SUCCESS, {'task_id': task_id}) diff --git a/api/resources/v2/containers.py b/api/resources/v2/containers.py new file mode 100644 index 000000000..ce7130376 --- /dev/null +++ b/api/resources/v2/containers.py @@ -0,0 +1,383 @@ +############################################################################## +# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +from __future__ import absolute_import + +import logging +import threading +import time +import uuid +import os +import glob + +from six.moves import configparser +from oslo_serialization import jsonutils +from docker import Client + +from api import ApiResource +from api.utils import influx +from api.database.v2.handlers import V2ContainerHandler +from api.database.v2.handlers import V2EnvironmentHandler +from yardstick.common import constants as consts +from yardstick.common import utils +from yardstick.common.utils import result_handler +from yardstick.common.utils import get_free_port +from yardstick.common.httpClient import HttpClient + + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + +environment_handler = V2EnvironmentHandler() +container_handler = V2ContainerHandler() + + +class V2Containers(ApiResource): + + def post(self): + return self._dispatch_post() + + def create_influxdb(self, args): + try: + environment_id = args['environment_id'] + except KeyError: + return result_handler(consts.API_ERROR, 'environment_id must be provided') + + try: + uuid.UUID(environment_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid environment id') + + try: + environment = environment_handler.get_by_uuid(environment_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such environment id') + + container_info = environment.container_id + container_info = jsonutils.loads(container_info) if container_info else {} + + if container_info.get('influxdb'): + return result_handler(consts.API_ERROR, 'influxdb container already exist') + + name = 'influxdb-{}'.format(environment_id[:8]) + port = get_free_port(consts.SERVER_IP) + container_id = str(uuid.uuid4()) + LOG.info('%s will launch on : %s', name, port) + + LOG.info('launch influxdb background') + args = (name, port, container_id) + thread = threading.Thread(target=self._create_influxdb, args=args) + thread.start() + + LOG.info('record container in database') + container_init_data = { + 'uuid': container_id, + 'environment_id': environment_id, + 'name': name, + 'port': port, + 'status': 0 + } + container_handler.insert(container_init_data) + + LOG.info('update container in environment') + container_info['influxdb'] = container_id + environment_info = {'container_id': jsonutils.dumps(container_info)} + environment_handler.update_attr(environment_id, environment_info) + + return result_handler(consts.API_SUCCESS, {'uuid': container_id}) + + def _check_image_exist(self, client, t): + return any(t in a['RepoTags'][0] + for a in client.images() if a['RepoTags']) + + def _create_influxdb(self, name, port, container_id): + client = Client(base_url=consts.DOCKER_URL) + + try: + LOG.info('Checking if influxdb image exist') + if not self._check_image_exist(client, '%s:%s' % + (consts.INFLUXDB_IMAGE, + consts.INFLUXDB_TAG)): + LOG.info('Influxdb image not exist, start pulling') + client.pull(consts.INFLUXDB_IMAGE, tag=consts.INFLUXDB_TAG) + + LOG.info('Createing influxdb container') + container = self._create_influxdb_container(client, name, port) + LOG.info('Influxdb container is created') + + time.sleep(5) + + container = client.inspect_container(container['Id']) + ip = container['NetworkSettings']['Networks']['bridge']['IPAddress'] + LOG.debug('container ip is: %s', ip) + + LOG.info('Changing output to influxdb') + self._change_output_to_influxdb(ip) + + LOG.info('Config influxdb') + self._config_influxdb() + + container_handler.update_attr(container_id, {'status': 1}) + + LOG.info('Finished') + except Exception: + container_handler.update_attr(container_id, {'status': 2}) + LOG.exception('Creating influxdb failed') + + def _create_influxdb_container(self, client, name, port): + + ports = [port] + port_bindings = {8086: port} + restart_policy = {"MaximumRetryCount": 0, "Name": "always"} + host_config = client.create_host_config(port_bindings=port_bindings, + restart_policy=restart_policy) + + LOG.info('Creating container') + container = client.create_container(image='%s:%s' % + (consts.INFLUXDB_IMAGE, + consts.INFLUXDB_TAG), + ports=ports, + name=name, + detach=True, + tty=True, + host_config=host_config) + LOG.info('Starting container') + client.start(container) + return container + + def _config_influxdb(self): + try: + client = influx.get_data_db_client() + client.create_user(consts.INFLUXDB_USER, + consts.INFLUXDB_PASS, + consts.INFLUXDB_DB_NAME) + client.create_database(consts.INFLUXDB_DB_NAME) + LOG.info('Success to config influxDB') + except Exception: + LOG.exception('Config influxdb failed') + + def _change_output_to_influxdb(self, ip): + utils.makedirs(consts.CONF_DIR) + + parser = configparser.ConfigParser() + LOG.info('Reading output sample configuration') + parser.read(consts.CONF_SAMPLE_FILE) + + LOG.info('Set dispatcher to influxdb') + parser.set('DEFAULT', 'dispatcher', 'influxdb') + parser.set('dispatcher_influxdb', 'target', + 'http://{}:{}'.format(ip, 8086)) + + LOG.info('Writing to %s', consts.CONF_FILE) + with open(consts.CONF_FILE, 'w') as f: + parser.write(f) + + def create_grafana(self, args): + try: + environment_id = args['environment_id'] + except KeyError: + return result_handler(consts.API_ERROR, 'environment_id must be provided') + + try: + uuid.UUID(environment_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid environment id') + + try: + environment = environment_handler.get_by_uuid(environment_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such environment id') + + container_info = environment.container_id + container_info = jsonutils.loads(container_info) if container_info else {} + + if not container_info.get('influxdb'): + return result_handler(consts.API_ERROR, 'influxdb not set') + + if container_info.get('grafana'): + return result_handler(consts.API_ERROR, 'grafana container already exists') + + name = 'grafana-{}'.format(environment_id[:8]) + port = get_free_port(consts.SERVER_IP) + container_id = str(uuid.uuid4()) + + args = (name, port, container_id) + thread = threading.Thread(target=self._create_grafana, args=args) + thread.start() + + container_init_data = { + 'uuid': container_id, + 'environment_id': environment_id, + 'name': name, + 'port': port, + 'status': 0 + } + container_handler.insert(container_init_data) + + container_info['grafana'] = container_id + environment_info = {'container_id': jsonutils.dumps(container_info)} + environment_handler.update_attr(environment_id, environment_info) + + return result_handler(consts.API_SUCCESS, {'uuid': container_id}) + + def _create_grafana(self, name, port, container_id): + client = Client(base_url=consts.DOCKER_URL) + + try: + LOG.info('Checking if grafana image exist') + image = '{}:{}'.format(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG) + if not self._check_image_exist(client, image): + LOG.info('Grafana image not exist, start pulling') + client.pull(consts.GRAFANA_IMAGE, consts.GRAFANA_TAG) + + LOG.info('Createing grafana container') + container = self._create_grafana_container(client, name, port) + LOG.info('Grafana container is created') + + time.sleep(5) + + container = client.inspect_container(container['Id']) + ip = container['NetworkSettings']['Networks']['bridge']['IPAddress'] + LOG.debug('container ip is: %s', ip) + + LOG.info('Creating data source for grafana') + self._create_data_source(ip) + + LOG.info('Creating dashboard for grafana') + self._create_dashboard(ip) + + container_handler.update_attr(container_id, {'status': 1}) + LOG.info('Finished') + except Exception: + container_handler.update_attr(container_id, {'status': 2}) + LOG.exception('Create grafana failed') + + def _create_dashboard(self, ip): + url = 'http://admin:admin@{}:{}/api/dashboards/db'.format(ip, 3000) + path = os.path.join(consts.REPOS_DIR, 'dashboard', '*dashboard.json') + + for i in sorted(glob.iglob(path)): + with open(i) as f: + data = jsonutils.load(f) + try: + HttpClient().post(url, data) + except Exception: + LOG.exception('Create dashboard %s failed', i) + raise + + def _create_data_source(self, ip): + url = 'http://admin:admin@{}:{}/api/datasources'.format(ip, 3000) + + influx_conf = utils.parse_ini_file(consts.CONF_FILE) + try: + influx_url = influx_conf['dispatcher_influxdb']['target'] + except KeyError: + LOG.exception('influxdb url not set in yardstick.conf') + raise + + data = { + "name": "yardstick", + "type": "influxdb", + "access": "proxy", + "url": influx_url, + "password": "root", + "user": "root", + "database": "yardstick", + "basicAuth": True, + "basicAuthUser": "admin", + "basicAuthPassword": "admin", + "isDefault": False, + } + try: + HttpClient().post(url, data) + except Exception: + LOG.exception('Create datasources failed') + raise + + def _create_grafana_container(self, client, name, port): + ports = [3000] + port_bindings = {3000: port} + restart_policy = {"MaximumRetryCount": 0, "Name": "always"} + host_config = client.create_host_config(port_bindings=port_bindings, + restart_policy=restart_policy) + + LOG.info('Creating container') + container = client.create_container(image='%s:%s' % + (consts.GRAFANA_IMAGE, + consts.GRAFANA_TAG), + name=name, + ports=ports, + detach=True, + tty=True, + host_config=host_config) + LOG.info('Starting container') + client.start(container) + return container + + +class V2Container(ApiResource): + + def get(self, container_id): + try: + uuid.UUID(container_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid container id') + + try: + container = container_handler.get_by_uuid(container_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such container id') + + name = container.name + client = Client(base_url=consts.DOCKER_URL) + info = client.inspect_container(name) + + data = { + 'name': name, + 'status': info.get('State', {}).get('Status', 'error'), + 'time': info.get('Created'), + 'port': container.port + } + + return result_handler(consts.API_SUCCESS, {'container': data}) + + def delete(self, container_id): + try: + uuid.UUID(container_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid container id') + + try: + container = container_handler.get_by_uuid(container_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such container id') + + environment_id = container.environment_id + + client = Client(base_url=consts.DOCKER_URL) + LOG.info('delete container: %s', container.name) + try: + client.remove_container(container.name, force=True) + except Exception: + LOG.exception('delete container failed') + return result_handler(consts.API_ERROR, 'delete container failed') + + LOG.info('delete container in database') + container_handler.delete_by_uuid(container_id) + + LOG.info('update container in environment') + environment = environment_handler.get_by_uuid(environment_id) + container_info = jsonutils.loads(environment.container_id) + key = next((k for k, v in container_info.items() if v == container_id)) + container_info.pop(key) + environment_delete_data = { + 'container_id': jsonutils.dumps(container_info) + } + environment_handler.update_attr(environment_id, environment_delete_data) + + return result_handler(consts.API_SUCCESS, {'container': container_id}) diff --git a/api/resources/v2/images.py b/api/resources/v2/images.py new file mode 100644 index 000000000..701818493 --- /dev/null +++ b/api/resources/v2/images.py @@ -0,0 +1,72 @@ +import logging +import subprocess +import threading + +from api import ApiResource +from yardstick.common.utils import result_handler +from yardstick.common.utils import source_env +from yardstick.common.utils import change_obj_to_dict +from yardstick.common.openstack_utils import get_nova_client +from yardstick.common import constants as consts + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +class V2Images(ApiResource): + + def get(self): + try: + source_env(consts.OPENRC) + except: + return result_handler(consts.API_ERROR, 'source openrc error') + + nova_client = get_nova_client() + try: + images_list = nova_client.images.list() + except: + return result_handler(consts.API_ERROR, 'get images error') + else: + images = [self.get_info(change_obj_to_dict(i)) for i in images_list] + status = 1 if all(i['status'] == 'ACTIVE' for i in images) else 0 + + return result_handler(consts.API_SUCCESS, {'status': status, 'images': images}) + + def post(self): + return self._dispatch_post() + + def get_info(self, data): + result = { + 'name': data.get('name', ''), + 'size': data.get('OS-EXT-IMG-SIZE:size', ''), + 'status': data.get('status', ''), + 'time': data.get('updated', '') + } + return result + + def load_image(self, args): + thread = threading.Thread(target=self._load_images) + thread.start() + return result_handler(consts.API_SUCCESS, {}) + + def _load_images(self): + LOG.info('source openrc') + source_env(consts.OPENRC) + + LOG.info('clean images') + cmd = [consts.CLEAN_IMAGES_SCRIPT] + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + cwd=consts.REPOS_DIR) + _, err = p.communicate() + if p.returncode != 0: + LOG.error('clean image failed: %s', err) + + LOG.info('load images') + cmd = [consts.LOAD_IMAGES_SCRIPT] + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + cwd=consts.REPOS_DIR) + _, err = p.communicate() + if p.returncode != 0: + LOG.error('load image failed: %s', err) + + LOG.info('Done') diff --git a/api/resources/v2/pods.py b/api/resources/v2/pods.py index ffb8a6046..ebc1312da 100644 --- a/api/resources/v2/pods.py +++ b/api/resources/v2/pods.py @@ -77,3 +77,24 @@ class V2Pod(ApiResource): content = jsonutils.loads(pod.content) return result_handler(consts.API_SUCCESS, {'pod': content}) + + def delete(self, pod_id): + try: + uuid.UUID(pod_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid pod id') + + pod_handler = V2PodHandler() + try: + pod = pod_handler.get_by_uuid(pod_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such pod') + + LOG.info('update pod in environment') + environment_handler = V2EnvironmentHandler() + environment_handler.update_attr(pod.environment_id, {'pod_id': None}) + + LOG.info('delete pod in database') + pod_handler.delete_by_uuid(pod_id) + + return result_handler(consts.API_SUCCESS, {'pod': pod_id}) diff --git a/api/resources/v2/projects.py b/api/resources/v2/projects.py new file mode 100644 index 000000000..376cf1a37 --- /dev/null +++ b/api/resources/v2/projects.py @@ -0,0 +1,97 @@ +import uuid +import logging + +from datetime import datetime + +from api import ApiResource +from api.database.v2.handlers import V2ProjectHandler +from api.database.v2.handlers import V2TaskHandler +from yardstick.common.utils import result_handler +from yardstick.common.utils import change_obj_to_dict +from yardstick.common import constants as consts + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +class V2Projects(ApiResource): + + def get(self): + project_handler = V2ProjectHandler() + projects = [change_obj_to_dict(p) for p in project_handler.list_all()] + + for p in projects: + tasks = p['tasks'] + p['tasks'] = tasks.split(',') if tasks else [] + + return result_handler(consts.API_SUCCESS, {'projects': projects}) + + def post(self): + return self._dispatch_post() + + def create_project(self, args): + try: + name = args['name'] + except KeyError: + return result_handler(consts.API_ERROR, 'name must be provided') + + project_id = str(uuid.uuid4()) + create_time = datetime.now() + project_handler = V2ProjectHandler() + + project_init_data = { + 'uuid': project_id, + 'name': name, + 'time': create_time + } + project_handler.insert(project_init_data) + + return result_handler(consts.API_SUCCESS, {'uuid': project_id}) + + +class V2Project(ApiResource): + + def get(self, project_id): + try: + uuid.UUID(project_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid project id') + + project_handler = V2ProjectHandler() + try: + project = project_handler.get_by_uuid(project_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such project id') + + project_info = change_obj_to_dict(project) + tasks = project_info['tasks'] + project_info['tasks'] = tasks.split(',') if tasks else [] + + return result_handler(consts.API_SUCCESS, {'project': project_info}) + + def delete(self, project_id): + try: + uuid.UUID(project_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid project id') + + project_handler = V2ProjectHandler() + try: + project = project_handler.get_by_uuid(project_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such project id') + + if project.tasks: + LOG.info('delete related task') + task_handler = V2TaskHandler() + for task_id in project.tasks.split(','): + LOG.debug('delete task: %s', task_id) + try: + task_handler.delete_by_uuid(task_id) + except ValueError: + LOG.exception('no such task id: %s', task_id) + + LOG.info('delete project in database') + project_handler.delete_by_uuid(project_id) + + return result_handler(consts.API_SUCCESS, {'project': project_id}) diff --git a/api/resources/v2/tasks.py b/api/resources/v2/tasks.py new file mode 100644 index 000000000..9790d7640 --- /dev/null +++ b/api/resources/v2/tasks.py @@ -0,0 +1,245 @@ +import uuid +import logging +from datetime import datetime + +from oslo_serialization import jsonutils + +from api import ApiResource +from api.database.v2.handlers import V2TaskHandler +from api.database.v2.handlers import V2ProjectHandler +from api.database.v2.handlers import V2EnvironmentHandler +from api.utils.thread import TaskThread +from yardstick.common.utils import result_handler +from yardstick.common.utils import change_obj_to_dict +from yardstick.common import constants as consts +from yardstick.benchmark.core.task import Task +from yardstick.benchmark.core import Param + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +class V2Tasks(ApiResource): + + def get(self): + task_handler = V2TaskHandler() + tasks = [change_obj_to_dict(t) for t in task_handler.list_all()] + + for t in tasks: + result = t['result'] + t['result'] = jsonutils.loads(result) if result else None + + return result_handler(consts.API_SUCCESS, {'tasks': tasks}) + + def post(self): + return self._dispatch_post() + + def create_task(self, args): + try: + name = args['name'] + except KeyError: + return result_handler(consts.API_ERROR, 'name must be provided') + + try: + project_id = args['project_id'] + except KeyError: + return result_handler(consts.API_ERROR, 'project_id must be provided') + + task_id = str(uuid.uuid4()) + create_time = datetime.now() + task_handler = V2TaskHandler() + + LOG.info('create task in database') + task_init_data = { + 'uuid': task_id, + 'project_id': project_id, + 'name': name, + 'time': create_time, + 'status': -1 + } + task_handler.insert(task_init_data) + + LOG.info('create task in project') + project_handler = V2ProjectHandler() + project_handler.append_attr(project_id, {'tasks': task_id}) + + return result_handler(consts.API_SUCCESS, {'uuid': task_id}) + + +class V2Task(ApiResource): + + def get(self, task_id): + try: + uuid.UUID(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid task id') + + task_handler = V2TaskHandler() + try: + task = task_handler.get_by_uuid(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such task id') + + task_info = change_obj_to_dict(task) + result = task_info['result'] + task_info['result'] = jsonutils.loads(result) if result else None + + return result_handler(consts.API_SUCCESS, {'task': task_info}) + + def delete(self, task_id): + try: + uuid.UUID(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid task id') + + task_handler = V2TaskHandler() + try: + project_id = task_handler.get_by_uuid(task_id).project_id + except ValueError: + return result_handler(consts.API_ERROR, 'no such task id') + + LOG.info('delete task in database') + task_handler.delete_by_uuid(task_id) + + project_handler = V2ProjectHandler() + project = project_handler.get_by_uuid(project_id) + + if project.tasks: + LOG.info('update tasks in project') + new_task_list = project.tasks.split(',').remove(task_id) + if new_task_list: + new_tasks = ','.join(new_task_list) + else: + new_tasks = None + project_handler.update_attr(project_id, {'tasks': new_tasks}) + + return result_handler(consts.API_SUCCESS, {'task': task_id}) + + def put(self, task_id): + + try: + uuid.UUID(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid task id') + + task_handler = V2TaskHandler() + try: + task_handler.get_by_uuid(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such task id') + + return self._dispatch_post(task_id=task_id) + + def add_environment(self, args): + + task_id = args['task_id'] + try: + environment_id = args['environment_id'] + except KeyError: + return result_handler(consts.API_ERROR, 'environment_id must be provided') + + try: + uuid.UUID(environment_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid environment id') + + environment_handler = V2EnvironmentHandler() + try: + environment_handler.get_by_uuid(environment_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such environment id') + + LOG.info('update environment_id in task') + task_handler = V2TaskHandler() + task_handler.update_attr(task_id, {'environment_id': environment_id}) + + return result_handler(consts.API_SUCCESS, {'uuid': task_id}) + + def add_case(self, args): + task_id = args['task_id'] + try: + name = args['case_name'] + except KeyError: + return result_handler(consts.API_ERROR, 'case_name must be provided') + + try: + content = args['case_content'] + except KeyError: + return result_handler(consts.API_ERROR, 'case_content must be provided') + + LOG.info('update case info in task') + task_handler = V2TaskHandler() + task_update_data = { + 'case_name': name, + 'content': content, + 'suite': False + } + task_handler.update_attr(task_id, task_update_data) + + return result_handler(consts.API_SUCCESS, {'uuid': task_id}) + + def add_suite(self, args): + task_id = args['task_id'] + try: + name = args['suite_name'] + except KeyError: + return result_handler(consts.API_ERROR, 'suite_name must be provided') + + try: + content = args['suite_content'] + except KeyError: + return result_handler(consts.API_ERROR, 'suite_content must be provided') + + LOG.info('update suite info in task') + task_handler = V2TaskHandler() + task_update_data = { + 'case_name': name, + 'content': content, + 'suite': True + } + task_handler.update_attr(task_id, task_update_data) + + return result_handler(consts.API_SUCCESS, {'uuid': task_id}) + + def run(self, args): + try: + task_id = args['task_id'] + except KeyError: + return result_handler(consts.API_ERROR, 'task_id must be provided') + + try: + uuid.UUID(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'invalid task id') + + task_handler = V2TaskHandler() + try: + task = task_handler.get_by_uuid(task_id) + except ValueError: + return result_handler(consts.API_ERROR, 'no such task id') + + if not task.environment_id: + return result_handler(consts.API_ERROR, 'environment not set') + + if not task.case_name or not task.content: + return result_handler(consts.API_ERROR, 'case not set') + + if task.status == 0: + return result_handler(consts.API_ERROR, 'task is already running') + + with open('/tmp/{}.yaml'.format(task.case_name), 'w') as f: + f.write(task.content) + + data = { + 'inputfile': ['/tmp/{}.yaml'.format(task.case_name)], + 'task_id': task_id + } + if task.suite: + data.update({'suite': True}) + + LOG.info('start task thread') + param = Param(data) + task_thread = TaskThread(Task().start, param, task_handler) + task_thread.start() + + return result_handler(consts.API_SUCCESS, {'uuid': task_id}) diff --git a/api/resources/v2/testcases.py b/api/resources/v2/testcases.py new file mode 100644 index 000000000..81b4aa88c --- /dev/null +++ b/api/resources/v2/testcases.py @@ -0,0 +1,62 @@ +import logging +import errno +import os + +from api import ApiResource +from yardstick.common.utils import result_handler +from yardstick.common import constants as consts +from yardstick.benchmark.core import Param +from yardstick.benchmark.core.testcase import Testcase + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +class V2Testcases(ApiResource): + + def get(self): + param = Param({}) + testcase_list = Testcase().list_all(param) + return result_handler(consts.API_SUCCESS, testcase_list) + + def post(self): + return self._dispatch_post() + + def upload_case(self, args): + try: + upload_file = args['file'] + except KeyError: + return result_handler(consts.API_ERROR, 'file must be provided') + + case_name = os.path.join(consts.TESTCASE_DIR, upload_file.filename) + + LOG.info('save case file') + upload_file.save(case_name) + + return result_handler(consts.API_SUCCESS, {'testcase': upload_file.filename}) + + +class V2Testcase(ApiResource): + + def get(self, case_name): + case_path = os.path.join(consts.TESTCASE_DIR, '{}.yaml'.format(case_name)) + + try: + with open(case_path) as f: + data = f.read() + except IOError as e: + if e.errno == errno.ENOENT: + return result_handler(consts.API_ERROR, 'case does not exist') + + return result_handler(consts.API_SUCCESS, {'testcase': data}) + + def delete(self, case_name): + case_path = os.path.join(consts.TESTCASE_DIR, '{}.yaml'.format(case_name)) + + try: + os.remove(case_path) + except IOError as e: + if e.errno == errno.ENOENT: + return result_handler(consts.API_ERROR, 'case does not exist') + + return result_handler(consts.API_SUCCESS, {'testcase': case_name}) diff --git a/api/resources/v2/testsuites.py b/api/resources/v2/testsuites.py new file mode 100644 index 000000000..ee942eff9 --- /dev/null +++ b/api/resources/v2/testsuites.py @@ -0,0 +1,81 @@ +import os +import errno +import logging + +import yaml + +from api import ApiResource +from yardstick.common.utils import result_handler +from yardstick.common import constants as consts +from yardstick.benchmark.core.testsuite import Testsuite +from yardstick.benchmark.core import Param + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +class V2Testsuites(ApiResource): + + def get(self): + param = Param({}) + testsuite_list = Testsuite().list_all(param) + + data = { + 'testsuites': testsuite_list + } + + return result_handler(consts.API_SUCCESS, data) + + def post(self): + return self._dispatch_post() + + def create_suite(self, args): + try: + suite_name = args['name'] + except KeyError: + return result_handler(consts.API_ERROR, 'name must be provided') + + try: + testcases = args['testcases'] + except KeyError: + return result_handler(consts.API_ERROR, 'testcases must be provided') + + testcases = [{'file_name': '{}.yaml'.format(t)} for t in testcases] + + suite = os.path.join(consts.TESTSUITE_DIR, '{}.yaml'.format(suite_name)) + suite_content = { + 'schema': 'yardstick:suite:0.1', + 'name': suite_name, + 'test_cases_dir': 'tests/opnfv/test_cases/', + 'test_cases': testcases + } + + LOG.info('write test suite') + with open(suite, 'w') as f: + yaml.dump(suite_content, f, default_flow_style=False) + + return result_handler(consts.API_SUCCESS, {'suite': suite_name}) + + +class V2Testsuite(ApiResource): + + def get(self, suite_name): + suite_path = os.path.join(consts.TESTSUITE_DIR, '{}.yaml'.format(suite_name)) + try: + with open(suite_path) as f: + data = f.read() + except IOError as e: + if e.errno == errno.ENOENT: + return result_handler(consts.API_ERROR, 'suite does not exist') + + return result_handler(consts.API_SUCCESS, {'testsuite': data}) + + def delete(self, suite_name): + suite_path = os.path.join(consts.TESTSUITE_DIR, '{}.yaml'.format(suite_name)) + try: + os.remove(suite_path) + except IOError as e: + if e.errno == errno.ENOENT: + return result_handler(consts.API_ERROR, 'suite does not exist') + + return result_handler(consts.API_SUCCESS, {'testsuite': suite_name}) |