aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--api/__init__.py1
-rw-r--r--api/database/v2/handlers.py3
-rw-r--r--api/resources/v2/__init__.py0
-rw-r--r--api/resources/v2/environments.py117
-rw-r--r--api/resources/v2/openrcs.py211
-rw-r--r--api/resources/v2/pods.py79
-rw-r--r--api/server.py13
-rw-r--r--api/urls.py35
-rw-r--r--docs/release/release-notes/release-notes.rst66
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_monitor_command.py8
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_monitor_multi.py2
-rw-r--r--tests/unit/benchmark/scenarios/storage/test_storperf.py4
-rw-r--r--tests/unit/common/test_utils.py13
-rw-r--r--tests/unit/network_services/vnf_generic/vnf/test_vpe_vnf.py44
-rw-r--r--yardstick/benchmark/scenarios/availability/monitor/monitor_command.py5
-rw-r--r--yardstick/common/constants.py2
-rw-r--r--yardstick/common/utils.py10
17 files changed, 568 insertions, 45 deletions
diff --git a/api/__init__.py b/api/__init__.py
index 4622802df..3195c971b 100644
--- a/api/__init__.py
+++ b/api/__init__.py
@@ -34,6 +34,7 @@ class ApiResource(Resource):
except KeyError:
pass
+ args.update({k: v for k, v in request.form.items()})
LOG.debug('Input args is: action: %s, args: %s', action, args)
return action, args
diff --git a/api/database/v2/handlers.py b/api/database/v2/handlers.py
index eb732817d..095ad724c 100644
--- a/api/database/v2/handlers.py
+++ b/api/database/v2/handlers.py
@@ -24,6 +24,9 @@ class V2EnvironmentHandler(object):
db_session.commit()
return environment
+ def list_all(self):
+ return V2Environment.query.all()
+
def get_by_uuid(self, uuid):
environment = V2Environment.query.filter_by(uuid=uuid).first()
if not environment:
diff --git a/api/resources/v2/__init__.py b/api/resources/v2/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/api/resources/v2/__init__.py
diff --git a/api/resources/v2/environments.py b/api/resources/v2/environments.py
new file mode 100644
index 000000000..e4679b0d6
--- /dev/null
+++ b/api/resources/v2/environments.py
@@ -0,0 +1,117 @@
+import uuid
+import logging
+
+from oslo_serialization import jsonutils
+from docker import Client
+
+from api import ApiResource
+from api.database.v2.handlers import V2EnvironmentHandler
+from api.database.v2.handlers import V2OpenrcHandler
+from api.database.v2.handlers import V2PodHandler
+from api.database.v2.handlers import V2ContainerHandler
+from yardstick.common.utils import result_handler
+from yardstick.common.utils import change_obj_to_dict
+from yardstick.common import constants as consts
+
+LOG = logging.getLogger(__name__)
+LOG.setLevel(logging.DEBUG)
+
+
+class V2Environments(ApiResource):
+
+ def get(self):
+ environment_handler = V2EnvironmentHandler()
+ environments = [change_obj_to_dict(e) for e in environment_handler.list_all()]
+
+ for e in environments:
+ container_info = e['container_id']
+ e['container_id'] = jsonutils.loads(container_info) if container_info else {}
+
+ data = {
+ 'environments': environments
+ }
+
+ return result_handler(consts.API_SUCCESS, data)
+
+ def post(self):
+ return self._dispatch_post()
+
+ def create_environment(self, args):
+ try:
+ name = args['name']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'name must be provided')
+
+ env_id = str(uuid.uuid4())
+
+ environment_handler = V2EnvironmentHandler()
+
+ env_init_data = {
+ 'name': name,
+ 'uuid': env_id
+ }
+ environment_handler.insert(env_init_data)
+
+ return result_handler(consts.API_SUCCESS, {'uuid': env_id})
+
+
+class V2Environment(ApiResource):
+
+ def get(self, environment_id):
+ try:
+ uuid.UUID(environment_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid environment id')
+
+ environment_handler = V2EnvironmentHandler()
+ try:
+ environment = environment_handler.get_by_uuid(environment_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'no such environment id')
+
+ environment = change_obj_to_dict(environment)
+ container_id = environment['container_id']
+ environment['container_id'] = jsonutils.loads(container_id) if container_id else {}
+ return result_handler(consts.API_SUCCESS, {'environment': environment})
+
+ def delete(self, environment_id):
+ try:
+ uuid.UUID(environment_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid environment id')
+
+ environment_handler = V2EnvironmentHandler()
+ try:
+ environment = environment_handler.get_by_uuid(environment_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'no such environment id')
+
+ if environment.openrc_id:
+ LOG.info('delete openrc: %s', environment.openrc_id)
+ openrc_handler = V2OpenrcHandler()
+ openrc_handler.delete_by_uuid(environment.openrc_id)
+
+ if environment.pod_id:
+ LOG.info('delete pod: %s', environment.pod_id)
+ pod_handler = V2PodHandler()
+ pod_handler.delete_by_uuid(environment.pod_id)
+
+ if environment.container_id:
+ LOG.info('delete containers')
+ container_info = jsonutils.loads(environment.container_id)
+
+ container_handler = V2ContainerHandler()
+ client = Client(base_url=consts.DOCKER_URL)
+ for k, v in container_info.items():
+ LOG.info('start delete: %s', k)
+ container = container_handler.get_by_uuid(v)
+ LOG.debug('container name: %s', container.name)
+ try:
+ client.remove_container(container.name, force=True)
+ except Exception:
+ LOG.exception('remove container failed')
+ container_handler.delete_by_uuid(v)
+
+ environment_handler.delete_by_uuid(environment_id)
+
+ return result_handler(consts.API_SUCCESS, {'environment': environment_id})
diff --git a/api/resources/v2/openrcs.py b/api/resources/v2/openrcs.py
new file mode 100644
index 000000000..5f3b9382f
--- /dev/null
+++ b/api/resources/v2/openrcs.py
@@ -0,0 +1,211 @@
+import uuid
+import logging
+import re
+import os
+
+import yaml
+from oslo_serialization import jsonutils
+
+from api import ApiResource
+from api.database.v2.handlers import V2OpenrcHandler
+from api.database.v2.handlers import V2EnvironmentHandler
+from yardstick.common import constants as consts
+from yardstick.common.utils import result_handler
+from yardstick.common.utils import makedirs
+from yardstick.common.utils import source_env
+
+LOG = logging.getLogger(__name__)
+LOG.setLevel(logging.DEBUG)
+
+
+class V2Openrcs(ApiResource):
+
+ def post(self):
+ return self._dispatch_post()
+
+ def upload_openrc(self, args):
+ try:
+ upload_file = args['file']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'file must be provided')
+
+ try:
+ environment_id = args['environment_id']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'environment_id must be provided')
+
+ try:
+ uuid.UUID(environment_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid environment id')
+
+ LOG.info('writing openrc: %s', consts.OPENRC)
+ makedirs(consts.CONF_DIR)
+ upload_file.save(consts.OPENRC)
+ source_env(consts.OPENRC)
+
+ LOG.info('parsing openrc')
+ try:
+ openrc_data = self._get_openrc_dict()
+ except Exception:
+ LOG.exception('parse openrc failed')
+ return result_handler(consts.API_ERROR, 'parse openrc failed')
+
+ openrc_id = str(uuid.uuid4())
+ self._write_into_database(environment_id, openrc_id, openrc_data)
+
+ LOG.info('writing ansible cloud conf')
+ try:
+ self._generate_ansible_conf_file(openrc_data)
+ except Exception:
+ LOG.exception('write cloud conf failed')
+ return result_handler(consts.API_ERROR, 'genarate ansible conf failed')
+ LOG.info('finish writing ansible cloud conf')
+
+ return result_handler(consts.API_SUCCESS, {'openrc': openrc_data, 'uuid': openrc_id})
+
+ def update_openrc(self, args):
+ try:
+ openrc_vars = args['openrc']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'openrc must be provided')
+
+ try:
+ environment_id = args['environment_id']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'environment_id must be provided')
+
+ try:
+ uuid.UUID(environment_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid environment id')
+
+ LOG.info('writing openrc: %s', consts.OPENRC)
+ makedirs(consts.CONF_DIR)
+
+ lines = ['export {}={}\n'.format(k, v) for k, v in openrc_vars.items()]
+ LOG.debug('writing: %s', ''.join(lines))
+ with open(consts.OPENRC, 'w') as f:
+ f.writelines(lines)
+ LOG.info('writing openrc: Done')
+
+ LOG.info('source openrc: %s', consts.OPENRC)
+ try:
+ source_env(consts.OPENRC)
+ except Exception:
+ LOG.exception('source openrc failed')
+ return result_handler(consts.API_ERROR, 'source openrc failed')
+ LOG.info('source openrc: Done')
+
+ openrc_id = str(uuid.uuid4())
+ self._write_into_database(environment_id, openrc_id, openrc_vars)
+
+ LOG.info('writing ansible cloud conf')
+ try:
+ self._generate_ansible_conf_file(openrc_vars)
+ except Exception:
+ LOG.exception('write cloud conf failed')
+ return result_handler(consts.API_ERROR, 'genarate ansible conf failed')
+ LOG.info('finish writing ansible cloud conf')
+
+ return result_handler(consts.API_SUCCESS, {'openrc': openrc_vars, 'uuid': openrc_id})
+
+ def _write_into_database(self, environment_id, openrc_id, openrc_data):
+ LOG.info('writing openrc to database')
+ openrc_handler = V2OpenrcHandler()
+ openrc_init_data = {
+ 'uuid': openrc_id,
+ 'environment_id': environment_id,
+ 'content': jsonutils.dumps(openrc_data)
+ }
+ openrc_handler.insert(openrc_init_data)
+
+ LOG.info('binding openrc to environment: %s', environment_id)
+ environment_handler = V2EnvironmentHandler()
+ environment_handler.update_attr(environment_id, {'openrc_id': openrc_id})
+
+ def _get_openrc_dict(self):
+ with open(consts.OPENRC) as f:
+ content = f.readlines()
+
+ result = {}
+ for line in content:
+ m = re.search(r'(\ .*)=(.*)', line)
+ if m:
+ try:
+ value = os.environ[m.group(1).strip()]
+ except KeyError:
+ pass
+ else:
+ result.update({m.group(1).strip(): value})
+
+ return result
+
+ def _generate_ansible_conf_file(self, openrc_data):
+ ansible_conf = {
+ 'clouds': {
+ 'opnfv': {
+ 'auth': {
+ }
+ }
+ }
+ }
+ black_list = ['OS_IDENTITY_API_VERSION', 'OS_IMAGE_API_VERSION']
+
+ for k, v in openrc_data.items():
+ if k.startswith('OS') and k not in black_list:
+ key = k[3:].lower()
+ ansible_conf['clouds']['opnfv']['auth'][key] = v
+
+ try:
+ value = openrc_data['OS_IDENTITY_API_VERSION']
+ except KeyError:
+ pass
+ else:
+ ansible_conf['clouds']['opnfv']['identity_api_version'] = value
+
+ makedirs(consts.OPENSTACK_CONF_DIR)
+ with open(consts.CLOUDS_CONF, 'w') as f:
+ yaml.dump(ansible_conf, f, default_flow_style=False)
+
+
+class V2Openrc(ApiResource):
+
+ def get(self, openrc_id):
+ try:
+ uuid.UUID(openrc_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid openrc id')
+
+ LOG.info('Geting openrc: %s', openrc_id)
+ openrc_handler = V2OpenrcHandler()
+ try:
+ openrc = openrc_handler.get_by_uuid(openrc_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'no such openrc id')
+
+ LOG.info('load openrc content')
+ content = jsonutils.loads(openrc.content)
+
+ return result_handler(consts.API_ERROR, {'openrc': content})
+
+ def delete(self, openrc_id):
+ try:
+ uuid.UUID(openrc_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid openrc id')
+
+ LOG.info('Geting openrc: %s', openrc_id)
+ openrc_handler = V2OpenrcHandler()
+ try:
+ openrc = openrc_handler.get_by_uuid(openrc_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'no such openrc id')
+
+ LOG.info('update openrc in environment')
+ environment_handler = V2EnvironmentHandler()
+ environment_handler.update_attr(openrc.environment_id, {'openrc_id': None})
+
+ openrc_handler.delete_by_uuid(openrc_id)
+
+ return result_handler(consts.API_SUCCESS, {'openrc': openrc_id})
diff --git a/api/resources/v2/pods.py b/api/resources/v2/pods.py
new file mode 100644
index 000000000..ffb8a6046
--- /dev/null
+++ b/api/resources/v2/pods.py
@@ -0,0 +1,79 @@
+import uuid
+import yaml
+import logging
+
+from oslo_serialization import jsonutils
+
+from api import ApiResource
+from api.database.v2.handlers import V2PodHandler
+from api.database.v2.handlers import V2EnvironmentHandler
+from yardstick.common import constants as consts
+from yardstick.common.utils import result_handler
+from yardstick.common.task_template import TaskTemplate
+
+LOG = logging.getLogger(__name__)
+LOG.setLevel(logging.DEBUG)
+
+
+class V2Pods(ApiResource):
+
+ def post(self):
+ return self._dispatch_post()
+
+ def upload_pod_file(self, args):
+ try:
+ upload_file = args['file']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'file must be provided')
+
+ try:
+ environment_id = args['environment_id']
+ except KeyError:
+ return result_handler(consts.API_ERROR, 'environment_id must be provided')
+
+ try:
+ uuid.UUID(environment_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid environment id')
+
+ LOG.info('writing pod file: %s', consts.POD_FILE)
+ upload_file.save(consts.POD_FILE)
+
+ with open(consts.POD_FILE) as f:
+ data = yaml.safe_load(TaskTemplate.render(f.read()))
+ LOG.debug('pod content is: %s', data)
+
+ LOG.info('create pod in database')
+ pod_id = str(uuid.uuid4())
+ pod_handler = V2PodHandler()
+ pod_init_data = {
+ 'uuid': pod_id,
+ 'environment_id': environment_id,
+ 'content': jsonutils.dumps(data)
+ }
+ pod_handler.insert(pod_init_data)
+
+ LOG.info('update pod in environment')
+ environment_handler = V2EnvironmentHandler()
+ environment_handler.update_attr(environment_id, {'pod_id': pod_id})
+
+ return result_handler(consts.API_SUCCESS, {'uuid': pod_id, 'pod': data})
+
+
+class V2Pod(ApiResource):
+
+ def get(self, pod_id):
+ try:
+ uuid.UUID(pod_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'invalid pod id')
+
+ pod_handler = V2PodHandler()
+ try:
+ pod = pod_handler.get_by_uuid(pod_id)
+ except ValueError:
+ return result_handler(consts.API_ERROR, 'no such pod')
+
+ content = jsonutils.loads(pod.content)
+
+ return result_handler(consts.API_SUCCESS, {'pod': content})
diff --git a/api/server.py b/api/server.py
index c1548ca10..158b8a508 100644
--- a/api/server.py
+++ b/api/server.py
@@ -32,7 +32,7 @@ try:
except ImportError:
from urllib.parse import urljoin
-logger = logging.getLogger(__name__)
+LOG = logging.getLogger(__name__)
app = Flask(__name__)
@@ -62,7 +62,7 @@ def init_db():
return False
subclses = filter(func, inspect.getmembers(models, inspect.isclass))
- logger.debug('Import models: %s', [a[1] for a in subclses])
+ LOG.debug('Import models: %s', [a[1] for a in subclses])
Base.metadata.create_all(bind=engine)
@@ -77,12 +77,15 @@ def get_endpoint(url):
for u in urlpatterns:
- api.add_resource(get_resource(u.target), u.url, endpoint=get_endpoint(u.url))
+ try:
+ api.add_resource(get_resource(u.target), u.url, endpoint=get_endpoint(u.url))
+ except StopIteration:
+ LOG.error('url resource not found: %s', u.url)
if __name__ == '__main__':
_init_logging()
- logger.setLevel(logging.DEBUG)
- logger.info('Starting server')
+ LOG.setLevel(logging.DEBUG)
+ LOG.info('Starting server')
init_db()
app.run(host='0.0.0.0')
diff --git a/api/urls.py b/api/urls.py
index 4b9d4d191..5c7e9f7b5 100644
--- a/api/urls.py
+++ b/api/urls.py
@@ -19,5 +19,38 @@ urlpatterns = [
Url('/yardstick/testcases/<case_name>/docs', 'v1_case_docs'),
Url('/yardstick/testsuites/action', 'v1_test_suite'),
Url('/yardstick/results', 'v1_result'),
- Url('/yardstick/env/action', 'v1_env')
+ Url('/yardstick/env/action', 'v1_env'),
+
+ # api v2
+ Url('/api/v2/yardstick/environments', 'v2_environments'),
+ Url('/api/v2/yardstick/environments/action', 'v2_environments'),
+ Url('/api/v2/yardstick/environments/<environment_id>', 'v2_environment'),
+
+ Url('/api/v2/yardstick/openrcs/action', 'v2_openrcs'),
+ Url('/api/v2/yardstick/openrcs/<openrc_id>', 'v2_openrc'),
+
+ Url('/api/v2/yardstick/pods/action', 'v2_pods'),
+ Url('/api/v2/yardstick/pods/<pod_id>', 'v2_pod'),
+
+ Url('/api/v2/yardstick/images', 'v2_images'),
+ Url('/api/v2/yardstick/images/action', 'v2_images'),
+
+ Url('/api/v2/yardstick/containers/action', 'v2_containers'),
+ Url('/api/v2/yardstick/containers/<container_id>', 'v2_container'),
+
+ Url('/api/v2/yardstick/projects', 'v2_projects'),
+ Url('/api/v2/yardstick/projects/action', 'v2_projects'),
+ Url('/api/v2/yardstick/projects/<project_id>', 'v2_project'),
+
+ Url('/api/v2/yardstick/tasks', 'v2_tasks'),
+ Url('/api/v2/yardstick/tasks/action', 'v2_tasks'),
+ Url('/api/v2/yardstick/tasks/<task_id>', 'v2_task'),
+
+ Url('/api/v2/yardstick/testcases', 'v2_testcases'),
+ Url('/api/v2/yardstick/testcases/action', 'v2_testcases'),
+ Url('/api/v2/yardstick/testcases/<case_name>', 'v2_testcase'),
+
+ Url('/api/v2/yardstick/testsuites', 'v2_testsuites'),
+ Url('/api/v2/yardstick/testsuites/action', 'v2_testsuites'),
+ Url('/api/v2/yardstick/testsuites/<suite_name>', 'v2_testsuites')
]
diff --git a/docs/release/release-notes/release-notes.rst b/docs/release/release-notes/release-notes.rst
index d89f9ed24..6d55ada86 100644
--- a/docs/release/release-notes/release-notes.rst
+++ b/docs/release/release-notes/release-notes.rst
@@ -38,7 +38,11 @@ Version History
| *Date* | *Version* | *Comment* |
| | | |
+----------------+--------------------+---------------------------------+
-| | 3.0 | Yardstick for Danube release |
+| | 3.1 | Yardstick for Danube release |
+| | | |
+| | | Note: The 3.1 tag is due to git |
+| | | tag issue during Danube 3.0 |
+| | | release |
| | | |
+----------------+--------------------+---------------------------------+
| May 4th, 2017 | 2.0 | Yardstick for Danube release |
@@ -139,19 +143,19 @@ Release Data
| **Project** | Yardstick |
| | |
+--------------------------------------+--------------------------------------+
-| **Repo/tag** | yardstick/Danube.2.0 |
+| **Repo/tag** | yardstick/Danube.3.1 |
| | |
+--------------------------------------+--------------------------------------+
-| **Yardstick Docker image tag** | Danube.2.0 |
+| **Yardstick Docker image tag** | Danube.3.1 |
| | |
+--------------------------------------+--------------------------------------+
| **Release designation** | Danube |
| | |
+--------------------------------------+--------------------------------------+
-| **Release date** | May 4th, 2017 |
+| **Release date** | July 14th, 2017 |
| | |
+--------------------------------------+--------------------------------------+
-| **Purpose of the delivery** | OPNFV Danube release 2.0 |
+| **Purpose of the delivery** | OPNFV Danube release 3.0 |
| | |
+--------------------------------------+--------------------------------------+
@@ -171,7 +175,7 @@ Software Deliverables
---------------------
- - The Yardstick Docker image: https://hub.docker.com/r/opnfv/yardstick (tag: danube.2.0)
+ - The Yardstick Docker image: https://hub.docker.com/r/opnfv/yardstick (tag: danube.3.1)
**Contexts**
@@ -515,7 +519,7 @@ Feature additions
Scenario Matrix
===============
-For Danube 2.0, Yardstick was tested on the following scenarios:
+For Danube 3.0, Yardstick was tested on the following scenarios:
+-------------------------+---------+---------+---------+---------+
| Scenario | Apex | Compass | Fuel | Joid |
@@ -613,10 +617,50 @@ Known Issues/Faults
Corrected Faults
----------------
+Danube.3.1:
+
++----------------------------+------------------------------------------------+
+| **JIRA REFERENCE** | **DESCRIPTION** |
+| | |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-714 | Add yardstick env influxdb/grafana command for |
+| | CentOS |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-655 | Monitor command in tc019 may not show the |
+| | real nova-api service status |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-397 | HA testing framework improvement |
+| | |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-660 | Improve monitor_process pass criteria |
+| | |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-657 | HA monitor_multi bug, |
+| | KeyError: 'max_outage_time' |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-647 | TC025 fault_type value is wrong when using |
+| | baremetal pod scripts |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-659 | Terminate openstack service process using kill |
+| | command in HA test cases |
++----------------------------+------------------------------------------------+
+| JIRA: ARMBAND-275 | Yardstick TC005 fails with |
+| | "Cannot map zero-fill pages" error |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-561 | Bugfix: AttributeError: 'dict' object has no |
+| | attribute 'split' if run sample/ping-hot.yaml |
++----------------------------+------------------------------------------------+
+| JIRA: ARMBAND-268 | ERROR No JSON object could be decoded from |
+| | LMBENCH in TC010 |
++----------------------------+------------------------------------------------+
+| JIRA: YARDSTICK-680 | storperf test case tc074 do not get results |
+| | |
++----------------------------+------------------------------------------------+
+
Danube.2.0:
+----------------------------+------------------------------------------------+
-| **JIRA REFERENCE** | **SLOGAN** |
+| **JIRA REFERENCE** | **DESCRIPTION** |
| | |
+----------------------------+------------------------------------------------+
| JIRA: YARDSTICK-608 | Set work directory in Yardstick container |
@@ -662,7 +706,7 @@ Danube.2.0:
Danube.1.0:
+----------------------------+------------------------------------------------+
-| **JIRA REFERENCE** | **SLOGAN** |
+| **JIRA REFERENCE** | **DESCRIPTION** |
| | |
+----------------------------+------------------------------------------------+
| JIRA: YARDSTICK-599 | Could not load EntryPoint.parse when using |
@@ -673,7 +717,7 @@ Danube.1.0:
+----------------------------+------------------------------------------------+
-Danube 2.0 known restrictions/issues
+Danube 3.1 known restrictions/issues
====================================
+-----------+-----------+----------------------------------------------+
| Installer | Scenario | Issue |
@@ -695,7 +739,7 @@ Open JIRA tickets
=================
+----------------------------+------------------------------------------------+
-| **JIRA REFERENCE** | **SLOGAN** |
+| **JIRA REFERENCE** | **DESCRIPTION** |
| | |
+----------------------------+------------------------------------------------+
| JIRA: YARDSTICK-626 | Fio and Lmbench don't work in Ubuntu-arm64 |
diff --git a/tests/unit/benchmark/scenarios/availability/test_monitor_command.py b/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
index 2ed4be731..6a9b3b157 100644
--- a/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
+++ b/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
@@ -30,12 +30,14 @@ class ExecuteShellTestCase(unittest.TestCase):
exitcode, output = monitor_command._execute_shell_command(cmd)
self.assertEqual(exitcode, 0)
- def test__fun_execute_shell_command_fail_cmd_exception(self,
+ @mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.LOG')
+ def test__fun_execute_shell_command_fail_cmd_exception(self, mock_log,
mock_subprocess):
cmd = "env"
mock_subprocess.check_output.side_effect = RuntimeError
exitcode, output = monitor_command._execute_shell_command(cmd)
self.assertEqual(exitcode, -1)
+ mock_log.error.assert_called_once()
@mock.patch(
@@ -67,13 +69,15 @@ class MonitorOpenstackCmdTestCase(unittest.TestCase):
instance._result = {"outage_time": 0}
instance.verify_SLA()
- def test__monitor_command_monitor_func_failure(self, mock_subprocess):
+ @mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.LOG')
+ def test__monitor_command_monitor_func_failure(self, mock_log, mock_subprocess):
mock_subprocess.check_output.return_value = (1, 'unittest')
instance = monitor_command.MonitorOpenstackCmd(self.config, None, {"nova-api": 10})
instance.setup()
mock_subprocess.check_output.side_effect = RuntimeError
ret = instance.monitor_func()
self.assertEqual(ret, False)
+ mock_log.error.assert_called_once()
instance._result = {"outage_time": 10}
instance.verify_SLA()
diff --git a/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py b/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
index f8d12bd29..b59ec6cf1 100644
--- a/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
+++ b/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
@@ -36,7 +36,7 @@ class MultiMonitorServiceTestCase(unittest.TestCase):
'key': 'service-status',
'monitor_key': 'service-status',
'host': 'node1',
- 'monitor_time': 3,
+ 'monitor_time': 0.1,
'parameter': {'serviceName': 'haproxy'},
'sla': {'max_outage_time': 1}
}
diff --git a/tests/unit/benchmark/scenarios/storage/test_storperf.py b/tests/unit/benchmark/scenarios/storage/test_storperf.py
index 00054d531..7b16bb37d 100644
--- a/tests/unit/benchmark/scenarios/storage/test_storperf.py
+++ b/tests/unit/benchmark/scenarios/storage/test_storperf.py
@@ -130,7 +130,7 @@ class StorPerfTestCase(unittest.TestCase):
"queue_depths": 4,
"workload": "rs",
"StorPerf_ip": "192.168.23.2",
- "query_interval": 10,
+ "query_interval": 0,
"timeout": 60
}
@@ -160,7 +160,7 @@ class StorPerfTestCase(unittest.TestCase):
"queue_depths": 4,
"workload": "rs",
"StorPerf_ip": "192.168.23.2",
- "query_interval": 10,
+ "query_interval": 0,
"timeout": 60
}
diff --git a/tests/unit/common/test_utils.py b/tests/unit/common/test_utils.py
index 7f260cfe6..664b38b13 100644
--- a/tests/unit/common/test_utils.py
+++ b/tests/unit/common/test_utils.py
@@ -163,6 +163,19 @@ class TranslateToStrTestCase(unittest.TestCase):
self.assertEqual(result, output_str)
+class ChangeObjToDictTestCase(unittest.TestCase):
+
+ def test_change_obj_to_dict(self):
+ class A(object):
+ def __init__(self):
+ self.name = 'yardstick'
+
+ obj = A()
+ obj_r = utils.change_obj_to_dict(obj)
+ obj_s = {'name': 'yardstick'}
+ self.assertEqual(obj_r, obj_s)
+
+
def main():
unittest.main()
diff --git a/tests/unit/network_services/vnf_generic/vnf/test_vpe_vnf.py b/tests/unit/network_services/vnf_generic/vnf/test_vpe_vnf.py
index b69e537aa..54934c2fe 100644
--- a/tests/unit/network_services/vnf_generic/vnf/test_vpe_vnf.py
+++ b/tests/unit/network_services/vnf_generic/vnf/test_vpe_vnf.py
@@ -16,17 +16,20 @@
#
from __future__ import absolute_import
+
+import os
import unittest
+
import mock
-import os
-from yardstick.network_services.vnf_generic.vnf.vpe_vnf import VpeApproxVnf
-from yardstick.network_services.vnf_generic.vnf import vpe_vnf
from yardstick.network_services.nfvi.resource import ResourceProfile
+from yardstick.network_services.vnf_generic.vnf import vpe_vnf
from yardstick.network_services.vnf_generic.vnf.base import \
QueueFileWrapper
+from yardstick.network_services.vnf_generic.vnf.vpe_vnf import VpeApproxVnf
+@mock.patch('yardstick.network_services.vnf_generic.vnf.vpe_vnf.time')
class TestVpeApproxVnf(unittest.TestCase):
VNFD = {'vnfd:vnfd-catalog':
{'vnfd':
@@ -218,12 +221,12 @@ class TestVpeApproxVnf(unittest.TestCase):
'password': 'r00t',
'VNF model': 'vpe_vnf.yaml'}}}
- def test___init__(self):
+ def test___init__(self, mock_time):
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
vpe_approx_vnf = VpeApproxVnf(vnfd)
self.assertIsNone(vpe_approx_vnf._vnf_process)
- def test_collect_kpi(self):
+ def test_collect_kpi(self, mock_time):
with mock.patch("yardstick.ssh.SSH") as ssh:
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
ssh_mock = mock.Mock(autospec=ssh.SSH)
@@ -235,15 +238,17 @@ class TestVpeApproxVnf(unittest.TestCase):
vpe_approx_vnf.resource = mock.Mock(autospec=ResourceProfile)
vpe_approx_vnf.resource.check_if_sa_running = \
mock.Mock(return_value=[0, 1])
- vpe_approx_vnf.resource.amqp_collect_nfvi_kpi= \
+ vpe_approx_vnf.resource.amqp_collect_nfvi_kpi = \
mock.Mock(return_value={})
result = {'pkt_in_down_stream': 0,
'pkt_in_up_stream': 0,
'collect_stats': {'core': {}},
'pkt_drop_down_stream': 0, 'pkt_drop_up_stream': 0}
- self.assertEqual(result, vpe_approx_vnf.collect_kpi())
+ # mock execute_command because it sleeps for 3 seconds.
+ with mock.patch.object(vpe_approx_vnf, "execute_command", return_value=""):
+ self.assertEqual(result, vpe_approx_vnf.collect_kpi())
- def test_execute_command(self):
+ def test_execute_command(self, mock_time):
with mock.patch("yardstick.ssh.SSH") as ssh:
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
ssh_mock = mock.Mock(autospec=ssh.SSH)
@@ -255,7 +260,7 @@ class TestVpeApproxVnf(unittest.TestCase):
cmd = "quit"
self.assertEqual("", vpe_approx_vnf.execute_command(cmd))
- def test_get_stats_vpe(self):
+ def test_get_stats_vpe(self, mock_time):
with mock.patch("yardstick.ssh.SSH") as ssh:
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
ssh_mock = mock.Mock(autospec=ssh.SSH)
@@ -270,7 +275,7 @@ class TestVpeApproxVnf(unittest.TestCase):
'pkt_drop_down_stream': 400, 'pkt_drop_up_stream': 600}
self.assertEqual(result, vpe_approx_vnf.get_stats_vpe())
- def test_run_vpe(self):
+ def test_run_vpe(self, mock_time):
with mock.patch("yardstick.ssh.SSH") as ssh:
ssh_mock = mock.Mock(autospec=ssh.SSH)
ssh_mock.execute = \
@@ -288,7 +293,7 @@ class TestVpeApproxVnf(unittest.TestCase):
self.assertEqual(None,
vpe_approx_vnf._run_vpe(queue_wrapper, vpe_vnf))
- def test_instantiate(self):
+ def test_instantiate(self, mock_time):
with mock.patch("yardstick.ssh.SSH") as ssh:
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
ssh_mock = mock.Mock(autospec=ssh.SSH)
@@ -301,11 +306,12 @@ class TestVpeApproxVnf(unittest.TestCase):
vpe_approx_vnf._run_vpe = mock.Mock(return_value=0)
vpe_approx_vnf._resource_collect_start = mock.Mock(return_value=0)
vpe_approx_vnf.q_out.put("pipeline>")
- vpe_vnf.WAIT_TIME = 3
- self.assertEqual(0, vpe_approx_vnf.instantiate(self.scenario_cfg,
- self.context_cfg))
+ vpe_vnf.WAIT_TIME = 0.1
+ # if process it still running exitcode will be None
+ self.assertIn(vpe_approx_vnf.instantiate(self.scenario_cfg, self.context_cfg),
+ {0, None})
- def test_instantiate_panic(self):
+ def test_instantiate_panic(self, mock_time):
with mock.patch("yardstick.ssh.SSH") as ssh:
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
ssh_mock = mock.Mock(autospec=ssh.SSH)
@@ -316,17 +322,17 @@ class TestVpeApproxVnf(unittest.TestCase):
vpe_approx_vnf = VpeApproxVnf(vnfd)
self.scenario_cfg['vnf_options'] = {'vpe': {'cfg': ""}}
vpe_approx_vnf._run_vpe = mock.Mock(return_value=0)
- vpe_vnf.WAIT_TIME = 1
+ vpe_vnf.WAIT_TIME = 0.1
self.assertRaises(RuntimeError, vpe_approx_vnf.instantiate,
self.scenario_cfg, self.context_cfg)
- def test_scale(self):
+ def test_scale(self, mock_time):
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
vpe_approx_vnf = VpeApproxVnf(vnfd)
flavor = ""
self.assertRaises(NotImplementedError, vpe_approx_vnf.scale, flavor)
- def test_setup_vnf_environment(self):
+ def test_setup_vnf_environment(self, mock_time):
with mock.patch("yardstick.ssh.SSH") as ssh:
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
ssh_mock = mock.Mock(autospec=ssh.SSH)
@@ -338,7 +344,7 @@ class TestVpeApproxVnf(unittest.TestCase):
self.assertEqual(None,
vpe_approx_vnf.setup_vnf_environment(ssh_mock))
- def test_terminate(self):
+ def test_terminate(self, mock_time):
vnfd = self.VNFD['vnfd:vnfd-catalog']['vnfd'][0]
vpe_approx_vnf = VpeApproxVnf(vnfd)
self.assertEqual(None, vpe_approx_vnf.terminate())
diff --git a/yardstick/benchmark/scenarios/availability/monitor/monitor_command.py b/yardstick/benchmark/scenarios/availability/monitor/monitor_command.py
index a0777f94e..a9488cc30 100644
--- a/yardstick/benchmark/scenarios/availability/monitor/monitor_command.py
+++ b/yardstick/benchmark/scenarios/availability/monitor/monitor_command.py
@@ -11,7 +11,6 @@ from __future__ import absolute_import
import os
import logging
import subprocess
-import traceback
import yardstick.ssh as ssh
from yardstick.benchmark.scenarios.availability.monitor import basemonitor
@@ -27,9 +26,7 @@ def _execute_shell_command(command):
output = subprocess.check_output(command, shell=True)
except Exception:
exitcode = -1
- output = traceback.format_exc()
- LOG.error("exec command '%s' error:\n ", command)
- LOG.error(traceback.format_exc())
+ LOG.error("exec command '%s' error:\n ", command, exc_info=True)
return exitcode, output
diff --git a/yardstick/common/constants.py b/yardstick/common/constants.py
index d71975af4..5cd34ba53 100644
--- a/yardstick/common/constants.py
+++ b/yardstick/common/constants.py
@@ -48,12 +48,14 @@ SAMPLE_CASE_DIR = join(REPOS_DIR, 'samples')
TESTCASE_DIR = join(YARDSTICK_ROOT_PATH, 'tests/opnfv/test_cases/')
TESTSUITE_DIR = join(YARDSTICK_ROOT_PATH, 'tests/opnfv/test_suites/')
DOCS_DIR = join(REPOS_DIR, 'docs/testing/user/userguide/')
+OPENSTACK_CONF_DIR = '/etc/openstack'
# file
OPENRC = get_param('file.openrc', '/etc/yardstick/openstack.creds')
ETC_HOSTS = get_param('file.etc_hosts', '/etc/hosts')
CONF_FILE = join(CONF_DIR, 'yardstick.conf')
POD_FILE = join(CONF_DIR, 'pod.yaml')
+CLOUDS_CONF = join(OPENSTACK_CONF_DIR, 'clouds.yml')
CONF_SAMPLE_FILE = join(CONF_SAMPLE_DIR, 'yardstick.conf.sample')
FETCH_SCRIPT = get_param('file.fetch_script', 'utils/fetch_os_creds.sh')
FETCH_SCRIPT = join(RELENG_DIR, FETCH_SCRIPT)
diff --git a/yardstick/common/utils.py b/yardstick/common/utils.py
index 1faba4d9e..0c0bac934 100644
--- a/yardstick/common/utils.py
+++ b/yardstick/common/utils.py
@@ -232,3 +232,13 @@ def result_handler(status, data):
'result': data
}
return jsonify(result)
+
+
+def change_obj_to_dict(obj):
+ dic = {}
+ for k, v in vars(obj).items():
+ try:
+ vars(v)
+ except TypeError:
+ dic.update({k: v})
+ return dic